mailru: backend for mail.ru

This commit is contained in:
Ivan Andreev 2019-09-09 21:56:16 +01:00 committed by Nick Craig-Wood
parent bdcd0b4c64
commit ba1daea072
14 changed files with 3270 additions and 0 deletions

View File

@ -41,6 +41,7 @@ Rclone *("rsync for cloud storage")* is a command line program to sync files and
* Jottacloud [:page_facing_up:](https://rclone.org/jottacloud/) * Jottacloud [:page_facing_up:](https://rclone.org/jottacloud/)
* IBM COS S3 [:page_facing_up:](https://rclone.org/s3/#ibm-cos-s3) * IBM COS S3 [:page_facing_up:](https://rclone.org/s3/#ibm-cos-s3)
* Koofr [:page_facing_up:](https://rclone.org/koofr/) * Koofr [:page_facing_up:](https://rclone.org/koofr/)
* Mail.ru Cloud [:page_facing_up:](https://rclone.org/mailru/)
* Memset Memstore [:page_facing_up:](https://rclone.org/swift/) * Memset Memstore [:page_facing_up:](https://rclone.org/swift/)
* Mega [:page_facing_up:](https://rclone.org/mega/) * Mega [:page_facing_up:](https://rclone.org/mega/)
* Microsoft Azure Blob Storage [:page_facing_up:](https://rclone.org/azureblob/) * Microsoft Azure Blob Storage [:page_facing_up:](https://rclone.org/azureblob/)

View File

@ -20,6 +20,7 @@ import (
_ "github.com/rclone/rclone/backend/jottacloud" _ "github.com/rclone/rclone/backend/jottacloud"
_ "github.com/rclone/rclone/backend/koofr" _ "github.com/rclone/rclone/backend/koofr"
_ "github.com/rclone/rclone/backend/local" _ "github.com/rclone/rclone/backend/local"
_ "github.com/rclone/rclone/backend/mailru"
_ "github.com/rclone/rclone/backend/mega" _ "github.com/rclone/rclone/backend/mega"
_ "github.com/rclone/rclone/backend/onedrive" _ "github.com/rclone/rclone/backend/onedrive"
_ "github.com/rclone/rclone/backend/opendrive" _ "github.com/rclone/rclone/backend/opendrive"

107
backend/mailru/api/bin.go Normal file
View File

@ -0,0 +1,107 @@
package api
// BIN protocol constants
const (
BinContentType = "application/x-www-form-urlencoded"
TreeIDLength = 12
DunnoNodeIDLength = 16
)
// Operations in binary protocol
const (
OperationAddFile = 103 // 0x67
OperationRename = 105 // 0x69
OperationCreateFolder = 106 // 0x6A
OperationFolderList = 117 // 0x75
OperationSharedFoldersList = 121 // 0x79
// TODO investigate opcodes below
Operation154MaybeItemInfo = 154 // 0x9A
Operation102MaybeAbout = 102 // 0x66
Operation104MaybeDelete = 104 // 0x68
)
// CreateDir protocol constants
const (
MkdirResultOK = 0
MkdirResultSourceNotExists = 1
MkdirResultAlreadyExists = 4
MkdirResultExistsDifferentCase = 9
MkdirResultInvalidName = 10
MkdirResultFailed254 = 254
)
// Move result codes
const (
MoveResultOK = 0
MoveResultSourceNotExists = 1
MoveResultFailed002 = 2
MoveResultAlreadyExists = 4
MoveResultFailed005 = 5
MoveResultFailed254 = 254
)
// AddFile result codes
const (
AddResultOK = 0
AddResultError01 = 1
AddResultDunno04 = 4
AddResultWrongPath = 5
AddResultNoFreeSpace = 7
AddResultDunno09 = 9
AddResultInvalidName = 10
AddResultNotModified = 12
AddResultFailedA = 253
AddResultFailedB = 254
)
// List request options
const (
ListOptTotalSpace = 1
ListOptDelete = 2
ListOptFingerprint = 4
ListOptUnknown8 = 8
ListOptUnknown16 = 16
ListOptFolderSize = 32
ListOptUsedSpace = 64
ListOptUnknown128 = 128
ListOptUnknown256 = 256
)
// ListOptDefaults ...
const ListOptDefaults = ListOptUnknown128 | ListOptUnknown256 | ListOptFolderSize | ListOptTotalSpace | ListOptUsedSpace
// List parse flags
const (
ListParseDone = 0
ListParseReadItem = 1
ListParsePin = 2
ListParsePinUpper = 3
ListParseUnknown15 = 15
)
// List operation results
const (
ListResultOK = 0
ListResultNotExists = 1
ListResultDunno02 = 2
ListResultDunno03 = 3
ListResultAlreadyExists04 = 4
ListResultDunno05 = 5
ListResultDunno06 = 6
ListResultDunno07 = 7
ListResultDunno08 = 8
ListResultAlreadyExists09 = 9
ListResultDunno10 = 10
ListResultDunno11 = 11
ListResultDunno12 = 12
ListResultFailedB = 253
ListResultFailedA = 254
)
// Directory item types
const (
ListItemMountPoint = 0
ListItemFile = 1
ListItemFolder = 2
ListItemSharedFolder = 3
)

View File

@ -0,0 +1,225 @@
package api
// BIN protocol helpers
import (
"bufio"
"bytes"
"encoding/binary"
"io"
"log"
"time"
"github.com/pkg/errors"
"github.com/rclone/rclone/lib/readers"
)
// protocol errors
var (
ErrorPrematureEOF = errors.New("Premature EOF")
ErrorInvalidLength = errors.New("Invalid length")
ErrorZeroTerminate = errors.New("String must end with zero")
)
// BinWriter is a binary protocol writer
type BinWriter struct {
b *bytes.Buffer // growing byte buffer
a []byte // temporary buffer for next varint
}
// NewBinWriter creates a binary protocol helper
func NewBinWriter() *BinWriter {
return &BinWriter{
b: new(bytes.Buffer),
a: make([]byte, binary.MaxVarintLen64),
}
}
// Bytes returns binary data
func (w *BinWriter) Bytes() []byte {
return w.b.Bytes()
}
// Reader returns io.Reader with binary data
func (w *BinWriter) Reader() io.Reader {
return bytes.NewReader(w.b.Bytes())
}
// WritePu16 writes a short as unsigned varint
func (w *BinWriter) WritePu16(val int) {
if val < 0 || val > 65535 {
log.Fatalf("Invalid UInt16 %v", val)
}
w.WritePu64(int64(val))
}
// WritePu32 writes a signed long as unsigned varint
func (w *BinWriter) WritePu32(val int64) {
if val < 0 || val > 4294967295 {
log.Fatalf("Invalid UInt32 %v", val)
}
w.WritePu64(val)
}
// WritePu64 writes an unsigned (actually, signed) long as unsigned varint
func (w *BinWriter) WritePu64(val int64) {
if val < 0 {
log.Fatalf("Invalid UInt64 %v", val)
}
w.b.Write(w.a[:binary.PutUvarint(w.a, uint64(val))])
}
// WriteString writes a zero-terminated string
func (w *BinWriter) WriteString(str string) {
buf := []byte(str)
w.WritePu64(int64(len(buf) + 1))
w.b.Write(buf)
w.b.WriteByte(0)
}
// Write writes a byte buffer
func (w *BinWriter) Write(buf []byte) {
w.b.Write(buf)
}
// WriteWithLength writes a byte buffer prepended with its length as varint
func (w *BinWriter) WriteWithLength(buf []byte) {
w.WritePu64(int64(len(buf)))
w.b.Write(buf)
}
// BinReader is a binary protocol reader helper
type BinReader struct {
b *bufio.Reader
count *readers.CountingReader
err error // keeps the first error encountered
}
// NewBinReader creates a binary protocol reader helper
func NewBinReader(reader io.Reader) *BinReader {
r := &BinReader{}
r.count = readers.NewCountingReader(reader)
r.b = bufio.NewReader(r.count)
return r
}
// Count returns number of bytes read
func (r *BinReader) Count() uint64 {
return r.count.BytesRead()
}
// Error returns first encountered error or nil
func (r *BinReader) Error() error {
return r.err
}
// check() keeps the first error encountered in a stream
func (r *BinReader) check(err error) bool {
if err == nil {
return true
}
if r.err == nil {
// keep the first error
r.err = err
}
if err != io.EOF {
log.Fatalf("Error parsing response: %v", err)
}
return false
}
// ReadByteAsInt reads a single byte as uint32, returns -1 for EOF or errors
func (r *BinReader) ReadByteAsInt() int {
if octet, err := r.b.ReadByte(); r.check(err) {
return int(octet)
}
return -1
}
// ReadByteAsShort reads a single byte as uint16, returns -1 for EOF or errors
func (r *BinReader) ReadByteAsShort() int16 {
if octet, err := r.b.ReadByte(); r.check(err) {
return int16(octet)
}
return -1
}
// ReadIntSpl reads two bytes as little-endian uint16, returns -1 for EOF or errors
func (r *BinReader) ReadIntSpl() int {
var val uint16
if r.check(binary.Read(r.b, binary.LittleEndian, &val)) {
return int(val)
}
return -1
}
// ReadULong returns uint64 equivalent of -1 for EOF or errors
func (r *BinReader) ReadULong() uint64 {
if val, err := binary.ReadUvarint(r.b); r.check(err) {
return val
}
return 0xffffffffffffffff
}
// ReadPu32 returns -1 for EOF or errors
func (r *BinReader) ReadPu32() int64 {
if val, err := binary.ReadUvarint(r.b); r.check(err) {
return int64(val)
}
return -1
}
// ReadNBytes reads given number of bytes, returns invalid data for EOF or errors
func (r *BinReader) ReadNBytes(len int) []byte {
buf := make([]byte, len)
n, err := r.b.Read(buf)
if r.check(err) {
return buf
}
if n != len {
r.check(ErrorPrematureEOF)
}
return buf
}
// ReadBytesByLength reads buffer length and its bytes
func (r *BinReader) ReadBytesByLength() []byte {
len := r.ReadPu32()
if len < 0 {
r.check(ErrorInvalidLength)
return []byte{}
}
return r.ReadNBytes(int(len))
}
// ReadString reads a zero-terminated string with length
func (r *BinReader) ReadString() string {
len := int(r.ReadPu32())
if len < 1 {
r.check(ErrorInvalidLength)
return ""
}
buf := make([]byte, len-1)
n, err := r.b.Read(buf)
if !r.check(err) {
return ""
}
if n != len-1 {
r.check(ErrorPrematureEOF)
return ""
}
zeroByte, err := r.b.ReadByte()
if !r.check(err) {
return ""
}
if zeroByte != 0 {
r.check(ErrorZeroTerminate)
return ""
}
return string(buf)
}
// ReadDate reads a Unix encoded time
func (r *BinReader) ReadDate() time.Time {
return time.Unix(r.ReadPu32(), 0)
}

248
backend/mailru/api/m1.go Normal file
View File

@ -0,0 +1,248 @@
package api
import (
"fmt"
)
// M1 protocol constants and structures
const (
APIServerURL = "https://cloud.mail.ru"
PublicLinkURL = "https://cloud.mail.ru/public/"
DispatchServerURL = "https://dispatcher.cloud.mail.ru"
OAuthURL = "https://o2.mail.ru/token"
OAuthClientID = "cloud-win"
)
// ServerErrorResponse represents erroneous API response.
type ServerErrorResponse struct {
Message string `json:"body"`
Time int64 `json:"time"`
Status int `json:"status"`
}
func (e *ServerErrorResponse) Error() string {
return fmt.Sprintf("server error %d (%s)", e.Status, e.Message)
}
// FileErrorResponse represents erroneous API response for a file
type FileErrorResponse struct {
Body struct {
Home struct {
Value string `json:"value"`
Error string `json:"error"`
} `json:"home"`
} `json:"body"`
Status int `json:"status"`
Account string `json:"email,omitempty"`
Time int64 `json:"time,omitempty"`
Message string // non-json, calculated field
}
func (e *FileErrorResponse) Error() string {
return fmt.Sprintf("file error %d (%s)", e.Status, e.Body.Home.Error)
}
// UserInfoResponse contains account metadata
type UserInfoResponse struct {
Body struct {
AccountType string `json:"account_type"`
AccountVerified bool `json:"account_verified"`
Cloud struct {
Beta struct {
Allowed bool `json:"allowed"`
Asked bool `json:"asked"`
} `json:"beta"`
Billing struct {
ActiveCostID string `json:"active_cost_id"`
ActiveRateID string `json:"active_rate_id"`
AutoProlong bool `json:"auto_prolong"`
Basequota int64 `json:"basequota"`
Enabled bool `json:"enabled"`
Expires int `json:"expires"`
Prolong bool `json:"prolong"`
Promocodes struct {
} `json:"promocodes"`
Subscription []interface{} `json:"subscription"`
Version string `json:"version"`
} `json:"billing"`
Bonuses struct {
CameraUpload bool `json:"camera_upload"`
Complete bool `json:"complete"`
Desktop bool `json:"desktop"`
Feedback bool `json:"feedback"`
Links bool `json:"links"`
Mobile bool `json:"mobile"`
Registration bool `json:"registration"`
} `json:"bonuses"`
Enable struct {
Sharing bool `json:"sharing"`
} `json:"enable"`
FileSizeLimit int64 `json:"file_size_limit"`
Space struct {
BytesTotal int64 `json:"bytes_total"`
BytesUsed int `json:"bytes_used"`
Overquota bool `json:"overquota"`
} `json:"space"`
} `json:"cloud"`
Cloudflags struct {
Exists bool `json:"exists"`
} `json:"cloudflags"`
Domain string `json:"domain"`
Login string `json:"login"`
Newbie bool `json:"newbie"`
UI struct {
ExpandLoader bool `json:"expand_loader"`
Kind string `json:"kind"`
Sidebar bool `json:"sidebar"`
Sort struct {
Order string `json:"order"`
Type string `json:"type"`
} `json:"sort"`
Thumbs bool `json:"thumbs"`
} `json:"ui"`
} `json:"body"`
Email string `json:"email"`
Status int `json:"status"`
Time int64 `json:"time"`
}
// ListItem ...
type ListItem struct {
Count struct {
Folders int `json:"folders"`
Files int `json:"files"`
} `json:"count,omitempty"`
Kind string `json:"kind"`
Type string `json:"type"`
Name string `json:"name"`
Home string `json:"home"`
Size int64 `json:"size"`
Mtime int64 `json:"mtime,omitempty"`
Hash string `json:"hash,omitempty"`
VirusScan string `json:"virus_scan,omitempty"`
Tree string `json:"tree,omitempty"`
Grev int `json:"grev,omitempty"`
Rev int `json:"rev,omitempty"`
}
// ItemInfoResponse ...
type ItemInfoResponse struct {
Email string `json:"email"`
Body ListItem `json:"body"`
Time int64 `json:"time"`
Status int `json:"status"`
}
// FolderInfoResponse ...
type FolderInfoResponse struct {
Body struct {
Count struct {
Folders int `json:"folders"`
Files int `json:"files"`
} `json:"count"`
Tree string `json:"tree"`
Name string `json:"name"`
Grev int `json:"grev"`
Size int64 `json:"size"`
Sort struct {
Order string `json:"order"`
Type string `json:"type"`
} `json:"sort"`
Kind string `json:"kind"`
Rev int `json:"rev"`
Type string `json:"type"`
Home string `json:"home"`
List []ListItem `json:"list"`
} `json:"body,omitempty"`
Time int64 `json:"time"`
Status int `json:"status"`
Email string `json:"email"`
}
// ShardInfoResponse ...
type ShardInfoResponse struct {
Email string `json:"email"`
Body struct {
Video []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"video"`
ViewDirect []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"view_direct"`
WeblinkView []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"weblink_view"`
WeblinkVideo []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"weblink_video"`
WeblinkGet []struct {
Count int `json:"count"`
URL string `json:"url"`
} `json:"weblink_get"`
Stock []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"stock"`
WeblinkThumbnails []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"weblink_thumbnails"`
PublicUpload []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"public_upload"`
Auth []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"auth"`
Web []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"web"`
View []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"view"`
Upload []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"upload"`
Get []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"get"`
Thumbnails []struct {
Count string `json:"count"`
URL string `json:"url"`
} `json:"thumbnails"`
} `json:"body"`
Time int64 `json:"time"`
Status int `json:"status"`
}
// CleanupResponse ...
type CleanupResponse struct {
Email string `json:"email"`
Time int64 `json:"time"`
StatusStr string `json:"status"`
}
// GenericResponse ...
type GenericResponse struct {
Email string `json:"email"`
Time int64 `json:"time"`
Status int `json:"status"`
// ignore other fields
}
// GenericBodyResponse ...
type GenericBodyResponse struct {
Email string `json:"email"`
Body string `json:"body"`
Time int64 `json:"time"`
Status int `json:"status"`
}

2378
backend/mailru/mailru.go Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,18 @@
// Test Mailru filesystem interface
package mailru_test
import (
"testing"
"github.com/rclone/rclone/backend/mailru"
"github.com/rclone/rclone/fstest/fstests"
)
// TestIntegration runs integration tests against the remote
func TestIntegration(t *testing.T) {
fstests.Run(t, &fstests.Opt{
RemoteName: "TestMailru:",
NilObject: (*mailru.Object)(nil),
SkipBadWindowsCharacters: true,
})
}

View File

@ -41,6 +41,7 @@ docs = [
"hubic.md", "hubic.md",
"jottacloud.md", "jottacloud.md",
"koofr.md", "koofr.md",
"mailru.md",
"mega.md", "mega.md",
"azureblob.md", "azureblob.md",
"onedrive.md", "onedrive.md",

View File

@ -30,6 +30,7 @@ Rclone is a command line program to sync files and directories to and from:
* {{< provider name="Jottacloud" home="https://www.jottacloud.com/en/" config="/jottacloud/" >}} * {{< provider name="Jottacloud" home="https://www.jottacloud.com/en/" config="/jottacloud/" >}}
* {{< provider name="IBM COS S3" home="http://www.ibm.com/cloud/object-storage" config="/s3/#ibm-cos-s3" >}} * {{< provider name="IBM COS S3" home="http://www.ibm.com/cloud/object-storage" config="/s3/#ibm-cos-s3" >}}
* {{< provider name="Koofr" home="https://koofr.eu/" config="/koofr/" >}} * {{< provider name="Koofr" home="https://koofr.eu/" config="/koofr/" >}}
* {{< provider name="Mail.ru Cloud" home="https://cloud.mail.ru/" config="/mailru/" >}}
* {{< provider name="Memset Memstore" home="https://www.memset.com/cloud/storage/" config="/swift/" >}} * {{< provider name="Memset Memstore" home="https://www.memset.com/cloud/storage/" config="/swift/" >}}
* {{< provider name="Mega" home="https://mega.nz/" config="/mega/" >}} * {{< provider name="Mega" home="https://mega.nz/" config="/mega/" >}}
* {{< provider name="Microsoft Azure Blob Storage" home="https://azure.microsoft.com/en-us/services/storage/blobs/" config="/azureblob/" >}} * {{< provider name="Microsoft Azure Blob Storage" home="https://azure.microsoft.com/en-us/services/storage/blobs/" config="/azureblob/" >}}

View File

@ -37,6 +37,7 @@ See the following for detailed instructions for
* [Hubic](/hubic/) * [Hubic](/hubic/)
* [Jottacloud](/jottacloud/) * [Jottacloud](/jottacloud/)
* [Koofr](/koofr/) * [Koofr](/koofr/)
* [Mail.ru Cloud](/mailru/)
* [Mega](/mega/) * [Mega](/mega/)
* [Microsoft Azure Blob Storage](/azureblob/) * [Microsoft Azure Blob Storage](/azureblob/)
* [Microsoft OneDrive](/onedrive/) * [Microsoft OneDrive](/onedrive/)

280
docs/content/mailru.md Normal file
View File

@ -0,0 +1,280 @@
---
title: "Mailru"
description: "Mail.ru Cloud"
date: "2019-08-04"
---
<i class="fas fa-at"></i> Mail.ru Cloud
----------------------------------------
[Mail.ru Cloud](https://cloud.mail.ru/) is a cloud storage provided by a Russian internet company [Mail.Ru Group](https://mail.ru). The official desktop client is [Disk-O:](https://disk-o.cloud/), available only on Windows. (Please note that official sites are in Russian)
### Features highlights ###
- Paths may be as deep as required, eg `remote:directory/subdirectory`
- Files have a `last modified time` property, directories don't
- Deleted files are by default moved to the trash
- Files and directories can be shared via public links
- Partial uploads or streaming are not supported, file size must be known before upload
- Maximum file size is limited to 2G for a free acount, unlimited for paid accounts
- Storage keeps hash for all files and performs transparent deduplication,
the hash algorithm is a modified SHA1
- If a particular file is already present in storage, one can quickly submit file hash
instead of long file upload (this optimization is supported by rclone)
### Configuration ###
Here is an example of making a mailru configuration. First create a Mail.ru Cloud
account and choose a tariff, then run
rclone config
This will guide you through an interactive setup process:
```
No remotes found - make a new one
n) New remote
s) Set configuration password
q) Quit config
n/s/q> n
name> remote
Type of storage to configure.
Type of storage to configure.
Enter a string value. Press Enter for the default ("").
Choose a number from below, or type in your own value
[snip]
XX / Mail.ru Cloud
\ "mailru"
[snip]
Storage> mailru
User name (usually email)
Enter a string value. Press Enter for the default ("").
user> username@mail.ru
Password
y) Yes type in my own password
g) Generate random password
y/g> y
Enter the password:
password:
Confirm the password:
password:
Skip full upload if there is another file with same data hash.
This feature is called "speedup" or "put by hash". It is especially efficient
in case of generally available files like popular books, video or audio clips
[snip]
Enter a boolean value (true or false). Press Enter for the default ("true").
Choose a number from below, or type in your own value
1 / Enable
\ "true"
2 / Disable
\ "false"
speedup_enable> 1
Edit advanced config? (y/n)
y) Yes
n) No
y/n> n
Remote config
--------------------
[remote]
type = mailru
user = username@mail.ru
pass = *** ENCRYPTED ***
speedup_enable = true
--------------------
y) Yes this is OK
e) Edit this remote
d) Delete this remote
y/e/d> y
```
Configuration of this backend does not require a local web browser.
You can use the configured backend as shown below:
See top level directories
rclone lsd remote:
Make a new directory
rclone mkdir remote:directory
List the contents of a directory
rclone ls remote:directory
Sync `/home/local/directory` to the remote path, deleting any
excess files in the path.
rclone sync /home/local/directory remote:directory
### Modified time ###
Files support a modification time attribute with up to 1 second precision.
Directories do not have a modification time, which is shown as "Jan 1 1970".
### Hash checksums ###
Hash sums use a custom Mail.ru algorithm based on SHA1.
If file size is less than or equal to the SHA1 block size (20 bytes),
its hash is simply its data right-padded with zero bytes.
Hash sum of a larger file is computed as a SHA1 sum of the file data
bytes concatenated with a decimal representation of the data length.
### Emptying Trash ###
Removing a file or directory actually moves it to the trash, which is not
visible to rclone but can be seen in a web browser. The trashed file
still occupies part of total quota. If you wish to empty your trash
and free some quota, you can use the `rclone cleanup remote:` command,
which will permanently delete all your trashed files.
This command does not take any path arguments.
### Quota information ###
To view your current quota you can use the `rclone about remote:`
command which will display your usage limit (quota) and the current usage.
### Limitations ###
File size limits depend on your account. A single file size is limited by 2G
for a free account and unlimited for paid tariffs. Please refer to the Mail.ru
site for the total uploaded size limits.
Note that Mailru is case insensitive so you can't have a file called
"Hello.doc" and one called "hello.doc".
<!--- autogenerated options start - DO NOT EDIT, instead edit fs.RegInfo in backend/mailru/mailru.go then run make backenddocs -->
### Standard Options
Here are the standard options specific to mailru (Mail.ru Cloud).
#### --mailru-user
User name (usually email)
- Config: user
- Env Var: RCLONE_MAILRU_USER
- Type: string
- Default: ""
#### --mailru-pass
Password
- Config: pass
- Env Var: RCLONE_MAILRU_PASS
- Type: string
- Default: ""
#### --mailru-speedup-enable
Skip full upload if there is another file with same data hash.
This feature is called "speedup" or "put by hash". It is especially efficient
in case of generally available files like popular books, video or audio clips,
because files are searched by hash in all accounts of all mailru users.
Please note that rclone may need local memory and disk space to calculate
content hash in advance and decide whether full upload is required.
Also, if rclone does not know file size in advance (e.g. in case of
streaming or partial uploads), it will not even try this optimization.
- Config: speedup_enable
- Env Var: RCLONE_MAILRU_SPEEDUP_ENABLE
- Type: bool
- Default: true
- Examples:
- "true"
- Enable
- "false"
- Disable
### Advanced Options
Here are the advanced options specific to mailru (Mail.ru Cloud).
#### --mailru-speedup-file-patterns
Comma separated list of file name patterns eligible for speedup (put by hash).
Patterns are case insensitive and can contain `*` or `?` meta characters.
- Config: speedup_file_patterns
- Env Var: RCLONE_MAILRU_SPEEDUP_FILE_PATTERNS
- Type: string
- Default: "*.mkv,*.avi,*.mp4,*.mp3,*.zip,*.gz,*.rar,*.pdf"
- Examples:
- `""`
- Empty list completely disables speedup (put by hash).
- `"*"`
- All files will be attempted for speedup.
- `"*.mkv,*.avi,*.mp4,*.mp3"`
- Only common audio/video files will be tried for put by hash.
- `"*.zip,*.gz,*.rar,*.pdf"`
- Only common archives or PDF books will be tried for speedup.
#### --mailru-speedup-max-disk
This option allows you to disable speedup (put by hash) for large files
(because preliminary hashing can exhaust you RAM or disk space)
- Config: speedup_max_disk
- Env Var: RCLONE_MAILRU_SPEEDUP_MAX_DISK
- Type: SizeSuffix
- Default: 3G
- Examples:
- "0"
- Completely disable speedup (put by hash).
- "1G"
- Files larger than 1Gb will be uploaded directly.
- "3G"
- Choose this option if you have less than 3Gb free on local disk.
#### --mailru-speedup-max-memory
Files larger than the size given below will always be hashed on disk.
- Config: speedup_max_memory
- Env Var: RCLONE_MAILRU_SPEEDUP_MAX_MEMORY
- Type: SizeSuffix
- Default: 32M
- Examples:
- "0"
- Preliminary hashing will always be done in a temporary disk location.
- "32M"
- Do not dedicate more than 32Mb RAM for preliminary hashing.
- "256M"
- You have at most 256Mb RAM free for hash calculations.
#### --mailru-check-hash
What should copy do if file checksum is mismatched or invalid
- Config: check_hash
- Env Var: RCLONE_MAILRU_CHECK_HASH
- Type: bool
- Default: true
- Examples:
- "true"
- Fail with error.
- "false"
- Ignore and continue.
#### --mailru-user-agent
HTTP user agent used internally by client.
Defaults to "rclone/VERSION" or "--user-agent" provided on command line.
- Config: user_agent
- Env Var: RCLONE_MAILRU_USER_AGENT
- Type: string
- Default: ""
#### --mailru-quirks
Comma separated list of internal maintenance flags. This option is intended
for development purposes. Should not be used by an ordinary user.
- Config: quirks
- Env Var: RCLONE_MAILRU_QUIRKS
- Type: string
- Default: ""
<!--- autogenerated options stop -->

View File

@ -31,6 +31,7 @@ Here is an overview of the major features of each cloud storage system.
| Hubic | MD5 | Yes | No | No | R/W | | Hubic | MD5 | Yes | No | No | R/W |
| Jottacloud | MD5 | Yes | Yes | No | R/W | | Jottacloud | MD5 | Yes | Yes | No | R/W |
| Koofr | MD5 | No | Yes | No | - | | Koofr | MD5 | No | Yes | No | - |
| Mail.ru Cloud | Mailru ‡‡‡ | Yes | Yes | No | - |
| Mega | - | No | No | Yes | - | | Mega | - | No | No | Yes | - |
| Microsoft Azure Blob Storage | MD5 | Yes | No | No | R/W | | Microsoft Azure Blob Storage | MD5 | Yes | No | No | R/W |
| Microsoft OneDrive | SHA1 ‡‡ | Yes | Yes | No | R | | Microsoft OneDrive | SHA1 ‡‡ | Yes | Yes | No | R |
@ -70,6 +71,8 @@ or `sha1sum` as well as `echo` are in the remote's PATH.
for business and SharePoint server support Microsoft's own for business and SharePoint server support Microsoft's own
[QuickXorHash](https://docs.microsoft.com/en-us/onedrive/developer/code-snippets/quickxorhash). [QuickXorHash](https://docs.microsoft.com/en-us/onedrive/developer/code-snippets/quickxorhash).
‡‡‡ Mail.ru uses its own modified SHA1 hash
### ModTime ### ### ModTime ###
The cloud storage system supports setting modification times on The cloud storage system supports setting modification times on
@ -148,6 +151,7 @@ operations more efficient.
| HTTP | No | No | No | No | No | No | No | No [#2178](https://github.com/rclone/rclone/issues/2178) | No | Yes | | HTTP | No | No | No | No | No | No | No | No [#2178](https://github.com/rclone/rclone/issues/2178) | No | Yes |
| Hubic | Yes † | Yes | No | No | No | Yes | Yes | No [#2178](https://github.com/rclone/rclone/issues/2178) | Yes | No | | Hubic | Yes † | Yes | No | No | No | Yes | Yes | No [#2178](https://github.com/rclone/rclone/issues/2178) | Yes | No |
| Jottacloud | Yes | Yes | Yes | Yes | No | Yes | No | Yes | Yes | Yes | | Jottacloud | Yes | Yes | Yes | Yes | No | Yes | No | Yes | Yes | Yes |
| Mail.ru Cloud | Yes | Yes | Yes | Yes | Yes | No | No | Yes | Yes | Yes |
| Mega | Yes | No | Yes | Yes | Yes | No | No | No [#2178](https://github.com/rclone/rclone/issues/2178) | Yes | Yes | | Mega | Yes | No | Yes | Yes | Yes | No | No | No [#2178](https://github.com/rclone/rclone/issues/2178) | Yes | Yes |
| Microsoft Azure Blob Storage | Yes | Yes | No | No | No | Yes | No | No [#2178](https://github.com/rclone/rclone/issues/2178) | No | No | | Microsoft Azure Blob Storage | Yes | Yes | No | No | No | Yes | No | No [#2178](https://github.com/rclone/rclone/issues/2178) | No | No |
| Microsoft OneDrive | Yes | Yes | Yes | Yes | No [#575](https://github.com/rclone/rclone/issues/575) | No | No | Yes | Yes | Yes | | Microsoft OneDrive | Yes | Yes | Yes | Yes | No [#575](https://github.com/rclone/rclone/issues/575) | No | No | Yes | Yes | Yes |

View File

@ -73,6 +73,7 @@
<li><a href="/hubic/"><i class="fa fa-space-shuttle"></i> Hubic</a></li> <li><a href="/hubic/"><i class="fa fa-space-shuttle"></i> Hubic</a></li>
<li><a href="/jottacloud/"><i class="fa fa-cloud"></i> Jottacloud</a></li> <li><a href="/jottacloud/"><i class="fa fa-cloud"></i> Jottacloud</a></li>
<li><a href="/koofr/"><i class="fa fa-suitcase"></i> Koofr</a></li> <li><a href="/koofr/"><i class="fa fa-suitcase"></i> Koofr</a></li>
<li><a href="/mailru/"><i class="fa fa-at"></i> Mail.ru Cloud</a></li>
<li><a href="/mega/"><i class="fa fa-archive"></i> Mega</a></li> <li><a href="/mega/"><i class="fa fa-archive"></i> Mega</a></li>
<li><a href="/azureblob/"><i class="fab fa-windows"></i> Microsoft Azure Blob Storage</a></li> <li><a href="/azureblob/"><i class="fab fa-windows"></i> Microsoft Azure Blob Storage</a></li>
<li><a href="/onedrive/"><i class="fab fa-windows"></i> Microsoft OneDrive</a></li> <li><a href="/onedrive/"><i class="fab fa-windows"></i> Microsoft OneDrive</a></li>

View File

@ -130,3 +130,7 @@ backends:
remote: "TestPutio:" remote: "TestPutio:"
subdir: false subdir: false
fastlist: false fastlist: false
- backend: "mailru"
remote: "TestMailru:"
subdir: false
fastlist: false