sftp: add --sftp-concurrency to improve high latency transfers

See: https://forum.rclone.org/t/increasing-sftp-transfer-speed/29928
This commit is contained in:
Nick Craig-Wood 2022-03-28 12:57:34 +01:00
parent 95e0934755
commit 78120d40d9
1 changed files with 12 additions and 0 deletions

View File

@ -292,6 +292,16 @@ a large file, try lowering this number.
`,
Default: 32 * fs.Kibi,
Advanced: true,
}, {
Name: "concurrency",
Help: `The maximum number of outstanding requests for one file
This controls the maximum number of outstanding requests for one file.
Increasing it will increase throughput on high latency links at the
cost of using more memory.
`,
Default: 64,
Advanced: true,
}},
}
fs.Register(fsi)
@ -325,6 +335,7 @@ type Options struct {
DisableConcurrentWrites bool `config:"disable_concurrent_writes"`
IdleTimeout fs.Duration `config:"idle_timeout"`
ChunkSize fs.SizeSuffix `config:"chunk_size"`
Concurrency int `config:"concurrency"`
}
// Fs stores the interface to the remote SFTP files
@ -503,6 +514,7 @@ func (f *Fs) newSftpClient(conn *ssh.Client, opts ...sftp.ClientOption) (*sftp.C
sftp.UseConcurrentReads(!f.opt.DisableConcurrentReads),
sftp.UseConcurrentWrites(!f.opt.DisableConcurrentWrites),
sftp.MaxPacketUnchecked(int(f.opt.ChunkSize)),
sftp.MaxConcurrentRequestsPerFile(f.opt.Concurrency),
)
return sftp.NewClientPipe(pr, pw, opts...)
}