drive: fix transfer of big files using up lots of memory - fixes #5

This was done by making a seekWrapper which wraps an io.Reader with a
basic Seek for code.google.com/p/google-api-go-client/googleapi to
detect the length.  Without this the getReaderSize function reads the
entire file into memory to find its length.
This commit is contained in:
Nick Craig-Wood 2014-07-04 17:17:21 +01:00
parent d4817399ff
commit 91cfbd4146
1 changed files with 27 additions and 0 deletions

View File

@ -22,6 +22,7 @@ import (
"log"
"mime"
"net/http"
"os"
"path"
"strings"
"sync"
@ -689,6 +690,30 @@ func (f *FsDrive) ListDir() fs.DirChan {
return out
}
// seekWrapper wraps an io.Reader with a basic Seek for
// code.google.com/p/google-api-go-client/googleapi
// to detect the length (see getReaderSize function)
type seekWrapper struct {
in io.Reader
size int64
}
// Read bytes from the object - see io.Reader
func (file *seekWrapper) Read(p []byte) (n int, err error) {
return file.in.Read(p)
}
// Seek - minimal implementation for Google Drive's length detection
func (file *seekWrapper) Seek(offset int64, whence int) (int64, error) {
switch whence {
case os.SEEK_CUR:
return 0, nil
case os.SEEK_END:
return file.size, nil
}
return 0, nil
}
// Put the object
//
// This assumes that the object doesn't not already exists - if you
@ -725,6 +750,7 @@ func (f *FsDrive) Put(in io.Reader, remote string, modTime time.Time, size int64
}
// Make the API request to upload metadata and file data.
in = &seekWrapper{in: in, size: size}
info, err = f.svc.Files.Insert(info).Media(in).Do()
if err != nil {
return o, fmt.Errorf("Upload failed: %s", err)
@ -926,6 +952,7 @@ func (o *FsObjectDrive) Update(in io.Reader, modTime time.Time, size int64) erro
}
// Make the API request to upload metadata and file data.
in = &seekWrapper{in: in, size: size}
info, err := o.drive.svc.Files.Update(info.Id, info).SetModifiedDate(true).Media(in).Do()
if err != nil {
return fmt.Errorf("Update failed: %s", err)