Document workaround for ACD maximum file size.

Document workaround for ACD maximum file size and display a warning in verbose mode before upload starts.

Fixes #215.
This commit is contained in:
klauspost 2016-01-04 13:23:33 +01:00 committed by Nick Craig-Wood
parent 3c31d711b3
commit 4ce2a84df0
2 changed files with 16 additions and 0 deletions

View File

@ -39,6 +39,7 @@ const (
statusAvailable = "AVAILABLE" statusAvailable = "AVAILABLE"
timeFormat = time.RFC3339 // 2014-03-07T22:31:12.173Z timeFormat = time.RFC3339 // 2014-03-07T22:31:12.173Z
minSleep = 20 * time.Millisecond minSleep = 20 * time.Millisecond
warnFileSize = 50 << 30 // Display warning for files larger than this size
) )
// Globals // Globals
@ -440,6 +441,9 @@ func (f *Fs) Put(in io.Reader, remote string, modTime time.Time, size int64) (fs
if err != nil { if err != nil {
return nil, err return nil, err
} }
if size > warnFileSize {
fs.Debug(f, "Warning: file %q may fail because it is too big. Use --max-size=%dGB to skip large files.", remote, warnFileSize>>30)
}
folder := acd.FolderFromId(directoryID, o.fs.c.Nodes) folder := acd.FolderFromId(directoryID, o.fs.c.Nodes)
var info *acd.File var info *acd.File
var resp *http.Response var resp *http.Response

View File

@ -102,3 +102,15 @@ Amazon cloud drive has rate limiting so you may notice errors in the
sync (429 errors). rclone will automatically retry the sync up to 3 sync (429 errors). rclone will automatically retry the sync up to 3
times by default (see `--retries` flag) which should hopefully work times by default (see `--retries` flag) which should hopefully work
around this problem. around this problem.
Amazon cloud drive has an internal limit of file sizes that can be
uploaded to the service. This limit is not officially published,
but all files larger than this will fail.
At the time of writing (Jan 2016) is in the area of 50GB per file.
This means that larger files are likely to fail.
Unfortunatly there is no way for rclone to see that this failure is
because of file size, so it will retry the operation, as any other
failure. To avoid this problem, use `--max-size=50GB` option to limit
the maximum size of uploaded files.