diff --git a/backend/b2/b2.go b/backend/b2/b2.go index 590314250..f45ac858f 100644 --- a/backend/b2/b2.go +++ b/backend/b2/b2.go @@ -120,20 +120,26 @@ these chunks are buffered in memory and there might a maximum of minimim size.`, Default: fs.SizeSuffix(defaultChunkSize), Advanced: true, + }, { + Name: "disable_checksum", + Help: `Disable checksums for large (> upload cutoff) files`, + Default: false, + Advanced: true, }}, }) } // Options defines the configuration for this backend type Options struct { - Account string `config:"account"` - Key string `config:"key"` - Endpoint string `config:"endpoint"` - TestMode string `config:"test_mode"` - Versions bool `config:"versions"` - HardDelete bool `config:"hard_delete"` - UploadCutoff fs.SizeSuffix `config:"upload_cutoff"` - ChunkSize fs.SizeSuffix `config:"chunk_size"` + Account string `config:"account"` + Key string `config:"key"` + Endpoint string `config:"endpoint"` + TestMode string `config:"test_mode"` + Versions bool `config:"versions"` + HardDelete bool `config:"hard_delete"` + UploadCutoff fs.SizeSuffix `config:"upload_cutoff"` + ChunkSize fs.SizeSuffix `config:"chunk_size"` + DisableCheckSum bool `config:"disable_checksum"` } // Fs represents a remote b2 server diff --git a/backend/b2/upload.go b/backend/b2/upload.go index 76b2f32f5..0ac60f2c7 100644 --- a/backend/b2/upload.go +++ b/backend/b2/upload.go @@ -116,8 +116,10 @@ func (f *Fs) newLargeUpload(o *Object, in io.Reader, src fs.ObjectInfo) (up *lar }, } // Set the SHA1 if known - if calculatedSha1, err := src.Hash(hash.SHA1); err == nil && calculatedSha1 != "" { - request.Info[sha1Key] = calculatedSha1 + if !o.fs.opt.DisableCheckSum { + if calculatedSha1, err := src.Hash(hash.SHA1); err == nil && calculatedSha1 != "" { + request.Info[sha1Key] = calculatedSha1 + } } var response api.StartLargeFileResponse err = f.pacer.Call(func() (bool, error) {