bisync: add to integration tests - fixes #7665

This change officially adds bisync to the nightly integration tests for all
backends.

This will be part of giving us the confidence to take bisync out of beta.

A number of fixes have been added to account for features which can differ on
different backends -- for example, hash types / modtime support, empty
directories, unicode normalization, and unimportant differences in log output.
We will likely find that more of these are needed once we start running these
with the full set of remotes.

Additionally, bisync's extremely sensitive tests revealed a few bugs in other
backends that weren't previously covered by other tests. Fixes for those issues
have been submitted on the following separate PRs (and bisync test failures will
be expected until they are merged):

- #7670 memory: fix deadlock in operations.Purge
- #7688 memory: fix incorrect list entries when rooted at subdirectory
- #7690 memory: fix dst mutating src after server-side copy
- #7692 dropbox: fix chunked uploads when size <= chunkSize

Relatedly, workarounds have been put in place for the following backend
limitations that are unsolvable for the time being:

- #3262 drive is sometimes aware of trashed files/folders when it shouldn't be
- #6199 dropbox can't handle emojis and certain other characters
- #4590 onedrive API has longstanding bug for conflictBehavior=replace in
	server-side copy/move
This commit is contained in:
nielash 2024-03-08 17:53:33 -05:00
parent fecce67ac6
commit 2bebbfaded
56 changed files with 59314 additions and 176 deletions

View File

@ -0,0 +1,130 @@
package bisync_test
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/rclone/rclone/cmd/bisync/bilib"
"github.com/rclone/rclone/fs"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v2"
)
const configFile = "../../fstest/test_all/config.yaml"
// Config describes the config for this program
type Config struct {
Tests []Test
Backends []Backend
}
// Test describes an integration test to run with `go test`
type Test struct {
Path string // path to the source directory
FastList bool // if it is possible to add -fast-list to tests
Short bool // if it is possible to run the test with -short
AddBackend bool // set if Path needs the current backend appending
NoRetries bool // set if no retries should be performed
NoBinary bool // set to not build a binary in advance
LocalOnly bool // if set only run with the local backend
}
// Backend describes a backend test
//
// FIXME make bucket-based remotes set sub-dir automatically???
type Backend struct {
Backend string // name of the backend directory
Remote string // name of the test remote
FastList bool // set to test with -fast-list
Short bool // set to test with -short
OneOnly bool // set to run only one backend test at once
MaxFile string // file size limit
CleanUp bool // when running clean, run cleanup first
Ignore []string // test names to ignore the failure of
Tests []string // paths of tests to run, blank for all
ListRetries int // -list-retries if > 0
ExtraTime float64 // factor to multiply the timeout by
}
func parseConfig() (*Config, error) {
d, err := os.ReadFile(configFile)
if err != nil {
return nil, fmt.Errorf("failed to read config file: %w", err)
}
config := &Config{}
err = yaml.Unmarshal(d, &config)
if err != nil {
return nil, fmt.Errorf("failed to parse config file: %w", err)
}
return config, nil
}
const debugFormat = ` {
"name": %q,
"type": "go",
"request": "launch",
"mode": "test",
"program": "./cmd/bisync",
"args": ["-remote", %q, "-remote2", %q, "-case", %q, "-no-cleanup"]
},
`
const docFormat = `{
"version": "0.2.0",
"configurations": [
%s
]
}`
// generates a launch.json file for debugging in VS Code.
// note: just copy the ones you need into your real launch.json file, as VS Code will crash if there are too many!
func (b *bisyncTest) generateDebuggers() {
config, err := parseConfig()
if err != nil {
fs.Errorf(config, "failed to parse config: %v", err)
}
testList := []string{}
for _, testCase := range b.listDir(b.dataRoot) {
if strings.HasPrefix(testCase, "test_") {
// if dir is empty, skip it (can happen due to gitignored files/dirs when checking out branch)
if len(b.listDir(filepath.Join(b.dataRoot, testCase))) == 0 {
continue
}
testList = append(testList, testCase)
}
}
variations := []string{"LocalRemote", "RemoteLocal", "RemoteRemote"}
debuggers := ""
for _, backend := range config.Backends {
if backend.Remote == "" {
backend.Remote = "local"
}
for _, testcase := range testList {
for _, variation := range variations {
if variation != "RemoteRemote" && backend.Remote == "local" {
continue
}
name := fmt.Sprintf("Test %s %s %s", backend.Remote, testcase, variation)
switch variation {
case "LocalRemote":
debuggers += fmt.Sprintf(debugFormat, name, "local", backend.Remote, testcase)
case "RemoteLocal":
debuggers += fmt.Sprintf(debugFormat, name, backend.Remote, "local", testcase)
case "RemoteRemote":
debuggers += fmt.Sprintf(debugFormat, name, backend.Remote, backend.Remote, testcase)
}
}
}
}
out := fmt.Sprintf(docFormat, debuggers)
outpath := "./testdata/bisync_vscode_debuggers_launch.json"
err = os.WriteFile(outpath, []byte(out), bilib.PermSecure)
assert.NoError(b.t, err, "writing golden file %s", outpath)
}

View File

@ -61,6 +61,15 @@ const (
var initDate = time.Date(2000, time.January, 1, 0, 0, 0, 0, bisync.TZ) var initDate = time.Date(2000, time.January, 1, 0, 0, 0, 0, bisync.TZ)
/* Useful Command Shortcuts */
// go test ./cmd/bisync -remote local -race
// go test ./cmd/bisync -remote local -golden
// go test ./cmd/bisync -remote local -case extended_filenames
// go run ./fstest/test_all -run '^TestBisync.*$' -timeout 3h -verbose -maxtries 5
// go run ./fstest/test_all -remotes local,TestCrypt:,TestDrive:,TestOneDrive:,TestOneDriveBusiness:,TestDropbox:,TestCryptDrive:,TestOpenDrive:,TestChunker:,:memory:,TestCryptNoEncryption:,TestCombine:DirA,TestFTPRclone:,TestWebdavRclone:,TestS3Rclone:,TestSFTPRclone:,TestSFTPRcloneSSH: -run '^TestBisync.*$' -timeout 3h -verbose -maxtries 5
// go test -timeout 3h -run '^TestBisync.*$' github.com/rclone/rclone/cmd/bisync -remote TestDrive:Bisync -v
// go test -timeout 3h -run '^TestBisyncRemoteRemote/basic$' github.com/rclone/rclone/cmd/bisync -remote TestDropbox:Bisync -v
// logReplacements make modern test logs comparable with golden dir. // logReplacements make modern test logs comparable with golden dir.
// It is a string slice of even length with this structure: // It is a string slice of even length with this structure:
// //
@ -77,7 +86,8 @@ var logReplacements = []string{
`^DEBUG : .*$`, dropMe, `^DEBUG : .*$`, dropMe,
// ignore dropbox info messages // ignore dropbox info messages
`^NOTICE: too_many_(requests|write_operations)/\.*: Too many requests or write operations.*$`, dropMe, `^NOTICE: too_many_(requests|write_operations)/\.*: Too many requests or write operations.*$`, dropMe,
`^NOTICE: Dropbox root .*?: Forced to upload files to set modification times on this backend.$`, dropMe, `^NOTICE: .*?: Forced to upload files to set modification times on this backend.$`, dropMe,
`^INFO : .*? Committing uploads - please wait...$`, dropMe,
`^INFO : .*?: src and dst identical but can't set mod time without deleting and re-uploading$`, dropMe, `^INFO : .*?: src and dst identical but can't set mod time without deleting and re-uploading$`, dropMe,
// ignore crypt info messages // ignore crypt info messages
`^INFO : .*?: Crypt detected! Using cryptcheck instead of check. \(Use --size-only or --ignore-checksum to disable\)$`, dropMe, `^INFO : .*?: Crypt detected! Using cryptcheck instead of check. \(Use --size-only or --ignore-checksum to disable\)$`, dropMe,
@ -89,6 +99,7 @@ var logReplacements = []string{
`^.*?"SlowHashDetected":.*?$`, dropMe, `^.*?"SlowHashDetected":.*?$`, dropMe,
`^.*? for same-side diffs on .*?$`, dropMe, `^.*? for same-side diffs on .*?$`, dropMe,
`^.*?Downloading hashes.*?$`, dropMe, `^.*?Downloading hashes.*?$`, dropMe,
`^.*?Can't compare hashes, so using check --download.*?$`, dropMe,
// ignore timestamps in directory time updates // ignore timestamps in directory time updates
`^(INFO : .*?: (Made directory with|Set directory) (metadata|modification time)).*$`, dropMe, `^(INFO : .*?: (Made directory with|Set directory) (metadata|modification time)).*$`, dropMe,
// ignore sizes in directory time updates // ignore sizes in directory time updates
@ -127,7 +138,7 @@ var logHoppers = []string{
`(?:INFO |NOTICE): .*: Fixed case by renaming to: .*`, `(?:INFO |NOTICE): .*: Fixed case by renaming to: .*`,
// order of files re-checked prior to a conflict rename // order of files re-checked prior to a conflict rename
`ERROR : .*: md5 differ.*`, `ERROR : .*: {hashtype} differ.*`,
// Directory modification time setting can happen in any order // Directory modification time setting can happen in any order
`INFO : .*: (Set directory modification time|Made directory with metadata).*`, `INFO : .*: (Set directory modification time|Made directory with metadata).*`,
@ -197,12 +208,35 @@ type bisyncTest struct {
debug bool debug bool
stopAt int stopAt int
TestFn bisync.TestFunc TestFn bisync.TestFunc
ignoreModtime bool // ignore modtimes when comparing final listings, for backends without support
} }
var color = bisync.Color var color = bisync.Color
// Path1 is remote, Path2 is local
func TestBisyncRemoteLocal(t *testing.T) {
if *fstest.RemoteName == *argRemote2 {
t.Skip("path1 and path2 are the same remote")
}
testBisync(t, *fstest.RemoteName, *argRemote2)
}
// Path1 is local, Path2 is remote
func TestBisyncLocalRemote(t *testing.T) {
if *fstest.RemoteName == *argRemote2 {
t.Skip("path1 and path2 are the same remote")
}
testBisync(t, *argRemote2, *fstest.RemoteName)
}
// Path1 and Path2 are both different directories on remote
// (useful for testing server-side copy/move)
func TestBisyncRemoteRemote(t *testing.T) {
testBisync(t, *fstest.RemoteName, *fstest.RemoteName)
}
// TestBisync is a test engine for bisync test cases. // TestBisync is a test engine for bisync test cases.
func TestBisync(t *testing.T) { func testBisync(t *testing.T, path1, path2 string) {
ctx := context.Background() ctx := context.Background()
fstest.Initialise() fstest.Initialise()
@ -215,7 +249,8 @@ func TestBisync(t *testing.T) {
ci.RefreshTimes = true ci.RefreshTimes = true
} }
bisync.Colors = true bisync.Colors = true
time.Local, _ = time.LoadLocation("America/New_York") time.Local = bisync.TZ
ci.FsCacheExpireDuration = 5 * time.Hour
baseDir, err := os.Getwd() baseDir, err := os.Getwd()
require.NoError(t, err, "get current directory") require.NoError(t, err, "get current directory")
@ -234,8 +269,8 @@ func TestBisync(t *testing.T) {
logDir: filepath.Join(tempDir, "logs"), logDir: filepath.Join(tempDir, "logs"),
logPath: filepath.Join(workDir, logFileName), logPath: filepath.Join(workDir, logFileName),
// global flags // global flags
argRemote1: *fstest.RemoteName, argRemote1: path1,
argRemote2: *argRemote2, argRemote2: path2,
noCompare: *argNoCompare, noCompare: *argNoCompare,
noCleanup: *argNoCleanup, noCleanup: *argNoCleanup,
golden: *argGolden, golden: *argGolden,
@ -333,10 +368,11 @@ func (b *bisyncTest) runTestCase(ctx context.Context, t *testing.T, testCase str
// Prepare initial content // Prepare initial content
b.cleanupCase(ctx) b.cleanupCase(ctx)
initFs, err := fs.NewFs(ctx, b.initDir) initFs, err := cache.Get(ctx, b.initDir)
require.NoError(b.t, err) require.NoError(b.t, err)
require.NoError(b.t, sync.CopyDir(ctx, b.fs1, initFs, true), "setting up path1") ctxNoDsStore, _ := ctxNoDsStore(ctx, b.t)
require.NoError(b.t, sync.CopyDir(ctx, b.fs2, initFs, true), "setting up path2") require.NoError(b.t, sync.CopyDir(ctxNoDsStore, b.fs1, initFs, true), "setting up path1")
require.NoError(b.t, sync.CopyDir(ctxNoDsStore, b.fs2, initFs, true), "setting up path2")
// Create log file // Create log file
b.mkdir(b.workDir) b.mkdir(b.workDir)
@ -443,11 +479,11 @@ func (b *bisyncTest) runTestCase(ctx context.Context, t *testing.T, testCase str
// if a local path is provided, it's ignored (the test will run under system temp) // if a local path is provided, it's ignored (the test will run under system temp)
func (b *bisyncTest) makeTempRemote(ctx context.Context, remote, subdir string) (f, parent fs.Fs, path, canon string) { func (b *bisyncTest) makeTempRemote(ctx context.Context, remote, subdir string) (f, parent fs.Fs, path, canon string) {
var err error var err error
if bilib.IsLocalPath(remote) { if bilib.IsLocalPath(remote) && !strings.HasPrefix(remote, ":") {
if remote != "" && remote != "local" { if remote != "" && remote != "local" {
b.t.Fatalf(`Missing ":" in remote %q. Use "local" to test with local filesystem.`, remote) b.t.Fatalf(`Missing ":" in remote %q. Use "local" to test with local filesystem.`, remote)
} }
parent, err = fs.NewFs(ctx, b.tempDir) parent, err = cache.Get(ctx, b.tempDir)
require.NoError(b.t, err, "parsing %s", b.tempDir) require.NoError(b.t, err, "parsing %s", b.tempDir)
path = filepath.Join(b.tempDir, b.testCase) path = filepath.Join(b.tempDir, b.testCase)
@ -459,7 +495,7 @@ func (b *bisyncTest) makeTempRemote(ctx context.Context, remote, subdir string)
remote += "/" remote += "/"
} }
remote += b.randName remote += b.randName
parent, err = fs.NewFs(ctx, remote) parent, err = cache.Get(ctx, remote)
require.NoError(b.t, err, "parsing %s", remote) require.NoError(b.t, err, "parsing %s", remote)
path = remote + "/" + b.testCase path = remote + "/" + b.testCase
@ -467,13 +503,9 @@ func (b *bisyncTest) makeTempRemote(ctx context.Context, remote, subdir string)
path += "/" + subdir path += "/" + subdir
} }
f, err = fs.NewFs(ctx, path) f, err = cache.Get(ctx, path)
require.NoError(b.t, err, "parsing %s/%s", remote, subdir) require.NoError(b.t, err, "parsing %s/%s", remote, subdir)
path = bilib.FsPath(f) // Make it canonical path = bilib.FsPath(f) // Make it canonical
if f.Precision() == fs.ModTimeNotSupported {
b.t.Skipf("modification time support is missing on %s", subdir)
}
return return
} }
@ -509,12 +541,12 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
for i := 0; i < 50; i++ { for i := 0; i < 50; i++ {
dst := "file" + fmt.Sprint(i) + ".txt" dst := "file" + fmt.Sprint(i) + ".txt"
err := b.copyFile(ctx, src, b.path2, dst) err := b.copyFile(ctx, src, bilib.StripHexString(b.path2), dst)
if err != nil { if err != nil {
fs.Errorf(src, "error copying file: %v", err) fs.Errorf(src, "error copying file: %v", err)
} }
dst = "file" + fmt.Sprint(100-i) + ".txt" dst = "file" + fmt.Sprint(100-i) + ".txt"
err = b.copyFile(ctx, src, b.path1, dst) err = b.copyFile(ctx, src, bilib.StripHexString(b.path1), dst)
if err != nil { if err != nil {
fs.Errorf(dst, "error copying file: %v", err) fs.Errorf(dst, "error copying file: %v", err)
} }
@ -534,18 +566,21 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
return b.saveTestListings(args[1], false) return b.saveTestListings(args[1], false)
case "purge-children": case "purge-children":
b.checkArgs(args, 1, 1) b.checkArgs(args, 1, 1)
if fsrc, err = fs.NewFs(ctx, args[1]); err != nil { dir := ""
return err if strings.HasPrefix(args[1], bilib.StripHexString(b.path1)) {
fsrc = b.fs1
dir = strings.TrimPrefix(args[1], bilib.StripHexString(b.path1))
} else if strings.HasPrefix(args[1], bilib.StripHexString(b.path2)) {
fsrc = b.fs2
dir = strings.TrimPrefix(args[1], bilib.StripHexString(b.path2))
} else {
return fmt.Errorf("error parsing arg: %q (path1: %q, path2: %q)", args[1], b.path1, b.path2)
} }
err = purgeChildren(ctx, fsrc, "") return purgeChildren(ctx, fsrc, dir)
if err != nil {
return err
}
return
case "delete-file": case "delete-file":
b.checkArgs(args, 1, 1) b.checkArgs(args, 1, 1)
dir, file := filepath.Split(args[1]) dir, file := filepath.Split(args[1])
if fsrc, err = fs.NewFs(ctx, dir); err != nil { if fsrc, err = cache.Get(ctx, dir); err != nil {
return err return err
} }
var obj fs.Object var obj fs.Object
@ -555,14 +590,14 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
return operations.DeleteFile(ctx, obj) return operations.DeleteFile(ctx, obj)
case "delete-glob": case "delete-glob":
b.checkArgs(args, 2, 2) b.checkArgs(args, 2, 2)
if fsrc, err = fs.NewFs(ctx, args[1]); err != nil { if fsrc, err = cache.Get(ctx, args[1]); err != nil {
return err return err
} }
return deleteFiles(ctx, fsrc, args[2]) return deleteFiles(ctx, fsrc, args[2])
case "touch-glob": case "touch-glob":
b.checkArgs(args, 3, 3) b.checkArgs(args, 3, 3)
date, src, glob := args[1], args[2], args[3] date, src, glob := args[1], args[2], args[3]
if fsrc, err = fs.NewFs(ctx, src); err != nil { if fsrc, err = cache.Get(ctx, src); err != nil {
return err return err
} }
_, err = touchFiles(ctx, date, fsrc, src, glob) _, err = touchFiles(ctx, date, fsrc, src, glob)
@ -571,7 +606,7 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
b.checkArgs(args, 3, 3) b.checkArgs(args, 3, 3)
date, src, dst := args[1], args[2], args[3] date, src, dst := args[1], args[2], args[3]
dir, file := filepath.Split(src) dir, file := filepath.Split(src)
if fsrc, err = fs.NewFs(ctx, dir); err != nil { if fsrc, err = cache.Get(ctx, dir); err != nil {
return err return err
} }
if _, err = touchFiles(ctx, date, fsrc, dir, file); err != nil { if _, err = touchFiles(ctx, date, fsrc, dir, file); err != nil {
@ -604,9 +639,11 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
} }
switch args[0] { switch args[0] {
case "copy-dir": case "copy-dir":
err = sync.CopyDir(ctx, fdst, fsrc, true) ctxNoDsStore, _ := ctxNoDsStore(ctx, b.t)
err = sync.CopyDir(ctxNoDsStore, fdst, fsrc, true)
case "sync-dir": case "sync-dir":
err = sync.Sync(ctx, fdst, fsrc, true) ctxNoDsStore, _ := ctxNoDsStore(ctx, b.t)
err = sync.Sync(ctxNoDsStore, fdst, fsrc, true)
} }
return err return err
case "list-dirs": case "list-dirs":
@ -646,7 +683,7 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
remoteName = "/" remoteName = "/"
} }
fsrc, err = fs.NewFs(ctx, remoteName) fsrc, err = cache.Get(ctx, remoteName)
if err != nil { if err != nil {
return err return err
} }
@ -665,21 +702,34 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
fixDirname := func(old, new string) { fixDirname := func(old, new string) {
if new != old { if new != old {
oldName, err := fs.NewFs(ctx, old) oldName, err := cache.Get(ctx, old)
if err != nil { if err != nil {
fs.Logf(old, "error getting Fs: %v", err) fs.Errorf(old, "error getting Fs: %v", err)
return
} }
fs.Debugf(nil, "Attempting to move %s to %s", oldName.Root(), new) fs.Debugf(nil, "Attempting to move %s to %s", oldName.Root(), new)
// Create random name to temporarily move dir to // Create random name to temporarily move dir to
tmpDirName := strings.TrimSuffix(new, slash) + "-rclone-move-" + random.String(8) tmpDirName := strings.TrimSuffix(new, slash) + "-rclone-move-" + random.String(8)
var tmpDirFs fs.Fs var tmpDirFs fs.Fs
tmpDirFs, _ = fs.NewFs(ctx, tmpDirName) tmpDirFs, err = cache.Get(ctx, tmpDirName)
if err != nil {
fs.Errorf(tmpDirName, "error creating temp dir for move: %v", err)
}
if tmpDirFs == nil {
return
}
err = sync.MoveDir(ctx, tmpDirFs, oldName, true, true) err = sync.MoveDir(ctx, tmpDirFs, oldName, true, true)
if err != nil { if err != nil {
fs.Debugf(oldName, "error attempting to move folder: %v", err) fs.Debugf(oldName, "error attempting to move folder: %v", err)
} }
// now move the temp dir to real name // now move the temp dir to real name
fsrc, _ = fs.NewFs(ctx, new) fsrc, err = cache.Get(ctx, new)
if err != nil {
fs.Errorf(new, "error creating fsrc dir for move: %v", err)
}
if fsrc == nil {
return
}
err = sync.MoveDir(ctx, fsrc, tmpDirFs, true, true) err = sync.MoveDir(ctx, fsrc, tmpDirFs, true, true)
if err != nil { if err != nil {
fs.Debugf(tmpDirFs, "error attempting to move folder to %s: %v", fsrc.Root(), err) fs.Debugf(tmpDirFs, "error attempting to move folder to %s: %v", fsrc.Root(), err)
@ -709,7 +759,11 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
fs.Debugf(nil, "HASH old: %s new: %s equal: %v", stringToHash(old), stringToHash(new), stringToHash(old) == stringToHash(new)) fs.Debugf(nil, "HASH old: %s new: %s equal: %v", stringToHash(old), stringToHash(new), stringToHash(old) == stringToHash(new))
if ok && new != old { if ok && new != old {
fs.Debugf(new, "attempting to rename %s to %s", old, new) fs.Debugf(new, "attempting to rename %s to %s", old, new)
err = operations.MoveFile(ctx, fsrc, fsrc, new, old) srcObj, err := fsrc.NewObject(ctx, old)
if err != nil {
fs.Errorf(old, "errorfinding srcObj - %v", err)
}
_, err = operations.MoveCaseInsensitive(ctx, fsrc, fsrc, new, old, false, srcObj)
if err != nil { if err != nil {
fs.Errorf(new, "error trying to rename %s to %s - %v", old, new, err) fs.Errorf(new, "error trying to rename %s to %s - %v", old, new, err)
} }
@ -777,6 +831,95 @@ func (b *bisyncTest) checkArgs(args []string, min, max int) {
} }
} }
func (b *bisyncTest) checkPreReqs(ctx context.Context, opt *bisync.Options) (context.Context, *bisync.Options) {
// check pre-requisites
if b.testCase == "backupdir" && !(b.fs1.Features().IsLocal && b.fs2.Features().IsLocal) {
b.t.Skip("backupdir test currently only works on local (it uses the workdir)")
}
if b.testCase == "volatile" && !(b.fs1.Features().IsLocal && b.fs2.Features().IsLocal) {
b.t.Skip("skipping 'volatile' test on non-local as it requires uploading 100 files")
}
if strings.HasPrefix(b.fs1.String(), "Dropbox") || strings.HasPrefix(b.fs2.String(), "Dropbox") {
fs.GetConfig(ctx).RefreshTimes = true // https://rclone.org/bisync/#notes-about-testing
}
if strings.HasPrefix(b.fs1.String(), "Dropbox") {
b.fs1.Features().Disable("Copy") // https://github.com/rclone/rclone/issues/6199#issuecomment-1570366202
}
if strings.HasPrefix(b.fs2.String(), "Dropbox") {
b.fs2.Features().Disable("Copy") // https://github.com/rclone/rclone/issues/6199#issuecomment-1570366202
}
if strings.HasPrefix(b.fs1.String(), "OneDrive") {
b.fs1.Features().Disable("Copy") // API has longstanding bug for conflictBehavior=replace https://github.com/rclone/rclone/issues/4590
b.fs1.Features().Disable("Move")
}
if strings.HasPrefix(b.fs2.String(), "OneDrive") {
b.fs2.Features().Disable("Copy") // API has longstanding bug for conflictBehavior=replace https://github.com/rclone/rclone/issues/4590
b.fs2.Features().Disable("Move")
}
if (!b.fs1.Features().CanHaveEmptyDirectories || !b.fs2.Features().CanHaveEmptyDirectories) && (b.testCase == "createemptysrcdirs" || b.testCase == "rmdirs") {
b.t.Skip("skipping test as remote does not support empty dirs")
}
if b.fs1.Precision() == fs.ModTimeNotSupported || b.fs2.Precision() == fs.ModTimeNotSupported {
if b.testCase != "nomodtime" {
b.t.Skip("skipping test as at least one remote does not support setting modtime")
}
b.ignoreModtime = true
}
// test if modtimes are writeable
testSetModtime := func(f fs.Fs) {
in := bytes.NewBufferString("modtime_write_test")
objinfo := object.NewStaticObjectInfo("modtime_write_test", initDate, int64(len("modtime_write_test")), true, nil, nil)
obj, err := f.Put(ctx, in, objinfo)
require.NoError(b.t, err)
err = obj.SetModTime(ctx, initDate)
if err == fs.ErrorCantSetModTime {
if b.testCase != "nomodtime" {
b.t.Skip("skipping test as at least one remote does not support setting modtime")
}
}
err = obj.Remove(ctx)
require.NoError(b.t, err)
}
testSetModtime(b.fs1)
testSetModtime(b.fs2)
if b.testCase == "normalization" || b.testCase == "extended_char_paths" || b.testCase == "extended_filenames" {
// test whether remote is capable of running test
const chars = "ě_{chr:81}{chr:fe}{spc}áñhࢺ_測試Русский_ěáñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö"
testfilename1 := splitLine(norm.NFD.String(norm.NFC.String(chars)))[0]
testfilename2 := splitLine(norm.NFC.String(norm.NFD.String(chars)))[0]
preTest := func(f fs.Fs, testfilename string) string {
in := bytes.NewBufferString(testfilename)
objinfo := object.NewStaticObjectInfo(testfilename, initDate, int64(len(testfilename)), true, nil, nil)
obj, err := f.Put(ctx, in, objinfo)
if err != nil {
b.t.Skipf("Fs is incapable of running test, skipping: %s (expected: \n%s (%s) actual: \n%s (%v))\n (fs: %s) \n", b.testCase, testfilename, detectEncoding(testfilename), "upload failed", err, f)
}
entries, err := f.List(ctx, "")
assert.NoError(b.t, err)
if entries.Len() == 1 && entries[0].Remote() != testfilename {
diffStr, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{A: []string{testfilename}, B: []string{entries[0].Remote()}})
// we can still deal with this as long as both remotes auto-convert the same way.
b.t.Logf("Warning: this remote seems to auto-convert special characters (testcase: %s) (expected: \n%s (%s) actual: \n%s (%s))\n (fs: %s) \n%v", b.testCase, testfilename, detectEncoding(testfilename), entries[0].Remote(), detectEncoding(entries[0].Remote()), f, diffStr)
}
err = obj.Remove(ctx)
require.NoError(b.t, err)
return entries[0].Remote()
}
got1 := preTest(b.fs1, testfilename1)
got1 += preTest(b.fs1, testfilename2)
if b.fs1.Name() != b.fs2.Name() {
got2 := preTest(b.fs2, testfilename1)
got2 += preTest(b.fs2, testfilename2)
if got1 != got2 {
diffStr, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{A: []string{got1}, B: []string{got2}})
b.t.Skipf("Fs is incapable of running test as the paths produce different results, skipping: %s (path1: \n%s (%s) path2: \n%s (%s))\n (fs1: %s fs2: %s) \n%v", b.testCase, got1, detectEncoding(got1), got2, got2, b.fs1, b.fs2, diffStr)
}
}
}
return ctx, opt
}
func (b *bisyncTest) runBisync(ctx context.Context, args []string) (err error) { func (b *bisyncTest) runBisync(ctx context.Context, args []string) (err error) {
opt := &bisync.Options{ opt := &bisync.Options{
Workdir: b.workDir, Workdir: b.workDir,
@ -787,12 +930,13 @@ func (b *bisyncTest) runBisync(ctx context.Context, args []string) (err error) {
CheckSync: bisync.CheckSyncTrue, CheckSync: bisync.CheckSyncTrue,
TestFn: b.TestFn, TestFn: b.TestFn,
} }
ctx, opt = b.checkPreReqs(ctx, opt)
octx, ci := fs.AddConfig(ctx) octx, ci := fs.AddConfig(ctx)
fs1, fs2 := b.fs1, b.fs2 fs1, fs2 := b.fs1, b.fs2
addSubdir := func(path, subdir string) fs.Fs { addSubdir := func(path, subdir string) fs.Fs {
remote := path + subdir remote := path + subdir
f, err := fs.NewFs(ctx, remote) f, err := cache.Get(ctx, remote)
require.NoError(b.t, err, "parsing remote %q", remote) require.NoError(b.t, err, "parsing remote %q", remote)
return f return f
} }
@ -838,9 +982,13 @@ func (b *bisyncTest) runBisync(ctx context.Context, args []string) (err error) {
case "compare-all": case "compare-all":
opt.CompareFlag = "size,modtime,checksum" opt.CompareFlag = "size,modtime,checksum"
opt.Compare.DownloadHash = true // allows us to test crypt and the like opt.Compare.DownloadHash = true // allows us to test crypt and the like
case "nomodtime":
ci.CheckSum = true
opt.CompareFlag = "size,checksum"
opt.Compare.DownloadHash = true // allows us to test crypt and the like
case "subdir": case "subdir":
fs1 = addSubdir(b.path1, val) fs1 = addSubdir(bilib.StripHexString(b.path1), val)
fs2 = addSubdir(b.path2, val) fs2 = addSubdir(bilib.StripHexString(b.path2), val)
case "backupdir1": case "backupdir1":
opt.BackupDir1 = val opt.BackupDir1 = val
case "backupdir2": case "backupdir2":
@ -872,6 +1020,10 @@ func (b *bisyncTest) runBisync(ctx context.Context, args []string) (err error) {
// set all dirs to a fixed date for test stability, as they are considered as of v1.66. // set all dirs to a fixed date for test stability, as they are considered as of v1.66.
jamDirTimes := func(f fs.Fs) { jamDirTimes := func(f fs.Fs) {
if f.Features().DirSetModTime == nil && f.Features().MkdirMetadata == nil {
fs.Debugf(f, "Skipping jamDirTimes as remote does not support DirSetModTime or MkdirMetadata")
return
}
err := walk.ListR(ctx, f, "", true, -1, walk.ListDirs, func(entries fs.DirEntries) error { err := walk.ListR(ctx, f, "", true, -1, walk.ListDirs, func(entries fs.DirEntries) error {
var err error var err error
entries.ForDir(func(dir fs.Directory) { entries.ForDir(func(dir fs.Directory) {
@ -929,10 +1081,11 @@ func (b *bisyncTest) saveTestListings(prefix string, keepSource bool) (err error
} }
func (b *bisyncTest) copyFile(ctx context.Context, src, dst, asName string) (err error) { func (b *bisyncTest) copyFile(ctx context.Context, src, dst, asName string) (err error) {
fs.Debugf(nil, "copyFile %q to %q as %q", src, dst, asName)
var fsrc, fdst fs.Fs var fsrc, fdst fs.Fs
var srcPath, srcFile, dstPath, dstFile string var srcPath, srcFile, dstPath, dstFile string
switch fsrc, err = fs.NewFs(ctx, src); err { switch fsrc, err = fs.NewFs(ctx, src); err { // intentionally using NewFs here to avoid dircaching the parent
case fs.ErrorIsFile: case fs.ErrorIsFile:
// ok // ok
case nil: case nil:
@ -955,7 +1108,7 @@ func (b *bisyncTest) copyFile(ctx context.Context, src, dst, asName string) (err
if dstFile != "" { if dstFile != "" {
dstPath = dst // force directory dstPath = dst // force directory
} }
if fdst, err = fs.NewFs(ctx, dstPath); err != nil { if fdst, err = fs.NewFs(ctx, dstPath); err != nil { // intentionally using NewFs here to avoid dircaching the parent
return err return err
} }
@ -969,12 +1122,13 @@ func (b *bisyncTest) copyFile(ctx context.Context, src, dst, asName string) (err
if err := fi.AddFile(srcFile); err != nil { if err := fi.AddFile(srcFile); err != nil {
return err return err
} }
fs.Debugf(nil, "operations.CopyFile %q to %q as %q", srcFile, fdst.String(), dstFile)
return operations.CopyFile(fctx, fdst, fsrc, dstFile, srcFile) return operations.CopyFile(fctx, fdst, fsrc, dstFile, srcFile)
} }
// listSubdirs is equivalent to `rclone lsf -R [--dirs-only]` // listSubdirs is equivalent to `rclone lsf -R [--dirs-only]`
func (b *bisyncTest) listSubdirs(ctx context.Context, remote string, DirsOnly bool) error { func (b *bisyncTest) listSubdirs(ctx context.Context, remote string, DirsOnly bool) error {
f, err := fs.NewFs(ctx, remote) f, err := cache.Get(ctx, remote)
if err != nil { if err != nil {
return err return err
} }
@ -1036,6 +1190,9 @@ func deleteFiles(ctx context.Context, f fs.Fs, glob string) error {
// Note: `rclone touch` can touch only single file, doesn't support filters. // Note: `rclone touch` can touch only single file, doesn't support filters.
func touchFiles(ctx context.Context, dateStr string, f fs.Fs, dir, glob string) ([]string, error) { func touchFiles(ctx context.Context, dateStr string, f fs.Fs, dir, glob string) ([]string, error) {
files := []string{} files := []string{}
if f.Precision() == fs.ModTimeNotSupported {
return files, nil
}
date, err := time.ParseInLocation(touchDateFormat, dateStr, bisync.TZ) date, err := time.ParseInLocation(touchDateFormat, dateStr, bisync.TZ)
if err != nil { if err != nil {
@ -1065,14 +1222,19 @@ func touchFiles(ctx context.Context, dateStr string, f fs.Fs, dir, glob string)
fs.Debugf(obj, "Set modification time %s", dateStr) fs.Debugf(obj, "Set modification time %s", dateStr)
err := obj.SetModTime(ctx, date) err := obj.SetModTime(ctx, date)
if err == fs.ErrorCantSetModTimeWithoutDelete { if err == fs.ErrorCantSetModTimeWithoutDelete || err == fs.ErrorCantSetModTime {
// Workaround for dropbox, similar to --refresh-times // Workaround for dropbox, similar to --refresh-times
err = nil err = nil
buf := new(bytes.Buffer) buf := new(bytes.Buffer)
size := obj.Size() size := obj.Size()
separator := "" separator := ""
if size > 0 { if size > 0 {
err = operations.Cat(ctx, f, buf, 0, size, []byte(separator)) filterCtx, fi := filter.AddConfig(ctx)
err = fi.AddFile(remote) // limit Cat to only this file, not all files in dir
if err != nil {
return files, err
}
err = operations.Cat(filterCtx, f, buf, 0, size, []byte(separator))
} }
info := object.NewStaticObjectInfo(remote, date, size, true, nil, f) info := object.NewStaticObjectInfo(remote, date, size, true, nil, f)
if err == nil { if err == nil {
@ -1186,6 +1348,7 @@ func (b *bisyncTest) compareResults() int {
// Golden results will have adapted file names and contain // Golden results will have adapted file names and contain
// generic strings instead of local or cloud paths. // generic strings instead of local or cloud paths.
func (b *bisyncTest) storeGolden() { func (b *bisyncTest) storeGolden() {
b.generateDebuggers()
// Perform consistency checks // Perform consistency checks
files := b.listDir(b.workDir) files := b.listDir(b.workDir)
require.NotEmpty(b.t, files, "nothing to store in golden dir") require.NotEmpty(b.t, files, "nothing to store in golden dir")
@ -1271,8 +1434,9 @@ func (b *bisyncTest) mangleResult(dir, file string, golden bool) string {
// First replace filenames with whitespace // First replace filenames with whitespace
// some backends (such as crypt) log them on multiple lines due to encoding differences, while others (local) do not // some backends (such as crypt) log them on multiple lines due to encoding differences, while others (local) do not
wsrep := []string{ wsrep := []string{
"subdir with" + eol + "white space.txt/file2 with" + eol + "white space.txt", "subdir with" + eol + "white space.txt/file2 with" + eol + "white space.txt", "subdir with white space.txt/file2 with white space.txt",
"subdir with white space.txt/file2 with white space.txt", "with\nwhite space", "with white space",
"with\u0090white space", "with white space",
} }
whitespaceJoiner := strings.NewReplacer(wsrep...) whitespaceJoiner := strings.NewReplacer(wsrep...)
s := whitespaceJoiner.Replace(string(buf)) s := whitespaceJoiner.Replace(string(buf))
@ -1404,7 +1568,6 @@ func (b *bisyncTest) mangleListing(text string, golden bool, file string) string
lineRegex := regexp.MustCompile(`^(\S) +(-?\d+) (\S+) (\S+) (\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d\.\d{9}[+-]\d{4}) (".+")$`) lineRegex := regexp.MustCompile(`^(\S) +(-?\d+) (\S+) (\S+) (\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d\.\d{9}[+-]\d{4}) (".+")$`)
const timeFormat = "2006-01-02T15:04:05.000000000-0700" const timeFormat = "2006-01-02T15:04:05.000000000-0700"
const lineFormat = "%s %8d %s %s %s %q\n" const lineFormat = "%s %8d %s %s %s %q\n"
TZ := time.UTC
fields := lineRegex.FindStringSubmatch(strings.TrimSuffix(lines[i], "\n")) fields := lineRegex.FindStringSubmatch(strings.TrimSuffix(lines[i], "\n"))
if fields != nil { if fields != nil {
sizeVal, sizeErr := strconv.ParseInt(fields[2], 10, 64) sizeVal, sizeErr := strconv.ParseInt(fields[2], 10, 64)
@ -1412,11 +1575,11 @@ func (b *bisyncTest) mangleListing(text string, golden bool, file string) string
// account for filename encoding differences by normalizing to OS encoding // account for filename encoding differences by normalizing to OS encoding
fields[6] = normalizeEncoding(fields[6]) fields[6] = normalizeEncoding(fields[6])
timeStr := fields[5] timeStr := fields[5]
if f.Precision() == fs.ModTimeNotSupported { if f.Precision() == fs.ModTimeNotSupported || b.ignoreModtime {
lines[i] = fmt.Sprintf(lineFormat, fields[1], sizeVal, fields[3], fields[4], "-", fields[6]) lines[i] = fmt.Sprintf(lineFormat, fields[1], sizeVal, fields[3], fields[4], "-", fields[6])
continue continue
} }
timeVal, timeErr := time.ParseInLocation(timeFormat, timeStr, TZ) timeVal, timeErr := time.ParseInLocation(timeFormat, timeStr, bisync.TZ)
if timeErr == nil { if timeErr == nil {
timeRound := timeVal.Round(f.Precision() * 2) timeRound := timeVal.Round(f.Precision() * 2)
lines[i] = fmt.Sprintf(lineFormat, fields[1], sizeVal, fields[3], fields[4], timeRound, fields[6]) lines[i] = fmt.Sprintf(lineFormat, fields[1], sizeVal, fields[3], fields[4], timeRound, fields[6])
@ -1453,8 +1616,8 @@ func (b *bisyncTest) newReplacer(mangle bool) *strings.Replacer {
"{datadir/}", b.dataDir + slash, "{datadir/}", b.dataDir + slash,
"{testdir/}", b.testDir + slash, "{testdir/}", b.testDir + slash,
"{workdir/}", b.workDir + slash, "{workdir/}", b.workDir + slash,
"{path1/}", b.path1, "{path1/}", bilib.StripHexString(b.path1),
"{path2/}", b.path2, "{path2/}", bilib.StripHexString(b.path2),
"{session}", b.sessionName, "{session}", b.sessionName,
"{/}", slash, "{/}", slash,
} }
@ -1469,6 +1632,8 @@ func (b *bisyncTest) newReplacer(mangle bool) *strings.Replacer {
b.fs2.String(), "{path2String}", b.fs2.String(), "{path2String}",
b.path1, "{path1/}", b.path1, "{path1/}",
b.path2, "{path2/}", b.path2, "{path2/}",
bilib.StripHexString(b.path1), "{path1/}",
bilib.StripHexString(b.path2), "{path2/}",
"//?/" + strings.TrimSuffix(strings.Replace(b.path1, slash, "/", -1), "/"), "{path1}", // fix windows-specific issue "//?/" + strings.TrimSuffix(strings.Replace(b.path1, slash, "/", -1), "/"), "{path1}", // fix windows-specific issue
"//?/" + strings.TrimSuffix(strings.Replace(b.path2, slash, "/", -1), "/"), "{path2}", "//?/" + strings.TrimSuffix(strings.Replace(b.path2, slash, "/", -1), "/"), "{path2}",
strings.TrimSuffix(b.path1, slash), "{path1}", // ensure it's still recognized without trailing slash strings.TrimSuffix(b.path1, slash), "{path1}", // ensure it's still recognized without trailing slash
@ -1476,6 +1641,10 @@ func (b *bisyncTest) newReplacer(mangle bool) *strings.Replacer {
b.workDir, "{workdir}", b.workDir, "{workdir}",
b.sessionName, "{session}", b.sessionName, "{session}",
} }
// convert all hash types to "{hashtype}"
for _, ht := range hash.Supported().Array() {
rep = append(rep, ht.String(), "{hashtype}")
}
if fixSlash { if fixSlash {
prep := []string{} prep := []string{}
for i := 0; i < len(rep); i += 2 { for i := 0; i < len(rep); i += 2 {
@ -1611,3 +1780,26 @@ func stringToHash(s string) string {
} }
return sum return sum
} }
func detectEncoding(s string) string {
if norm.NFC.IsNormalString(s) && norm.NFD.IsNormalString(s) {
return "BOTH"
}
if !norm.NFC.IsNormalString(s) && norm.NFD.IsNormalString(s) {
return "NFD"
}
if norm.NFC.IsNormalString(s) && !norm.NFD.IsNormalString(s) {
return "NFC"
}
return "OTHER"
}
// filters out those pesky macOS .DS_Store files, which are forbidden on Dropbox and just generally annoying
func ctxNoDsStore(ctx context.Context, t *testing.T) (context.Context, *filter.Filter) {
ctxNoDsStore, fi := filter.AddConfig(ctx)
err := fi.AddRule("- .DS_Store")
require.NoError(t, err)
err = fi.AddRule("+ **")
require.NoError(t, err)
return ctxNoDsStore, fi
}

File diff suppressed because it is too large Load Diff

View File

@ -94,7 +94,7 @@ INFO : Path2: 7 changes:  1 new,  3 modified, 
INFO : (Modified:  3 newer,  0 older,  3 larger,  0 smaller) INFO : (Modified:  3 newer,  0 older,  3 larger,  0 smaller)
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file5.txt: md5 differ ERROR : file5.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
INFO : Finished checking the potential conflicts. 1 differences found INFO : Finished checking the potential conflicts. 1 differences found

View File

@ -94,7 +94,7 @@ INFO : Path2: 7 changes:  1 new,  3 modified, 
INFO : (Modified:  3 newer,  0 older,  3 larger,  0 smaller) INFO : (Modified:  3 newer,  0 older,  3 larger,  0 smaller)
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file5.txt: md5 differ ERROR : file5.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
INFO : Finished checking the potential conflicts. 1 differences found INFO : Finished checking the potential conflicts. 1 differences found

View File

@ -4,7 +4,7 @@
(02) : test EXCLUDE - OTHER TESTS (02) : test EXCLUDE - OTHER TESTS
(03) : copy-file {datadir/}exclude-other-filtersfile.txt {workdir/} (03) : copy-file {datadir/}exclude-other-filtersfile.txt {workdir/}
(04) : test resync to get the filters file md5 built. (04) : test resync to get the filters file {hashtype} built.
(05) : bisync resync filters-file={workdir/}exclude-other-filtersfile.txt (05) : bisync resync filters-file={workdir/}exclude-other-filtersfile.txt
INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set. INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.
INFO : Bisyncing with Comparison Settings: INFO : Bisyncing with Comparison Settings:
@ -18,7 +18,7 @@ INFO : Bisyncing with Comparison Settings:
} }
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}" INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
INFO : Using filters file {workdir/}exclude-other-filtersfile.txt INFO : Using filters file {workdir/}exclude-other-filtersfile.txt
INFO : Storing filters file hash to {workdir/}exclude-other-filtersfile.txt.md5 INFO : Storing filters file hash to {workdir/}exclude-other-filtersfile.txt.{hashtype}
INFO : Copying Path2 files to Path1 INFO : Copying Path2 files to Path1
INFO : - Path2 Resync is copying files to - Path1 INFO : - Path2 Resync is copying files to - Path1
INFO : - Path1 Resync is copying files to - Path2 INFO : - Path1 Resync is copying files to - Path2
@ -133,7 +133,7 @@ INFO : Bisyncing with Comparison Settings:
} }
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}" INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
INFO : Using filters file {workdir/}include-other-filtersfile.txt INFO : Using filters file {workdir/}include-other-filtersfile.txt
INFO : Storing filters file hash to {workdir/}include-other-filtersfile.txt.md5 INFO : Storing filters file hash to {workdir/}include-other-filtersfile.txt.{hashtype}
INFO : Copying Path2 files to Path1 INFO : Copying Path2 files to Path1
INFO : - Path2 Resync is copying files to - Path1 INFO : - Path2 Resync is copying files to - Path1
INFO : - Path1 Resync is copying files to - Path2 INFO : - Path1 Resync is copying files to - Path2

View File

@ -116,7 +116,7 @@ INFO : Path2: 6 changes:  1 new,  3 modified, 
INFO : (Modified:  3 newer,  0 older,  3 larger,  0 smaller) INFO : (Modified:  3 newer,  0 older,  3 larger,  0 smaller)
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file5.txt: md5 differ ERROR : file5.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
INFO : Finished checking the potential conflicts. 1 differences found INFO : Finished checking the potential conflicts. 1 differences found
@ -181,7 +181,7 @@ INFO : Path2: 6 changes:  1 new,  3 modified, 
INFO : (Modified:  3 newer,  0 older,  3 larger,  0 smaller) INFO : (Modified:  3 newer,  0 older,  3 larger,  0 smaller)
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file5.txt: md5 differ ERROR : file5.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
INFO : Finished checking the potential conflicts. 1 differences found INFO : Finished checking the potential conflicts. 1 differences found

View File

@ -58,7 +58,7 @@ INFO : Path2: 2 changes:  0 new,  2 modified, 
INFO : (Modified:  2 newer,  0 older,  2 larger,  0 smaller) INFO : (Modified:  2 newer,  0 older,  2 larger,  0 smaller)
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file1.txt: md5 differ ERROR : file1.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
NOTICE: {path2String}: 1 matching files NOTICE: {path2String}: 1 matching files

View File

@ -171,7 +171,7 @@ INFO : Bisyncing with Comparison Settings:
} }
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}" INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
INFO : Using filters file {workdir/}測試_filtersfile.txt INFO : Using filters file {workdir/}測試_filtersfile.txt
INFO : Storing filters file hash to {workdir/}測試_filtersfile.txt.md5 INFO : Storing filters file hash to {workdir/}測試_filtersfile.txt.{hashtype}
INFO : Copying Path2 files to Path1 INFO : Copying Path2 files to Path1
INFO : - Path2 Resync is copying files to - Path1 INFO : - Path2 Resync is copying files to - Path1
INFO : - Path1 Resync is copying files to - Path2 INFO : - Path1 Resync is copying files to - Path2

View File

@ -3,7 +3,7 @@
(02) : copy-file {datadir/}filtersfile.flt {workdir/} (02) : copy-file {datadir/}filtersfile.flt {workdir/}
(03) : test resync to force building of the filters md5 hash (03) : test resync to force building of the filters {hashtype} hash
(04) : bisync filters-file={workdir/}filtersfile.flt resync (04) : bisync filters-file={workdir/}filtersfile.flt resync
INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set. INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.
INFO : Bisyncing with Comparison Settings: INFO : Bisyncing with Comparison Settings:
@ -17,7 +17,7 @@ INFO : Bisyncing with Comparison Settings:
} }
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}" INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
INFO : Using filters file {workdir/}filtersfile.flt INFO : Using filters file {workdir/}filtersfile.flt
INFO : Storing filters file hash to {workdir/}filtersfile.flt.md5 INFO : Storing filters file hash to {workdir/}filtersfile.flt.{hashtype}
INFO : Copying Path2 files to Path1 INFO : Copying Path2 files to Path1
INFO : - Path2 Resync is copying files to - Path1 INFO : - Path2 Resync is copying files to - Path1
INFO : - Path1 Resync is copying files to - Path2 INFO : - Path1 Resync is copying files to - Path2

View File

@ -24,7 +24,7 @@ INFO : Bisync successful
(04) : test 1. inject filters file in workdir. (04) : test 1. inject filters file in workdir.
(05) : copy-file {datadir/}filtersfile.txt {workdir/} (05) : copy-file {datadir/}filtersfile.txt {workdir/}
(06) : test 2. run with filters-file but without md5. should abort. (06) : test 2. run with filters-file but without {hashtype}. should abort.
(07) : bisync filters-file={workdir/}filtersfile.txt (07) : bisync filters-file={workdir/}filtersfile.txt
INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set. INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.
INFO : Bisyncing with Comparison Settings: INFO : Bisyncing with Comparison Settings:
@ -38,7 +38,7 @@ INFO : Bisyncing with Comparison Settings:
} }
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}" INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
INFO : Using filters file {workdir/}filtersfile.txt INFO : Using filters file {workdir/}filtersfile.txt
ERROR : Bisync critical error: filters file md5 hash not found (must run --resync): {workdir/}filtersfile.txt ERROR : Bisync critical error: filters file {hashtype} hash not found (must run --resync): {workdir/}filtersfile.txt
ERROR : Bisync aborted. Must run --resync to recover. ERROR : Bisync aborted. Must run --resync to recover.
Bisync error: bisync aborted Bisync error: bisync aborted
@ -78,7 +78,7 @@ INFO : Bisyncing with Comparison Settings:
} }
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}" INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
INFO : Using filters file {workdir/}filtersfile.txt INFO : Using filters file {workdir/}filtersfile.txt
INFO : Storing filters file hash to {workdir/}filtersfile.txt.md5 INFO : Storing filters file hash to {workdir/}filtersfile.txt.{hashtype}
INFO : Copying Path2 files to Path1 INFO : Copying Path2 files to Path1
INFO : - Path2 Resync is copying files to - Path1 INFO : - Path2 Resync is copying files to - Path1
INFO : - Path1 Resync is copying files to - Path2 INFO : - Path1 Resync is copying files to - Path2
@ -143,7 +143,7 @@ INFO : Bisyncing with Comparison Settings:
} }
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}" INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
INFO : Using filters file {workdir/}filtersfile.txt INFO : Using filters file {workdir/}filtersfile.txt
INFO : Skipped storing filters file hash to {workdir/}filtersfile.txt.md5 as --dry-run is set INFO : Skipped storing filters file hash to {workdir/}filtersfile.txt.{hashtype} as --dry-run is set
INFO : Copying Path2 files to Path1 INFO : Copying Path2 files to Path1
NOTICE: - Path2 Resync is copying files to - Path1 NOTICE: - Path2 Resync is copying files to - Path1
NOTICE: - Path1 Resync is copying files to - Path2 NOTICE: - Path1 Resync is copying files to - Path2

View File

@ -0,0 +1,5 @@
"file11.txt"
"file2.txt"
"file4.txt"
"file5.txt.conflict1"
"file7.txt"

View File

@ -0,0 +1,5 @@
"file1.txt"
"file10.txt"
"file3.txt"
"file5.txt.conflict2"
"file6.txt"

View File

@ -0,0 +1 @@
"file3.txt"

View File

@ -0,0 +1 @@
"file4.txt"

View File

@ -0,0 +1,10 @@
# bisync listing v1 from test
- 109 md5:294d25b294ff26a5243dba914ac3fbf7 - 0001-01-01T00:00:00.000000000+0000 "RCLONE_TEST"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file1.txt"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file10.txt"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file11.txt"
- 13 md5:fb3ecfb2800400fb01b0bfd39903e9fb - 0001-01-01T00:00:00.000000000+0000 "file2.txt"
- 39 md5:0860a03592626642f8fd6c8bfb447d2a - 0001-01-01T00:00:00.000000000+0000 "file5.txt.conflict1"
- 39 md5:979a803b15d27df0c31ad7d29006d10b - 0001-01-01T00:00:00.000000000+0000 "file5.txt.conflict2"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file6.txt"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file7.txt"

View File

@ -0,0 +1,8 @@
# bisync listing v1 from test
- 109 md5:294d25b294ff26a5243dba914ac3fbf7 - 0001-01-01T00:00:00.000000000+0000 "RCLONE_TEST"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file1.txt"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file11.txt"
- 13 md5:fb3ecfb2800400fb01b0bfd39903e9fb - 0001-01-01T00:00:00.000000000+0000 "file2.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file3.txt"
- 39 md5:0860a03592626642f8fd6c8bfb447d2a - 0001-01-01T00:00:00.000000000+0000 "file5.txt"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file7.txt"

View File

@ -0,0 +1,10 @@
# bisync listing v1 from test
- 109 md5:294d25b294ff26a5243dba914ac3fbf7 - 0001-01-01T00:00:00.000000000+0000 "RCLONE_TEST"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file1.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file2.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file3.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file4.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file5.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file6.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file7.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file8.txt"

View File

@ -0,0 +1,10 @@
# bisync listing v1 from test
- 109 md5:294d25b294ff26a5243dba914ac3fbf7 - 0001-01-01T00:00:00.000000000+0000 "RCLONE_TEST"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file1.txt"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file10.txt"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file11.txt"
- 13 md5:fb3ecfb2800400fb01b0bfd39903e9fb - 0001-01-01T00:00:00.000000000+0000 "file2.txt"
- 39 md5:0860a03592626642f8fd6c8bfb447d2a - 0001-01-01T00:00:00.000000000+0000 "file5.txt.conflict1"
- 39 md5:979a803b15d27df0c31ad7d29006d10b - 0001-01-01T00:00:00.000000000+0000 "file5.txt.conflict2"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file6.txt"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file7.txt"

View File

@ -0,0 +1,8 @@
# bisync listing v1 from test
- 109 md5:294d25b294ff26a5243dba914ac3fbf7 - 0001-01-01T00:00:00.000000000+0000 "RCLONE_TEST"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file1.txt"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file10.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file2.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file4.txt"
- 39 md5:979a803b15d27df0c31ad7d29006d10b - 0001-01-01T00:00:00.000000000+0000 "file5.txt"
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file6.txt"

View File

@ -0,0 +1,10 @@
# bisync listing v1 from test
- 109 md5:294d25b294ff26a5243dba914ac3fbf7 - 0001-01-01T00:00:00.000000000+0000 "RCLONE_TEST"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file1.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file2.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file3.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file4.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file5.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file6.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file7.txt"
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file8.txt"

View File

@ -0,0 +1,109 @@
(01) : test changes
(02) : test initial bisync
(03) : bisync resync nomodtime
INFO : Bisyncing with Comparison Settings:
{
"Modtime": false,
"Size": true,
"Checksum": true,
"NoSlowHash": false,
"SlowHashSyncOnly": false,
"DownloadHash": true
}
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
INFO : Copying Path2 files to Path1
INFO : - Path2 Resync is copying files to - Path1
INFO : - Path1 Resync is copying files to - Path2
INFO : Resync updating listings
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
INFO : Bisync successful
(04) : test make modifications on both paths
(05) : test new on path2 - file10
(06) : copy-file {datadir/}file10.txt {path2/}
(07) : test changed on path2 - file1
(08) : copy-file {datadir/}file1.txt {path2/}
(09) : test new on path1 - file11
(10) : copy-file {datadir/}file11.txt {path1/}
(11) : test changed on path1 - file2
(12) : copy-file {datadir/}file2.txt {path1/}
(13) : test deleted on path2 - file3
(14) : delete-file {path2/}file3.txt
(15) : test deleted on path1 - file4
(16) : delete-file {path1/}file4.txt
(17) : test deleted on both paths - file8
(18) : delete-file {path1/}file8.txt
(19) : delete-file {path2/}file8.txt
(20) : test changed on both paths - file5 (file5R, file5L)
(21) : copy-as {datadir/}file5R.txt {path2/} file5.txt
(22) : copy-as {datadir/}file5L.txt {path1/} file5.txt
(23) : test changed on path2 and deleted on path1 - file6
(24) : copy-file {datadir/}file6.txt {path2/}
(25) : delete-file {path1/}file6.txt
(26) : test changed on path1 and deleted on path2 - file7
(27) : copy-file {datadir/}file7.txt {path1/}
(28) : delete-file {path2/}file7.txt
(29) : test bisync run
(30) : bisync nomodtime
INFO : Bisyncing with Comparison Settings:
{
"Modtime": false,
"Size": true,
"Checksum": true,
"NoSlowHash": false,
"SlowHashSyncOnly": false,
"DownloadHash": true
}
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
INFO : Building Path1 and Path2 listings
INFO : Path1 checking for diffs
INFO : - Path1 File changed: size (larger), hash - file2.txt
INFO : - Path1 File was deleted - file4.txt
INFO : - Path1 File changed: size (larger), hash - file5.txt
INFO : - Path1 File was deleted - file6.txt
INFO : - Path1 File changed: size (larger), hash - file7.txt
INFO : - Path1 File was deleted - file8.txt
INFO : - Path1 File is new - file11.txt
INFO : Path1: 7 changes:  1 new,  3 modified,  3 deleted
INFO : (Modified:  3 larger,  0 smaller,  3 hash differs)
INFO : Path2 checking for diffs
INFO : - Path2 File changed: size (larger), hash - file1.txt
INFO : - Path2 File was deleted - file3.txt
INFO : - Path2 File changed: size (larger), hash - file5.txt
INFO : - Path2 File changed: size (larger), hash - file6.txt
INFO : - Path2 File was deleted - file7.txt
INFO : - Path2 File was deleted - file8.txt
INFO : - Path2 File is new - file10.txt
INFO : Path2: 7 changes:  1 new,  3 modified,  3 deleted
INFO : (Modified:  3 larger,  0 smaller,  3 hash differs)
INFO : Applying changes
INFO : - Path1 Queue copy to Path2 - {path2/}file11.txt
INFO : - Path1 Queue copy to Path2 - {path2/}file2.txt
INFO : - Path2 Queue delete - {path2/}file4.txt
NOTICE: - WARNING New or changed in both paths - file5.txt
NOTICE: - Path1 Renaming Path1 copy - {path1/}file5.txt.conflict1
NOTICE: - Path1 Queue copy to Path2 - {path2/}file5.txt.conflict1
NOTICE: - Path2 Renaming Path2 copy - {path2/}file5.txt.conflict2
NOTICE: - Path2 Queue copy to Path1 - {path1/}file5.txt.conflict2
INFO : - Path2 Queue copy to Path1 - {path1/}file6.txt
INFO : - Path1 Queue copy to Path2 - {path2/}file7.txt
INFO : - Path2 Queue copy to Path1 - {path1/}file1.txt
INFO : - Path2 Queue copy to Path1 - {path1/}file10.txt
INFO : - Path1 Queue delete - {path1/}file3.txt
INFO : - Path2 Do queued copies to - Path1
INFO : - Path1 Do queued copies to - Path2
INFO : Updating listings
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
INFO : Bisync successful

View File

@ -0,0 +1 @@
This file is used for testing the health of rclone accesses to the local/remote file system. Do not delete.

View File

View File

View File

View File

View File

View File

View File

View File

View File

@ -0,0 +1 @@
This file is newer

View File

@ -0,0 +1 @@
This file is newer

View File

@ -0,0 +1 @@
This file is newer

View File

@ -0,0 +1 @@
Newer version

View File

@ -0,0 +1 @@
This file is newer and not equal to 5R

View File

@ -0,0 +1 @@
This file is newer and not equal to 5L

View File

@ -0,0 +1 @@
This file is newer

View File

@ -0,0 +1 @@
This file is newer

View File

@ -0,0 +1,53 @@
test changes
# Exercise all of the various file change scenarios
# - New on Path2 file10
# - changed on Path2 file1
# - New on Path1 file11
# - changed on Path1 file2
# - Deleted on Path2 file3
# - Deleted on Path1 file4
# - Changed on Path2 and on Path1 file5 (file5r, file5l)
# - changed on Path2 and deleted on Path1 file6
# - changed on Path1 and deleted on Path2 file7
# - Deleted on both paths file8
test initial bisync
bisync resync nomodtime
test make modifications on both paths
test new on path2 - file10
copy-file {datadir/}file10.txt {path2/}
test changed on path2 - file1
copy-file {datadir/}file1.txt {path2/}
test new on path1 - file11
copy-file {datadir/}file11.txt {path1/}
test changed on path1 - file2
copy-file {datadir/}file2.txt {path1/}
test deleted on path2 - file3
delete-file {path2/}file3.txt
test deleted on path1 - file4
delete-file {path1/}file4.txt
test deleted on both paths - file8
delete-file {path1/}file8.txt
delete-file {path2/}file8.txt
test changed on both paths - file5 (file5R, file5L)
copy-as {datadir/}file5R.txt {path2/} file5.txt
copy-as {datadir/}file5L.txt {path1/} file5.txt
test changed on path2 and deleted on path1 - file6
copy-file {datadir/}file6.txt {path2/}
delete-file {path1/}file6.txt
test changed on path1 and deleted on path2 - file7
copy-file {datadir/}file7.txt {path1/}
delete-file {path2/}file7.txt
test bisync run
bisync nomodtime

View File

@ -1,3 +1,3 @@
"folder/HeLlO,wOrLd!.txt" "newfolder/HeLlO,wOrLd!.txt"
"folder/éééö.txt" "newfolder/éééö.txt"
"測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt" "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"

View File

@ -2,6 +2,6 @@
- 19 - - 2001-01-05T00:00:00.000000000+0000 "file1.txt" - 19 - - 2001-01-05T00:00:00.000000000+0000 "file1.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt"
- 19 - - 2001-01-05T00:00:00.000000000+0000 "folder/HeLlO,wOrLd!.txt" - 19 - - 2001-01-05T00:00:00.000000000+0000 "newfolder/HeLlO,wOrLd!.txt"
- 19 - - 2001-01-05T00:00:00.000000000+0000 "folder/éééö.txt" - 19 - - 2001-01-05T00:00:00.000000000+0000 "newfolder/éééö.txt"
- 19 - - 2001-01-05T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt" - 19 - - 2001-01-05T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"

View File

@ -2,6 +2,6 @@
- 19 - - 2001-01-03T00:00:00.000000000+0000 "file1.txt" - 19 - - 2001-01-03T00:00:00.000000000+0000 "file1.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt"
- 19 - - 2001-01-05T00:00:00.000000000+0000 "folder/HeLlO,wOrLd!.txt" - 19 - - 2001-01-05T00:00:00.000000000+0000 "newfolder/HeLlO,wOrLd!.txt"
- 19 - - 2001-01-05T00:00:00.000000000+0000 "folder/éééö.txt" - 19 - - 2001-01-05T00:00:00.000000000+0000 "newfolder/éééö.txt"
- 19 - - 2001-01-05T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt" - 19 - - 2001-01-05T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"

View File

@ -2,6 +2,6 @@
- 19 - - 2001-01-03T00:00:00.000000000+0000 "file1.txt" - 19 - - 2001-01-03T00:00:00.000000000+0000 "file1.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt"
- 19 - - 2001-01-03T00:00:00.000000000+0000 "folder/HeLlO,wOrLd!.txt" - 19 - - 2001-01-03T00:00:00.000000000+0000 "newfolder/HeLlO,wOrLd!.txt"
- 19 - - 2001-01-03T00:00:00.000000000+0000 "folder/éééö.txt" - 19 - - 2001-01-03T00:00:00.000000000+0000 "newfolder/éééö.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"

View File

@ -2,6 +2,6 @@
- 19 - - 2001-01-05T00:00:00.000000000+0000 "file1.txt" - 19 - - 2001-01-05T00:00:00.000000000+0000 "file1.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt"
- 19 - - 2001-01-05T00:00:00.000000000+0000 "folder/hello,WORLD!.txt" - 19 - - 2001-01-05T00:00:00.000000000+0000 "newfolder/hello,WORLD!.txt"
- 19 - - 2001-01-05T00:00:00.000000000+0000 "folder/éééö.txt" - 19 - - 2001-01-05T00:00:00.000000000+0000 "newfolder/éééö.txt"
- 19 - - 2001-01-05T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt" - 19 - - 2001-01-05T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"

View File

@ -2,6 +2,6 @@
- 19 - - 2001-01-05T00:00:00.000000000+0000 "file1.txt" - 19 - - 2001-01-05T00:00:00.000000000+0000 "file1.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt"
- 19 - - 2001-01-03T00:00:00.000000000+0000 "folder/hello,WORLD!.txt" - 19 - - 2001-01-03T00:00:00.000000000+0000 "newfolder/hello,WORLD!.txt"
- 19 - - 2001-01-03T00:00:00.000000000+0000 "folder/éééö.txt" - 19 - - 2001-01-03T00:00:00.000000000+0000 "newfolder/éééö.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"

View File

@ -2,6 +2,6 @@
- 19 - - 2001-01-03T00:00:00.000000000+0000 "file1.txt" - 19 - - 2001-01-03T00:00:00.000000000+0000 "file1.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt"
- 19 - - 2001-01-03T00:00:00.000000000+0000 "folder/hello,WORLD!.txt" - 19 - - 2001-01-03T00:00:00.000000000+0000 "newfolder/hello,WORLD!.txt"
- 19 - - 2001-01-03T00:00:00.000000000+0000 "folder/éééö.txt" - 19 - - 2001-01-03T00:00:00.000000000+0000 "newfolder/éééö.txt"
- 19 - - 2001-01-02T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt" - 19 - - 2001-01-02T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"

View File

@ -103,13 +103,13 @@ INFO : Bisync successful
(19) : copy-as-NFC {datadir/}file1.txt {path1/}測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö 測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö.txt (19) : copy-as-NFC {datadir/}file1.txt {path1/}測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö 測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö.txt
(20) : copy-as-NFC {datadir/}file1.txt {path1/}folder éééö.txt (20) : copy-as-NFC {datadir/}file1.txt {path1/}newfolder éééö.txt
(21) : copy-as-NFC {datadir/}file1.txt {path1/}folder HeLlO,wOrLd!.txt (21) : copy-as-NFC {datadir/}file1.txt {path1/}newfolder HeLlO,wOrLd!.txt
(22) : touch-copy 2001-01-03 {datadir/}file1.txt {path2/} (22) : touch-copy 2001-01-03 {datadir/}file1.txt {path2/}
(23) : copy-as-NFD {datadir/}file1.txt {path2/}folder éééö.txt (23) : copy-as-NFD {datadir/}file1.txt {path2/}newfolder éééö.txt
(24) : copy-as-NFD {datadir/}file1.txt {path2/}folder hello,WORLD!.txt (24) : copy-as-NFD {datadir/}file1.txt {path2/}newfolder hello,WORLD!.txt
(25) : test bisync run with normalization (25) : test bisync run with normalization
(26) : bisync norm force (26) : bisync norm force
@ -126,14 +126,14 @@ INFO : Bisyncing with Comparison Settings:
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}" INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
INFO : Building Path1 and Path2 listings INFO : Building Path1 and Path2 listings
INFO : Path1 checking for diffs INFO : Path1 checking for diffs
INFO : - Path1 File is new - folder/HeLlO,wOrLd!.txt INFO : - Path1 File is new - newfolder/HeLlO,wOrLd!.txt
INFO : - Path1 File is new - folder/éééö.txt INFO : - Path1 File is new - newfolder/éééö.txt
INFO : - Path1 File is new - "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt" INFO : - Path1 File is new - "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"
INFO : Path1: 3 changes:  3 new,  0 modified,  0 deleted INFO : Path1: 3 changes:  3 new,  0 modified,  0 deleted
INFO : Path2 checking for diffs INFO : Path2 checking for diffs
INFO : - Path2 File changed: time (newer) - file1.txt INFO : - Path2 File changed: time (newer) - file1.txt
INFO : - Path2 File is new - folder/éééö.txt INFO : - Path2 File is new - newfolder/éééö.txt
INFO : - Path2 File is new - folder/hello,WORLD!.txt INFO : - Path2 File is new - newfolder/hello,WORLD!.txt
INFO : Path2: 3 changes:  2 new,  1 modified,  0 deleted INFO : Path2: 3 changes:  2 new,  1 modified,  0 deleted
INFO : (Modified:  1 newer,  0 older) INFO : (Modified:  1 newer,  0 older)
INFO : Applying changes INFO : Applying changes
@ -141,12 +141,12 @@ INFO : Checking potential conflicts...
NOTICE: {path2String}: 0 differences found NOTICE: {path2String}: 0 differences found
NOTICE: {path2String}: 2 matching files NOTICE: {path2String}: 2 matching files
INFO : Finished checking the potential conflicts. %!s(<nil>) INFO : Finished checking the potential conflicts. %!s(<nil>)
NOTICE: - WARNING New or changed in both paths - folder/HeLlO,wOrLd!.txt NOTICE: - WARNING New or changed in both paths - newfolder/HeLlO,wOrLd!.txt
INFO : folder/HeLlO,wOrLd!.txt: Files are equal but will copy anyway to update modtime (will not rename) INFO : newfolder/HeLlO,wOrLd!.txt: Files are equal but will copy anyway to update modtime (will not rename)
INFO : - Path2 Queue copy to Path1 - {path1/}folder/HeLlO,wOrLd!.txt INFO : - Path2 Queue copy to Path1 - {path1/}newfolder/HeLlO,wOrLd!.txt
NOTICE: - WARNING New or changed in both paths - folder/éééö.txt NOTICE: - WARNING New or changed in both paths - newfolder/éééö.txt
INFO : folder/éééö.txt: Files are equal but will copy anyway to update modtime (will not rename) INFO : newfolder/éééö.txt: Files are equal but will copy anyway to update modtime (will not rename)
INFO : - Path2 Queue copy to Path1 - {path1/}folder/éééö.txt INFO : - Path2 Queue copy to Path1 - {path1/}newfolder/éééö.txt
INFO : - Path1 Queue copy to Path2 - "{path2/}測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt" INFO : - Path1 Queue copy to Path2 - "{path2/}測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"
INFO : - Path2 Queue copy to Path1 - {path1/}file1.txt INFO : - Path2 Queue copy to Path1 - {path1/}file1.txt
INFO : - Path2 Do queued copies to - Path1 INFO : - Path2 Do queued copies to - Path1
@ -178,8 +178,8 @@ INFO : Bisync successful
(29) : test changed on one path (29) : test changed on one path
(30) : touch-copy 2001-01-05 {datadir/}file1.txt {path2/} (30) : touch-copy 2001-01-05 {datadir/}file1.txt {path2/}
(31) : copy-as-NFC {datadir/}file1.txt {path1/}測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö 測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö.txt (31) : copy-as-NFC {datadir/}file1.txt {path1/}測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö 測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö.txt
(32) : copy-as-NFC {datadir/}file1.txt {path1/}folder éééö.txt (32) : copy-as-NFC {datadir/}file1.txt {path1/}newfolder éééö.txt
(33) : copy-as-NFC {datadir/}file1.txt {path1/}folder HeLlO,wOrLd!.txt (33) : copy-as-NFC {datadir/}file1.txt {path1/}newfolder HeLlO,wOrLd!.txt
(34) : bisync norm (34) : bisync norm
INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set. INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.
INFO : Bisyncing with Comparison Settings: INFO : Bisyncing with Comparison Settings:
@ -194,8 +194,8 @@ INFO : Bisyncing with Comparison Settings:
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}" INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
INFO : Building Path1 and Path2 listings INFO : Building Path1 and Path2 listings
INFO : Path1 checking for diffs INFO : Path1 checking for diffs
INFO : - Path1 File changed: time (newer) - folder/HeLlO,wOrLd!.txt INFO : - Path1 File changed: time (newer) - newfolder/HeLlO,wOrLd!.txt
INFO : - Path1 File changed: time (newer) - folder/éééö.txt INFO : - Path1 File changed: time (newer) - newfolder/éééö.txt
INFO : - Path1 File changed: time (newer) - "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt" INFO : - Path1 File changed: time (newer) - "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"
INFO : Path1: 3 changes:  0 new,  3 modified,  0 deleted INFO : Path1: 3 changes:  0 new,  3 modified,  0 deleted
INFO : (Modified:  3 newer,  0 older) INFO : (Modified:  3 newer,  0 older)
@ -204,8 +204,8 @@ INFO : - Path2 File changed: time (newer)[0
INFO : Path2: 1 changes:  0 new,  1 modified,  0 deleted INFO : Path2: 1 changes:  0 new,  1 modified,  0 deleted
INFO : (Modified:  1 newer,  0 older) INFO : (Modified:  1 newer,  0 older)
INFO : Applying changes INFO : Applying changes
INFO : - Path1 Queue copy to Path2 - {path2/}folder/hello,WORLD!.txt INFO : - Path1 Queue copy to Path2 - {path2/}newfolder/hello,WORLD!.txt
INFO : - Path1 Queue copy to Path2 - {path2/}folder/éééö.txt INFO : - Path1 Queue copy to Path2 - {path2/}newfolder/éééö.txt
INFO : - Path1 Queue copy to Path2 - "{path2/}測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt" INFO : - Path1 Queue copy to Path2 - "{path2/}測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀💆🏿\u200d♂🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"
INFO : - Path2 Queue copy to Path1 - {path1/}file1.txt INFO : - Path2 Queue copy to Path1 - {path1/}file1.txt
INFO : - Path2 Do queued copies to - Path1 INFO : - Path2 Do queued copies to - Path1

View File

@ -32,13 +32,13 @@ bisync resync
# copy NFC version to Path1 # copy NFC version to Path1
# note: need to slightly change the name to avoid Drive known issue #3262 which could try to copy the old name from the trash # note: need to slightly change the name to avoid Drive known issue #3262 which could try to copy the old name from the trash
copy-as-NFC {datadir/}file1.txt {path1/}測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö 測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö.txt copy-as-NFC {datadir/}file1.txt {path1/}測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö 測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö.txt
copy-as-NFC {datadir/}file1.txt {path1/}folder éééö.txt copy-as-NFC {datadir/}file1.txt {path1/}newfolder éééö.txt
copy-as-NFC {datadir/}file1.txt {path1/}folder HeLlO,wOrLd!.txt copy-as-NFC {datadir/}file1.txt {path1/}newfolder HeLlO,wOrLd!.txt
# place newer NFD version on Path2 # place newer NFD version on Path2
touch-copy 2001-01-03 {datadir/}file1.txt {path2/} touch-copy 2001-01-03 {datadir/}file1.txt {path2/}
copy-as-NFD {datadir/}file1.txt {path2/}folder éééö.txt copy-as-NFD {datadir/}file1.txt {path2/}newfolder éééö.txt
copy-as-NFD {datadir/}file1.txt {path2/}folder hello,WORLD!.txt copy-as-NFD {datadir/}file1.txt {path2/}newfolder hello,WORLD!.txt
test bisync run with normalization test bisync run with normalization
bisync norm force bisync norm force
@ -49,6 +49,6 @@ bisync resync norm
test changed on one path test changed on one path
touch-copy 2001-01-05 {datadir/}file1.txt {path2/} touch-copy 2001-01-05 {datadir/}file1.txt {path2/}
copy-as-NFC {datadir/}file1.txt {path1/}測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö 測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö.txt copy-as-NFC {datadir/}file1.txt {path1/}測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö 測試_Руский___ě_áñ👸🏼🧝🏾💆🏿🐨🤙🏼🤮🧑🏻🔧🧑🔬éö.txt
copy-as-NFC {datadir/}file1.txt {path1/}folder éééö.txt copy-as-NFC {datadir/}file1.txt {path1/}newfolder éééö.txt
copy-as-NFC {datadir/}file1.txt {path1/}folder HeLlO,wOrLd!.txt copy-as-NFC {datadir/}file1.txt {path1/}newfolder HeLlO,wOrLd!.txt
bisync norm bisync norm

View File

@ -1,8 +1,10 @@
(01) : test rclone-args (01) : test rclone-args
(02) : test initial bisync (02) : touch-glob 2001-01-02 {datadir/} *
(03) : bisync resync checksum
(03) : test initial bisync
(04) : bisync resync checksum
INFO : Bisyncing with Comparison Settings: INFO : Bisyncing with Comparison Settings:
{ {
"Modtime": false, "Modtime": false,
@ -20,19 +22,20 @@ INFO : Resync updating listings
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}" INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
INFO : Bisync successful INFO : Bisync successful
(04) : test place newer files on both paths (05) : test place newer files on both paths
(05) : touch-glob 2001-01-02 {datadir/} *
(06) : copy-file {datadir/}file1.txt {path1/} (06) : copy-file {datadir/}file1.txt {path1/}
(07) : copy-file {datadir/}file2.txt {path2/} (07) : copy-file {datadir/}file2.txt {path2/}
(08) : copy-file {datadir/}file20.txt {path1/}subdir (08) : copy-file {datadir/}file20.txt {path1/}subdir
(09) : copy-file {datadir/}file21.txt {path2/}subdir (09) : copy-file {datadir/}file21.txt {path2/}subdir
(10) : test run bisync with custom options
(11) : bisync checksum (10) : touch-glob 2001-01-02 {path1/} file1.txt
(11) : touch-glob 2001-01-02 {path2/} file2.txt
(12) : touch-glob 2001-01-02 {path1/}subdir/ file20.txt
(13) : touch-glob 2001-01-02 {path2/}subdir/ file21.txt
(14) : test run bisync with custom options
(15) : bisync checksum
INFO : Bisyncing with Comparison Settings: INFO : Bisyncing with Comparison Settings:
{ {
"Modtime": false, "Modtime": false,
@ -61,15 +64,20 @@ INFO : Updating listings
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}" INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
INFO : Bisync successful INFO : Bisync successful
(12) : touch-glob 2007-07-23 {datadir/} * (16) : touch-glob 2007-07-23 {datadir/} *
(13) : copy-file {datadir/}file1.txt {path1/} (17) : copy-file {datadir/}file1.txt {path1/}
(14) : copy-file {datadir/}file2.txt {path2/} (18) : copy-file {datadir/}file2.txt {path2/}
(19) : copy-file {datadir/}file20.txt {path1/}subdir
(20) : copy-as {datadir/}file21.txt {path2/} file1.txt
(15) : copy-file {datadir/}file20.txt {path1/}subdir
(16) : copy-as {datadir/}file21.txt {path2/} file1.txt
(17) : bisync size-only (21) : touch-glob 2007-07-23 {path1/} file1.txt
(22) : touch-glob 2007-07-23 {path2/} file2.txt
(23) : touch-glob 2007-07-23 {path1/}subdir/ file20.txt
(24) : touch-glob 2007-07-23 {path2/}subdir/ file21.txt
(25) : bisync size-only
INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set. INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.
INFO : Bisyncing with Comparison Settings: INFO : Bisyncing with Comparison Settings:
{ {
@ -93,7 +101,7 @@ INFO : - Path2 Do queued copies to - P
INFO : Updating listings INFO : Updating listings
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}" INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
INFO : Bisync successful INFO : Bisync successful
(18) : bisync resync (26) : bisync resync
INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set. INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.
INFO : Bisyncing with Comparison Settings: INFO : Bisyncing with Comparison Settings:
{ {
@ -112,13 +120,18 @@ INFO : Resync updating listings
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}" INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
INFO : Bisync successful INFO : Bisync successful
(19) : copy-file {datadir/}file1.txt {path1/} (27) : copy-file {datadir/}file1.txt {path1/}
(20) : copy-file {datadir/}file2.txt {path2/} (28) : copy-file {datadir/}file2.txt {path2/}
(29) : copy-file {datadir/}file20.txt {path1/}subdir
(30) : copy-file {datadir/}file21.txt {path2/}subdir
(21) : copy-file {datadir/}file20.txt {path1/}subdir
(22) : copy-file {datadir/}file21.txt {path2/}subdir
(23) : bisync ignore-size (31) : touch-glob 2007-07-23 {path1/} file1.txt
(32) : touch-glob 2007-07-23 {path2/} file2.txt
(33) : touch-glob 2007-07-23 {path1/}subdir/ file20.txt
(34) : touch-glob 2007-07-23 {path2/}subdir/ file21.txt
(35) : bisync ignore-size
INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set. INFO : Setting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.
INFO : Bisyncing with Comparison Settings: INFO : Bisyncing with Comparison Settings:
{ {
@ -145,7 +158,7 @@ INFO : Updating listings
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}" INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
INFO : Bisync successful INFO : Bisync successful
(24) : bisync resync compare-all (36) : bisync resync compare-all
INFO : Bisyncing with Comparison Settings: INFO : Bisyncing with Comparison Settings:
{ {
"Modtime": true, "Modtime": true,
@ -163,16 +176,22 @@ INFO : Resync updating listings
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}" INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
INFO : Bisync successful INFO : Bisync successful
(25) : copy-as {datadir/}file21.txt {path2/} file2.txt (37) : copy-as {datadir/}file21.txt {path2/} file2.txt
(26) : touch-glob 2023-08-26 {datadir/} * (38) : touch-glob 2023-08-26 {datadir/} *
(27) : copy-file {datadir/}file1.txt {path1/} (39) : copy-file {datadir/}file1.txt {path1/}
(28) : copy-file {datadir/}file20.txt {path1/}subdir (40) : copy-file {datadir/}file20.txt {path1/}subdir
(29) : copy-file {datadir/}file21.txt {path2/}subdir (41) : copy-file {datadir/}file21.txt {path2/}subdir
(30) : bisync compare-all
(42) : touch-glob 2007-07-23 {path2/} file2.txt
(43) : touch-glob 2023-08-26 {path1/} file1.txt
(44) : touch-glob 2023-08-26 {path1/}subdir/ file20.txt
(45) : touch-glob 2023-08-26 {path2/}subdir/ file21.txt
(46) : bisync compare-all
INFO : Bisyncing with Comparison Settings: INFO : Bisyncing with Comparison Settings:
{ {
"Modtime": true, "Modtime": true,

View File

@ -2,21 +2,24 @@ test rclone-args
# Pass generic flags to rclone under test using as an example # Pass generic flags to rclone under test using as an example
# the --size-only flag, which changes the meaning of operations. # the --size-only flag, which changes the meaning of operations.
# force specific modification time since file time is lost through git
touch-glob 2001-01-02 {datadir/} *
test initial bisync test initial bisync
bisync resync checksum bisync resync checksum
test place newer files on both paths test place newer files on both paths
# force specific modification time since file time is lost through git
touch-glob 2001-01-02 {datadir/} *
copy-file {datadir/}file1.txt {path1/} copy-file {datadir/}file1.txt {path1/}
copy-file {datadir/}file2.txt {path2/} copy-file {datadir/}file2.txt {path2/}
copy-file {datadir/}file20.txt {path1/}subdir copy-file {datadir/}file20.txt {path1/}subdir
copy-file {datadir/}file21.txt {path2/}subdir copy-file {datadir/}file21.txt {path2/}subdir
# re-touch dest to avoid race conditions with parallel tests
touch-glob 2001-01-02 {path1/} file1.txt
touch-glob 2001-01-02 {path2/} file2.txt
touch-glob 2001-01-02 {path1/}subdir/ file20.txt
touch-glob 2001-01-02 {path2/}subdir/ file21.txt
test run bisync with custom options test run bisync with custom options
bisync checksum bisync checksum
@ -24,19 +27,29 @@ touch-glob 2007-07-23 {datadir/} *
copy-file {datadir/}file1.txt {path1/} copy-file {datadir/}file1.txt {path1/}
copy-file {datadir/}file2.txt {path2/} copy-file {datadir/}file2.txt {path2/}
copy-file {datadir/}file20.txt {path1/}subdir copy-file {datadir/}file20.txt {path1/}subdir
copy-as {datadir/}file21.txt {path2/} file1.txt copy-as {datadir/}file21.txt {path2/} file1.txt
# re-touch dest to avoid race conditions with parallel tests
touch-glob 2007-07-23 {path1/} file1.txt
touch-glob 2007-07-23 {path2/} file2.txt
touch-glob 2007-07-23 {path1/}subdir/ file20.txt
touch-glob 2007-07-23 {path2/}subdir/ file21.txt
bisync size-only bisync size-only
bisync resync bisync resync
copy-file {datadir/}file1.txt {path1/} copy-file {datadir/}file1.txt {path1/}
copy-file {datadir/}file2.txt {path2/} copy-file {datadir/}file2.txt {path2/}
copy-file {datadir/}file20.txt {path1/}subdir copy-file {datadir/}file20.txt {path1/}subdir
copy-file {datadir/}file21.txt {path2/}subdir copy-file {datadir/}file21.txt {path2/}subdir
# re-touch dest to avoid race conditions with parallel tests
touch-glob 2007-07-23 {path1/} file1.txt
touch-glob 2007-07-23 {path2/} file2.txt
touch-glob 2007-07-23 {path1/}subdir/ file20.txt
touch-glob 2007-07-23 {path2/}subdir/ file21.txt
bisync ignore-size bisync ignore-size
bisync resync compare-all bisync resync compare-all
@ -50,4 +63,10 @@ copy-file {datadir/}file1.txt {path1/}
copy-file {datadir/}file20.txt {path1/}subdir copy-file {datadir/}file20.txt {path1/}subdir
copy-file {datadir/}file21.txt {path2/}subdir copy-file {datadir/}file21.txt {path2/}subdir
# re-touch dest to avoid race conditions with parallel tests
touch-glob 2007-07-23 {path2/} file2.txt
touch-glob 2023-08-26 {path1/} file1.txt
touch-glob 2023-08-26 {path1/}subdir/ file20.txt
touch-glob 2023-08-26 {path2/}subdir/ file21.txt
bisync compare-all bisync compare-all

View File

@ -52,7 +52,7 @@ INFO : Path2: 1 changes:  0 new,  1 modified, 
INFO : (Modified:  1 newer,  0 older,  1 larger,  0 smaller) INFO : (Modified:  1 newer,  0 older,  1 larger,  0 smaller)
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file1.txt: md5 differ ERROR : file1.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
INFO : Finished checking the potential conflicts. 1 differences found INFO : Finished checking the potential conflicts. 1 differences found
@ -97,7 +97,7 @@ INFO : Path2: 1 changes:  0 new,  1 modified, 
INFO : (Modified:  1 newer,  0 older) INFO : (Modified:  1 newer,  0 older)
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file1.txt: md5 differ ERROR : file1.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
INFO : Finished checking the potential conflicts. 1 differences found INFO : Finished checking the potential conflicts. 1 differences found
@ -142,7 +142,7 @@ INFO : - Path2 File is new - f
INFO : Path2: 1 changes:  1 new,  0 modified,  0 deleted INFO : Path2: 1 changes:  1 new,  0 modified,  0 deleted
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file1.txt: md5 differ ERROR : file1.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
INFO : Finished checking the potential conflicts. 1 differences found INFO : Finished checking the potential conflicts. 1 differences found
@ -188,7 +188,7 @@ INFO : - Path2 File is new - f
INFO : Path2: 1 changes:  1 new,  0 modified,  0 deleted INFO : Path2: 1 changes:  1 new,  0 modified,  0 deleted
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file1.txt: md5 differ ERROR : file1.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
INFO : Finished checking the potential conflicts. 1 differences found INFO : Finished checking the potential conflicts. 1 differences found
@ -234,7 +234,7 @@ INFO : - Path2 File is new - f
INFO : Path2: 1 changes:  1 new,  0 modified,  0 deleted INFO : Path2: 1 changes:  1 new,  0 modified,  0 deleted
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file1.txt: md5 differ ERROR : file1.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
INFO : Finished checking the potential conflicts. 1 differences found INFO : Finished checking the potential conflicts. 1 differences found
@ -283,8 +283,8 @@ INFO : Path2: 2 changes:  1 new,  1 modified, 
INFO : (Modified:  1 newer,  0 older,  1 larger,  0 smaller) INFO : (Modified:  1 newer,  0 older,  1 larger,  0 smaller)
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file1.txt: md5 differ ERROR : file1.txt: {hashtype} differ
ERROR : file2.txt: md5 differ ERROR : file2.txt: {hashtype} differ
NOTICE: {path2String}: 2 differences found NOTICE: {path2String}: 2 differences found
NOTICE: {path2String}: 2 errors while checking NOTICE: {path2String}: 2 errors while checking
INFO : Finished checking the potential conflicts. 2 differences found INFO : Finished checking the potential conflicts. 2 differences found

View File

@ -101,7 +101,7 @@ INFO : Bisyncing with Comparison Settings:
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}" INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
INFO : Copying Path2 files to Path1 INFO : Copying Path2 files to Path1
INFO : - Path2 Resync is copying files to - Path1 INFO : - Path2 Resync is copying files to - Path1
INFO : file1.txt: Path2 is older. Path1: 2003-09-03 20:00:00 -0400 EDT, Path2: 2003-07-22 20:00:00 -0400 EDT, Difference: 1032h0m0s INFO : file1.txt: Path2 is older. Path1: 2003-09-04 00:00:00 +0000 UTC, Path2: 2003-07-23 00:00:00 +0000 UTC, Difference: 1032h0m0s
INFO : - Path1 Resync is copying files to - Path2 INFO : - Path1 Resync is copying files to - Path2
INFO : Resync updating listings INFO : Resync updating listings
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}" INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"

View File

@ -52,7 +52,7 @@ INFO : Path2: 1 changes:  0 new,  1 modified, 
INFO : (Modified:  1 newer,  0 older,  1 larger,  0 smaller) INFO : (Modified:  1 newer,  0 older,  1 larger,  0 smaller)
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file5.txt: md5 differ ERROR : file5.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
INFO : Finished checking the potential conflicts. 1 differences found INFO : Finished checking the potential conflicts. 1 differences found
@ -196,7 +196,7 @@ INFO : Path2: 50 changes:  43 new,  7 modified, 
INFO : (Modified:  7 newer,  0 older,  7 larger,  0 smaller) INFO : (Modified:  7 newer,  0 older,  7 larger,  0 smaller)
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file5.txt: md5 differ ERROR : file5.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
INFO : Finished checking the potential conflicts. 1 differences found INFO : Finished checking the potential conflicts. 1 differences found
@ -339,7 +339,7 @@ INFO : - Path2 File is new - f
INFO : Path2: 1 changes:  1 new,  0 modified,  0 deleted INFO : Path2: 1 changes:  1 new,  0 modified,  0 deleted
INFO : Applying changes INFO : Applying changes
INFO : Checking potential conflicts... INFO : Checking potential conflicts...
ERROR : file5.txt: md5 differ ERROR : file5.txt: {hashtype} differ
NOTICE: {path2String}: 1 differences found NOTICE: {path2String}: 1 differences found
NOTICE: {path2String}: 1 errors while checking NOTICE: {path2String}: 1 errors while checking
INFO : Finished checking the potential conflicts. 1 differences found INFO : Finished checking the potential conflicts. 1 differences found

View File

@ -223,8 +223,10 @@ func logModTimeUpload(dst fs.Object) {
} }
// EqualFn allows replacing Equal() with a custom function during NeedTransfer() // EqualFn allows replacing Equal() with a custom function during NeedTransfer()
type EqualFn func(ctx context.Context, src fs.ObjectInfo, dst fs.Object) bool type (
type equalFnContextKey struct{} EqualFn func(ctx context.Context, src fs.ObjectInfo, dst fs.Object) bool
equalFnContextKey struct{}
)
var equalFnKey = equalFnContextKey{} var equalFnKey = equalFnContextKey{}
@ -451,7 +453,7 @@ func move(ctx context.Context, fdst fs.Fs, dst fs.Object, remote string, src fs.
} }
} else if needsMoveCaseInsensitive(fdst, fdst, remote, src.Remote(), false) { } else if needsMoveCaseInsensitive(fdst, fdst, remote, src.Remote(), false) {
doMove = func(ctx context.Context, src fs.Object, remote string) (fs.Object, error) { doMove = func(ctx context.Context, src fs.Object, remote string) (fs.Object, error) {
return moveCaseInsensitive(ctx, fdst, fdst, remote, src.Remote(), false, src) return MoveCaseInsensitive(ctx, fdst, fdst, remote, src.Remote(), false, src)
} }
} }
} }
@ -1453,9 +1455,7 @@ func Rmdirs(ctx context.Context, f fs.Fs, dir string, leaveRoot bool) error {
} }
} }
var ( errCount := errcount.New()
errCount = errcount.New()
)
// Delete all directories at the same level in parallel // Delete all directories at the same level in parallel
for level := len(toDelete) - 1; level >= 0; level-- { for level := len(toDelete) - 1; level >= 0; level-- {
dirs := toDelete[level] dirs := toDelete[level]
@ -1775,7 +1775,6 @@ func copyURLFn(ctx context.Context, dstFileName string, url string, autoFilename
// CopyURL copies the data from the url to (fdst, dstFileName) // CopyURL copies the data from the url to (fdst, dstFileName)
func CopyURL(ctx context.Context, fdst fs.Fs, dstFileName string, url string, autoFilename, dstFileNameFromHeader bool, noClobber bool) (dst fs.Object, err error) { func CopyURL(ctx context.Context, fdst fs.Fs, dstFileName string, url string, autoFilename, dstFileNameFromHeader bool, noClobber bool) (dst fs.Object, err error) {
err = copyURLFn(ctx, dstFileName, url, autoFilename, dstFileNameFromHeader, func(ctx context.Context, dstFileName string, in io.ReadCloser, size int64, modTime time.Time) (err error) { err = copyURLFn(ctx, dstFileName, url, autoFilename, dstFileNameFromHeader, func(ctx context.Context, dstFileName string, in io.ReadCloser, size int64, modTime time.Time) (err error) {
if noClobber { if noClobber {
_, err = fdst.NewObject(ctx, dstFileName) _, err = fdst.NewObject(ctx, dstFileName)
@ -1852,16 +1851,13 @@ func needsMoveCaseInsensitive(fdst fs.Fs, fsrc fs.Fs, dstFileName string, srcFil
return !cp && fdst.Name() == fsrc.Name() && fdst.Features().CaseInsensitive && dstFileName != srcFileName && strings.EqualFold(dstFilePath, srcFilePath) return !cp && fdst.Name() == fsrc.Name() && fdst.Features().CaseInsensitive && dstFileName != srcFileName && strings.EqualFold(dstFilePath, srcFilePath)
} }
// Special case for changing case of a file on a case insensitive remote // MoveCaseInsensitive handles changing case of a file on a case insensitive remote.
// This will move the file to a temporary name then // This will move the file to a temporary name then
// move it back to the intended destination. This is required // move it back to the intended destination. This is required
// to avoid issues with certain remotes and avoid file deletion. // to avoid issues with certain remotes and avoid file deletion.
// returns nil, nil if !needsMoveCaseInsensitive. // returns nil, nil if !needsMoveCaseInsensitive.
// this does not account a transfer -- the caller should do that if desired. // this does not account a transfer -- the caller should do that if desired.
func moveCaseInsensitive(ctx context.Context, fdst fs.Fs, fsrc fs.Fs, dstFileName string, srcFileName string, cp bool, srcObj fs.Object) (newDst fs.Object, err error) { func MoveCaseInsensitive(ctx context.Context, fdst fs.Fs, fsrc fs.Fs, dstFileName string, srcFileName string, cp bool, srcObj fs.Object) (newDst fs.Object, err error) {
if !needsMoveCaseInsensitive(fdst, fsrc, dstFileName, srcFileName, cp) {
return nil, nil
}
logger, _ := GetLogger(ctx) logger, _ := GetLogger(ctx)
// Choose operations // Choose operations
@ -1947,7 +1943,7 @@ func moveOrCopyFile(ctx context.Context, fdst fs.Fs, fsrc fs.Fs, dstFileName str
defer func() { defer func() {
tr.Done(ctx, err) tr.Done(ctx, err)
}() }()
_, err = moveCaseInsensitive(ctx, fdst, fsrc, dstFileName, srcFileName, cp, srcObj) _, err = MoveCaseInsensitive(ctx, fdst, fsrc, dstFileName, srcFileName, cp, srcObj)
return err return err
} }

View File

@ -7,6 +7,7 @@ tests:
fastlist: true fastlist: true
- path: fs/sync - path: fs/sync
fastlist: true fastlist: true
- path: cmd/bisync
- path: vfs - path: vfs
- path: cmd/serve/restic - path: cmd/serve/restic
localonly: true localonly: true