mirror of
https://github.com/rclone/rclone.git
synced 2024-11-25 09:41:44 +08:00
bisync: add to integration tests - fixes #7665
This change officially adds bisync to the nightly integration tests for all backends. This will be part of giving us the confidence to take bisync out of beta. A number of fixes have been added to account for features which can differ on different backends -- for example, hash types / modtime support, empty directories, unicode normalization, and unimportant differences in log output. We will likely find that more of these are needed once we start running these with the full set of remotes. Additionally, bisync's extremely sensitive tests revealed a few bugs in other backends that weren't previously covered by other tests. Fixes for those issues have been submitted on the following separate PRs (and bisync test failures will be expected until they are merged): - #7670 memory: fix deadlock in operations.Purge - #7688 memory: fix incorrect list entries when rooted at subdirectory - #7690 memory: fix dst mutating src after server-side copy - #7692 dropbox: fix chunked uploads when size <= chunkSize Relatedly, workarounds have been put in place for the following backend limitations that are unsolvable for the time being: - #3262 drive is sometimes aware of trashed files/folders when it shouldn't be - #6199 dropbox can't handle emojis and certain other characters - #4590 onedrive API has longstanding bug for conflictBehavior=replace in server-side copy/move
This commit is contained in:
parent
fecce67ac6
commit
2bebbfaded
130
cmd/bisync/bisync_debug_test.go
Normal file
130
cmd/bisync/bisync_debug_test.go
Normal file
|
@ -0,0 +1,130 @@
|
|||
package bisync_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/rclone/rclone/cmd/bisync/bilib"
|
||||
"github.com/rclone/rclone/fs"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
const configFile = "../../fstest/test_all/config.yaml"
|
||||
|
||||
// Config describes the config for this program
|
||||
type Config struct {
|
||||
Tests []Test
|
||||
Backends []Backend
|
||||
}
|
||||
|
||||
// Test describes an integration test to run with `go test`
|
||||
type Test struct {
|
||||
Path string // path to the source directory
|
||||
FastList bool // if it is possible to add -fast-list to tests
|
||||
Short bool // if it is possible to run the test with -short
|
||||
AddBackend bool // set if Path needs the current backend appending
|
||||
NoRetries bool // set if no retries should be performed
|
||||
NoBinary bool // set to not build a binary in advance
|
||||
LocalOnly bool // if set only run with the local backend
|
||||
}
|
||||
|
||||
// Backend describes a backend test
|
||||
//
|
||||
// FIXME make bucket-based remotes set sub-dir automatically???
|
||||
type Backend struct {
|
||||
Backend string // name of the backend directory
|
||||
Remote string // name of the test remote
|
||||
FastList bool // set to test with -fast-list
|
||||
Short bool // set to test with -short
|
||||
OneOnly bool // set to run only one backend test at once
|
||||
MaxFile string // file size limit
|
||||
CleanUp bool // when running clean, run cleanup first
|
||||
Ignore []string // test names to ignore the failure of
|
||||
Tests []string // paths of tests to run, blank for all
|
||||
ListRetries int // -list-retries if > 0
|
||||
ExtraTime float64 // factor to multiply the timeout by
|
||||
}
|
||||
|
||||
func parseConfig() (*Config, error) {
|
||||
d, err := os.ReadFile(configFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read config file: %w", err)
|
||||
}
|
||||
config := &Config{}
|
||||
err = yaml.Unmarshal(d, &config)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse config file: %w", err)
|
||||
}
|
||||
return config, nil
|
||||
}
|
||||
|
||||
const debugFormat = ` {
|
||||
"name": %q,
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "test",
|
||||
"program": "./cmd/bisync",
|
||||
"args": ["-remote", %q, "-remote2", %q, "-case", %q, "-no-cleanup"]
|
||||
},
|
||||
`
|
||||
|
||||
const docFormat = `{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
%s
|
||||
]
|
||||
}`
|
||||
|
||||
// generates a launch.json file for debugging in VS Code.
|
||||
// note: just copy the ones you need into your real launch.json file, as VS Code will crash if there are too many!
|
||||
func (b *bisyncTest) generateDebuggers() {
|
||||
config, err := parseConfig()
|
||||
if err != nil {
|
||||
fs.Errorf(config, "failed to parse config: %v", err)
|
||||
}
|
||||
|
||||
testList := []string{}
|
||||
for _, testCase := range b.listDir(b.dataRoot) {
|
||||
if strings.HasPrefix(testCase, "test_") {
|
||||
// if dir is empty, skip it (can happen due to gitignored files/dirs when checking out branch)
|
||||
if len(b.listDir(filepath.Join(b.dataRoot, testCase))) == 0 {
|
||||
continue
|
||||
}
|
||||
testList = append(testList, testCase)
|
||||
}
|
||||
}
|
||||
|
||||
variations := []string{"LocalRemote", "RemoteLocal", "RemoteRemote"}
|
||||
debuggers := ""
|
||||
|
||||
for _, backend := range config.Backends {
|
||||
if backend.Remote == "" {
|
||||
backend.Remote = "local"
|
||||
}
|
||||
for _, testcase := range testList {
|
||||
for _, variation := range variations {
|
||||
if variation != "RemoteRemote" && backend.Remote == "local" {
|
||||
continue
|
||||
}
|
||||
|
||||
name := fmt.Sprintf("Test %s %s %s", backend.Remote, testcase, variation)
|
||||
switch variation {
|
||||
case "LocalRemote":
|
||||
debuggers += fmt.Sprintf(debugFormat, name, "local", backend.Remote, testcase)
|
||||
case "RemoteLocal":
|
||||
debuggers += fmt.Sprintf(debugFormat, name, backend.Remote, "local", testcase)
|
||||
case "RemoteRemote":
|
||||
debuggers += fmt.Sprintf(debugFormat, name, backend.Remote, backend.Remote, testcase)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out := fmt.Sprintf(docFormat, debuggers)
|
||||
outpath := "./testdata/bisync_vscode_debuggers_launch.json"
|
||||
err = os.WriteFile(outpath, []byte(out), bilib.PermSecure)
|
||||
assert.NoError(b.t, err, "writing golden file %s", outpath)
|
||||
}
|
|
@ -61,6 +61,15 @@ const (
|
|||
|
||||
var initDate = time.Date(2000, time.January, 1, 0, 0, 0, 0, bisync.TZ)
|
||||
|
||||
/* Useful Command Shortcuts */
|
||||
// go test ./cmd/bisync -remote local -race
|
||||
// go test ./cmd/bisync -remote local -golden
|
||||
// go test ./cmd/bisync -remote local -case extended_filenames
|
||||
// go run ./fstest/test_all -run '^TestBisync.*$' -timeout 3h -verbose -maxtries 5
|
||||
// go run ./fstest/test_all -remotes local,TestCrypt:,TestDrive:,TestOneDrive:,TestOneDriveBusiness:,TestDropbox:,TestCryptDrive:,TestOpenDrive:,TestChunker:,:memory:,TestCryptNoEncryption:,TestCombine:DirA,TestFTPRclone:,TestWebdavRclone:,TestS3Rclone:,TestSFTPRclone:,TestSFTPRcloneSSH: -run '^TestBisync.*$' -timeout 3h -verbose -maxtries 5
|
||||
// go test -timeout 3h -run '^TestBisync.*$' github.com/rclone/rclone/cmd/bisync -remote TestDrive:Bisync -v
|
||||
// go test -timeout 3h -run '^TestBisyncRemoteRemote/basic$' github.com/rclone/rclone/cmd/bisync -remote TestDropbox:Bisync -v
|
||||
|
||||
// logReplacements make modern test logs comparable with golden dir.
|
||||
// It is a string slice of even length with this structure:
|
||||
//
|
||||
|
@ -77,7 +86,8 @@ var logReplacements = []string{
|
|||
`^DEBUG : .*$`, dropMe,
|
||||
// ignore dropbox info messages
|
||||
`^NOTICE: too_many_(requests|write_operations)/\.*: Too many requests or write operations.*$`, dropMe,
|
||||
`^NOTICE: Dropbox root .*?: Forced to upload files to set modification times on this backend.$`, dropMe,
|
||||
`^NOTICE: .*?: Forced to upload files to set modification times on this backend.$`, dropMe,
|
||||
`^INFO : .*? Committing uploads - please wait...$`, dropMe,
|
||||
`^INFO : .*?: src and dst identical but can't set mod time without deleting and re-uploading$`, dropMe,
|
||||
// ignore crypt info messages
|
||||
`^INFO : .*?: Crypt detected! Using cryptcheck instead of check. \(Use --size-only or --ignore-checksum to disable\)$`, dropMe,
|
||||
|
@ -89,6 +99,7 @@ var logReplacements = []string{
|
|||
`^.*?"SlowHashDetected":.*?$`, dropMe,
|
||||
`^.*? for same-side diffs on .*?$`, dropMe,
|
||||
`^.*?Downloading hashes.*?$`, dropMe,
|
||||
`^.*?Can't compare hashes, so using check --download.*?$`, dropMe,
|
||||
// ignore timestamps in directory time updates
|
||||
`^(INFO : .*?: (Made directory with|Set directory) (metadata|modification time)).*$`, dropMe,
|
||||
// ignore sizes in directory time updates
|
||||
|
@ -127,7 +138,7 @@ var logHoppers = []string{
|
|||
`(?:INFO |NOTICE): .*: Fixed case by renaming to: .*`,
|
||||
|
||||
// order of files re-checked prior to a conflict rename
|
||||
`ERROR : .*: md5 differ.*`,
|
||||
`ERROR : .*: {hashtype} differ.*`,
|
||||
|
||||
// Directory modification time setting can happen in any order
|
||||
`INFO : .*: (Set directory modification time|Made directory with metadata).*`,
|
||||
|
@ -189,20 +200,43 @@ type bisyncTest struct {
|
|||
parent1 fs.Fs
|
||||
parent2 fs.Fs
|
||||
// global flags
|
||||
argRemote1 string
|
||||
argRemote2 string
|
||||
noCompare bool
|
||||
noCleanup bool
|
||||
golden bool
|
||||
debug bool
|
||||
stopAt int
|
||||
TestFn bisync.TestFunc
|
||||
argRemote1 string
|
||||
argRemote2 string
|
||||
noCompare bool
|
||||
noCleanup bool
|
||||
golden bool
|
||||
debug bool
|
||||
stopAt int
|
||||
TestFn bisync.TestFunc
|
||||
ignoreModtime bool // ignore modtimes when comparing final listings, for backends without support
|
||||
}
|
||||
|
||||
var color = bisync.Color
|
||||
|
||||
// Path1 is remote, Path2 is local
|
||||
func TestBisyncRemoteLocal(t *testing.T) {
|
||||
if *fstest.RemoteName == *argRemote2 {
|
||||
t.Skip("path1 and path2 are the same remote")
|
||||
}
|
||||
testBisync(t, *fstest.RemoteName, *argRemote2)
|
||||
}
|
||||
|
||||
// Path1 is local, Path2 is remote
|
||||
func TestBisyncLocalRemote(t *testing.T) {
|
||||
if *fstest.RemoteName == *argRemote2 {
|
||||
t.Skip("path1 and path2 are the same remote")
|
||||
}
|
||||
testBisync(t, *argRemote2, *fstest.RemoteName)
|
||||
}
|
||||
|
||||
// Path1 and Path2 are both different directories on remote
|
||||
// (useful for testing server-side copy/move)
|
||||
func TestBisyncRemoteRemote(t *testing.T) {
|
||||
testBisync(t, *fstest.RemoteName, *fstest.RemoteName)
|
||||
}
|
||||
|
||||
// TestBisync is a test engine for bisync test cases.
|
||||
func TestBisync(t *testing.T) {
|
||||
func testBisync(t *testing.T, path1, path2 string) {
|
||||
ctx := context.Background()
|
||||
fstest.Initialise()
|
||||
|
||||
|
@ -215,7 +249,8 @@ func TestBisync(t *testing.T) {
|
|||
ci.RefreshTimes = true
|
||||
}
|
||||
bisync.Colors = true
|
||||
time.Local, _ = time.LoadLocation("America/New_York")
|
||||
time.Local = bisync.TZ
|
||||
ci.FsCacheExpireDuration = 5 * time.Hour
|
||||
|
||||
baseDir, err := os.Getwd()
|
||||
require.NoError(t, err, "get current directory")
|
||||
|
@ -234,8 +269,8 @@ func TestBisync(t *testing.T) {
|
|||
logDir: filepath.Join(tempDir, "logs"),
|
||||
logPath: filepath.Join(workDir, logFileName),
|
||||
// global flags
|
||||
argRemote1: *fstest.RemoteName,
|
||||
argRemote2: *argRemote2,
|
||||
argRemote1: path1,
|
||||
argRemote2: path2,
|
||||
noCompare: *argNoCompare,
|
||||
noCleanup: *argNoCleanup,
|
||||
golden: *argGolden,
|
||||
|
@ -333,10 +368,11 @@ func (b *bisyncTest) runTestCase(ctx context.Context, t *testing.T, testCase str
|
|||
|
||||
// Prepare initial content
|
||||
b.cleanupCase(ctx)
|
||||
initFs, err := fs.NewFs(ctx, b.initDir)
|
||||
initFs, err := cache.Get(ctx, b.initDir)
|
||||
require.NoError(b.t, err)
|
||||
require.NoError(b.t, sync.CopyDir(ctx, b.fs1, initFs, true), "setting up path1")
|
||||
require.NoError(b.t, sync.CopyDir(ctx, b.fs2, initFs, true), "setting up path2")
|
||||
ctxNoDsStore, _ := ctxNoDsStore(ctx, b.t)
|
||||
require.NoError(b.t, sync.CopyDir(ctxNoDsStore, b.fs1, initFs, true), "setting up path1")
|
||||
require.NoError(b.t, sync.CopyDir(ctxNoDsStore, b.fs2, initFs, true), "setting up path2")
|
||||
|
||||
// Create log file
|
||||
b.mkdir(b.workDir)
|
||||
|
@ -443,11 +479,11 @@ func (b *bisyncTest) runTestCase(ctx context.Context, t *testing.T, testCase str
|
|||
// if a local path is provided, it's ignored (the test will run under system temp)
|
||||
func (b *bisyncTest) makeTempRemote(ctx context.Context, remote, subdir string) (f, parent fs.Fs, path, canon string) {
|
||||
var err error
|
||||
if bilib.IsLocalPath(remote) {
|
||||
if bilib.IsLocalPath(remote) && !strings.HasPrefix(remote, ":") {
|
||||
if remote != "" && remote != "local" {
|
||||
b.t.Fatalf(`Missing ":" in remote %q. Use "local" to test with local filesystem.`, remote)
|
||||
}
|
||||
parent, err = fs.NewFs(ctx, b.tempDir)
|
||||
parent, err = cache.Get(ctx, b.tempDir)
|
||||
require.NoError(b.t, err, "parsing %s", b.tempDir)
|
||||
|
||||
path = filepath.Join(b.tempDir, b.testCase)
|
||||
|
@ -459,7 +495,7 @@ func (b *bisyncTest) makeTempRemote(ctx context.Context, remote, subdir string)
|
|||
remote += "/"
|
||||
}
|
||||
remote += b.randName
|
||||
parent, err = fs.NewFs(ctx, remote)
|
||||
parent, err = cache.Get(ctx, remote)
|
||||
require.NoError(b.t, err, "parsing %s", remote)
|
||||
|
||||
path = remote + "/" + b.testCase
|
||||
|
@ -467,13 +503,9 @@ func (b *bisyncTest) makeTempRemote(ctx context.Context, remote, subdir string)
|
|||
path += "/" + subdir
|
||||
}
|
||||
|
||||
f, err = fs.NewFs(ctx, path)
|
||||
f, err = cache.Get(ctx, path)
|
||||
require.NoError(b.t, err, "parsing %s/%s", remote, subdir)
|
||||
path = bilib.FsPath(f) // Make it canonical
|
||||
|
||||
if f.Precision() == fs.ModTimeNotSupported {
|
||||
b.t.Skipf("modification time support is missing on %s", subdir)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -509,12 +541,12 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
|
|||
|
||||
for i := 0; i < 50; i++ {
|
||||
dst := "file" + fmt.Sprint(i) + ".txt"
|
||||
err := b.copyFile(ctx, src, b.path2, dst)
|
||||
err := b.copyFile(ctx, src, bilib.StripHexString(b.path2), dst)
|
||||
if err != nil {
|
||||
fs.Errorf(src, "error copying file: %v", err)
|
||||
}
|
||||
dst = "file" + fmt.Sprint(100-i) + ".txt"
|
||||
err = b.copyFile(ctx, src, b.path1, dst)
|
||||
err = b.copyFile(ctx, src, bilib.StripHexString(b.path1), dst)
|
||||
if err != nil {
|
||||
fs.Errorf(dst, "error copying file: %v", err)
|
||||
}
|
||||
|
@ -534,18 +566,21 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
|
|||
return b.saveTestListings(args[1], false)
|
||||
case "purge-children":
|
||||
b.checkArgs(args, 1, 1)
|
||||
if fsrc, err = fs.NewFs(ctx, args[1]); err != nil {
|
||||
return err
|
||||
dir := ""
|
||||
if strings.HasPrefix(args[1], bilib.StripHexString(b.path1)) {
|
||||
fsrc = b.fs1
|
||||
dir = strings.TrimPrefix(args[1], bilib.StripHexString(b.path1))
|
||||
} else if strings.HasPrefix(args[1], bilib.StripHexString(b.path2)) {
|
||||
fsrc = b.fs2
|
||||
dir = strings.TrimPrefix(args[1], bilib.StripHexString(b.path2))
|
||||
} else {
|
||||
return fmt.Errorf("error parsing arg: %q (path1: %q, path2: %q)", args[1], b.path1, b.path2)
|
||||
}
|
||||
err = purgeChildren(ctx, fsrc, "")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return
|
||||
return purgeChildren(ctx, fsrc, dir)
|
||||
case "delete-file":
|
||||
b.checkArgs(args, 1, 1)
|
||||
dir, file := filepath.Split(args[1])
|
||||
if fsrc, err = fs.NewFs(ctx, dir); err != nil {
|
||||
if fsrc, err = cache.Get(ctx, dir); err != nil {
|
||||
return err
|
||||
}
|
||||
var obj fs.Object
|
||||
|
@ -555,14 +590,14 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
|
|||
return operations.DeleteFile(ctx, obj)
|
||||
case "delete-glob":
|
||||
b.checkArgs(args, 2, 2)
|
||||
if fsrc, err = fs.NewFs(ctx, args[1]); err != nil {
|
||||
if fsrc, err = cache.Get(ctx, args[1]); err != nil {
|
||||
return err
|
||||
}
|
||||
return deleteFiles(ctx, fsrc, args[2])
|
||||
case "touch-glob":
|
||||
b.checkArgs(args, 3, 3)
|
||||
date, src, glob := args[1], args[2], args[3]
|
||||
if fsrc, err = fs.NewFs(ctx, src); err != nil {
|
||||
if fsrc, err = cache.Get(ctx, src); err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = touchFiles(ctx, date, fsrc, src, glob)
|
||||
|
@ -571,7 +606,7 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
|
|||
b.checkArgs(args, 3, 3)
|
||||
date, src, dst := args[1], args[2], args[3]
|
||||
dir, file := filepath.Split(src)
|
||||
if fsrc, err = fs.NewFs(ctx, dir); err != nil {
|
||||
if fsrc, err = cache.Get(ctx, dir); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err = touchFiles(ctx, date, fsrc, dir, file); err != nil {
|
||||
|
@ -604,9 +639,11 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
|
|||
}
|
||||
switch args[0] {
|
||||
case "copy-dir":
|
||||
err = sync.CopyDir(ctx, fdst, fsrc, true)
|
||||
ctxNoDsStore, _ := ctxNoDsStore(ctx, b.t)
|
||||
err = sync.CopyDir(ctxNoDsStore, fdst, fsrc, true)
|
||||
case "sync-dir":
|
||||
err = sync.Sync(ctx, fdst, fsrc, true)
|
||||
ctxNoDsStore, _ := ctxNoDsStore(ctx, b.t)
|
||||
err = sync.Sync(ctxNoDsStore, fdst, fsrc, true)
|
||||
}
|
||||
return err
|
||||
case "list-dirs":
|
||||
|
@ -646,7 +683,7 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
|
|||
remoteName = "/"
|
||||
}
|
||||
|
||||
fsrc, err = fs.NewFs(ctx, remoteName)
|
||||
fsrc, err = cache.Get(ctx, remoteName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -665,21 +702,34 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
|
|||
|
||||
fixDirname := func(old, new string) {
|
||||
if new != old {
|
||||
oldName, err := fs.NewFs(ctx, old)
|
||||
oldName, err := cache.Get(ctx, old)
|
||||
if err != nil {
|
||||
fs.Logf(old, "error getting Fs: %v", err)
|
||||
fs.Errorf(old, "error getting Fs: %v", err)
|
||||
return
|
||||
}
|
||||
fs.Debugf(nil, "Attempting to move %s to %s", oldName.Root(), new)
|
||||
// Create random name to temporarily move dir to
|
||||
tmpDirName := strings.TrimSuffix(new, slash) + "-rclone-move-" + random.String(8)
|
||||
var tmpDirFs fs.Fs
|
||||
tmpDirFs, _ = fs.NewFs(ctx, tmpDirName)
|
||||
tmpDirFs, err = cache.Get(ctx, tmpDirName)
|
||||
if err != nil {
|
||||
fs.Errorf(tmpDirName, "error creating temp dir for move: %v", err)
|
||||
}
|
||||
if tmpDirFs == nil {
|
||||
return
|
||||
}
|
||||
err = sync.MoveDir(ctx, tmpDirFs, oldName, true, true)
|
||||
if err != nil {
|
||||
fs.Debugf(oldName, "error attempting to move folder: %v", err)
|
||||
}
|
||||
// now move the temp dir to real name
|
||||
fsrc, _ = fs.NewFs(ctx, new)
|
||||
fsrc, err = cache.Get(ctx, new)
|
||||
if err != nil {
|
||||
fs.Errorf(new, "error creating fsrc dir for move: %v", err)
|
||||
}
|
||||
if fsrc == nil {
|
||||
return
|
||||
}
|
||||
err = sync.MoveDir(ctx, fsrc, tmpDirFs, true, true)
|
||||
if err != nil {
|
||||
fs.Debugf(tmpDirFs, "error attempting to move folder to %s: %v", fsrc.Root(), err)
|
||||
|
@ -709,7 +759,11 @@ func (b *bisyncTest) runTestStep(ctx context.Context, line string) (err error) {
|
|||
fs.Debugf(nil, "HASH old: %s new: %s equal: %v", stringToHash(old), stringToHash(new), stringToHash(old) == stringToHash(new))
|
||||
if ok && new != old {
|
||||
fs.Debugf(new, "attempting to rename %s to %s", old, new)
|
||||
err = operations.MoveFile(ctx, fsrc, fsrc, new, old)
|
||||
srcObj, err := fsrc.NewObject(ctx, old)
|
||||
if err != nil {
|
||||
fs.Errorf(old, "errorfinding srcObj - %v", err)
|
||||
}
|
||||
_, err = operations.MoveCaseInsensitive(ctx, fsrc, fsrc, new, old, false, srcObj)
|
||||
if err != nil {
|
||||
fs.Errorf(new, "error trying to rename %s to %s - %v", old, new, err)
|
||||
}
|
||||
|
@ -777,6 +831,95 @@ func (b *bisyncTest) checkArgs(args []string, min, max int) {
|
|||
}
|
||||
}
|
||||
|
||||
func (b *bisyncTest) checkPreReqs(ctx context.Context, opt *bisync.Options) (context.Context, *bisync.Options) {
|
||||
// check pre-requisites
|
||||
if b.testCase == "backupdir" && !(b.fs1.Features().IsLocal && b.fs2.Features().IsLocal) {
|
||||
b.t.Skip("backupdir test currently only works on local (it uses the workdir)")
|
||||
}
|
||||
if b.testCase == "volatile" && !(b.fs1.Features().IsLocal && b.fs2.Features().IsLocal) {
|
||||
b.t.Skip("skipping 'volatile' test on non-local as it requires uploading 100 files")
|
||||
}
|
||||
if strings.HasPrefix(b.fs1.String(), "Dropbox") || strings.HasPrefix(b.fs2.String(), "Dropbox") {
|
||||
fs.GetConfig(ctx).RefreshTimes = true // https://rclone.org/bisync/#notes-about-testing
|
||||
}
|
||||
if strings.HasPrefix(b.fs1.String(), "Dropbox") {
|
||||
b.fs1.Features().Disable("Copy") // https://github.com/rclone/rclone/issues/6199#issuecomment-1570366202
|
||||
}
|
||||
if strings.HasPrefix(b.fs2.String(), "Dropbox") {
|
||||
b.fs2.Features().Disable("Copy") // https://github.com/rclone/rclone/issues/6199#issuecomment-1570366202
|
||||
}
|
||||
if strings.HasPrefix(b.fs1.String(), "OneDrive") {
|
||||
b.fs1.Features().Disable("Copy") // API has longstanding bug for conflictBehavior=replace https://github.com/rclone/rclone/issues/4590
|
||||
b.fs1.Features().Disable("Move")
|
||||
}
|
||||
if strings.HasPrefix(b.fs2.String(), "OneDrive") {
|
||||
b.fs2.Features().Disable("Copy") // API has longstanding bug for conflictBehavior=replace https://github.com/rclone/rclone/issues/4590
|
||||
b.fs2.Features().Disable("Move")
|
||||
}
|
||||
if (!b.fs1.Features().CanHaveEmptyDirectories || !b.fs2.Features().CanHaveEmptyDirectories) && (b.testCase == "createemptysrcdirs" || b.testCase == "rmdirs") {
|
||||
b.t.Skip("skipping test as remote does not support empty dirs")
|
||||
}
|
||||
if b.fs1.Precision() == fs.ModTimeNotSupported || b.fs2.Precision() == fs.ModTimeNotSupported {
|
||||
if b.testCase != "nomodtime" {
|
||||
b.t.Skip("skipping test as at least one remote does not support setting modtime")
|
||||
}
|
||||
b.ignoreModtime = true
|
||||
}
|
||||
// test if modtimes are writeable
|
||||
testSetModtime := func(f fs.Fs) {
|
||||
in := bytes.NewBufferString("modtime_write_test")
|
||||
objinfo := object.NewStaticObjectInfo("modtime_write_test", initDate, int64(len("modtime_write_test")), true, nil, nil)
|
||||
obj, err := f.Put(ctx, in, objinfo)
|
||||
require.NoError(b.t, err)
|
||||
err = obj.SetModTime(ctx, initDate)
|
||||
if err == fs.ErrorCantSetModTime {
|
||||
if b.testCase != "nomodtime" {
|
||||
b.t.Skip("skipping test as at least one remote does not support setting modtime")
|
||||
}
|
||||
}
|
||||
err = obj.Remove(ctx)
|
||||
require.NoError(b.t, err)
|
||||
}
|
||||
testSetModtime(b.fs1)
|
||||
testSetModtime(b.fs2)
|
||||
|
||||
if b.testCase == "normalization" || b.testCase == "extended_char_paths" || b.testCase == "extended_filenames" {
|
||||
// test whether remote is capable of running test
|
||||
const chars = "ě_{chr:81}{chr:fe}{spc}áñhࢺ_測試Русский_ěáñ👸🏼🧝🏾♀️💆🏿♂️🐨🤙🏼🤮🧑🏻🔧🧑🔬éö"
|
||||
testfilename1 := splitLine(norm.NFD.String(norm.NFC.String(chars)))[0]
|
||||
testfilename2 := splitLine(norm.NFC.String(norm.NFD.String(chars)))[0]
|
||||
preTest := func(f fs.Fs, testfilename string) string {
|
||||
in := bytes.NewBufferString(testfilename)
|
||||
objinfo := object.NewStaticObjectInfo(testfilename, initDate, int64(len(testfilename)), true, nil, nil)
|
||||
obj, err := f.Put(ctx, in, objinfo)
|
||||
if err != nil {
|
||||
b.t.Skipf("Fs is incapable of running test, skipping: %s (expected: \n%s (%s) actual: \n%s (%v))\n (fs: %s) \n", b.testCase, testfilename, detectEncoding(testfilename), "upload failed", err, f)
|
||||
}
|
||||
entries, err := f.List(ctx, "")
|
||||
assert.NoError(b.t, err)
|
||||
if entries.Len() == 1 && entries[0].Remote() != testfilename {
|
||||
diffStr, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{A: []string{testfilename}, B: []string{entries[0].Remote()}})
|
||||
// we can still deal with this as long as both remotes auto-convert the same way.
|
||||
b.t.Logf("Warning: this remote seems to auto-convert special characters (testcase: %s) (expected: \n%s (%s) actual: \n%s (%s))\n (fs: %s) \n%v", b.testCase, testfilename, detectEncoding(testfilename), entries[0].Remote(), detectEncoding(entries[0].Remote()), f, diffStr)
|
||||
}
|
||||
err = obj.Remove(ctx)
|
||||
require.NoError(b.t, err)
|
||||
return entries[0].Remote()
|
||||
}
|
||||
got1 := preTest(b.fs1, testfilename1)
|
||||
got1 += preTest(b.fs1, testfilename2)
|
||||
if b.fs1.Name() != b.fs2.Name() {
|
||||
got2 := preTest(b.fs2, testfilename1)
|
||||
got2 += preTest(b.fs2, testfilename2)
|
||||
if got1 != got2 {
|
||||
diffStr, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{A: []string{got1}, B: []string{got2}})
|
||||
b.t.Skipf("Fs is incapable of running test as the paths produce different results, skipping: %s (path1: \n%s (%s) path2: \n%s (%s))\n (fs1: %s fs2: %s) \n%v", b.testCase, got1, detectEncoding(got1), got2, got2, b.fs1, b.fs2, diffStr)
|
||||
}
|
||||
}
|
||||
}
|
||||
return ctx, opt
|
||||
}
|
||||
|
||||
func (b *bisyncTest) runBisync(ctx context.Context, args []string) (err error) {
|
||||
opt := &bisync.Options{
|
||||
Workdir: b.workDir,
|
||||
|
@ -787,12 +930,13 @@ func (b *bisyncTest) runBisync(ctx context.Context, args []string) (err error) {
|
|||
CheckSync: bisync.CheckSyncTrue,
|
||||
TestFn: b.TestFn,
|
||||
}
|
||||
ctx, opt = b.checkPreReqs(ctx, opt)
|
||||
octx, ci := fs.AddConfig(ctx)
|
||||
fs1, fs2 := b.fs1, b.fs2
|
||||
|
||||
addSubdir := func(path, subdir string) fs.Fs {
|
||||
remote := path + subdir
|
||||
f, err := fs.NewFs(ctx, remote)
|
||||
f, err := cache.Get(ctx, remote)
|
||||
require.NoError(b.t, err, "parsing remote %q", remote)
|
||||
return f
|
||||
}
|
||||
|
@ -838,9 +982,13 @@ func (b *bisyncTest) runBisync(ctx context.Context, args []string) (err error) {
|
|||
case "compare-all":
|
||||
opt.CompareFlag = "size,modtime,checksum"
|
||||
opt.Compare.DownloadHash = true // allows us to test crypt and the like
|
||||
case "nomodtime":
|
||||
ci.CheckSum = true
|
||||
opt.CompareFlag = "size,checksum"
|
||||
opt.Compare.DownloadHash = true // allows us to test crypt and the like
|
||||
case "subdir":
|
||||
fs1 = addSubdir(b.path1, val)
|
||||
fs2 = addSubdir(b.path2, val)
|
||||
fs1 = addSubdir(bilib.StripHexString(b.path1), val)
|
||||
fs2 = addSubdir(bilib.StripHexString(b.path2), val)
|
||||
case "backupdir1":
|
||||
opt.BackupDir1 = val
|
||||
case "backupdir2":
|
||||
|
@ -872,6 +1020,10 @@ func (b *bisyncTest) runBisync(ctx context.Context, args []string) (err error) {
|
|||
|
||||
// set all dirs to a fixed date for test stability, as they are considered as of v1.66.
|
||||
jamDirTimes := func(f fs.Fs) {
|
||||
if f.Features().DirSetModTime == nil && f.Features().MkdirMetadata == nil {
|
||||
fs.Debugf(f, "Skipping jamDirTimes as remote does not support DirSetModTime or MkdirMetadata")
|
||||
return
|
||||
}
|
||||
err := walk.ListR(ctx, f, "", true, -1, walk.ListDirs, func(entries fs.DirEntries) error {
|
||||
var err error
|
||||
entries.ForDir(func(dir fs.Directory) {
|
||||
|
@ -929,10 +1081,11 @@ func (b *bisyncTest) saveTestListings(prefix string, keepSource bool) (err error
|
|||
}
|
||||
|
||||
func (b *bisyncTest) copyFile(ctx context.Context, src, dst, asName string) (err error) {
|
||||
fs.Debugf(nil, "copyFile %q to %q as %q", src, dst, asName)
|
||||
var fsrc, fdst fs.Fs
|
||||
var srcPath, srcFile, dstPath, dstFile string
|
||||
|
||||
switch fsrc, err = fs.NewFs(ctx, src); err {
|
||||
switch fsrc, err = fs.NewFs(ctx, src); err { // intentionally using NewFs here to avoid dircaching the parent
|
||||
case fs.ErrorIsFile:
|
||||
// ok
|
||||
case nil:
|
||||
|
@ -955,7 +1108,7 @@ func (b *bisyncTest) copyFile(ctx context.Context, src, dst, asName string) (err
|
|||
if dstFile != "" {
|
||||
dstPath = dst // force directory
|
||||
}
|
||||
if fdst, err = fs.NewFs(ctx, dstPath); err != nil {
|
||||
if fdst, err = fs.NewFs(ctx, dstPath); err != nil { // intentionally using NewFs here to avoid dircaching the parent
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -969,12 +1122,13 @@ func (b *bisyncTest) copyFile(ctx context.Context, src, dst, asName string) (err
|
|||
if err := fi.AddFile(srcFile); err != nil {
|
||||
return err
|
||||
}
|
||||
fs.Debugf(nil, "operations.CopyFile %q to %q as %q", srcFile, fdst.String(), dstFile)
|
||||
return operations.CopyFile(fctx, fdst, fsrc, dstFile, srcFile)
|
||||
}
|
||||
|
||||
// listSubdirs is equivalent to `rclone lsf -R [--dirs-only]`
|
||||
func (b *bisyncTest) listSubdirs(ctx context.Context, remote string, DirsOnly bool) error {
|
||||
f, err := fs.NewFs(ctx, remote)
|
||||
f, err := cache.Get(ctx, remote)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -1036,6 +1190,9 @@ func deleteFiles(ctx context.Context, f fs.Fs, glob string) error {
|
|||
// Note: `rclone touch` can touch only single file, doesn't support filters.
|
||||
func touchFiles(ctx context.Context, dateStr string, f fs.Fs, dir, glob string) ([]string, error) {
|
||||
files := []string{}
|
||||
if f.Precision() == fs.ModTimeNotSupported {
|
||||
return files, nil
|
||||
}
|
||||
|
||||
date, err := time.ParseInLocation(touchDateFormat, dateStr, bisync.TZ)
|
||||
if err != nil {
|
||||
|
@ -1065,14 +1222,19 @@ func touchFiles(ctx context.Context, dateStr string, f fs.Fs, dir, glob string)
|
|||
|
||||
fs.Debugf(obj, "Set modification time %s", dateStr)
|
||||
err := obj.SetModTime(ctx, date)
|
||||
if err == fs.ErrorCantSetModTimeWithoutDelete {
|
||||
if err == fs.ErrorCantSetModTimeWithoutDelete || err == fs.ErrorCantSetModTime {
|
||||
// Workaround for dropbox, similar to --refresh-times
|
||||
err = nil
|
||||
buf := new(bytes.Buffer)
|
||||
size := obj.Size()
|
||||
separator := ""
|
||||
if size > 0 {
|
||||
err = operations.Cat(ctx, f, buf, 0, size, []byte(separator))
|
||||
filterCtx, fi := filter.AddConfig(ctx)
|
||||
err = fi.AddFile(remote) // limit Cat to only this file, not all files in dir
|
||||
if err != nil {
|
||||
return files, err
|
||||
}
|
||||
err = operations.Cat(filterCtx, f, buf, 0, size, []byte(separator))
|
||||
}
|
||||
info := object.NewStaticObjectInfo(remote, date, size, true, nil, f)
|
||||
if err == nil {
|
||||
|
@ -1186,6 +1348,7 @@ func (b *bisyncTest) compareResults() int {
|
|||
// Golden results will have adapted file names and contain
|
||||
// generic strings instead of local or cloud paths.
|
||||
func (b *bisyncTest) storeGolden() {
|
||||
b.generateDebuggers()
|
||||
// Perform consistency checks
|
||||
files := b.listDir(b.workDir)
|
||||
require.NotEmpty(b.t, files, "nothing to store in golden dir")
|
||||
|
@ -1271,8 +1434,9 @@ func (b *bisyncTest) mangleResult(dir, file string, golden bool) string {
|
|||
// First replace filenames with whitespace
|
||||
// some backends (such as crypt) log them on multiple lines due to encoding differences, while others (local) do not
|
||||
wsrep := []string{
|
||||
"subdir with" + eol + "white space.txt/file2 with" + eol + "white space.txt",
|
||||
"subdir with white space.txt/file2 with white space.txt",
|
||||
"subdir with" + eol + "white space.txt/file2 with" + eol + "white space.txt", "subdir with white space.txt/file2 with white space.txt",
|
||||
"with\nwhite space", "with white space",
|
||||
"with\u0090white space", "with white space",
|
||||
}
|
||||
whitespaceJoiner := strings.NewReplacer(wsrep...)
|
||||
s := whitespaceJoiner.Replace(string(buf))
|
||||
|
@ -1404,7 +1568,6 @@ func (b *bisyncTest) mangleListing(text string, golden bool, file string) string
|
|||
lineRegex := regexp.MustCompile(`^(\S) +(-?\d+) (\S+) (\S+) (\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d\.\d{9}[+-]\d{4}) (".+")$`)
|
||||
const timeFormat = "2006-01-02T15:04:05.000000000-0700"
|
||||
const lineFormat = "%s %8d %s %s %s %q\n"
|
||||
TZ := time.UTC
|
||||
fields := lineRegex.FindStringSubmatch(strings.TrimSuffix(lines[i], "\n"))
|
||||
if fields != nil {
|
||||
sizeVal, sizeErr := strconv.ParseInt(fields[2], 10, 64)
|
||||
|
@ -1412,11 +1575,11 @@ func (b *bisyncTest) mangleListing(text string, golden bool, file string) string
|
|||
// account for filename encoding differences by normalizing to OS encoding
|
||||
fields[6] = normalizeEncoding(fields[6])
|
||||
timeStr := fields[5]
|
||||
if f.Precision() == fs.ModTimeNotSupported {
|
||||
if f.Precision() == fs.ModTimeNotSupported || b.ignoreModtime {
|
||||
lines[i] = fmt.Sprintf(lineFormat, fields[1], sizeVal, fields[3], fields[4], "-", fields[6])
|
||||
continue
|
||||
}
|
||||
timeVal, timeErr := time.ParseInLocation(timeFormat, timeStr, TZ)
|
||||
timeVal, timeErr := time.ParseInLocation(timeFormat, timeStr, bisync.TZ)
|
||||
if timeErr == nil {
|
||||
timeRound := timeVal.Round(f.Precision() * 2)
|
||||
lines[i] = fmt.Sprintf(lineFormat, fields[1], sizeVal, fields[3], fields[4], timeRound, fields[6])
|
||||
|
@ -1453,8 +1616,8 @@ func (b *bisyncTest) newReplacer(mangle bool) *strings.Replacer {
|
|||
"{datadir/}", b.dataDir + slash,
|
||||
"{testdir/}", b.testDir + slash,
|
||||
"{workdir/}", b.workDir + slash,
|
||||
"{path1/}", b.path1,
|
||||
"{path2/}", b.path2,
|
||||
"{path1/}", bilib.StripHexString(b.path1),
|
||||
"{path2/}", bilib.StripHexString(b.path2),
|
||||
"{session}", b.sessionName,
|
||||
"{/}", slash,
|
||||
}
|
||||
|
@ -1469,6 +1632,8 @@ func (b *bisyncTest) newReplacer(mangle bool) *strings.Replacer {
|
|||
b.fs2.String(), "{path2String}",
|
||||
b.path1, "{path1/}",
|
||||
b.path2, "{path2/}",
|
||||
bilib.StripHexString(b.path1), "{path1/}",
|
||||
bilib.StripHexString(b.path2), "{path2/}",
|
||||
"//?/" + strings.TrimSuffix(strings.Replace(b.path1, slash, "/", -1), "/"), "{path1}", // fix windows-specific issue
|
||||
"//?/" + strings.TrimSuffix(strings.Replace(b.path2, slash, "/", -1), "/"), "{path2}",
|
||||
strings.TrimSuffix(b.path1, slash), "{path1}", // ensure it's still recognized without trailing slash
|
||||
|
@ -1476,6 +1641,10 @@ func (b *bisyncTest) newReplacer(mangle bool) *strings.Replacer {
|
|||
b.workDir, "{workdir}",
|
||||
b.sessionName, "{session}",
|
||||
}
|
||||
// convert all hash types to "{hashtype}"
|
||||
for _, ht := range hash.Supported().Array() {
|
||||
rep = append(rep, ht.String(), "{hashtype}")
|
||||
}
|
||||
if fixSlash {
|
||||
prep := []string{}
|
||||
for i := 0; i < len(rep); i += 2 {
|
||||
|
@ -1611,3 +1780,26 @@ func stringToHash(s string) string {
|
|||
}
|
||||
return sum
|
||||
}
|
||||
|
||||
func detectEncoding(s string) string {
|
||||
if norm.NFC.IsNormalString(s) && norm.NFD.IsNormalString(s) {
|
||||
return "BOTH"
|
||||
}
|
||||
if !norm.NFC.IsNormalString(s) && norm.NFD.IsNormalString(s) {
|
||||
return "NFD"
|
||||
}
|
||||
if norm.NFC.IsNormalString(s) && !norm.NFD.IsNormalString(s) {
|
||||
return "NFC"
|
||||
}
|
||||
return "OTHER"
|
||||
}
|
||||
|
||||
// filters out those pesky macOS .DS_Store files, which are forbidden on Dropbox and just generally annoying
|
||||
func ctxNoDsStore(ctx context.Context, t *testing.T) (context.Context, *filter.Filter) {
|
||||
ctxNoDsStore, fi := filter.AddConfig(ctx)
|
||||
err := fi.AddRule("- .DS_Store")
|
||||
require.NoError(t, err)
|
||||
err = fi.AddRule("+ **")
|
||||
require.NoError(t, err)
|
||||
return ctxNoDsStore, fi
|
||||
}
|
||||
|
|
58542
cmd/bisync/testdata/bisync_vscode_debuggers_launch.json
vendored
Normal file
58542
cmd/bisync/testdata/bisync_vscode_debuggers_launch.json
vendored
Normal file
File diff suppressed because it is too large
Load Diff
|
@ -94,7 +94,7 @@ INFO : Path2: 7 changes: [32m 1 new[0m, [33m 3 modified[0m, [31m
|
|||
INFO : ([33mModified[0m: [36m 3 newer[0m, [34m 0 older[0m, [36m 3 larger[0m, [34m 0 smaller[0m)
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file5.txt: md5 differ
|
||||
ERROR : file5.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 1 differences found
|
||||
|
|
|
@ -94,7 +94,7 @@ INFO : Path2: 7 changes: [32m 1 new[0m, [33m 3 modified[0m, [31m
|
|||
INFO : ([33mModified[0m: [36m 3 newer[0m, [34m 0 older[0m, [36m 3 larger[0m, [34m 0 smaller[0m)
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file5.txt: md5 differ
|
||||
ERROR : file5.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 1 differences found
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
[36m(02) :[0m [34mtest EXCLUDE - OTHER TESTS[0m
|
||||
[36m(03) :[0m [34mcopy-file {datadir/}exclude-other-filtersfile.txt {workdir/}[0m
|
||||
|
||||
[36m(04) :[0m [34mtest resync to get the filters file md5 built.[0m
|
||||
[36m(04) :[0m [34mtest resync to get the filters file {hashtype} built.[0m
|
||||
[36m(05) :[0m [34mbisync resync filters-file={workdir/}exclude-other-filtersfile.txt[0m
|
||||
INFO : [2mSetting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
|
@ -18,7 +18,7 @@ INFO : Bisyncing with Comparison Settings:
|
|||
}
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}exclude-other-filtersfile.txt
|
||||
INFO : Storing filters file hash to {workdir/}exclude-other-filtersfile.txt.md5
|
||||
INFO : Storing filters file hash to {workdir/}exclude-other-filtersfile.txt.{hashtype}
|
||||
INFO : Copying Path2 files to Path1
|
||||
INFO : - [34mPath2[0m [35mResync is copying files to[0m - [36mPath1[0m
|
||||
INFO : - [36mPath1[0m [35mResync is copying files to[0m - [36mPath2[0m
|
||||
|
@ -133,7 +133,7 @@ INFO : Bisyncing with Comparison Settings:
|
|||
}
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}include-other-filtersfile.txt
|
||||
INFO : Storing filters file hash to {workdir/}include-other-filtersfile.txt.md5
|
||||
INFO : Storing filters file hash to {workdir/}include-other-filtersfile.txt.{hashtype}
|
||||
INFO : Copying Path2 files to Path1
|
||||
INFO : - [34mPath2[0m [35mResync is copying files to[0m - [36mPath1[0m
|
||||
INFO : - [36mPath1[0m [35mResync is copying files to[0m - [36mPath2[0m
|
||||
|
|
|
@ -116,7 +116,7 @@ INFO : Path2: 6 changes: [32m 1 new[0m, [33m 3 modified[0m, [31m
|
|||
INFO : ([33mModified[0m: [36m 3 newer[0m, [34m 0 older[0m, [36m 3 larger[0m, [34m 0 smaller[0m)
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file5.txt: md5 differ
|
||||
ERROR : file5.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 1 differences found
|
||||
|
@ -181,7 +181,7 @@ INFO : Path2: 6 changes: [32m 1 new[0m, [33m 3 modified[0m, [31m
|
|||
INFO : ([33mModified[0m: [36m 3 newer[0m, [34m 0 older[0m, [36m 3 larger[0m, [34m 0 smaller[0m)
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file5.txt: md5 differ
|
||||
ERROR : file5.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 1 differences found
|
||||
|
|
|
@ -58,7 +58,7 @@ INFO : Path2: 2 changes: [32m 0 new[0m, [33m 2 modified[0m, [31m
|
|||
INFO : ([33mModified[0m: [36m 2 newer[0m, [34m 0 older[0m, [36m 2 larger[0m, [34m 0 smaller[0m)
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file1.txt: md5 differ
|
||||
ERROR : file1.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
NOTICE: {path2String}: 1 matching files
|
||||
|
|
|
@ -171,7 +171,7 @@ INFO : Bisyncing with Comparison Settings:
|
|||
}
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}測試_filtersfile.txt
|
||||
INFO : Storing filters file hash to {workdir/}測試_filtersfile.txt.md5
|
||||
INFO : Storing filters file hash to {workdir/}測試_filtersfile.txt.{hashtype}
|
||||
INFO : Copying Path2 files to Path1
|
||||
INFO : - [34mPath2[0m [35mResync is copying files to[0m - [36mPath1[0m
|
||||
INFO : - [36mPath1[0m [35mResync is copying files to[0m - [36mPath2[0m
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
[36m(02) :[0m [34mcopy-file {datadir/}filtersfile.flt {workdir/}[0m
|
||||
|
||||
[36m(03) :[0m [34mtest resync to force building of the filters md5 hash[0m
|
||||
[36m(03) :[0m [34mtest resync to force building of the filters {hashtype} hash[0m
|
||||
[36m(04) :[0m [34mbisync filters-file={workdir/}filtersfile.flt resync[0m
|
||||
INFO : [2mSetting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
|
@ -17,7 +17,7 @@ INFO : Bisyncing with Comparison Settings:
|
|||
}
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}filtersfile.flt
|
||||
INFO : Storing filters file hash to {workdir/}filtersfile.flt.md5
|
||||
INFO : Storing filters file hash to {workdir/}filtersfile.flt.{hashtype}
|
||||
INFO : Copying Path2 files to Path1
|
||||
INFO : - [34mPath2[0m [35mResync is copying files to[0m - [36mPath1[0m
|
||||
INFO : - [36mPath1[0m [35mResync is copying files to[0m - [36mPath2[0m
|
||||
|
|
|
@ -24,7 +24,7 @@ INFO : [32mBisync successful[0m
|
|||
[36m(04) :[0m [34mtest 1. inject filters file in workdir.[0m
|
||||
[36m(05) :[0m [34mcopy-file {datadir/}filtersfile.txt {workdir/}[0m
|
||||
|
||||
[36m(06) :[0m [34mtest 2. run with filters-file but without md5. should abort.[0m
|
||||
[36m(06) :[0m [34mtest 2. run with filters-file but without {hashtype}. should abort.[0m
|
||||
[36m(07) :[0m [34mbisync filters-file={workdir/}filtersfile.txt[0m
|
||||
INFO : [2mSetting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
|
@ -38,7 +38,7 @@ INFO : Bisyncing with Comparison Settings:
|
|||
}
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}filtersfile.txt
|
||||
ERROR : [31mBisync critical error: filters file md5 hash not found (must run --resync): {workdir/}filtersfile.txt[0m
|
||||
ERROR : [31mBisync critical error: filters file {hashtype} hash not found (must run --resync): {workdir/}filtersfile.txt[0m
|
||||
ERROR : [31mBisync aborted. Must run --resync to recover.[0m
|
||||
Bisync error: bisync aborted
|
||||
|
||||
|
@ -78,7 +78,7 @@ INFO : Bisyncing with Comparison Settings:
|
|||
}
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}filtersfile.txt
|
||||
INFO : Storing filters file hash to {workdir/}filtersfile.txt.md5
|
||||
INFO : Storing filters file hash to {workdir/}filtersfile.txt.{hashtype}
|
||||
INFO : Copying Path2 files to Path1
|
||||
INFO : - [34mPath2[0m [35mResync is copying files to[0m - [36mPath1[0m
|
||||
INFO : - [36mPath1[0m [35mResync is copying files to[0m - [36mPath2[0m
|
||||
|
@ -143,7 +143,7 @@ INFO : Bisyncing with Comparison Settings:
|
|||
}
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Using filters file {workdir/}filtersfile.txt
|
||||
INFO : Skipped storing filters file hash to {workdir/}filtersfile.txt.md5 as --dry-run is set
|
||||
INFO : Skipped storing filters file hash to {workdir/}filtersfile.txt.{hashtype} as --dry-run is set
|
||||
INFO : Copying Path2 files to Path1
|
||||
NOTICE: - [34mPath2[0m [35mResync is copying files to[0m - [36mPath1[0m
|
||||
NOTICE: - [36mPath1[0m [35mResync is copying files to[0m - [36mPath2[0m
|
||||
|
|
5
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.copy1to2.que
vendored
Normal file
5
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.copy1to2.que
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
"file11.txt"
|
||||
"file2.txt"
|
||||
"file4.txt"
|
||||
"file5.txt.conflict1"
|
||||
"file7.txt"
|
5
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.copy2to1.que
vendored
Normal file
5
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.copy2to1.que
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
"file1.txt"
|
||||
"file10.txt"
|
||||
"file3.txt"
|
||||
"file5.txt.conflict2"
|
||||
"file6.txt"
|
1
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.delete1.que
vendored
Normal file
1
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.delete1.que
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
"file3.txt"
|
1
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.delete2.que
vendored
Normal file
1
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.delete2.que
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
"file4.txt"
|
10
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.path1.lst
vendored
Normal file
10
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.path1.lst
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
# bisync listing v1 from test
|
||||
- 109 md5:294d25b294ff26a5243dba914ac3fbf7 - 0001-01-01T00:00:00.000000000+0000 "RCLONE_TEST"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file1.txt"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file10.txt"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file11.txt"
|
||||
- 13 md5:fb3ecfb2800400fb01b0bfd39903e9fb - 0001-01-01T00:00:00.000000000+0000 "file2.txt"
|
||||
- 39 md5:0860a03592626642f8fd6c8bfb447d2a - 0001-01-01T00:00:00.000000000+0000 "file5.txt.conflict1"
|
||||
- 39 md5:979a803b15d27df0c31ad7d29006d10b - 0001-01-01T00:00:00.000000000+0000 "file5.txt.conflict2"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file6.txt"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file7.txt"
|
8
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.path1.lst-new
vendored
Normal file
8
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.path1.lst-new
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
# bisync listing v1 from test
|
||||
- 109 md5:294d25b294ff26a5243dba914ac3fbf7 - 0001-01-01T00:00:00.000000000+0000 "RCLONE_TEST"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file1.txt"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file11.txt"
|
||||
- 13 md5:fb3ecfb2800400fb01b0bfd39903e9fb - 0001-01-01T00:00:00.000000000+0000 "file2.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file3.txt"
|
||||
- 39 md5:0860a03592626642f8fd6c8bfb447d2a - 0001-01-01T00:00:00.000000000+0000 "file5.txt"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file7.txt"
|
10
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.path1.lst-old
vendored
Normal file
10
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.path1.lst-old
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
# bisync listing v1 from test
|
||||
- 109 md5:294d25b294ff26a5243dba914ac3fbf7 - 0001-01-01T00:00:00.000000000+0000 "RCLONE_TEST"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file1.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file2.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file3.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file4.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file5.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file6.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file7.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file8.txt"
|
10
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.path2.lst
vendored
Normal file
10
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.path2.lst
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
# bisync listing v1 from test
|
||||
- 109 md5:294d25b294ff26a5243dba914ac3fbf7 - 0001-01-01T00:00:00.000000000+0000 "RCLONE_TEST"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file1.txt"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file10.txt"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file11.txt"
|
||||
- 13 md5:fb3ecfb2800400fb01b0bfd39903e9fb - 0001-01-01T00:00:00.000000000+0000 "file2.txt"
|
||||
- 39 md5:0860a03592626642f8fd6c8bfb447d2a - 0001-01-01T00:00:00.000000000+0000 "file5.txt.conflict1"
|
||||
- 39 md5:979a803b15d27df0c31ad7d29006d10b - 0001-01-01T00:00:00.000000000+0000 "file5.txt.conflict2"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file6.txt"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file7.txt"
|
8
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.path2.lst-new
vendored
Normal file
8
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.path2.lst-new
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
# bisync listing v1 from test
|
||||
- 109 md5:294d25b294ff26a5243dba914ac3fbf7 - 0001-01-01T00:00:00.000000000+0000 "RCLONE_TEST"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file1.txt"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file10.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file2.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file4.txt"
|
||||
- 39 md5:979a803b15d27df0c31ad7d29006d10b - 0001-01-01T00:00:00.000000000+0000 "file5.txt"
|
||||
- 19 md5:7fe98ed88552b828777d8630900346b8 - 0001-01-01T00:00:00.000000000+0000 "file6.txt"
|
10
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.path2.lst-old
vendored
Normal file
10
cmd/bisync/testdata/test_nomodtime/golden/_testdir_path1.._testdir_path2.path2.lst-old
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
# bisync listing v1 from test
|
||||
- 109 md5:294d25b294ff26a5243dba914ac3fbf7 - 0001-01-01T00:00:00.000000000+0000 "RCLONE_TEST"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file1.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file2.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file3.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file4.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file5.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file6.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file7.txt"
|
||||
- 0 md5:d41d8cd98f00b204e9800998ecf8427e - 0001-01-01T00:00:00.000000000+0000 "file8.txt"
|
109
cmd/bisync/testdata/test_nomodtime/golden/test.log
vendored
Normal file
109
cmd/bisync/testdata/test_nomodtime/golden/test.log
vendored
Normal file
|
@ -0,0 +1,109 @@
|
|||
[36m(01) :[0m [34mtest changes[0m
|
||||
|
||||
|
||||
[36m(02) :[0m [34mtest initial bisync[0m
|
||||
[36m(03) :[0m [34mbisync resync nomodtime[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
{
|
||||
"Modtime": false,
|
||||
"Size": true,
|
||||
"Checksum": true,
|
||||
"NoSlowHash": false,
|
||||
"SlowHashSyncOnly": false,
|
||||
"DownloadHash": true
|
||||
}
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Copying Path2 files to Path1
|
||||
INFO : - [34mPath2[0m [35mResync is copying files to[0m - [36mPath1[0m
|
||||
INFO : - [36mPath1[0m [35mResync is copying files to[0m - [36mPath2[0m
|
||||
INFO : Resync updating listings
|
||||
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
|
||||
INFO : [32mBisync successful[0m
|
||||
|
||||
[36m(04) :[0m [34mtest make modifications on both paths[0m
|
||||
[36m(05) :[0m [34mtest new on path2 - file10[0m
|
||||
[36m(06) :[0m [34mcopy-file {datadir/}file10.txt {path2/}[0m
|
||||
|
||||
[36m(07) :[0m [34mtest changed on path2 - file1[0m
|
||||
[36m(08) :[0m [34mcopy-file {datadir/}file1.txt {path2/}[0m
|
||||
|
||||
[36m(09) :[0m [34mtest new on path1 - file11[0m
|
||||
[36m(10) :[0m [34mcopy-file {datadir/}file11.txt {path1/}[0m
|
||||
|
||||
[36m(11) :[0m [34mtest changed on path1 - file2[0m
|
||||
[36m(12) :[0m [34mcopy-file {datadir/}file2.txt {path1/}[0m
|
||||
|
||||
[36m(13) :[0m [34mtest deleted on path2 - file3[0m
|
||||
[36m(14) :[0m [34mdelete-file {path2/}file3.txt[0m
|
||||
|
||||
[36m(15) :[0m [34mtest deleted on path1 - file4[0m
|
||||
[36m(16) :[0m [34mdelete-file {path1/}file4.txt[0m
|
||||
|
||||
[36m(17) :[0m [34mtest deleted on both paths - file8[0m
|
||||
[36m(18) :[0m [34mdelete-file {path1/}file8.txt[0m
|
||||
[36m(19) :[0m [34mdelete-file {path2/}file8.txt[0m
|
||||
|
||||
[36m(20) :[0m [34mtest changed on both paths - file5 (file5R, file5L)[0m
|
||||
[36m(21) :[0m [34mcopy-as {datadir/}file5R.txt {path2/} file5.txt[0m
|
||||
[36m(22) :[0m [34mcopy-as {datadir/}file5L.txt {path1/} file5.txt[0m
|
||||
|
||||
[36m(23) :[0m [34mtest changed on path2 and deleted on path1 - file6[0m
|
||||
[36m(24) :[0m [34mcopy-file {datadir/}file6.txt {path2/}[0m
|
||||
[36m(25) :[0m [34mdelete-file {path1/}file6.txt[0m
|
||||
|
||||
[36m(26) :[0m [34mtest changed on path1 and deleted on path2 - file7[0m
|
||||
[36m(27) :[0m [34mcopy-file {datadir/}file7.txt {path1/}[0m
|
||||
[36m(28) :[0m [34mdelete-file {path2/}file7.txt[0m
|
||||
|
||||
[36m(29) :[0m [34mtest bisync run[0m
|
||||
[36m(30) :[0m [34mbisync nomodtime[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
{
|
||||
"Modtime": false,
|
||||
"Size": true,
|
||||
"Checksum": true,
|
||||
"NoSlowHash": false,
|
||||
"SlowHashSyncOnly": false,
|
||||
"DownloadHash": true
|
||||
}
|
||||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [36mPath1[0m [35m[33mFile changed: [35msize (larger)[0m, [35mhash[0m[0m[0m - [36mfile2.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[31mFile was deleted[0m[0m - [36mfile4.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[33mFile changed: [35msize (larger)[0m, [35mhash[0m[0m[0m - [36mfile5.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[31mFile was deleted[0m[0m - [36mfile6.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[33mFile changed: [35msize (larger)[0m, [35mhash[0m[0m[0m - [36mfile7.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[31mFile was deleted[0m[0m - [36mfile8.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[32mFile is new[0m[0m - [36mfile11.txt[0m
|
||||
INFO : Path1: 7 changes: [32m 1 new[0m, [33m 3 modified[0m, [31m 3 deleted[0m
|
||||
INFO : ([33mModified[0m: [36m 3 larger[0m, [34m 0 smaller[0m, [36m 3 hash differs[0m)
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : - [34mPath2[0m [35m[33mFile changed: [35msize (larger)[0m, [35mhash[0m[0m[0m - [36mfile1.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[31mFile was deleted[0m[0m - [36mfile3.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[33mFile changed: [35msize (larger)[0m, [35mhash[0m[0m[0m - [36mfile5.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[33mFile changed: [35msize (larger)[0m, [35mhash[0m[0m[0m - [36mfile6.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[31mFile was deleted[0m[0m - [36mfile7.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[31mFile was deleted[0m[0m - [36mfile8.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[32mFile is new[0m[0m - [36mfile10.txt[0m
|
||||
INFO : Path2: 7 changes: [32m 1 new[0m, [33m 3 modified[0m, [31m 3 deleted[0m
|
||||
INFO : ([33mModified[0m: [36m 3 larger[0m, [34m 0 smaller[0m, [36m 3 hash differs[0m)
|
||||
INFO : Applying changes
|
||||
INFO : - [36mPath1[0m [35m[32mQueue copy to[0m Path2[0m - [36m{path2/}file11.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[32mQueue copy to[0m Path2[0m - [36m{path2/}file2.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[31mQueue delete[0m[0m - [36m{path2/}file4.txt[0m
|
||||
NOTICE: - [34mWARNING[0m [35mNew or changed in both paths[0m - [36mfile5.txt[0m
|
||||
NOTICE: - [36mPath1[0m [35mRenaming Path1 copy[0m - [36m{path1/}file5.txt.conflict1[0m
|
||||
NOTICE: - [36mPath1[0m [35m[32mQueue copy to[0m Path2[0m - [36m{path2/}file5.txt.conflict1[0m
|
||||
NOTICE: - [34mPath2[0m [35mRenaming Path2 copy[0m - [36m{path2/}file5.txt.conflict2[0m
|
||||
NOTICE: - [34mPath2[0m [35m[32mQueue copy to[0m Path1[0m - [36m{path1/}file5.txt.conflict2[0m
|
||||
INFO : - [34mPath2[0m [35m[32mQueue copy to[0m Path1[0m - [36m{path1/}file6.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[32mQueue copy to[0m Path2[0m - [36m{path2/}file7.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[32mQueue copy to[0m Path1[0m - [36m{path1/}file1.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[32mQueue copy to[0m Path1[0m - [36m{path1/}file10.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[31mQueue delete[0m[0m - [36m{path1/}file3.txt[0m
|
||||
INFO : - [34mPath2[0m [35mDo queued copies to[0m - [36mPath1[0m
|
||||
INFO : - [36mPath1[0m [35mDo queued copies to[0m - [36mPath2[0m
|
||||
INFO : Updating listings
|
||||
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
|
||||
INFO : [32mBisync successful[0m
|
1
cmd/bisync/testdata/test_nomodtime/initial/RCLONE_TEST
vendored
Normal file
1
cmd/bisync/testdata/test_nomodtime/initial/RCLONE_TEST
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
This file is used for testing the health of rclone accesses to the local/remote file system. Do not delete.
|
0
cmd/bisync/testdata/test_nomodtime/initial/file1.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file1.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file2.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file2.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file3.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file3.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file4.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file4.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file5.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file5.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file6.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file6.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file7.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file7.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file8.txt
vendored
Normal file
0
cmd/bisync/testdata/test_nomodtime/initial/file8.txt
vendored
Normal file
1
cmd/bisync/testdata/test_nomodtime/modfiles/file1.txt
vendored
Normal file
1
cmd/bisync/testdata/test_nomodtime/modfiles/file1.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
This file is newer
|
1
cmd/bisync/testdata/test_nomodtime/modfiles/file10.txt
vendored
Normal file
1
cmd/bisync/testdata/test_nomodtime/modfiles/file10.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
This file is newer
|
1
cmd/bisync/testdata/test_nomodtime/modfiles/file11.txt
vendored
Normal file
1
cmd/bisync/testdata/test_nomodtime/modfiles/file11.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
This file is newer
|
1
cmd/bisync/testdata/test_nomodtime/modfiles/file2.txt
vendored
Normal file
1
cmd/bisync/testdata/test_nomodtime/modfiles/file2.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
Newer version
|
1
cmd/bisync/testdata/test_nomodtime/modfiles/file5L.txt
vendored
Normal file
1
cmd/bisync/testdata/test_nomodtime/modfiles/file5L.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
This file is newer and not equal to 5R
|
1
cmd/bisync/testdata/test_nomodtime/modfiles/file5R.txt
vendored
Normal file
1
cmd/bisync/testdata/test_nomodtime/modfiles/file5R.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
This file is newer and not equal to 5L
|
1
cmd/bisync/testdata/test_nomodtime/modfiles/file6.txt
vendored
Normal file
1
cmd/bisync/testdata/test_nomodtime/modfiles/file6.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
This file is newer
|
1
cmd/bisync/testdata/test_nomodtime/modfiles/file7.txt
vendored
Normal file
1
cmd/bisync/testdata/test_nomodtime/modfiles/file7.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
This file is newer
|
53
cmd/bisync/testdata/test_nomodtime/scenario.txt
vendored
Normal file
53
cmd/bisync/testdata/test_nomodtime/scenario.txt
vendored
Normal file
|
@ -0,0 +1,53 @@
|
|||
test changes
|
||||
# Exercise all of the various file change scenarios
|
||||
# - New on Path2 file10
|
||||
# - changed on Path2 file1
|
||||
# - New on Path1 file11
|
||||
# - changed on Path1 file2
|
||||
# - Deleted on Path2 file3
|
||||
# - Deleted on Path1 file4
|
||||
# - Changed on Path2 and on Path1 file5 (file5r, file5l)
|
||||
# - changed on Path2 and deleted on Path1 file6
|
||||
# - changed on Path1 and deleted on Path2 file7
|
||||
# - Deleted on both paths file8
|
||||
|
||||
test initial bisync
|
||||
bisync resync nomodtime
|
||||
|
||||
test make modifications on both paths
|
||||
test new on path2 - file10
|
||||
copy-file {datadir/}file10.txt {path2/}
|
||||
|
||||
test changed on path2 - file1
|
||||
copy-file {datadir/}file1.txt {path2/}
|
||||
|
||||
test new on path1 - file11
|
||||
copy-file {datadir/}file11.txt {path1/}
|
||||
|
||||
test changed on path1 - file2
|
||||
copy-file {datadir/}file2.txt {path1/}
|
||||
|
||||
test deleted on path2 - file3
|
||||
delete-file {path2/}file3.txt
|
||||
|
||||
test deleted on path1 - file4
|
||||
delete-file {path1/}file4.txt
|
||||
|
||||
test deleted on both paths - file8
|
||||
delete-file {path1/}file8.txt
|
||||
delete-file {path2/}file8.txt
|
||||
|
||||
test changed on both paths - file5 (file5R, file5L)
|
||||
copy-as {datadir/}file5R.txt {path2/} file5.txt
|
||||
copy-as {datadir/}file5L.txt {path1/} file5.txt
|
||||
|
||||
test changed on path2 and deleted on path1 - file6
|
||||
copy-file {datadir/}file6.txt {path2/}
|
||||
delete-file {path1/}file6.txt
|
||||
|
||||
test changed on path1 and deleted on path2 - file7
|
||||
copy-file {datadir/}file7.txt {path1/}
|
||||
delete-file {path2/}file7.txt
|
||||
|
||||
test bisync run
|
||||
bisync nomodtime
|
|
@ -1,3 +1,3 @@
|
|||
"folder/HeLlO,wOrLd!.txt"
|
||||
"folder/éééö.txt"
|
||||
"newfolder/HeLlO,wOrLd!.txt"
|
||||
"newfolder/éééö.txt"
|
||||
"測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"
|
||||
|
|
|
@ -2,6 +2,6 @@
|
|||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "file1.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "folder/HeLlO,wOrLd!.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "folder/éééö.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "newfolder/HeLlO,wOrLd!.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "newfolder/éééö.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"
|
||||
|
|
|
@ -2,6 +2,6 @@
|
|||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "file1.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "folder/HeLlO,wOrLd!.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "folder/éééö.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "newfolder/HeLlO,wOrLd!.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "newfolder/éééö.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"
|
||||
|
|
|
@ -2,6 +2,6 @@
|
|||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "file1.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt"
|
||||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "folder/HeLlO,wOrLd!.txt"
|
||||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "folder/éééö.txt"
|
||||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "newfolder/HeLlO,wOrLd!.txt"
|
||||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "newfolder/éééö.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"
|
||||
|
|
|
@ -2,6 +2,6 @@
|
|||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "file1.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "folder/hello,WORLD!.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "folder/éééö.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "newfolder/hello,WORLD!.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "newfolder/éééö.txt"
|
||||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"
|
||||
|
|
|
@ -2,6 +2,6 @@
|
|||
- 19 - - 2001-01-05T00:00:00.000000000+0000 "file1.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt"
|
||||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "folder/hello,WORLD!.txt"
|
||||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "folder/éééö.txt"
|
||||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "newfolder/hello,WORLD!.txt"
|
||||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "newfolder/éééö.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"
|
||||
|
|
|
@ -2,6 +2,6 @@
|
|||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "file1.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file2.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "file3.txt"
|
||||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "folder/hello,WORLD!.txt"
|
||||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "folder/éééö.txt"
|
||||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "newfolder/hello,WORLD!.txt"
|
||||
- 19 - - 2001-01-03T00:00:00.000000000+0000 "newfolder/éééö.txt"
|
||||
- 19 - - 2001-01-02T00:00:00.000000000+0000 "測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"
|
||||
|
|
|
@ -103,13 +103,13 @@ INFO : [32mBisync successful[0m
|
|||
|
||||
|
||||
[36m(19) :[0m [34mcopy-as-NFC {datadir/}file1.txt {path1/}測試_Руский___ě_áñ👸🏼🧝🏾♀️💆🏿♂️🐨🤙🏼🤮🧑🏻🔧🧑🔬éö 測試_Руский___ě_áñ👸🏼🧝🏾♀️💆🏿♂️🐨🤙🏼🤮🧑🏻🔧🧑🔬éö.txt[0m
|
||||
[36m(20) :[0m [34mcopy-as-NFC {datadir/}file1.txt {path1/}folder éééö.txt[0m
|
||||
[36m(21) :[0m [34mcopy-as-NFC {datadir/}file1.txt {path1/}folder HeLlO,wOrLd!.txt[0m
|
||||
[36m(20) :[0m [34mcopy-as-NFC {datadir/}file1.txt {path1/}newfolder éééö.txt[0m
|
||||
[36m(21) :[0m [34mcopy-as-NFC {datadir/}file1.txt {path1/}newfolder HeLlO,wOrLd!.txt[0m
|
||||
|
||||
|
||||
[36m(22) :[0m [34mtouch-copy 2001-01-03 {datadir/}file1.txt {path2/}[0m
|
||||
[36m(23) :[0m [34mcopy-as-NFD {datadir/}file1.txt {path2/}folder éééö.txt[0m
|
||||
[36m(24) :[0m [34mcopy-as-NFD {datadir/}file1.txt {path2/}folder hello,WORLD!.txt[0m
|
||||
[36m(23) :[0m [34mcopy-as-NFD {datadir/}file1.txt {path2/}newfolder éééö.txt[0m
|
||||
[36m(24) :[0m [34mcopy-as-NFD {datadir/}file1.txt {path2/}newfolder hello,WORLD!.txt[0m
|
||||
|
||||
[36m(25) :[0m [34mtest bisync run with normalization[0m
|
||||
[36m(26) :[0m [34mbisync norm force[0m
|
||||
|
@ -126,14 +126,14 @@ INFO : Bisyncing with Comparison Settings:
|
|||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [36mPath1[0m [35m[32mFile is new[0m[0m - [36mfolder/HeLlO,wOrLd!.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[32mFile is new[0m[0m - [36mfolder/éééö.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[32mFile is new[0m[0m - [36mnewfolder/HeLlO,wOrLd!.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[32mFile is new[0m[0m - [36mnewfolder/éééö.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[32mFile is new[0m[0m - [36m"測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"[0m
|
||||
INFO : Path1: 3 changes: [32m 3 new[0m, [33m 0 modified[0m, [31m 0 deleted[0m
|
||||
INFO : Path2 checking for diffs
|
||||
INFO : - [34mPath2[0m [35m[33mFile changed: [35mtime (newer)[0m[0m[0m - [36mfile1.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[32mFile is new[0m[0m - [36mfolder/éééö.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[32mFile is new[0m[0m - [36mfolder/hello,WORLD!.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[32mFile is new[0m[0m - [36mnewfolder/éééö.txt[0m
|
||||
INFO : - [34mPath2[0m [35m[32mFile is new[0m[0m - [36mnewfolder/hello,WORLD!.txt[0m
|
||||
INFO : Path2: 3 changes: [32m 2 new[0m, [33m 1 modified[0m, [31m 0 deleted[0m
|
||||
INFO : ([33mModified[0m: [36m 1 newer[0m, [34m 0 older[0m)
|
||||
INFO : Applying changes
|
||||
|
@ -141,12 +141,12 @@ INFO : Checking potential conflicts...
|
|||
NOTICE: {path2String}: 0 differences found
|
||||
NOTICE: {path2String}: 2 matching files
|
||||
INFO : Finished checking the potential conflicts. %!s(<nil>)
|
||||
NOTICE: - [34mWARNING[0m [35mNew or changed in both paths[0m - [36mfolder/HeLlO,wOrLd!.txt[0m
|
||||
INFO : folder/HeLlO,wOrLd!.txt: Files are equal but will copy anyway to update modtime (will not rename)
|
||||
INFO : - [34mPath2[0m [35m[32mQueue copy to[0m Path1[0m - [36m{path1/}folder/HeLlO,wOrLd!.txt[0m
|
||||
NOTICE: - [34mWARNING[0m [35mNew or changed in both paths[0m - [36mfolder/éééö.txt[0m
|
||||
INFO : folder/éééö.txt: Files are equal but will copy anyway to update modtime (will not rename)
|
||||
INFO : - [34mPath2[0m [35m[32mQueue copy to[0m Path1[0m - [36m{path1/}folder/éééö.txt[0m
|
||||
NOTICE: - [34mWARNING[0m [35mNew or changed in both paths[0m - [36mnewfolder/HeLlO,wOrLd!.txt[0m
|
||||
INFO : newfolder/HeLlO,wOrLd!.txt: Files are equal but will copy anyway to update modtime (will not rename)
|
||||
INFO : - [34mPath2[0m [35m[32mQueue copy to[0m Path1[0m - [36m{path1/}newfolder/HeLlO,wOrLd!.txt[0m
|
||||
NOTICE: - [34mWARNING[0m [35mNew or changed in both paths[0m - [36mnewfolder/éééö.txt[0m
|
||||
INFO : newfolder/éééö.txt: Files are equal but will copy anyway to update modtime (will not rename)
|
||||
INFO : - [34mPath2[0m [35m[32mQueue copy to[0m Path1[0m - [36m{path1/}newfolder/éééö.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[32mQueue copy to[0m Path2[0m - [36m"{path2/}測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"[0m
|
||||
INFO : - [34mPath2[0m [35m[32mQueue copy to[0m Path1[0m - [36m{path1/}file1.txt[0m
|
||||
INFO : - [34mPath2[0m [35mDo queued copies to[0m - [36mPath1[0m
|
||||
|
@ -178,8 +178,8 @@ INFO : [32mBisync successful[0m
|
|||
[36m(29) :[0m [34mtest changed on one path[0m
|
||||
[36m(30) :[0m [34mtouch-copy 2001-01-05 {datadir/}file1.txt {path2/}[0m
|
||||
[36m(31) :[0m [34mcopy-as-NFC {datadir/}file1.txt {path1/}測試_Руский___ě_áñ👸🏼🧝🏾♀️💆🏿♂️🐨🤙🏼🤮🧑🏻🔧🧑🔬éö 測試_Руский___ě_áñ👸🏼🧝🏾♀️💆🏿♂️🐨🤙🏼🤮🧑🏻🔧🧑🔬éö.txt[0m
|
||||
[36m(32) :[0m [34mcopy-as-NFC {datadir/}file1.txt {path1/}folder éééö.txt[0m
|
||||
[36m(33) :[0m [34mcopy-as-NFC {datadir/}file1.txt {path1/}folder HeLlO,wOrLd!.txt[0m
|
||||
[36m(32) :[0m [34mcopy-as-NFC {datadir/}file1.txt {path1/}newfolder éééö.txt[0m
|
||||
[36m(33) :[0m [34mcopy-as-NFC {datadir/}file1.txt {path1/}newfolder HeLlO,wOrLd!.txt[0m
|
||||
[36m(34) :[0m [34mbisync norm[0m
|
||||
INFO : [2mSetting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
|
@ -194,8 +194,8 @@ INFO : Bisyncing with Comparison Settings:
|
|||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Building Path1 and Path2 listings
|
||||
INFO : Path1 checking for diffs
|
||||
INFO : - [36mPath1[0m [35m[33mFile changed: [35mtime (newer)[0m[0m[0m - [36mfolder/HeLlO,wOrLd!.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[33mFile changed: [35mtime (newer)[0m[0m[0m - [36mfolder/éééö.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[33mFile changed: [35mtime (newer)[0m[0m[0m - [36mnewfolder/HeLlO,wOrLd!.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[33mFile changed: [35mtime (newer)[0m[0m[0m - [36mnewfolder/éééö.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[33mFile changed: [35mtime (newer)[0m[0m[0m - [36m"測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"[0m
|
||||
INFO : Path1: 3 changes: [32m 0 new[0m, [33m 3 modified[0m, [31m 0 deleted[0m
|
||||
INFO : ([33mModified[0m: [36m 3 newer[0m, [34m 0 older[0m)
|
||||
|
@ -204,8 +204,8 @@ INFO : - [34mPath2[0m [35m[33mFile changed: [35mtime (newer)[0m[0m[0
|
|||
INFO : Path2: 1 changes: [32m 0 new[0m, [33m 1 modified[0m, [31m 0 deleted[0m
|
||||
INFO : ([33mModified[0m: [36m 1 newer[0m, [34m 0 older[0m)
|
||||
INFO : Applying changes
|
||||
INFO : - [36mPath1[0m [35m[32mQueue copy to[0m Path2[0m - [36m{path2/}folder/hello,WORLD!.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[32mQueue copy to[0m Path2[0m - [36m{path2/}folder/éééö.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[32mQueue copy to[0m Path2[0m - [36m{path2/}newfolder/hello,WORLD!.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[32mQueue copy to[0m Path2[0m - [36m{path2/}newfolder/éééö.txt[0m
|
||||
INFO : - [36mPath1[0m [35m[32mQueue copy to[0m Path2[0m - [36m"{path2/}測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö/測試_Руский___ě_áñ👸🏼🧝🏾\u200d♀️💆🏿\u200d♂️🐨🤙🏼🤮🧑🏻\u200d🔧🧑\u200d🔬éö.txt"[0m
|
||||
INFO : - [34mPath2[0m [35m[32mQueue copy to[0m Path1[0m - [36m{path1/}file1.txt[0m
|
||||
INFO : - [34mPath2[0m [35mDo queued copies to[0m - [36mPath1[0m
|
||||
|
|
|
@ -32,13 +32,13 @@ bisync resync
|
|||
# copy NFC version to Path1
|
||||
# note: need to slightly change the name to avoid Drive known issue #3262 which could try to copy the old name from the trash
|
||||
copy-as-NFC {datadir/}file1.txt {path1/}測試_Руский___ě_áñ👸🏼🧝🏾♀️💆🏿♂️🐨🤙🏼🤮🧑🏻🔧🧑🔬éö 測試_Руский___ě_áñ👸🏼🧝🏾♀️💆🏿♂️🐨🤙🏼🤮🧑🏻🔧🧑🔬éö.txt
|
||||
copy-as-NFC {datadir/}file1.txt {path1/}folder éééö.txt
|
||||
copy-as-NFC {datadir/}file1.txt {path1/}folder HeLlO,wOrLd!.txt
|
||||
copy-as-NFC {datadir/}file1.txt {path1/}newfolder éééö.txt
|
||||
copy-as-NFC {datadir/}file1.txt {path1/}newfolder HeLlO,wOrLd!.txt
|
||||
|
||||
# place newer NFD version on Path2
|
||||
touch-copy 2001-01-03 {datadir/}file1.txt {path2/}
|
||||
copy-as-NFD {datadir/}file1.txt {path2/}folder éééö.txt
|
||||
copy-as-NFD {datadir/}file1.txt {path2/}folder hello,WORLD!.txt
|
||||
copy-as-NFD {datadir/}file1.txt {path2/}newfolder éééö.txt
|
||||
copy-as-NFD {datadir/}file1.txt {path2/}newfolder hello,WORLD!.txt
|
||||
|
||||
test bisync run with normalization
|
||||
bisync norm force
|
||||
|
@ -49,6 +49,6 @@ bisync resync norm
|
|||
test changed on one path
|
||||
touch-copy 2001-01-05 {datadir/}file1.txt {path2/}
|
||||
copy-as-NFC {datadir/}file1.txt {path1/}測試_Руский___ě_áñ👸🏼🧝🏾♀️💆🏿♂️🐨🤙🏼🤮🧑🏻🔧🧑🔬éö 測試_Руский___ě_áñ👸🏼🧝🏾♀️💆🏿♂️🐨🤙🏼🤮🧑🏻🔧🧑🔬éö.txt
|
||||
copy-as-NFC {datadir/}file1.txt {path1/}folder éééö.txt
|
||||
copy-as-NFC {datadir/}file1.txt {path1/}folder HeLlO,wOrLd!.txt
|
||||
copy-as-NFC {datadir/}file1.txt {path1/}newfolder éééö.txt
|
||||
copy-as-NFC {datadir/}file1.txt {path1/}newfolder HeLlO,wOrLd!.txt
|
||||
bisync norm
|
|
@ -1,8 +1,10 @@
|
|||
[36m(01) :[0m [34mtest rclone-args[0m
|
||||
|
||||
|
||||
[36m(02) :[0m [34mtest initial bisync[0m
|
||||
[36m(03) :[0m [34mbisync resync checksum[0m
|
||||
[36m(02) :[0m [34mtouch-glob 2001-01-02 {datadir/} *[0m
|
||||
|
||||
[36m(03) :[0m [34mtest initial bisync[0m
|
||||
[36m(04) :[0m [34mbisync resync checksum[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
{
|
||||
"Modtime": false,
|
||||
|
@ -20,19 +22,20 @@ INFO : Resync updating listings
|
|||
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
|
||||
INFO : [32mBisync successful[0m
|
||||
|
||||
[36m(04) :[0m [34mtest place newer files on both paths[0m
|
||||
|
||||
|
||||
[36m(05) :[0m [34mtouch-glob 2001-01-02 {datadir/} *[0m
|
||||
|
||||
[36m(05) :[0m [34mtest place newer files on both paths[0m
|
||||
[36m(06) :[0m [34mcopy-file {datadir/}file1.txt {path1/}[0m
|
||||
[36m(07) :[0m [34mcopy-file {datadir/}file2.txt {path2/}[0m
|
||||
|
||||
[36m(08) :[0m [34mcopy-file {datadir/}file20.txt {path1/}subdir[0m
|
||||
[36m(09) :[0m [34mcopy-file {datadir/}file21.txt {path2/}subdir[0m
|
||||
|
||||
[36m(10) :[0m [34mtest run bisync with custom options[0m
|
||||
[36m(11) :[0m [34mbisync checksum[0m
|
||||
|
||||
[36m(10) :[0m [34mtouch-glob 2001-01-02 {path1/} file1.txt[0m
|
||||
[36m(11) :[0m [34mtouch-glob 2001-01-02 {path2/} file2.txt[0m
|
||||
[36m(12) :[0m [34mtouch-glob 2001-01-02 {path1/}subdir/ file20.txt[0m
|
||||
[36m(13) :[0m [34mtouch-glob 2001-01-02 {path2/}subdir/ file21.txt[0m
|
||||
|
||||
[36m(14) :[0m [34mtest run bisync with custom options[0m
|
||||
[36m(15) :[0m [34mbisync checksum[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
{
|
||||
"Modtime": false,
|
||||
|
@ -61,15 +64,20 @@ INFO : Updating listings
|
|||
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
|
||||
INFO : [32mBisync successful[0m
|
||||
|
||||
[36m(12) :[0m [34mtouch-glob 2007-07-23 {datadir/} *[0m
|
||||
[36m(16) :[0m [34mtouch-glob 2007-07-23 {datadir/} *[0m
|
||||
|
||||
[36m(13) :[0m [34mcopy-file {datadir/}file1.txt {path1/}[0m
|
||||
[36m(14) :[0m [34mcopy-file {datadir/}file2.txt {path2/}[0m
|
||||
[36m(17) :[0m [34mcopy-file {datadir/}file1.txt {path1/}[0m
|
||||
[36m(18) :[0m [34mcopy-file {datadir/}file2.txt {path2/}[0m
|
||||
[36m(19) :[0m [34mcopy-file {datadir/}file20.txt {path1/}subdir[0m
|
||||
[36m(20) :[0m [34mcopy-as {datadir/}file21.txt {path2/} file1.txt[0m
|
||||
|
||||
[36m(15) :[0m [34mcopy-file {datadir/}file20.txt {path1/}subdir[0m
|
||||
[36m(16) :[0m [34mcopy-as {datadir/}file21.txt {path2/} file1.txt[0m
|
||||
|
||||
[36m(17) :[0m [34mbisync size-only[0m
|
||||
[36m(21) :[0m [34mtouch-glob 2007-07-23 {path1/} file1.txt[0m
|
||||
[36m(22) :[0m [34mtouch-glob 2007-07-23 {path2/} file2.txt[0m
|
||||
[36m(23) :[0m [34mtouch-glob 2007-07-23 {path1/}subdir/ file20.txt[0m
|
||||
[36m(24) :[0m [34mtouch-glob 2007-07-23 {path2/}subdir/ file21.txt[0m
|
||||
|
||||
[36m(25) :[0m [34mbisync size-only[0m
|
||||
INFO : [2mSetting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
{
|
||||
|
@ -93,7 +101,7 @@ INFO : - [34mPath2[0m [35mDo queued copies to[0m - [36mP
|
|||
INFO : Updating listings
|
||||
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
|
||||
INFO : [32mBisync successful[0m
|
||||
[36m(18) :[0m [34mbisync resync[0m
|
||||
[36m(26) :[0m [34mbisync resync[0m
|
||||
INFO : [2mSetting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
{
|
||||
|
@ -112,13 +120,18 @@ INFO : Resync updating listings
|
|||
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
|
||||
INFO : [32mBisync successful[0m
|
||||
|
||||
[36m(19) :[0m [34mcopy-file {datadir/}file1.txt {path1/}[0m
|
||||
[36m(20) :[0m [34mcopy-file {datadir/}file2.txt {path2/}[0m
|
||||
[36m(27) :[0m [34mcopy-file {datadir/}file1.txt {path1/}[0m
|
||||
[36m(28) :[0m [34mcopy-file {datadir/}file2.txt {path2/}[0m
|
||||
[36m(29) :[0m [34mcopy-file {datadir/}file20.txt {path1/}subdir[0m
|
||||
[36m(30) :[0m [34mcopy-file {datadir/}file21.txt {path2/}subdir[0m
|
||||
|
||||
[36m(21) :[0m [34mcopy-file {datadir/}file20.txt {path1/}subdir[0m
|
||||
[36m(22) :[0m [34mcopy-file {datadir/}file21.txt {path2/}subdir[0m
|
||||
|
||||
[36m(23) :[0m [34mbisync ignore-size[0m
|
||||
[36m(31) :[0m [34mtouch-glob 2007-07-23 {path1/} file1.txt[0m
|
||||
[36m(32) :[0m [34mtouch-glob 2007-07-23 {path2/} file2.txt[0m
|
||||
[36m(33) :[0m [34mtouch-glob 2007-07-23 {path1/}subdir/ file20.txt[0m
|
||||
[36m(34) :[0m [34mtouch-glob 2007-07-23 {path2/}subdir/ file21.txt[0m
|
||||
|
||||
[36m(35) :[0m [34mbisync ignore-size[0m
|
||||
INFO : [2mSetting --ignore-listing-checksum as neither --checksum nor --compare checksum are set.[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
{
|
||||
|
@ -145,7 +158,7 @@ INFO : Updating listings
|
|||
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
|
||||
INFO : [32mBisync successful[0m
|
||||
|
||||
[36m(24) :[0m [34mbisync resync compare-all[0m
|
||||
[36m(36) :[0m [34mbisync resync compare-all[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
{
|
||||
"Modtime": true,
|
||||
|
@ -163,16 +176,22 @@ INFO : Resync updating listings
|
|||
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
|
||||
INFO : [32mBisync successful[0m
|
||||
|
||||
[36m(25) :[0m [34mcopy-as {datadir/}file21.txt {path2/} file2.txt[0m
|
||||
[36m(37) :[0m [34mcopy-as {datadir/}file21.txt {path2/} file2.txt[0m
|
||||
|
||||
[36m(26) :[0m [34mtouch-glob 2023-08-26 {datadir/} *[0m
|
||||
[36m(38) :[0m [34mtouch-glob 2023-08-26 {datadir/} *[0m
|
||||
|
||||
[36m(27) :[0m [34mcopy-file {datadir/}file1.txt {path1/}[0m
|
||||
[36m(39) :[0m [34mcopy-file {datadir/}file1.txt {path1/}[0m
|
||||
|
||||
[36m(28) :[0m [34mcopy-file {datadir/}file20.txt {path1/}subdir[0m
|
||||
[36m(29) :[0m [34mcopy-file {datadir/}file21.txt {path2/}subdir[0m
|
||||
[36m(40) :[0m [34mcopy-file {datadir/}file20.txt {path1/}subdir[0m
|
||||
[36m(41) :[0m [34mcopy-file {datadir/}file21.txt {path2/}subdir[0m
|
||||
|
||||
[36m(30) :[0m [34mbisync compare-all[0m
|
||||
|
||||
[36m(42) :[0m [34mtouch-glob 2007-07-23 {path2/} file2.txt[0m
|
||||
[36m(43) :[0m [34mtouch-glob 2023-08-26 {path1/} file1.txt[0m
|
||||
[36m(44) :[0m [34mtouch-glob 2023-08-26 {path1/}subdir/ file20.txt[0m
|
||||
[36m(45) :[0m [34mtouch-glob 2023-08-26 {path2/}subdir/ file21.txt[0m
|
||||
|
||||
[36m(46) :[0m [34mbisync compare-all[0m
|
||||
INFO : Bisyncing with Comparison Settings:
|
||||
{
|
||||
"Modtime": true,
|
||||
|
|
|
@ -2,21 +2,24 @@ test rclone-args
|
|||
# Pass generic flags to rclone under test using as an example
|
||||
# the --size-only flag, which changes the meaning of operations.
|
||||
|
||||
# force specific modification time since file time is lost through git
|
||||
touch-glob 2001-01-02 {datadir/} *
|
||||
|
||||
test initial bisync
|
||||
bisync resync checksum
|
||||
|
||||
test place newer files on both paths
|
||||
|
||||
# force specific modification time since file time is lost through git
|
||||
touch-glob 2001-01-02 {datadir/} *
|
||||
|
||||
copy-file {datadir/}file1.txt {path1/}
|
||||
copy-file {datadir/}file2.txt {path2/}
|
||||
|
||||
copy-file {datadir/}file20.txt {path1/}subdir
|
||||
copy-file {datadir/}file21.txt {path2/}subdir
|
||||
|
||||
# re-touch dest to avoid race conditions with parallel tests
|
||||
touch-glob 2001-01-02 {path1/} file1.txt
|
||||
touch-glob 2001-01-02 {path2/} file2.txt
|
||||
touch-glob 2001-01-02 {path1/}subdir/ file20.txt
|
||||
touch-glob 2001-01-02 {path2/}subdir/ file21.txt
|
||||
|
||||
test run bisync with custom options
|
||||
bisync checksum
|
||||
|
||||
|
@ -24,19 +27,29 @@ touch-glob 2007-07-23 {datadir/} *
|
|||
|
||||
copy-file {datadir/}file1.txt {path1/}
|
||||
copy-file {datadir/}file2.txt {path2/}
|
||||
|
||||
copy-file {datadir/}file20.txt {path1/}subdir
|
||||
copy-as {datadir/}file21.txt {path2/} file1.txt
|
||||
|
||||
# re-touch dest to avoid race conditions with parallel tests
|
||||
touch-glob 2007-07-23 {path1/} file1.txt
|
||||
touch-glob 2007-07-23 {path2/} file2.txt
|
||||
touch-glob 2007-07-23 {path1/}subdir/ file20.txt
|
||||
touch-glob 2007-07-23 {path2/}subdir/ file21.txt
|
||||
|
||||
bisync size-only
|
||||
bisync resync
|
||||
|
||||
copy-file {datadir/}file1.txt {path1/}
|
||||
copy-file {datadir/}file2.txt {path2/}
|
||||
|
||||
copy-file {datadir/}file20.txt {path1/}subdir
|
||||
copy-file {datadir/}file21.txt {path2/}subdir
|
||||
|
||||
# re-touch dest to avoid race conditions with parallel tests
|
||||
touch-glob 2007-07-23 {path1/} file1.txt
|
||||
touch-glob 2007-07-23 {path2/} file2.txt
|
||||
touch-glob 2007-07-23 {path1/}subdir/ file20.txt
|
||||
touch-glob 2007-07-23 {path2/}subdir/ file21.txt
|
||||
|
||||
bisync ignore-size
|
||||
|
||||
bisync resync compare-all
|
||||
|
@ -50,4 +63,10 @@ copy-file {datadir/}file1.txt {path1/}
|
|||
copy-file {datadir/}file20.txt {path1/}subdir
|
||||
copy-file {datadir/}file21.txt {path2/}subdir
|
||||
|
||||
# re-touch dest to avoid race conditions with parallel tests
|
||||
touch-glob 2007-07-23 {path2/} file2.txt
|
||||
touch-glob 2023-08-26 {path1/} file1.txt
|
||||
touch-glob 2023-08-26 {path1/}subdir/ file20.txt
|
||||
touch-glob 2023-08-26 {path2/}subdir/ file21.txt
|
||||
|
||||
bisync compare-all
|
14
cmd/bisync/testdata/test_resolve/golden/test.log
vendored
14
cmd/bisync/testdata/test_resolve/golden/test.log
vendored
|
@ -52,7 +52,7 @@ INFO : Path2: 1 changes: [32m 0 new[0m, [33m 1 modified[0m, [31m
|
|||
INFO : ([33mModified[0m: [36m 1 newer[0m, [34m 0 older[0m, [36m 1 larger[0m, [34m 0 smaller[0m)
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file1.txt: md5 differ
|
||||
ERROR : file1.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 1 differences found
|
||||
|
@ -97,7 +97,7 @@ INFO : Path2: 1 changes: [32m 0 new[0m, [33m 1 modified[0m, [31m
|
|||
INFO : ([33mModified[0m: [36m 1 newer[0m, [34m 0 older[0m)
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file1.txt: md5 differ
|
||||
ERROR : file1.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 1 differences found
|
||||
|
@ -142,7 +142,7 @@ INFO : - [34mPath2[0m [35m[32mFile is new[0m[0m - [36mf
|
|||
INFO : Path2: 1 changes: [32m 1 new[0m, [33m 0 modified[0m, [31m 0 deleted[0m
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file1.txt: md5 differ
|
||||
ERROR : file1.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 1 differences found
|
||||
|
@ -188,7 +188,7 @@ INFO : - [34mPath2[0m [35m[32mFile is new[0m[0m - [36mf
|
|||
INFO : Path2: 1 changes: [32m 1 new[0m, [33m 0 modified[0m, [31m 0 deleted[0m
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file1.txt: md5 differ
|
||||
ERROR : file1.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 1 differences found
|
||||
|
@ -234,7 +234,7 @@ INFO : - [34mPath2[0m [35m[32mFile is new[0m[0m - [36mf
|
|||
INFO : Path2: 1 changes: [32m 1 new[0m, [33m 0 modified[0m, [31m 0 deleted[0m
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file1.txt: md5 differ
|
||||
ERROR : file1.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 1 differences found
|
||||
|
@ -283,8 +283,8 @@ INFO : Path2: 2 changes: [32m 1 new[0m, [33m 1 modified[0m, [31m
|
|||
INFO : ([33mModified[0m: [36m 1 newer[0m, [34m 0 older[0m, [36m 1 larger[0m, [34m 0 smaller[0m)
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file1.txt: md5 differ
|
||||
ERROR : file2.txt: md5 differ
|
||||
ERROR : file1.txt: {hashtype} differ
|
||||
ERROR : file2.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 2 differences found
|
||||
NOTICE: {path2String}: 2 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 2 differences found
|
||||
|
|
|
@ -101,7 +101,7 @@ INFO : Bisyncing with Comparison Settings:
|
|||
INFO : Synching Path1 "{path1/}" with Path2 "{path2/}"
|
||||
INFO : Copying Path2 files to Path1
|
||||
INFO : - [34mPath2[0m [35mResync is copying files to[0m - [36mPath1[0m
|
||||
INFO : file1.txt: Path2 is older. Path1: 2003-09-03 20:00:00 -0400 EDT, Path2: 2003-07-22 20:00:00 -0400 EDT, Difference: 1032h0m0s
|
||||
INFO : file1.txt: Path2 is older. Path1: 2003-09-04 00:00:00 +0000 UTC, Path2: 2003-07-23 00:00:00 +0000 UTC, Difference: 1032h0m0s
|
||||
INFO : - [36mPath1[0m [35mResync is copying files to[0m - [36mPath2[0m
|
||||
INFO : Resync updating listings
|
||||
INFO : Validating listings for Path1 "{path1/}" vs Path2 "{path2/}"
|
||||
|
|
|
@ -52,7 +52,7 @@ INFO : Path2: 1 changes: [32m 0 new[0m, [33m 1 modified[0m, [31m
|
|||
INFO : ([33mModified[0m: [36m 1 newer[0m, [34m 0 older[0m, [36m 1 larger[0m, [34m 0 smaller[0m)
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file5.txt: md5 differ
|
||||
ERROR : file5.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 1 differences found
|
||||
|
@ -196,7 +196,7 @@ INFO : Path2: 50 changes: [32m 43 new[0m, [33m 7 modified[0m, [31m
|
|||
INFO : ([33mModified[0m: [36m 7 newer[0m, [34m 0 older[0m, [36m 7 larger[0m, [34m 0 smaller[0m)
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file5.txt: md5 differ
|
||||
ERROR : file5.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 1 differences found
|
||||
|
@ -339,7 +339,7 @@ INFO : - [34mPath2[0m [35m[32mFile is new[0m[0m - [36mf
|
|||
INFO : Path2: 1 changes: [32m 1 new[0m, [33m 0 modified[0m, [31m 0 deleted[0m
|
||||
INFO : Applying changes
|
||||
INFO : Checking potential conflicts...
|
||||
ERROR : file5.txt: md5 differ
|
||||
ERROR : file5.txt: {hashtype} differ
|
||||
NOTICE: {path2String}: 1 differences found
|
||||
NOTICE: {path2String}: 1 errors while checking
|
||||
INFO : Finished checking the potential conflicts. 1 differences found
|
||||
|
|
|
@ -223,8 +223,10 @@ func logModTimeUpload(dst fs.Object) {
|
|||
}
|
||||
|
||||
// EqualFn allows replacing Equal() with a custom function during NeedTransfer()
|
||||
type EqualFn func(ctx context.Context, src fs.ObjectInfo, dst fs.Object) bool
|
||||
type equalFnContextKey struct{}
|
||||
type (
|
||||
EqualFn func(ctx context.Context, src fs.ObjectInfo, dst fs.Object) bool
|
||||
equalFnContextKey struct{}
|
||||
)
|
||||
|
||||
var equalFnKey = equalFnContextKey{}
|
||||
|
||||
|
@ -451,7 +453,7 @@ func move(ctx context.Context, fdst fs.Fs, dst fs.Object, remote string, src fs.
|
|||
}
|
||||
} else if needsMoveCaseInsensitive(fdst, fdst, remote, src.Remote(), false) {
|
||||
doMove = func(ctx context.Context, src fs.Object, remote string) (fs.Object, error) {
|
||||
return moveCaseInsensitive(ctx, fdst, fdst, remote, src.Remote(), false, src)
|
||||
return MoveCaseInsensitive(ctx, fdst, fdst, remote, src.Remote(), false, src)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1453,9 +1455,7 @@ func Rmdirs(ctx context.Context, f fs.Fs, dir string, leaveRoot bool) error {
|
|||
}
|
||||
}
|
||||
|
||||
var (
|
||||
errCount = errcount.New()
|
||||
)
|
||||
errCount := errcount.New()
|
||||
// Delete all directories at the same level in parallel
|
||||
for level := len(toDelete) - 1; level >= 0; level-- {
|
||||
dirs := toDelete[level]
|
||||
|
@ -1775,7 +1775,6 @@ func copyURLFn(ctx context.Context, dstFileName string, url string, autoFilename
|
|||
|
||||
// CopyURL copies the data from the url to (fdst, dstFileName)
|
||||
func CopyURL(ctx context.Context, fdst fs.Fs, dstFileName string, url string, autoFilename, dstFileNameFromHeader bool, noClobber bool) (dst fs.Object, err error) {
|
||||
|
||||
err = copyURLFn(ctx, dstFileName, url, autoFilename, dstFileNameFromHeader, func(ctx context.Context, dstFileName string, in io.ReadCloser, size int64, modTime time.Time) (err error) {
|
||||
if noClobber {
|
||||
_, err = fdst.NewObject(ctx, dstFileName)
|
||||
|
@ -1852,16 +1851,13 @@ func needsMoveCaseInsensitive(fdst fs.Fs, fsrc fs.Fs, dstFileName string, srcFil
|
|||
return !cp && fdst.Name() == fsrc.Name() && fdst.Features().CaseInsensitive && dstFileName != srcFileName && strings.EqualFold(dstFilePath, srcFilePath)
|
||||
}
|
||||
|
||||
// Special case for changing case of a file on a case insensitive remote
|
||||
// MoveCaseInsensitive handles changing case of a file on a case insensitive remote.
|
||||
// This will move the file to a temporary name then
|
||||
// move it back to the intended destination. This is required
|
||||
// to avoid issues with certain remotes and avoid file deletion.
|
||||
// returns nil, nil if !needsMoveCaseInsensitive.
|
||||
// this does not account a transfer -- the caller should do that if desired.
|
||||
func moveCaseInsensitive(ctx context.Context, fdst fs.Fs, fsrc fs.Fs, dstFileName string, srcFileName string, cp bool, srcObj fs.Object) (newDst fs.Object, err error) {
|
||||
if !needsMoveCaseInsensitive(fdst, fsrc, dstFileName, srcFileName, cp) {
|
||||
return nil, nil
|
||||
}
|
||||
func MoveCaseInsensitive(ctx context.Context, fdst fs.Fs, fsrc fs.Fs, dstFileName string, srcFileName string, cp bool, srcObj fs.Object) (newDst fs.Object, err error) {
|
||||
logger, _ := GetLogger(ctx)
|
||||
|
||||
// Choose operations
|
||||
|
@ -1947,7 +1943,7 @@ func moveOrCopyFile(ctx context.Context, fdst fs.Fs, fsrc fs.Fs, dstFileName str
|
|||
defer func() {
|
||||
tr.Done(ctx, err)
|
||||
}()
|
||||
_, err = moveCaseInsensitive(ctx, fdst, fsrc, dstFileName, srcFileName, cp, srcObj)
|
||||
_, err = MoveCaseInsensitive(ctx, fdst, fsrc, dstFileName, srcFileName, cp, srcObj)
|
||||
return err
|
||||
}
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ tests:
|
|||
fastlist: true
|
||||
- path: fs/sync
|
||||
fastlist: true
|
||||
- path: cmd/bisync
|
||||
- path: vfs
|
||||
- path: cmd/serve/restic
|
||||
localonly: true
|
||||
|
|
Loading…
Reference in New Issue
Block a user