2016-08-06 00:12:27 +08:00
|
|
|
package dedupe
|
|
|
|
|
|
|
|
import (
|
2019-06-17 16:34:30 +08:00
|
|
|
"context"
|
2016-08-06 00:12:27 +08:00
|
|
|
"log"
|
|
|
|
|
2019-07-29 01:47:38 +08:00
|
|
|
"github.com/rclone/rclone/cmd"
|
2019-10-11 23:55:04 +08:00
|
|
|
"github.com/rclone/rclone/fs/config/flags"
|
2019-07-29 01:47:38 +08:00
|
|
|
"github.com/rclone/rclone/fs/operations"
|
2016-08-06 00:12:27 +08:00
|
|
|
"github.com/spf13/cobra"
|
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
2018-01-13 00:30:54 +08:00
|
|
|
dedupeMode = operations.DeduplicateInteractive
|
2020-10-13 23:22:02 +08:00
|
|
|
byHash = false
|
2016-08-06 00:12:27 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
2019-10-11 23:58:11 +08:00
|
|
|
cmd.Root.AddCommand(commandDefinition)
|
2019-10-11 23:55:04 +08:00
|
|
|
cmdFlag := commandDefinition.Flags()
|
2020-01-19 19:09:45 +08:00
|
|
|
flags.FVarP(cmdFlag, &dedupeMode, "dedupe-mode", "", "Dedupe mode interactive|skip|first|newest|oldest|largest|smallest|rename.")
|
2020-10-13 23:22:02 +08:00
|
|
|
flags.BoolVarP(cmdFlag, &byHash, "by-hash", "", false, "Find indentical hashes rather than names")
|
2016-08-06 00:12:27 +08:00
|
|
|
}
|
|
|
|
|
2019-10-11 23:58:11 +08:00
|
|
|
var commandDefinition = &cobra.Command{
|
2016-08-06 00:12:27 +08:00
|
|
|
Use: "dedupe [mode] remote:path",
|
2020-06-16 19:39:26 +08:00
|
|
|
Short: `Interactively find duplicate filenames and delete/rename them.`,
|
2016-08-06 00:12:27 +08:00
|
|
|
Long: `
|
2020-06-16 19:39:26 +08:00
|
|
|
|
|
|
|
By default ` + "`dedupe`" + ` interactively finds files with duplicate
|
|
|
|
names and offers to delete all but one or rename them to be
|
2020-10-13 23:22:02 +08:00
|
|
|
different. This is known as deduping by name.
|
2020-06-16 19:39:26 +08:00
|
|
|
|
2020-10-13 23:22:02 +08:00
|
|
|
Deduping by name is only useful with backends like Google Drive which
|
|
|
|
can have duplicate file names. It can be run on wrapping backends
|
|
|
|
(e.g. crypt) if they wrap a backend which supports duplicate file
|
|
|
|
names.
|
2016-08-06 00:12:27 +08:00
|
|
|
|
2020-10-13 23:22:02 +08:00
|
|
|
However if --by-hash is passed in then dedupe will find files with
|
|
|
|
duplicate hashes instead which will work on any backend which supports
|
|
|
|
at least one hash. This can be used to find files with duplicate
|
|
|
|
content. This is known as deduping by hash.
|
2020-06-16 19:39:26 +08:00
|
|
|
|
2020-10-13 23:22:02 +08:00
|
|
|
If deduping by name, first rclone will merge directories with the same
|
|
|
|
name. It will do this iteratively until all the identically named
|
|
|
|
directories have been merged.
|
|
|
|
|
|
|
|
Next, if deduping by name, for every group of duplicate file names /
|
|
|
|
hashes, it will delete all but one identical files it finds without
|
|
|
|
confirmation. This means that for most duplicated files the ` +
|
|
|
|
"`dedupe`" + ` command will not be interactive.
|
2017-08-03 04:34:22 +08:00
|
|
|
|
2020-06-16 19:39:26 +08:00
|
|
|
` + "`dedupe`" + ` considers files to be identical if they have the
|
2020-10-14 05:49:58 +08:00
|
|
|
same file path and the same hash. If the backend does not support hashes (e.g. crypt wrapping
|
2020-06-16 19:39:26 +08:00
|
|
|
Google Drive) then they will never be found to be identical. If you
|
|
|
|
use the ` + "`--size-only`" + ` flag then files will be considered
|
|
|
|
identical if they have the same size (any hash will be ignored). This
|
|
|
|
can be useful on crypt backends which do not support hashes.
|
2020-06-06 00:04:23 +08:00
|
|
|
|
2020-10-13 23:22:02 +08:00
|
|
|
Next rclone will resolve the remaining duplicates. Exactly which
|
|
|
|
action is taken depends on the dedupe mode. By default rclone will
|
|
|
|
interactively query the user for each one.
|
|
|
|
|
2020-06-06 00:04:23 +08:00
|
|
|
**Important**: Since this can cause data loss, test first with the
|
|
|
|
` + "`--dry-run` or the `--interactive`/`-i`" + ` flag.
|
2016-08-06 00:12:27 +08:00
|
|
|
|
|
|
|
Here is an example run.
|
|
|
|
|
|
|
|
Before - with duplicates
|
|
|
|
|
|
|
|
$ rclone lsl drive:dupes
|
|
|
|
6048320 2016-03-05 16:23:16.798000000 one.txt
|
|
|
|
6048320 2016-03-05 16:23:11.775000000 one.txt
|
|
|
|
564374 2016-03-05 16:23:06.731000000 one.txt
|
|
|
|
6048320 2016-03-05 16:18:26.092000000 one.txt
|
|
|
|
6048320 2016-03-05 16:22:46.185000000 two.txt
|
|
|
|
1744073 2016-03-05 16:22:38.104000000 two.txt
|
|
|
|
564374 2016-03-05 16:22:52.118000000 two.txt
|
|
|
|
|
2020-06-16 19:39:26 +08:00
|
|
|
Now the ` + "`dedupe`" + ` session
|
2016-08-06 00:12:27 +08:00
|
|
|
|
|
|
|
$ rclone dedupe drive:dupes
|
|
|
|
2016/03/05 16:24:37 Google drive root 'dupes': Looking for duplicates using interactive mode.
|
2020-06-16 19:39:26 +08:00
|
|
|
one.txt: Found 4 files with duplicate names
|
|
|
|
one.txt: Deleting 2/3 identical duplicates (MD5 "1eedaa9fe86fd4b8632e2ac549403b36")
|
2016-08-06 00:12:27 +08:00
|
|
|
one.txt: 2 duplicates remain
|
2020-06-16 19:39:26 +08:00
|
|
|
1: 6048320 bytes, 2016-03-05 16:23:16.798000000, MD5 1eedaa9fe86fd4b8632e2ac549403b36
|
|
|
|
2: 564374 bytes, 2016-03-05 16:23:06.731000000, MD5 7594e7dc9fc28f727c42ee3e0749de81
|
2016-08-06 00:12:27 +08:00
|
|
|
s) Skip and do nothing
|
|
|
|
k) Keep just one (choose which in next step)
|
|
|
|
r) Rename all to be different (by changing file.jpg to file-1.jpg)
|
|
|
|
s/k/r> k
|
|
|
|
Enter the number of the file to keep> 1
|
|
|
|
one.txt: Deleted 1 extra copies
|
2021-01-05 23:46:57 +08:00
|
|
|
two.txt: Found 3 files with duplicate names
|
2016-08-06 00:12:27 +08:00
|
|
|
two.txt: 3 duplicates remain
|
2020-06-16 19:39:26 +08:00
|
|
|
1: 564374 bytes, 2016-03-05 16:22:52.118000000, MD5 7594e7dc9fc28f727c42ee3e0749de81
|
|
|
|
2: 6048320 bytes, 2016-03-05 16:22:46.185000000, MD5 1eedaa9fe86fd4b8632e2ac549403b36
|
|
|
|
3: 1744073 bytes, 2016-03-05 16:22:38.104000000, MD5 851957f7fb6f0bc4ce76be966d336802
|
2016-08-06 00:12:27 +08:00
|
|
|
s) Skip and do nothing
|
|
|
|
k) Keep just one (choose which in next step)
|
|
|
|
r) Rename all to be different (by changing file.jpg to file-1.jpg)
|
|
|
|
s/k/r> r
|
|
|
|
two-1.txt: renamed from: two.txt
|
|
|
|
two-2.txt: renamed from: two.txt
|
|
|
|
two-3.txt: renamed from: two.txt
|
|
|
|
|
|
|
|
The result being
|
|
|
|
|
|
|
|
$ rclone lsl drive:dupes
|
|
|
|
6048320 2016-03-05 16:23:16.798000000 one.txt
|
|
|
|
564374 2016-03-05 16:22:52.118000000 two-1.txt
|
|
|
|
6048320 2016-03-05 16:22:46.185000000 two-2.txt
|
|
|
|
1744073 2016-03-05 16:22:38.104000000 two-3.txt
|
|
|
|
|
|
|
|
Dedupe can be run non interactively using the ` + "`" + `--dedupe-mode` + "`" + ` flag or by using an extra parameter with the same value
|
|
|
|
|
|
|
|
* ` + "`" + `--dedupe-mode interactive` + "`" + ` - interactive as above.
|
|
|
|
* ` + "`" + `--dedupe-mode skip` + "`" + ` - removes identical files then skips anything left.
|
|
|
|
* ` + "`" + `--dedupe-mode first` + "`" + ` - removes identical files then keeps the first one.
|
|
|
|
* ` + "`" + `--dedupe-mode newest` + "`" + ` - removes identical files then keeps the newest one.
|
|
|
|
* ` + "`" + `--dedupe-mode oldest` + "`" + ` - removes identical files then keeps the oldest one.
|
2018-04-22 05:57:08 +08:00
|
|
|
* ` + "`" + `--dedupe-mode largest` + "`" + ` - removes identical files then keeps the largest one.
|
2020-01-16 21:47:15 +08:00
|
|
|
* ` + "`" + `--dedupe-mode smallest` + "`" + ` - removes identical files then keeps the smallest one.
|
2016-08-06 00:12:27 +08:00
|
|
|
* ` + "`" + `--dedupe-mode rename` + "`" + ` - removes identical files then renames the rest to be different.
|
2020-12-02 23:25:55 +08:00
|
|
|
* ` + "`" + `--dedupe-mode list` + "`" + ` - lists duplicate dirs and files only and changes nothing.
|
2016-08-06 00:12:27 +08:00
|
|
|
|
|
|
|
For example to rename all the identically named photos in your Google Photos directory, do
|
|
|
|
|
|
|
|
rclone dedupe --dedupe-mode rename "drive:Google Photos"
|
|
|
|
|
|
|
|
Or
|
|
|
|
|
|
|
|
rclone dedupe rename "drive:Google Photos"
|
|
|
|
`,
|
|
|
|
Run: func(command *cobra.Command, args []string) {
|
|
|
|
cmd.CheckArgs(1, 2, command, args)
|
|
|
|
if len(args) > 1 {
|
|
|
|
err := dedupeMode.Set(args[0])
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
args = args[1:]
|
|
|
|
}
|
|
|
|
fdst := cmd.NewFsSrc(args)
|
2016-12-05 00:52:24 +08:00
|
|
|
cmd.Run(false, false, command, func() error {
|
2020-10-13 23:22:02 +08:00
|
|
|
return operations.Deduplicate(context.Background(), fdst, dedupeMode, byHash)
|
2016-08-06 00:12:27 +08:00
|
|
|
})
|
|
|
|
},
|
|
|
|
}
|