2017-07-03 22:05:27 +08:00
|
|
|
|
package info
|
|
|
|
|
|
|
|
|
|
// FIXME once translations are implemented will need a no-escape
|
2020-08-09 01:02:18 +08:00
|
|
|
|
// option for Put so we can make these tests work again
|
2017-07-03 22:05:27 +08:00
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"bytes"
|
2019-06-17 16:34:30 +08:00
|
|
|
|
"context"
|
2019-05-14 23:49:55 +08:00
|
|
|
|
"encoding/json"
|
2017-07-03 22:05:27 +08:00
|
|
|
|
"fmt"
|
2017-08-22 14:00:10 +08:00
|
|
|
|
"io"
|
2020-08-09 01:02:18 +08:00
|
|
|
|
"log"
|
2019-05-14 23:49:55 +08:00
|
|
|
|
"os"
|
|
|
|
|
"path"
|
|
|
|
|
"regexp"
|
2017-07-03 22:05:27 +08:00
|
|
|
|
"sort"
|
2019-05-14 23:49:55 +08:00
|
|
|
|
"strconv"
|
2017-07-03 22:05:27 +08:00
|
|
|
|
"strings"
|
|
|
|
|
"sync"
|
|
|
|
|
"time"
|
|
|
|
|
|
2019-07-29 01:47:38 +08:00
|
|
|
|
"github.com/rclone/rclone/cmd"
|
2020-08-09 01:02:18 +08:00
|
|
|
|
"github.com/rclone/rclone/cmd/test"
|
|
|
|
|
"github.com/rclone/rclone/cmd/test/info/internal"
|
2019-07-29 01:47:38 +08:00
|
|
|
|
"github.com/rclone/rclone/fs"
|
2019-10-11 23:55:04 +08:00
|
|
|
|
"github.com/rclone/rclone/fs/config/flags"
|
2019-07-29 01:47:38 +08:00
|
|
|
|
"github.com/rclone/rclone/fs/hash"
|
|
|
|
|
"github.com/rclone/rclone/fs/object"
|
2019-08-06 19:44:08 +08:00
|
|
|
|
"github.com/rclone/rclone/lib/random"
|
2017-07-03 22:05:27 +08:00
|
|
|
|
"github.com/spf13/cobra"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
var (
|
2019-05-14 23:49:55 +08:00
|
|
|
|
writeJSON string
|
2017-07-03 22:05:27 +08:00
|
|
|
|
checkNormalization bool
|
|
|
|
|
checkControl bool
|
|
|
|
|
checkLength bool
|
2017-08-22 14:00:10 +08:00
|
|
|
|
checkStreaming bool
|
2020-08-09 01:02:18 +08:00
|
|
|
|
all bool
|
2019-05-14 23:49:55 +08:00
|
|
|
|
uploadWait time.Duration
|
|
|
|
|
positionLeftRe = regexp.MustCompile(`(?s)^(.*)-position-left-([[:xdigit:]]+)$`)
|
|
|
|
|
positionMiddleRe = regexp.MustCompile(`(?s)^position-middle-([[:xdigit:]]+)-(.*)-$`)
|
|
|
|
|
positionRightRe = regexp.MustCompile(`(?s)^position-right-([[:xdigit:]]+)-(.*)$`)
|
2017-07-03 22:05:27 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
func init() {
|
2020-08-09 01:02:18 +08:00
|
|
|
|
test.Command.AddCommand(commandDefinition)
|
2019-10-11 23:55:04 +08:00
|
|
|
|
cmdFlags := commandDefinition.Flags()
|
2021-08-16 17:30:01 +08:00
|
|
|
|
flags.StringVarP(cmdFlags, &writeJSON, "write-json", "", "", "Write results to file")
|
|
|
|
|
flags.BoolVarP(cmdFlags, &checkNormalization, "check-normalization", "", false, "Check UTF-8 Normalization")
|
|
|
|
|
flags.BoolVarP(cmdFlags, &checkControl, "check-control", "", false, "Check control characters")
|
|
|
|
|
flags.DurationVarP(cmdFlags, &uploadWait, "upload-wait", "", 0, "Wait after writing a file")
|
|
|
|
|
flags.BoolVarP(cmdFlags, &checkLength, "check-length", "", false, "Check max filename length")
|
|
|
|
|
flags.BoolVarP(cmdFlags, &checkStreaming, "check-streaming", "", false, "Check uploads with indeterminate file size")
|
|
|
|
|
flags.BoolVarP(cmdFlags, &all, "all", "", false, "Run all tests")
|
2017-07-03 22:05:27 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-10-11 23:58:11 +08:00
|
|
|
|
var commandDefinition = &cobra.Command{
|
2017-07-03 22:05:27 +08:00
|
|
|
|
Use: "info [remote:path]+",
|
2017-08-22 14:00:10 +08:00
|
|
|
|
Short: `Discovers file name or other limitations for paths.`,
|
|
|
|
|
Long: `rclone info discovers what filenames and upload methods are possible
|
|
|
|
|
to write to the paths passed in and how long they can be. It can take some
|
|
|
|
|
time. It will write test files into the remote:path passed in. It outputs
|
|
|
|
|
a bit of go code for each one.
|
2020-08-09 01:02:18 +08:00
|
|
|
|
|
|
|
|
|
**NB** this can create undeletable files and other hazards - use with care
|
2017-07-03 22:05:27 +08:00
|
|
|
|
`,
|
|
|
|
|
Run: func(command *cobra.Command, args []string) {
|
2019-09-05 20:59:06 +08:00
|
|
|
|
cmd.CheckArgs(1, 1e6, command, args)
|
2020-08-09 01:02:18 +08:00
|
|
|
|
if !checkNormalization && !checkControl && !checkLength && !checkStreaming && !all {
|
|
|
|
|
log.Fatalf("no tests selected - select a test or use -all")
|
|
|
|
|
}
|
|
|
|
|
if all {
|
|
|
|
|
checkNormalization = true
|
|
|
|
|
checkControl = true
|
|
|
|
|
checkLength = true
|
|
|
|
|
checkStreaming = true
|
|
|
|
|
}
|
2017-07-03 22:05:27 +08:00
|
|
|
|
for i := range args {
|
2018-05-08 00:58:16 +08:00
|
|
|
|
f := cmd.NewFsDir(args[i : i+1])
|
2017-07-03 22:05:27 +08:00
|
|
|
|
cmd.Run(false, false, command, func() error {
|
2019-06-17 16:34:30 +08:00
|
|
|
|
return readInfo(context.Background(), f)
|
2017-07-03 22:05:27 +08:00
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type results struct {
|
2019-06-17 16:34:30 +08:00
|
|
|
|
ctx context.Context
|
2017-07-03 22:05:27 +08:00
|
|
|
|
f fs.Fs
|
|
|
|
|
mu sync.Mutex
|
2019-05-14 23:49:55 +08:00
|
|
|
|
stringNeedsEscaping map[string]internal.Position
|
|
|
|
|
controlResults map[string]internal.ControlResult
|
2017-07-03 22:05:27 +08:00
|
|
|
|
maxFileLength int
|
|
|
|
|
canWriteUnnormalized bool
|
|
|
|
|
canReadUnnormalized bool
|
|
|
|
|
canReadRenormalized bool
|
2017-08-22 14:00:10 +08:00
|
|
|
|
canStream bool
|
2017-07-03 22:05:27 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-06-17 16:34:30 +08:00
|
|
|
|
func newResults(ctx context.Context, f fs.Fs) *results {
|
2017-07-03 22:05:27 +08:00
|
|
|
|
return &results{
|
2019-06-17 16:34:30 +08:00
|
|
|
|
ctx: ctx,
|
2018-11-02 20:12:09 +08:00
|
|
|
|
f: f,
|
2019-05-14 23:49:55 +08:00
|
|
|
|
stringNeedsEscaping: make(map[string]internal.Position),
|
|
|
|
|
controlResults: make(map[string]internal.ControlResult),
|
2017-07-03 22:05:27 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Print the results to stdout
|
|
|
|
|
func (r *results) Print() {
|
|
|
|
|
fmt.Printf("// %s\n", r.f.Name())
|
|
|
|
|
if checkControl {
|
|
|
|
|
escape := []string{}
|
2018-11-02 20:12:09 +08:00
|
|
|
|
for c, needsEscape := range r.stringNeedsEscaping {
|
2019-05-14 23:49:55 +08:00
|
|
|
|
if needsEscape != internal.PositionNone {
|
|
|
|
|
k := strconv.Quote(c)
|
|
|
|
|
k = k[1 : len(k)-1]
|
|
|
|
|
escape = append(escape, fmt.Sprintf("'%s'", k))
|
2017-07-03 22:05:27 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
sort.Strings(escape)
|
2019-05-14 23:49:55 +08:00
|
|
|
|
fmt.Printf("stringNeedsEscaping = []rune{\n")
|
2017-07-03 22:05:27 +08:00
|
|
|
|
fmt.Printf("\t%s\n", strings.Join(escape, ", "))
|
|
|
|
|
fmt.Printf("}\n")
|
|
|
|
|
}
|
|
|
|
|
if checkLength {
|
|
|
|
|
fmt.Printf("maxFileLength = %d\n", r.maxFileLength)
|
|
|
|
|
}
|
|
|
|
|
if checkNormalization {
|
|
|
|
|
fmt.Printf("canWriteUnnormalized = %v\n", r.canWriteUnnormalized)
|
|
|
|
|
fmt.Printf("canReadUnnormalized = %v\n", r.canReadUnnormalized)
|
|
|
|
|
fmt.Printf("canReadRenormalized = %v\n", r.canReadRenormalized)
|
|
|
|
|
}
|
2017-08-22 14:00:10 +08:00
|
|
|
|
if checkStreaming {
|
|
|
|
|
fmt.Printf("canStream = %v\n", r.canStream)
|
|
|
|
|
}
|
2017-07-03 22:05:27 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-14 23:49:55 +08:00
|
|
|
|
// WriteJSON writes the results to a JSON file when requested
|
|
|
|
|
func (r *results) WriteJSON() {
|
|
|
|
|
if writeJSON == "" {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
report := internal.InfoReport{
|
|
|
|
|
Remote: r.f.Name(),
|
|
|
|
|
}
|
|
|
|
|
if checkControl {
|
|
|
|
|
report.ControlCharacters = &r.controlResults
|
|
|
|
|
}
|
|
|
|
|
if checkLength {
|
|
|
|
|
report.MaxFileLength = &r.maxFileLength
|
|
|
|
|
}
|
|
|
|
|
if checkNormalization {
|
|
|
|
|
report.CanWriteUnnormalized = &r.canWriteUnnormalized
|
|
|
|
|
report.CanReadUnnormalized = &r.canReadUnnormalized
|
|
|
|
|
report.CanReadRenormalized = &r.canReadRenormalized
|
|
|
|
|
}
|
|
|
|
|
if checkStreaming {
|
|
|
|
|
report.CanStream = &r.canStream
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if f, err := os.Create(writeJSON); err != nil {
|
|
|
|
|
fs.Errorf(r.f, "Creating JSON file failed: %s", err)
|
|
|
|
|
} else {
|
|
|
|
|
defer fs.CheckClose(f, &err)
|
|
|
|
|
enc := json.NewEncoder(f)
|
|
|
|
|
enc.SetIndent("", " ")
|
|
|
|
|
err := enc.Encode(report)
|
|
|
|
|
if err != nil {
|
|
|
|
|
fs.Errorf(r.f, "Writing JSON file failed: %s", err)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
fs.Infof(r.f, "Wrote JSON file: %s", writeJSON)
|
|
|
|
|
}
|
|
|
|
|
|
2017-07-03 22:05:27 +08:00
|
|
|
|
// writeFile writes a file with some random contents
|
|
|
|
|
func (r *results) writeFile(path string) (fs.Object, error) {
|
2019-08-06 19:44:08 +08:00
|
|
|
|
contents := random.String(50)
|
2018-01-13 00:30:54 +08:00
|
|
|
|
src := object.NewStaticObjectInfo(path, time.Now(), int64(len(contents)), true, nil, r.f)
|
2019-05-14 23:49:55 +08:00
|
|
|
|
obj, err := r.f.Put(r.ctx, bytes.NewBufferString(contents), src)
|
|
|
|
|
if uploadWait > 0 {
|
|
|
|
|
time.Sleep(uploadWait)
|
|
|
|
|
}
|
|
|
|
|
return obj, err
|
2017-07-03 22:05:27 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// check whether normalization is enforced and check whether it is
|
|
|
|
|
// done on the files anyway
|
|
|
|
|
func (r *results) checkUTF8Normalization() {
|
|
|
|
|
unnormalized := "Héroique"
|
|
|
|
|
normalized := "Héroique"
|
|
|
|
|
_, err := r.writeFile(unnormalized)
|
|
|
|
|
if err != nil {
|
|
|
|
|
r.canWriteUnnormalized = false
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
r.canWriteUnnormalized = true
|
2019-06-17 16:34:30 +08:00
|
|
|
|
_, err = r.f.NewObject(r.ctx, unnormalized)
|
2017-07-03 22:05:27 +08:00
|
|
|
|
if err == nil {
|
|
|
|
|
r.canReadUnnormalized = true
|
|
|
|
|
}
|
2019-06-17 16:34:30 +08:00
|
|
|
|
_, err = r.f.NewObject(r.ctx, normalized)
|
2017-07-03 22:05:27 +08:00
|
|
|
|
if err == nil {
|
|
|
|
|
r.canReadRenormalized = true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-14 23:49:55 +08:00
|
|
|
|
func (r *results) checkStringPositions(k, s string) {
|
2018-11-02 20:12:09 +08:00
|
|
|
|
fs.Infof(r.f, "Writing position file 0x%0X", s)
|
2019-05-14 23:49:55 +08:00
|
|
|
|
positionError := internal.PositionNone
|
|
|
|
|
res := internal.ControlResult{
|
|
|
|
|
Text: s,
|
|
|
|
|
WriteError: make(map[internal.Position]string, 3),
|
|
|
|
|
GetError: make(map[internal.Position]string, 3),
|
|
|
|
|
InList: make(map[internal.Position]internal.Presence, 3),
|
|
|
|
|
}
|
2018-11-02 20:12:09 +08:00
|
|
|
|
|
2019-05-14 23:49:55 +08:00
|
|
|
|
for _, pos := range internal.PositionList {
|
2018-11-02 20:12:09 +08:00
|
|
|
|
path := ""
|
|
|
|
|
switch pos {
|
2019-05-14 23:49:55 +08:00
|
|
|
|
case internal.PositionMiddle:
|
2018-11-02 20:12:09 +08:00
|
|
|
|
path = fmt.Sprintf("position-middle-%0X-%s-", s, s)
|
2019-05-14 23:49:55 +08:00
|
|
|
|
case internal.PositionLeft:
|
2018-11-02 20:12:09 +08:00
|
|
|
|
path = fmt.Sprintf("%s-position-left-%0X", s, s)
|
2019-05-14 23:49:55 +08:00
|
|
|
|
case internal.PositionRight:
|
2018-11-02 20:12:09 +08:00
|
|
|
|
path = fmt.Sprintf("position-right-%0X-%s", s, s)
|
|
|
|
|
default:
|
|
|
|
|
panic("invalid position: " + pos.String())
|
|
|
|
|
}
|
2019-05-14 23:49:55 +08:00
|
|
|
|
_, writeError := r.writeFile(path)
|
|
|
|
|
if writeError != nil {
|
|
|
|
|
res.WriteError[pos] = writeError.Error()
|
|
|
|
|
fs.Infof(r.f, "Writing %s position file 0x%0X Error: %s", pos.String(), s, writeError)
|
2018-11-02 20:12:09 +08:00
|
|
|
|
} else {
|
|
|
|
|
fs.Infof(r.f, "Writing %s position file 0x%0X OK", pos.String(), s)
|
|
|
|
|
}
|
2019-06-17 16:34:30 +08:00
|
|
|
|
obj, getErr := r.f.NewObject(r.ctx, path)
|
2018-11-02 20:12:09 +08:00
|
|
|
|
if getErr != nil {
|
2019-05-14 23:49:55 +08:00
|
|
|
|
res.GetError[pos] = getErr.Error()
|
2018-11-02 20:12:09 +08:00
|
|
|
|
fs.Infof(r.f, "Getting %s position file 0x%0X Error: %s", pos.String(), s, getErr)
|
|
|
|
|
} else {
|
|
|
|
|
if obj.Size() != 50 {
|
2019-05-14 23:49:55 +08:00
|
|
|
|
res.GetError[pos] = fmt.Sprintf("invalid size %d", obj.Size())
|
2018-11-02 20:12:09 +08:00
|
|
|
|
fs.Infof(r.f, "Getting %s position file 0x%0X Invalid Size: %d", pos.String(), s, obj.Size())
|
|
|
|
|
} else {
|
|
|
|
|
fs.Infof(r.f, "Getting %s position file 0x%0X OK", pos.String(), s)
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-05-14 23:49:55 +08:00
|
|
|
|
if writeError != nil || getErr != nil {
|
2018-11-02 20:12:09 +08:00
|
|
|
|
positionError += pos
|
|
|
|
|
}
|
2017-07-03 22:05:27 +08:00
|
|
|
|
}
|
2018-11-02 20:12:09 +08:00
|
|
|
|
|
2017-07-03 22:05:27 +08:00
|
|
|
|
r.mu.Lock()
|
2019-05-14 23:49:55 +08:00
|
|
|
|
r.stringNeedsEscaping[k] = positionError
|
|
|
|
|
r.controlResults[k] = res
|
2017-07-03 22:05:27 +08:00
|
|
|
|
r.mu.Unlock()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// check we can write a file with the control chars
|
|
|
|
|
func (r *results) checkControls() {
|
|
|
|
|
fs.Infof(r.f, "Trying to create control character file names")
|
2020-11-05 19:33:32 +08:00
|
|
|
|
ci := fs.GetConfig(context.Background())
|
|
|
|
|
|
2017-07-03 22:05:27 +08:00
|
|
|
|
// Concurrency control
|
2020-11-05 19:33:32 +08:00
|
|
|
|
tokens := make(chan struct{}, ci.Checkers)
|
|
|
|
|
for i := 0; i < ci.Checkers; i++ {
|
2017-07-03 22:05:27 +08:00
|
|
|
|
tokens <- struct{}{}
|
|
|
|
|
}
|
|
|
|
|
var wg sync.WaitGroup
|
|
|
|
|
for i := rune(0); i < 128; i++ {
|
2018-11-02 20:12:09 +08:00
|
|
|
|
s := string(i)
|
2017-07-03 22:05:27 +08:00
|
|
|
|
if i == 0 || i == '/' {
|
|
|
|
|
// We're not even going to check NULL or /
|
2019-05-14 23:49:55 +08:00
|
|
|
|
r.stringNeedsEscaping[s] = internal.PositionAll
|
2017-07-03 22:05:27 +08:00
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
wg.Add(1)
|
2018-11-02 20:12:09 +08:00
|
|
|
|
go func(s string) {
|
2017-07-03 22:05:27 +08:00
|
|
|
|
defer wg.Done()
|
|
|
|
|
token := <-tokens
|
2019-05-14 23:49:55 +08:00
|
|
|
|
k := s
|
|
|
|
|
r.checkStringPositions(k, s)
|
2017-07-03 22:05:27 +08:00
|
|
|
|
tokens <- token
|
2018-11-02 20:12:09 +08:00
|
|
|
|
}(s)
|
|
|
|
|
}
|
2019-05-14 23:49:55 +08:00
|
|
|
|
for _, s := range []string{"\", "\u00A0", "\xBF", "\xFE"} {
|
2018-11-02 20:12:09 +08:00
|
|
|
|
wg.Add(1)
|
|
|
|
|
go func(s string) {
|
|
|
|
|
defer wg.Done()
|
|
|
|
|
token := <-tokens
|
2019-05-14 23:49:55 +08:00
|
|
|
|
k := s
|
|
|
|
|
r.checkStringPositions(k, s)
|
2018-11-02 20:12:09 +08:00
|
|
|
|
tokens <- token
|
|
|
|
|
}(s)
|
2017-07-03 22:05:27 +08:00
|
|
|
|
}
|
|
|
|
|
wg.Wait()
|
2019-05-14 23:49:55 +08:00
|
|
|
|
r.checkControlsList()
|
2017-07-03 22:05:27 +08:00
|
|
|
|
fs.Infof(r.f, "Done trying to create control character file names")
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-14 23:49:55 +08:00
|
|
|
|
func (r *results) checkControlsList() {
|
|
|
|
|
l, err := r.f.List(context.TODO(), "")
|
|
|
|
|
if err != nil {
|
|
|
|
|
fs.Errorf(r.f, "Listing control character file names failed: %s", err)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
namesMap := make(map[string]struct{}, len(l))
|
|
|
|
|
for _, s := range l {
|
|
|
|
|
namesMap[path.Base(s.Remote())] = struct{}{}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for path := range namesMap {
|
|
|
|
|
var pos internal.Position
|
|
|
|
|
var hex, value string
|
|
|
|
|
if g := positionLeftRe.FindStringSubmatch(path); g != nil {
|
|
|
|
|
pos, hex, value = internal.PositionLeft, g[2], g[1]
|
|
|
|
|
} else if g := positionMiddleRe.FindStringSubmatch(path); g != nil {
|
|
|
|
|
pos, hex, value = internal.PositionMiddle, g[1], g[2]
|
|
|
|
|
} else if g := positionRightRe.FindStringSubmatch(path); g != nil {
|
|
|
|
|
pos, hex, value = internal.PositionRight, g[1], g[2]
|
|
|
|
|
} else {
|
|
|
|
|
fs.Infof(r.f, "Unknown path %q", path)
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
var hexValue []byte
|
|
|
|
|
for ; len(hex) >= 2; hex = hex[2:] {
|
|
|
|
|
if b, err := strconv.ParseUint(hex[:2], 16, 8); err != nil {
|
|
|
|
|
fs.Infof(r.f, "Invalid path %q: %s", path, err)
|
|
|
|
|
continue
|
|
|
|
|
} else {
|
|
|
|
|
hexValue = append(hexValue, byte(b))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if hex != "" {
|
|
|
|
|
fs.Infof(r.f, "Invalid path %q", path)
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
hexStr := string(hexValue)
|
|
|
|
|
k := hexStr
|
|
|
|
|
switch r.controlResults[k].InList[pos] {
|
|
|
|
|
case internal.Absent:
|
|
|
|
|
if hexStr == value {
|
|
|
|
|
r.controlResults[k].InList[pos] = internal.Present
|
|
|
|
|
} else {
|
|
|
|
|
r.controlResults[k].InList[pos] = internal.Renamed
|
|
|
|
|
}
|
|
|
|
|
case internal.Present:
|
|
|
|
|
r.controlResults[k].InList[pos] = internal.Multiple
|
|
|
|
|
case internal.Renamed:
|
|
|
|
|
r.controlResults[k].InList[pos] = internal.Multiple
|
|
|
|
|
}
|
|
|
|
|
delete(namesMap, path)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if len(namesMap) > 0 {
|
|
|
|
|
fs.Infof(r.f, "Found additional control character file names:")
|
|
|
|
|
for name := range namesMap {
|
|
|
|
|
fs.Infof(r.f, "%q", name)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-07-03 22:05:27 +08:00
|
|
|
|
// find the max file name size we can use
|
|
|
|
|
func (r *results) findMaxLength() {
|
|
|
|
|
const maxLen = 16 * 1024
|
|
|
|
|
name := make([]byte, maxLen)
|
|
|
|
|
for i := range name {
|
|
|
|
|
name[i] = 'a'
|
|
|
|
|
}
|
|
|
|
|
// Find the first size of filename we can't write
|
|
|
|
|
i := sort.Search(len(name), func(i int) (fail bool) {
|
|
|
|
|
defer func() {
|
|
|
|
|
if err := recover(); err != nil {
|
|
|
|
|
fs.Infof(r.f, "Couldn't write file with name length %d: %v", i, err)
|
|
|
|
|
fail = true
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
path := string(name[:i])
|
|
|
|
|
_, err := r.writeFile(path)
|
|
|
|
|
if err != nil {
|
|
|
|
|
fs.Infof(r.f, "Couldn't write file with name length %d: %v", i, err)
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
fs.Infof(r.f, "Wrote file with name length %d", i)
|
|
|
|
|
return false
|
|
|
|
|
})
|
|
|
|
|
r.maxFileLength = i - 1
|
|
|
|
|
fs.Infof(r.f, "Max file length is %d", r.maxFileLength)
|
|
|
|
|
}
|
|
|
|
|
|
2017-08-22 14:00:10 +08:00
|
|
|
|
func (r *results) checkStreaming() {
|
|
|
|
|
putter := r.f.Put
|
|
|
|
|
if r.f.Features().PutStream != nil {
|
|
|
|
|
fs.Infof(r.f, "Given remote has specialized streaming function. Using that to test streaming.")
|
|
|
|
|
putter = r.f.Features().PutStream
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
contents := "thinking of test strings is hard"
|
|
|
|
|
buf := bytes.NewBufferString(contents)
|
2018-01-13 00:30:54 +08:00
|
|
|
|
hashIn := hash.NewMultiHasher()
|
2017-08-22 14:00:10 +08:00
|
|
|
|
in := io.TeeReader(buf, hashIn)
|
|
|
|
|
|
2018-01-13 00:30:54 +08:00
|
|
|
|
objIn := object.NewStaticObjectInfo("checkStreamingTest", time.Now(), -1, true, nil, r.f)
|
2019-06-17 16:34:30 +08:00
|
|
|
|
objR, err := putter(r.ctx, in, objIn)
|
2017-08-22 14:00:10 +08:00
|
|
|
|
if err != nil {
|
|
|
|
|
fs.Infof(r.f, "Streamed file failed to upload (%v)", err)
|
|
|
|
|
r.canStream = false
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
hashes := hashIn.Sums()
|
|
|
|
|
types := objR.Fs().Hashes().Array()
|
2018-01-13 00:30:54 +08:00
|
|
|
|
for _, Hash := range types {
|
2019-06-17 16:34:30 +08:00
|
|
|
|
sum, err := objR.Hash(r.ctx, Hash)
|
2017-08-22 14:00:10 +08:00
|
|
|
|
if err != nil {
|
2018-01-13 00:30:54 +08:00
|
|
|
|
fs.Infof(r.f, "Streamed file failed when getting hash %v (%v)", Hash, err)
|
2017-08-22 14:00:10 +08:00
|
|
|
|
r.canStream = false
|
|
|
|
|
return
|
|
|
|
|
}
|
2018-01-13 00:30:54 +08:00
|
|
|
|
if !hash.Equals(hashes[Hash], sum) {
|
|
|
|
|
fs.Infof(r.f, "Streamed file has incorrect hash %v: expecting %q got %q", Hash, hashes[Hash], sum)
|
2017-08-22 14:00:10 +08:00
|
|
|
|
r.canStream = false
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if int64(len(contents)) != objR.Size() {
|
|
|
|
|
fs.Infof(r.f, "Streamed file has incorrect file size: expecting %d got %d", len(contents), objR.Size())
|
|
|
|
|
r.canStream = false
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
r.canStream = true
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-17 16:34:30 +08:00
|
|
|
|
func readInfo(ctx context.Context, f fs.Fs) error {
|
|
|
|
|
err := f.Mkdir(ctx, "")
|
2017-07-03 22:05:27 +08:00
|
|
|
|
if err != nil {
|
2021-11-04 18:12:57 +08:00
|
|
|
|
return fmt.Errorf("couldn't mkdir: %w", err)
|
2017-07-03 22:05:27 +08:00
|
|
|
|
}
|
2019-06-17 16:34:30 +08:00
|
|
|
|
r := newResults(ctx, f)
|
2017-07-03 22:05:27 +08:00
|
|
|
|
if checkControl {
|
|
|
|
|
r.checkControls()
|
|
|
|
|
}
|
|
|
|
|
if checkLength {
|
|
|
|
|
r.findMaxLength()
|
|
|
|
|
}
|
|
|
|
|
if checkNormalization {
|
|
|
|
|
r.checkUTF8Normalization()
|
|
|
|
|
}
|
2017-08-22 14:00:10 +08:00
|
|
|
|
if checkStreaming {
|
|
|
|
|
r.checkStreaming()
|
|
|
|
|
}
|
2017-07-03 22:05:27 +08:00
|
|
|
|
r.Print()
|
2019-05-14 23:49:55 +08:00
|
|
|
|
r.WriteJSON()
|
2017-07-03 22:05:27 +08:00
|
|
|
|
return nil
|
|
|
|
|
}
|