2018-01-13 00:30:54 +08:00
|
|
|
package asyncreader
|
2015-10-06 22:35:22 +08:00
|
|
|
|
|
|
|
import (
|
|
|
|
"bufio"
|
|
|
|
"bytes"
|
2020-11-05 19:33:32 +08:00
|
|
|
"context"
|
2018-08-11 16:18:19 +08:00
|
|
|
"fmt"
|
2015-10-06 22:35:22 +08:00
|
|
|
"io"
|
2018-08-11 16:18:19 +08:00
|
|
|
"math/rand"
|
2015-10-06 22:35:22 +08:00
|
|
|
"strings"
|
2017-03-07 03:22:17 +08:00
|
|
|
"sync"
|
2015-10-06 22:35:22 +08:00
|
|
|
"testing"
|
|
|
|
"testing/iotest"
|
2017-03-07 03:22:17 +08:00
|
|
|
"time"
|
2016-06-30 00:59:31 +08:00
|
|
|
|
2019-07-29 01:47:38 +08:00
|
|
|
"github.com/rclone/rclone/lib/israce"
|
|
|
|
"github.com/rclone/rclone/lib/readers"
|
2016-06-30 00:59:31 +08:00
|
|
|
"github.com/stretchr/testify/assert"
|
|
|
|
"github.com/stretchr/testify/require"
|
2015-10-06 22:35:22 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
func TestAsyncReader(t *testing.T) {
|
2020-11-05 19:33:32 +08:00
|
|
|
ctx := context.Background()
|
|
|
|
|
2022-08-20 22:38:02 +08:00
|
|
|
buf := io.NopCloser(bytes.NewBufferString("Testbuffer"))
|
2020-11-05 19:33:32 +08:00
|
|
|
ar, err := New(ctx, buf, 4)
|
2016-06-30 00:59:31 +08:00
|
|
|
require.NoError(t, err)
|
2015-10-06 22:35:22 +08:00
|
|
|
|
|
|
|
var dst = make([]byte, 100)
|
|
|
|
n, err := ar.Read(dst)
|
2017-02-15 06:33:53 +08:00
|
|
|
assert.Equal(t, io.EOF, err)
|
2016-06-30 00:59:31 +08:00
|
|
|
assert.Equal(t, 10, n)
|
2015-10-06 22:35:22 +08:00
|
|
|
|
|
|
|
n, err = ar.Read(dst)
|
2016-06-30 00:59:31 +08:00
|
|
|
assert.Equal(t, io.EOF, err)
|
|
|
|
assert.Equal(t, 0, n)
|
2015-10-06 22:35:22 +08:00
|
|
|
|
|
|
|
// Test read after error
|
|
|
|
n, err = ar.Read(dst)
|
2016-06-30 00:59:31 +08:00
|
|
|
assert.Equal(t, io.EOF, err)
|
|
|
|
assert.Equal(t, 0, n)
|
2015-10-06 22:35:22 +08:00
|
|
|
|
|
|
|
err = ar.Close()
|
2016-06-30 00:59:31 +08:00
|
|
|
require.NoError(t, err)
|
2015-10-06 22:35:22 +08:00
|
|
|
// Test double close
|
|
|
|
err = ar.Close()
|
2016-06-30 00:59:31 +08:00
|
|
|
require.NoError(t, err)
|
2015-10-06 22:35:22 +08:00
|
|
|
|
|
|
|
// Test Close without reading everything
|
2022-08-20 22:38:02 +08:00
|
|
|
buf = io.NopCloser(bytes.NewBuffer(make([]byte, 50000)))
|
2020-11-05 19:33:32 +08:00
|
|
|
ar, err = New(ctx, buf, 4)
|
2016-06-30 00:59:31 +08:00
|
|
|
require.NoError(t, err)
|
2015-10-06 22:35:22 +08:00
|
|
|
err = ar.Close()
|
2016-06-30 00:59:31 +08:00
|
|
|
require.NoError(t, err)
|
2015-10-06 22:35:22 +08:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestAsyncWriteTo(t *testing.T) {
|
2020-11-05 19:33:32 +08:00
|
|
|
ctx := context.Background()
|
|
|
|
|
2022-08-20 22:38:02 +08:00
|
|
|
buf := io.NopCloser(bytes.NewBufferString("Testbuffer"))
|
2020-11-05 19:33:32 +08:00
|
|
|
ar, err := New(ctx, buf, 4)
|
2016-06-30 00:59:31 +08:00
|
|
|
require.NoError(t, err)
|
2015-10-06 22:35:22 +08:00
|
|
|
|
|
|
|
var dst = &bytes.Buffer{}
|
|
|
|
n, err := io.Copy(dst, ar)
|
2019-12-05 20:32:42 +08:00
|
|
|
require.NoError(t, err)
|
2016-06-30 00:59:31 +08:00
|
|
|
assert.Equal(t, int64(10), n)
|
2015-10-06 22:35:22 +08:00
|
|
|
|
2019-12-05 20:32:42 +08:00
|
|
|
// Should still not return any errors
|
2015-10-06 22:35:22 +08:00
|
|
|
n, err = io.Copy(dst, ar)
|
2019-12-05 20:32:42 +08:00
|
|
|
require.NoError(t, err)
|
2016-06-30 00:59:31 +08:00
|
|
|
assert.Equal(t, int64(0), n)
|
2015-10-06 22:35:22 +08:00
|
|
|
|
|
|
|
err = ar.Close()
|
2016-06-30 00:59:31 +08:00
|
|
|
require.NoError(t, err)
|
2015-10-06 22:35:22 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestAsyncReaderErrors(t *testing.T) {
|
2020-11-05 19:33:32 +08:00
|
|
|
ctx := context.Background()
|
|
|
|
|
2015-10-06 22:35:22 +08:00
|
|
|
// test nil reader
|
2020-11-05 19:33:32 +08:00
|
|
|
_, err := New(ctx, nil, 4)
|
2016-06-30 00:59:31 +08:00
|
|
|
require.Error(t, err)
|
2015-10-06 22:35:22 +08:00
|
|
|
|
|
|
|
// invalid buffer number
|
2022-08-20 22:38:02 +08:00
|
|
|
buf := io.NopCloser(bytes.NewBufferString("Testbuffer"))
|
2020-11-05 19:33:32 +08:00
|
|
|
_, err = New(ctx, buf, 0)
|
2016-06-30 00:59:31 +08:00
|
|
|
require.Error(t, err)
|
2020-11-05 19:33:32 +08:00
|
|
|
_, err = New(ctx, buf, -1)
|
2016-06-30 00:59:31 +08:00
|
|
|
require.Error(t, err)
|
2015-10-06 22:35:22 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// Complex read tests, leveraged from "bufio".
|
|
|
|
|
|
|
|
type readMaker struct {
|
|
|
|
name string
|
|
|
|
fn func(io.Reader) io.Reader
|
|
|
|
}
|
|
|
|
|
|
|
|
var readMakers = []readMaker{
|
|
|
|
{"full", func(r io.Reader) io.Reader { return r }},
|
|
|
|
{"byte", iotest.OneByteReader},
|
|
|
|
{"half", iotest.HalfReader},
|
|
|
|
{"data+err", iotest.DataErrReader},
|
|
|
|
{"timeout", iotest.TimeoutReader},
|
|
|
|
}
|
|
|
|
|
|
|
|
// Call Read to accumulate the text of a file
|
|
|
|
func reads(buf io.Reader, m int) string {
|
|
|
|
var b [1000]byte
|
|
|
|
nb := 0
|
|
|
|
for {
|
|
|
|
n, err := buf.Read(b[nb : nb+m])
|
|
|
|
nb += n
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
} else if err != nil && err != iotest.ErrTimeout {
|
|
|
|
panic("Data: " + err.Error())
|
|
|
|
} else if err != nil {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return string(b[0:nb])
|
|
|
|
}
|
|
|
|
|
|
|
|
type bufReader struct {
|
|
|
|
name string
|
|
|
|
fn func(io.Reader) string
|
|
|
|
}
|
|
|
|
|
|
|
|
var bufreaders = []bufReader{
|
|
|
|
{"1", func(b io.Reader) string { return reads(b, 1) }},
|
|
|
|
{"2", func(b io.Reader) string { return reads(b, 2) }},
|
|
|
|
{"3", func(b io.Reader) string { return reads(b, 3) }},
|
|
|
|
{"4", func(b io.Reader) string { return reads(b, 4) }},
|
|
|
|
{"5", func(b io.Reader) string { return reads(b, 5) }},
|
|
|
|
{"7", func(b io.Reader) string { return reads(b, 7) }},
|
|
|
|
}
|
|
|
|
|
|
|
|
const minReadBufferSize = 16
|
|
|
|
|
|
|
|
var bufsizes = []int{
|
|
|
|
0, minReadBufferSize, 23, 32, 46, 64, 93, 128, 1024, 4096,
|
|
|
|
}
|
|
|
|
|
|
|
|
// Test various input buffer sizes, number of buffers and read sizes.
|
|
|
|
func TestAsyncReaderSizes(t *testing.T) {
|
2020-11-05 19:33:32 +08:00
|
|
|
ctx := context.Background()
|
|
|
|
|
2015-10-06 22:35:22 +08:00
|
|
|
var texts [31]string
|
|
|
|
str := ""
|
|
|
|
all := ""
|
|
|
|
for i := 0; i < len(texts)-1; i++ {
|
|
|
|
texts[i] = str + "\n"
|
|
|
|
all += texts[i]
|
2020-07-24 23:16:45 +08:00
|
|
|
str += string(rune(i)%26 + 'a')
|
2015-10-06 22:35:22 +08:00
|
|
|
}
|
|
|
|
texts[len(texts)-1] = all
|
|
|
|
|
|
|
|
for h := 0; h < len(texts); h++ {
|
|
|
|
text := texts[h]
|
|
|
|
for i := 0; i < len(readMakers); i++ {
|
|
|
|
for j := 0; j < len(bufreaders); j++ {
|
|
|
|
for k := 0; k < len(bufsizes); k++ {
|
|
|
|
for l := 1; l < 10; l++ {
|
|
|
|
readmaker := readMakers[i]
|
|
|
|
bufreader := bufreaders[j]
|
|
|
|
bufsize := bufsizes[k]
|
|
|
|
read := readmaker.fn(strings.NewReader(text))
|
|
|
|
buf := bufio.NewReaderSize(read, bufsize)
|
2022-08-20 22:38:02 +08:00
|
|
|
ar, _ := New(ctx, io.NopCloser(buf), l)
|
2015-10-06 22:35:22 +08:00
|
|
|
s := bufreader.fn(ar)
|
2018-01-13 00:30:54 +08:00
|
|
|
// "timeout" expects the Reader to recover, AsyncReader does not.
|
2015-10-06 22:35:22 +08:00
|
|
|
if s != text && readmaker.name != "timeout" {
|
|
|
|
t.Errorf("reader=%s fn=%s bufsize=%d want=%q got=%q",
|
|
|
|
readmaker.name, bufreader.name, bufsize, text, s)
|
|
|
|
}
|
|
|
|
err := ar.Close()
|
2016-06-30 00:59:31 +08:00
|
|
|
require.NoError(t, err)
|
2015-10-06 22:35:22 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Test various input buffer sizes, number of buffers and read sizes.
|
|
|
|
func TestAsyncReaderWriteTo(t *testing.T) {
|
2020-11-05 19:33:32 +08:00
|
|
|
ctx := context.Background()
|
|
|
|
|
2015-10-06 22:35:22 +08:00
|
|
|
var texts [31]string
|
|
|
|
str := ""
|
|
|
|
all := ""
|
|
|
|
for i := 0; i < len(texts)-1; i++ {
|
|
|
|
texts[i] = str + "\n"
|
|
|
|
all += texts[i]
|
2020-07-24 23:16:45 +08:00
|
|
|
str += string(rune(i)%26 + 'a')
|
2015-10-06 22:35:22 +08:00
|
|
|
}
|
|
|
|
texts[len(texts)-1] = all
|
|
|
|
|
|
|
|
for h := 0; h < len(texts); h++ {
|
|
|
|
text := texts[h]
|
|
|
|
for i := 0; i < len(readMakers); i++ {
|
|
|
|
for j := 0; j < len(bufreaders); j++ {
|
|
|
|
for k := 0; k < len(bufsizes); k++ {
|
|
|
|
for l := 1; l < 10; l++ {
|
|
|
|
readmaker := readMakers[i]
|
|
|
|
bufreader := bufreaders[j]
|
|
|
|
bufsize := bufsizes[k]
|
|
|
|
read := readmaker.fn(strings.NewReader(text))
|
|
|
|
buf := bufio.NewReaderSize(read, bufsize)
|
2022-08-20 22:38:02 +08:00
|
|
|
ar, _ := New(ctx, io.NopCloser(buf), l)
|
2015-10-06 22:35:22 +08:00
|
|
|
dst := &bytes.Buffer{}
|
2017-03-07 03:22:17 +08:00
|
|
|
_, err := ar.WriteTo(dst)
|
2015-10-06 22:35:22 +08:00
|
|
|
if err != nil && err != io.EOF && err != iotest.ErrTimeout {
|
|
|
|
t.Fatal("Copy:", err)
|
|
|
|
}
|
|
|
|
s := dst.String()
|
2018-01-13 00:30:54 +08:00
|
|
|
// "timeout" expects the Reader to recover, AsyncReader does not.
|
2015-10-06 22:35:22 +08:00
|
|
|
if s != text && readmaker.name != "timeout" {
|
|
|
|
t.Errorf("reader=%s fn=%s bufsize=%d want=%q got=%q",
|
|
|
|
readmaker.name, bufreader.name, bufsize, text, s)
|
|
|
|
}
|
|
|
|
err = ar.Close()
|
2016-06-30 00:59:31 +08:00
|
|
|
require.NoError(t, err)
|
2015-10-06 22:35:22 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-03-07 03:22:17 +08:00
|
|
|
|
|
|
|
// Read an infinite number of zeros
|
|
|
|
type zeroReader struct {
|
|
|
|
closed bool
|
|
|
|
}
|
|
|
|
|
|
|
|
func (z *zeroReader) Read(p []byte) (n int, err error) {
|
|
|
|
if z.closed {
|
|
|
|
return 0, io.EOF
|
|
|
|
}
|
|
|
|
for i := range p {
|
|
|
|
p[i] = 0
|
|
|
|
}
|
|
|
|
return len(p), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (z *zeroReader) Close() error {
|
|
|
|
if z.closed {
|
|
|
|
panic("double close on zeroReader")
|
|
|
|
}
|
|
|
|
z.closed = true
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Test closing and abandoning
|
|
|
|
func testAsyncReaderClose(t *testing.T, writeto bool) {
|
2020-11-05 19:33:32 +08:00
|
|
|
ctx := context.Background()
|
|
|
|
|
2017-03-07 03:22:17 +08:00
|
|
|
zr := &zeroReader{}
|
2020-11-05 19:33:32 +08:00
|
|
|
a, err := New(ctx, zr, 16)
|
2017-03-07 03:22:17 +08:00
|
|
|
require.NoError(t, err)
|
|
|
|
var copyN int64
|
|
|
|
var copyErr error
|
|
|
|
var wg sync.WaitGroup
|
2017-05-24 23:24:06 +08:00
|
|
|
started := make(chan struct{})
|
2017-03-07 03:22:17 +08:00
|
|
|
wg.Add(1)
|
|
|
|
go func() {
|
|
|
|
defer wg.Done()
|
2017-05-24 23:24:06 +08:00
|
|
|
close(started)
|
|
|
|
if writeto {
|
2017-03-07 03:22:17 +08:00
|
|
|
// exercise the WriteTo path
|
2022-08-20 22:38:02 +08:00
|
|
|
copyN, copyErr = a.WriteTo(io.Discard)
|
2017-03-07 03:22:17 +08:00
|
|
|
} else {
|
|
|
|
// exercise the Read path
|
|
|
|
buf := make([]byte, 64*1024)
|
|
|
|
for {
|
|
|
|
var n int
|
|
|
|
n, copyErr = a.Read(buf)
|
|
|
|
copyN += int64(n)
|
|
|
|
if copyErr != nil {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
// Do some copying
|
2017-05-24 23:24:06 +08:00
|
|
|
<-started
|
2017-03-07 03:22:17 +08:00
|
|
|
time.Sleep(100 * time.Millisecond)
|
|
|
|
// Abandon the copy
|
|
|
|
a.Abandon()
|
|
|
|
wg.Wait()
|
2020-04-04 19:21:20 +08:00
|
|
|
assert.Equal(t, ErrorStreamAbandoned, copyErr)
|
2017-03-07 03:22:17 +08:00
|
|
|
// t.Logf("Copied %d bytes, err %v", copyN, copyErr)
|
|
|
|
assert.True(t, copyN > 0)
|
|
|
|
}
|
|
|
|
func TestAsyncReaderCloseRead(t *testing.T) { testAsyncReaderClose(t, false) }
|
|
|
|
func TestAsyncReaderCloseWriteTo(t *testing.T) { testAsyncReaderClose(t, true) }
|
2018-08-11 16:18:19 +08:00
|
|
|
|
|
|
|
func TestAsyncReaderSkipBytes(t *testing.T) {
|
2020-11-05 19:33:32 +08:00
|
|
|
ctx := context.Background()
|
|
|
|
|
2018-08-11 16:18:19 +08:00
|
|
|
t.Parallel()
|
|
|
|
data := make([]byte, 15000)
|
|
|
|
buf := make([]byte, len(data))
|
|
|
|
r := rand.New(rand.NewSource(42))
|
|
|
|
|
|
|
|
n, err := r.Read(data)
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Equal(t, len(data), n)
|
|
|
|
|
|
|
|
initialReads := []int{0, 1, 100, 2048,
|
|
|
|
softStartInitial - 1, softStartInitial, softStartInitial + 1,
|
|
|
|
8000, len(data)}
|
|
|
|
skips := []int{-1000, -101, -100, -99, 0, 1, 2048,
|
|
|
|
softStartInitial - 1, softStartInitial, softStartInitial + 1,
|
|
|
|
8000, len(data), BufferSize, 2 * BufferSize}
|
|
|
|
|
|
|
|
for buffers := 1; buffers <= 5; buffers++ {
|
2018-08-20 17:44:54 +08:00
|
|
|
if israce.Enabled && buffers > 1 {
|
|
|
|
t.Skip("FIXME Skipping further tests with race detector until https://github.com/golang/go/issues/27070 is fixed.")
|
|
|
|
}
|
2018-08-11 16:18:19 +08:00
|
|
|
t.Run(fmt.Sprintf("%d", buffers), func(t *testing.T) {
|
|
|
|
for _, initialRead := range initialReads {
|
|
|
|
t.Run(fmt.Sprintf("%d", initialRead), func(t *testing.T) {
|
|
|
|
for _, skip := range skips {
|
|
|
|
t.Run(fmt.Sprintf("%d", skip), func(t *testing.T) {
|
2022-08-20 22:38:02 +08:00
|
|
|
ar, err := New(ctx, io.NopCloser(bytes.NewReader(data)), buffers)
|
2018-08-11 16:18:19 +08:00
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
wantSkipFalse := false
|
|
|
|
buf = buf[:initialRead]
|
|
|
|
n, err := readers.ReadFill(ar, buf)
|
|
|
|
if initialRead >= len(data) {
|
|
|
|
wantSkipFalse = true
|
|
|
|
if initialRead > len(data) {
|
|
|
|
assert.Equal(t, err, io.EOF)
|
|
|
|
} else {
|
|
|
|
assert.True(t, err == nil || err == io.EOF)
|
|
|
|
}
|
|
|
|
assert.Equal(t, len(data), n)
|
|
|
|
assert.Equal(t, data, buf[:len(data)])
|
|
|
|
} else {
|
|
|
|
assert.NoError(t, err)
|
|
|
|
assert.Equal(t, initialRead, n)
|
|
|
|
assert.Equal(t, data[:initialRead], buf)
|
|
|
|
}
|
|
|
|
|
|
|
|
skipped := ar.SkipBytes(skip)
|
|
|
|
buf = buf[:1024]
|
|
|
|
n, err = readers.ReadFill(ar, buf)
|
|
|
|
offset := initialRead + skip
|
|
|
|
if skipped {
|
|
|
|
assert.False(t, wantSkipFalse)
|
|
|
|
l := len(buf)
|
|
|
|
if offset >= len(data) {
|
|
|
|
assert.Equal(t, err, io.EOF)
|
|
|
|
} else {
|
|
|
|
if offset+1024 >= len(data) {
|
|
|
|
l = len(data) - offset
|
|
|
|
}
|
|
|
|
assert.Equal(t, l, n)
|
|
|
|
assert.Equal(t, data[offset:offset+l], buf[:l])
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if initialRead >= len(data) {
|
|
|
|
assert.Equal(t, err, io.EOF)
|
|
|
|
} else {
|
2020-04-04 19:21:20 +08:00
|
|
|
assert.True(t, err == ErrorStreamAbandoned || err == io.EOF)
|
2018-08-11 16:18:19 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|