2
2
mirror of https://github.com/octoleo/restic.git synced 2024-11-29 08:14:03 +00:00

Use package "restic/test"

This commit is contained in:
Alexander Neumann 2015-04-09 21:15:48 +02:00
parent a2514425a3
commit 4f4f3c421a
24 changed files with 248 additions and 403 deletions

View File

@ -10,6 +10,7 @@ import (
"github.com/restic/restic" "github.com/restic/restic"
"github.com/restic/restic/backend" "github.com/restic/restic/backend"
"github.com/restic/restic/chunker" "github.com/restic/restic/chunker"
. "github.com/restic/restic/test"
) )
var benchArchiveDirectory = flag.String("test.benchdir", ".", "benchmark archiving a real directory (default: .)") var benchArchiveDirectory = flag.String("test.benchdir", ".", "benchmark archiving a real directory (default: .)")
@ -49,16 +50,16 @@ func benchmarkChunkEncrypt(b testing.TB, buf []byte, rd Rdr, key *restic.Key) {
break break
} }
ok(b, err) OK(b, err)
// reduce length of buf // reduce length of buf
buf = buf[:chunk.Length] buf = buf[:chunk.Length]
n, err := io.ReadFull(chunk.Reader(rd), buf) n, err := io.ReadFull(chunk.Reader(rd), buf)
ok(b, err) OK(b, err)
assert(b, uint(n) == chunk.Length, "invalid length: got %d, expected %d", n, chunk.Length) Assert(b, uint(n) == chunk.Length, "invalid length: got %d, expected %d", n, chunk.Length)
_, err = key.Encrypt(buf, buf) _, err = key.Encrypt(buf, buf)
ok(b, err) OK(b, err)
} }
restic.FreeChunker("BenchmarkChunkEncrypt", ch) restic.FreeChunker("BenchmarkChunkEncrypt", ch)
@ -137,7 +138,7 @@ func BenchmarkArchiveDirectory(b *testing.B) {
server.SetKey(key) server.SetKey(key)
arch, err := restic.NewArchiver(server) arch, err := restic.NewArchiver(server)
ok(b, err) OK(b, err)
_, id, err := arch.Snapshot(nil, []string{*benchArchiveDirectory}, nil) _, id, err := arch.Snapshot(nil, []string{*benchArchiveDirectory}, nil)
@ -146,10 +147,10 @@ func BenchmarkArchiveDirectory(b *testing.B) {
func snapshot(t testing.TB, server restic.Server, path string, parent backend.ID) *restic.Snapshot { func snapshot(t testing.TB, server restic.Server, path string, parent backend.ID) *restic.Snapshot {
arch, err := restic.NewArchiver(server) arch, err := restic.NewArchiver(server)
ok(t, err) OK(t, err)
ok(t, arch.Preload()) OK(t, arch.Preload())
sn, _, err := arch.Snapshot(nil, []string{path}, parent) sn, _, err := arch.Snapshot(nil, []string{path}, parent)
ok(t, err) OK(t, err)
return sn return sn
} }
@ -220,9 +221,9 @@ func BenchmarkPreload(t *testing.B) {
// archive a few files // archive a few files
arch, err := restic.NewArchiver(server) arch, err := restic.NewArchiver(server)
ok(t, err) OK(t, err)
sn, _, err := arch.Snapshot(nil, []string{*benchArchiveDirectory}, nil) sn, _, err := arch.Snapshot(nil, []string{*benchArchiveDirectory}, nil)
ok(t, err) OK(t, err)
t.Logf("archived snapshot %v", sn.ID()) t.Logf("archived snapshot %v", sn.ID())
// start benchmark // start benchmark
@ -231,8 +232,8 @@ func BenchmarkPreload(t *testing.B) {
for i := 0; i < t.N; i++ { for i := 0; i < t.N; i++ {
// create new archiver and preload // create new archiver and preload
arch2, err := restic.NewArchiver(server) arch2, err := restic.NewArchiver(server)
ok(t, err) OK(t, err)
ok(t, arch2.Preload()) OK(t, arch2.Preload())
} }
} }
@ -248,9 +249,9 @@ func BenchmarkLoadTree(t *testing.B) {
// archive a few files // archive a few files
arch, err := restic.NewArchiver(server) arch, err := restic.NewArchiver(server)
ok(t, err) OK(t, err)
sn, _, err := arch.Snapshot(nil, []string{*benchArchiveDirectory}, nil) sn, _, err := arch.Snapshot(nil, []string{*benchArchiveDirectory}, nil)
ok(t, err) OK(t, err)
t.Logf("archived snapshot %v", sn.ID()) t.Logf("archived snapshot %v", sn.ID())
list := make([]backend.ID, 0, 10) list := make([]backend.ID, 0, 10)
@ -276,7 +277,7 @@ func BenchmarkLoadTree(t *testing.B) {
for i := 0; i < t.N; i++ { for i := 0; i < t.N; i++ {
for _, id := range list { for _, id := range list {
_, err := restic.LoadTree(server, restic.Blob{Storage: id}) _, err := restic.LoadTree(server, restic.Blob{Storage: id})
ok(t, err) OK(t, err)
} }
} }
} }

View File

@ -9,6 +9,7 @@ import (
"testing" "testing"
"github.com/restic/restic/backend" "github.com/restic/restic/backend"
. "github.com/restic/restic/test"
) )
func testBackend(b backend.Backend, t *testing.T) { func testBackend(b backend.Backend, t *testing.T) {
@ -16,44 +17,44 @@ func testBackend(b backend.Backend, t *testing.T) {
// detect non-existing files // detect non-existing files
for _, test := range TestStrings { for _, test := range TestStrings {
id, err := backend.ParseID(test.id) id, err := backend.ParseID(test.id)
ok(t, err) OK(t, err)
// test if blob is already in repository // test if blob is already in repository
ret, err := b.Test(tpe, id.String()) ret, err := b.Test(tpe, id.String())
ok(t, err) OK(t, err)
assert(t, !ret, "blob was found to exist before creating") Assert(t, !ret, "blob was found to exist before creating")
// try to open not existing blob // try to open not existing blob
_, err = b.Get(tpe, id.String()) _, err = b.Get(tpe, id.String())
assert(t, err != nil, "blob data could be extracted before creation") Assert(t, err != nil, "blob data could be extracted before creation")
// try to get string out, should fail // try to get string out, should fail
ret, err = b.Test(tpe, id.String()) ret, err = b.Test(tpe, id.String())
ok(t, err) OK(t, err)
assert(t, !ret, "id %q was found (but should not have)", test.id) Assert(t, !ret, "id %q was found (but should not have)", test.id)
} }
// add files // add files
for _, test := range TestStrings { for _, test := range TestStrings {
// store string in backend // store string in backend
blob, err := b.Create() blob, err := b.Create()
ok(t, err) OK(t, err)
_, err = blob.Write([]byte(test.data)) _, err = blob.Write([]byte(test.data))
ok(t, err) OK(t, err)
ok(t, blob.Finalize(tpe, test.id)) OK(t, blob.Finalize(tpe, test.id))
// try to get it out again // try to get it out again
rd, err := b.Get(tpe, test.id) rd, err := b.Get(tpe, test.id)
ok(t, err) OK(t, err)
assert(t, rd != nil, "Get() returned nil") Assert(t, rd != nil, "Get() returned nil")
buf, err := ioutil.ReadAll(rd) buf, err := ioutil.ReadAll(rd)
ok(t, err) OK(t, err)
equals(t, test.data, string(buf)) Equals(t, test.data, string(buf))
// compare content // compare content
equals(t, test.data, string(buf)) Equals(t, test.data, string(buf))
} }
// test adding the first file again // test adding the first file again
@ -61,31 +62,31 @@ func testBackend(b backend.Backend, t *testing.T) {
// create blob // create blob
blob, err := b.Create() blob, err := b.Create()
ok(t, err) OK(t, err)
_, err = blob.Write([]byte(test.data)) _, err = blob.Write([]byte(test.data))
ok(t, err) OK(t, err)
err = blob.Finalize(tpe, test.id) err = blob.Finalize(tpe, test.id)
assert(t, err != nil, "expected error, got %v", err) Assert(t, err != nil, "expected error, got %v", err)
// remove and recreate // remove and recreate
err = b.Remove(tpe, test.id) err = b.Remove(tpe, test.id)
ok(t, err) OK(t, err)
// create blob // create blob
blob, err = b.Create() blob, err = b.Create()
ok(t, err) OK(t, err)
_, err = io.Copy(blob, bytes.NewReader([]byte(test.data))) _, err = io.Copy(blob, bytes.NewReader([]byte(test.data)))
ok(t, err) OK(t, err)
ok(t, blob.Finalize(tpe, test.id)) OK(t, blob.Finalize(tpe, test.id))
// list items // list items
IDs := backend.IDs{} IDs := backend.IDs{}
for _, test := range TestStrings { for _, test := range TestStrings {
id, err := backend.ParseID(test.id) id, err := backend.ParseID(test.id)
ok(t, err) OK(t, err)
IDs = append(IDs, id) IDs = append(IDs, id)
} }
@ -93,7 +94,7 @@ func testBackend(b backend.Backend, t *testing.T) {
i := 0 i := 0
for s := range b.List(tpe, nil) { for s := range b.List(tpe, nil) {
equals(t, IDs[i].String(), s) Equals(t, IDs[i].String(), s)
i++ i++
} }
@ -101,17 +102,17 @@ func testBackend(b backend.Backend, t *testing.T) {
if *testCleanup { if *testCleanup {
for _, test := range TestStrings { for _, test := range TestStrings {
id, err := backend.ParseID(test.id) id, err := backend.ParseID(test.id)
ok(t, err) OK(t, err)
found, err := b.Test(tpe, id.String()) found, err := b.Test(tpe, id.String())
ok(t, err) OK(t, err)
assert(t, found, fmt.Sprintf("id %q was not found before removal", id)) Assert(t, found, fmt.Sprintf("id %q was not found before removal", id))
ok(t, b.Remove(tpe, id.String())) OK(t, b.Remove(tpe, id.String()))
found, err = b.Test(tpe, id.String()) found, err = b.Test(tpe, id.String())
ok(t, err) OK(t, err)
assert(t, !found, fmt.Sprintf("id %q not found after removal", id)) Assert(t, !found, fmt.Sprintf("id %q not found after removal", id))
} }
} }

View File

@ -1,42 +1,12 @@
package backend_test package backend_test
import ( import (
"fmt"
"path/filepath"
"reflect"
"runtime"
"testing" "testing"
"github.com/restic/restic/backend" "github.com/restic/restic/backend"
. "github.com/restic/restic/test"
) )
// assert fails the test if the condition is false.
func assert(tb testing.TB, condition bool, msg string, v ...interface{}) {
if !condition {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: "+msg+"\033[39m\n\n", append([]interface{}{filepath.Base(file), line}, v...)...)
tb.FailNow()
}
}
// ok fails the test if an err is not nil.
func ok(tb testing.TB, err error) {
if err != nil {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: unexpected error: %s\033[39m\n\n", filepath.Base(file), line, err.Error())
tb.FailNow()
}
}
// equals fails the test if exp is not equal to act.
func equals(tb testing.TB, exp, act interface{}) {
if !reflect.DeepEqual(exp, act) {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d:\n\n\texp: %#v\n\n\tgot: %#v\033[39m\n\n", filepath.Base(file), line, exp, act)
tb.FailNow()
}
}
func str2id(s string) backend.ID { func str2id(s string) backend.ID {
id, err := backend.ParseID(s) id, err := backend.ParseID(s)
if err != nil { if err != nil {
@ -85,16 +55,16 @@ func TestPrefixLength(t *testing.T) {
} }
l, err := backend.PrefixLength(m, backend.Snapshot) l, err := backend.PrefixLength(m, backend.Snapshot)
ok(t, err) OK(t, err)
equals(t, 19, l) Equals(t, 19, l)
list = samples[:3] list = samples[:3]
l, err = backend.PrefixLength(m, backend.Snapshot) l, err = backend.PrefixLength(m, backend.Snapshot)
ok(t, err) OK(t, err)
equals(t, 19, l) Equals(t, 19, l)
list = samples[3:] list = samples[3:]
l, err = backend.PrefixLength(m, backend.Snapshot) l, err = backend.PrefixLength(m, backend.Snapshot)
ok(t, err) OK(t, err)
equals(t, 8, l) Equals(t, 8, l)
} }

View File

@ -6,6 +6,7 @@ import (
"testing" "testing"
"github.com/restic/restic/backend" "github.com/restic/restic/backend"
. "github.com/restic/restic/test"
) )
func randomID() []byte { func randomID() []byte {
@ -22,19 +23,19 @@ func TestSet(t *testing.T) {
testID := randomID() testID := randomID()
err := s.Find(testID) err := s.Find(testID)
assert(t, err != nil, "found test ID in IDSet before insertion") Assert(t, err != nil, "found test ID in IDSet before insertion")
for i := 0; i < 238; i++ { for i := 0; i < 238; i++ {
s.Insert(randomID()) s.Insert(randomID())
} }
s.Insert(testID) s.Insert(testID)
ok(t, s.Find(testID)) OK(t, s.Find(testID))
for i := 0; i < 80; i++ { for i := 0; i < 80; i++ {
s.Insert(randomID()) s.Insert(randomID())
} }
s.Insert(testID) s.Insert(testID)
ok(t, s.Find(testID)) OK(t, s.Find(testID))
} }

View File

@ -4,6 +4,7 @@ import (
"testing" "testing"
"github.com/restic/restic/backend" "github.com/restic/restic/backend"
. "github.com/restic/restic/test"
) )
var TestStrings = []struct { var TestStrings = []struct {
@ -19,24 +20,24 @@ var TestStrings = []struct {
func TestID(t *testing.T) { func TestID(t *testing.T) {
for _, test := range TestStrings { for _, test := range TestStrings {
id, err := backend.ParseID(test.id) id, err := backend.ParseID(test.id)
ok(t, err) OK(t, err)
id2, err := backend.ParseID(test.id) id2, err := backend.ParseID(test.id)
ok(t, err) OK(t, err)
assert(t, id.Equal(id2), "ID.Equal() does not work as expected") Assert(t, id.Equal(id2), "ID.Equal() does not work as expected")
ret, err := id.EqualString(test.id) ret, err := id.EqualString(test.id)
ok(t, err) OK(t, err)
assert(t, ret, "ID.EqualString() returned wrong value") Assert(t, ret, "ID.EqualString() returned wrong value")
// test json marshalling // test json marshalling
buf, err := id.MarshalJSON() buf, err := id.MarshalJSON()
ok(t, err) OK(t, err)
equals(t, "\""+test.id+"\"", string(buf)) Equals(t, "\""+test.id+"\"", string(buf))
var id3 backend.ID var id3 backend.ID
err = id3.UnmarshalJSON(buf) err = id3.UnmarshalJSON(buf)
ok(t, err) OK(t, err)
equals(t, id, id3) Equals(t, id, id3)
} }
} }

View File

@ -1,36 +0,0 @@
package local_test
import (
"fmt"
"path/filepath"
"reflect"
"runtime"
"testing"
)
// assert fails the test if the condition is false.
func assert(tb testing.TB, condition bool, msg string, v ...interface{}) {
if !condition {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: "+msg+"\033[39m\n\n", append([]interface{}{filepath.Base(file), line}, v...)...)
tb.FailNow()
}
}
// ok fails the test if an err is not nil.
func ok(tb testing.TB, err error) {
if err != nil {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: unexpected error: %s\033[39m\n\n", filepath.Base(file), line, err.Error())
tb.FailNow()
}
}
// equals fails the test if exp is not equal to act.
func equals(tb testing.TB, exp, act interface{}) {
if !reflect.DeepEqual(exp, act) {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d:\n\n\texp: %#v\n\n\tgot: %#v\033[39m\n\n", filepath.Base(file), line, exp, act)
tb.FailNow()
}
}

View File

@ -7,16 +7,17 @@ import (
"testing" "testing"
"github.com/restic/restic/backend/local" "github.com/restic/restic/backend/local"
. "github.com/restic/restic/test"
) )
var testCleanup = flag.Bool("test.cleanup", true, "clean up after running tests (remove local backend directory with all content)") var testCleanup = flag.Bool("test.cleanup", true, "clean up after running tests (remove local backend directory with all content)")
func setupLocalBackend(t *testing.T) *local.Local { func setupLocalBackend(t *testing.T) *local.Local {
tempdir, err := ioutil.TempDir("", "restic-test-") tempdir, err := ioutil.TempDir("", "restic-test-")
ok(t, err) OK(t, err)
b, err := local.Create(tempdir) b, err := local.Create(tempdir)
ok(t, err) OK(t, err)
t.Logf("created local backend at %s", tempdir) t.Logf("created local backend at %s", tempdir)
@ -29,14 +30,14 @@ func teardownLocalBackend(t *testing.T, b *local.Local) {
return return
} }
ok(t, b.Delete()) OK(t, b.Delete())
} }
func TestLocalBackend(t *testing.T) { func TestLocalBackend(t *testing.T) {
// test for non-existing backend // test for non-existing backend
b, err := local.Open("/invalid-restic-test") b, err := local.Open("/invalid-restic-test")
assert(t, err != nil, "opening invalid repository at /invalid-restic-test should have failed, but err is nil") Assert(t, err != nil, "opening invalid repository at /invalid-restic-test should have failed, but err is nil")
assert(t, b == nil, fmt.Sprintf("opening invalid repository at /invalid-restic-test should have failed, but b is not nil: %v", b)) Assert(t, b == nil, fmt.Sprintf("opening invalid repository at /invalid-restic-test should have failed, but b is not nil: %v", b))
s := setupLocalBackend(t) s := setupLocalBackend(t)
defer teardownLocalBackend(t, s) defer teardownLocalBackend(t, s)
@ -50,9 +51,9 @@ func TestLocalBackendCreationFailures(t *testing.T) {
// test failure to create a new repository at the same location // test failure to create a new repository at the same location
b2, err := local.Create(b.Location()) b2, err := local.Create(b.Location())
assert(t, err != nil && b2 == nil, fmt.Sprintf("creating a repository at %s for the second time should have failed", b.Location())) Assert(t, err != nil && b2 == nil, fmt.Sprintf("creating a repository at %s for the second time should have failed", b.Location()))
// test failure to create a new repository at the same location without a config file // test failure to create a new repository at the same location without a config file
b2, err = local.Create(b.Location()) b2, err = local.Create(b.Location())
assert(t, err != nil && b2 == nil, fmt.Sprintf("creating a repository at %s for the second time should have failed", b.Location())) Assert(t, err != nil && b2 == nil, fmt.Sprintf("creating a repository at %s for the second time should have failed", b.Location()))
} }

View File

@ -9,6 +9,7 @@ import (
"testing" "testing"
"github.com/restic/restic/backend" "github.com/restic/restic/backend"
. "github.com/restic/restic/test"
) )
func TestHashAppendReader(t *testing.T) { func TestHashAppendReader(t *testing.T) {
@ -27,22 +28,22 @@ func TestHashAppendReader(t *testing.T) {
target := bytes.NewBuffer(nil) target := bytes.NewBuffer(nil)
n, err := io.Copy(target, rd) n, err := io.Copy(target, rd)
ok(t, err) OK(t, err)
assert(t, n == int64(size)+int64(len(expectedHash)), Assert(t, n == int64(size)+int64(len(expectedHash)),
"HashAppendReader: invalid number of bytes read: got %d, expected %d", "HashAppendReader: invalid number of bytes read: got %d, expected %d",
n, size+len(expectedHash)) n, size+len(expectedHash))
r := target.Bytes() r := target.Bytes()
resultingHash := r[len(r)-len(expectedHash):] resultingHash := r[len(r)-len(expectedHash):]
assert(t, bytes.Equal(expectedHash[:], resultingHash), Assert(t, bytes.Equal(expectedHash[:], resultingHash),
"HashAppendReader: hashes do not match: expected %02x, got %02x", "HashAppendReader: hashes do not match: expected %02x, got %02x",
expectedHash, resultingHash) expectedHash, resultingHash)
// try to read again, must return io.EOF // try to read again, must return io.EOF
n2, err := rd.Read(make([]byte, 100)) n2, err := rd.Read(make([]byte, 100))
assert(t, n2 == 0, "HashAppendReader returned %d additional bytes", n) Assert(t, n2 == 0, "HashAppendReader returned %d additional bytes", n)
assert(t, err == io.EOF, "HashAppendReader returned %v instead of EOF", err) Assert(t, err == io.EOF, "HashAppendReader returned %v instead of EOF", err)
} }
} }
@ -61,20 +62,20 @@ func TestHashingReader(t *testing.T) {
rd := backend.NewHashingReader(bytes.NewReader(data), sha256.New()) rd := backend.NewHashingReader(bytes.NewReader(data), sha256.New())
n, err := io.Copy(ioutil.Discard, rd) n, err := io.Copy(ioutil.Discard, rd)
ok(t, err) OK(t, err)
assert(t, n == int64(size), Assert(t, n == int64(size),
"HashAppendReader: invalid number of bytes read: got %d, expected %d", "HashAppendReader: invalid number of bytes read: got %d, expected %d",
n, size) n, size)
resultingHash := rd.Sum(nil) resultingHash := rd.Sum(nil)
assert(t, bytes.Equal(expectedHash[:], resultingHash), Assert(t, bytes.Equal(expectedHash[:], resultingHash),
"HashAppendReader: hashes do not match: expected %02x, got %02x", "HashAppendReader: hashes do not match: expected %02x, got %02x",
expectedHash, resultingHash) expectedHash, resultingHash)
// try to read again, must return io.EOF // try to read again, must return io.EOF
n2, err := rd.Read(make([]byte, 100)) n2, err := rd.Read(make([]byte, 100))
assert(t, n2 == 0, "HashAppendReader returned %d additional bytes", n) Assert(t, n2 == 0, "HashAppendReader returned %d additional bytes", n)
assert(t, err == io.EOF, "HashAppendReader returned %v instead of EOF", err) Assert(t, err == io.EOF, "HashAppendReader returned %v instead of EOF", err)
} }
} }

View File

@ -7,16 +7,17 @@ import (
"testing" "testing"
"github.com/restic/restic/backend/sftp" "github.com/restic/restic/backend/sftp"
. "github.com/restic/restic/test"
) )
var sftpPath = flag.String("test.sftppath", "", "sftp binary path (default: empty)") var sftpPath = flag.String("test.sftppath", "", "sftp binary path (default: empty)")
func setupSFTPBackend(t *testing.T) *sftp.SFTP { func setupSFTPBackend(t *testing.T) *sftp.SFTP {
tempdir, err := ioutil.TempDir("", "restic-test-") tempdir, err := ioutil.TempDir("", "restic-test-")
ok(t, err) OK(t, err)
b, err := sftp.Create(tempdir, *sftpPath) b, err := sftp.Create(tempdir, *sftpPath)
ok(t, err) OK(t, err)
t.Logf("created sftp backend locally at %s", tempdir) t.Logf("created sftp backend locally at %s", tempdir)
@ -30,7 +31,7 @@ func teardownSFTPBackend(t *testing.T, b *sftp.SFTP) {
} }
err := os.RemoveAll(b.Location()) err := os.RemoveAll(b.Location())
ok(t, err) OK(t, err)
} }
func TestSFTPBackend(t *testing.T) { func TestSFTPBackend(t *testing.T) {

View File

@ -9,6 +9,7 @@ import (
"testing" "testing"
"github.com/restic/restic/backend" "github.com/restic/restic/backend"
. "github.com/restic/restic/test"
) )
func TestHashAppendWriter(t *testing.T) { func TestHashAppendWriter(t *testing.T) {
@ -27,22 +28,22 @@ func TestHashAppendWriter(t *testing.T) {
wr := backend.NewHashAppendWriter(target, sha256.New()) wr := backend.NewHashAppendWriter(target, sha256.New())
_, err = wr.Write(data) _, err = wr.Write(data)
ok(t, err) OK(t, err)
ok(t, wr.Close()) OK(t, wr.Close())
assert(t, len(target.Bytes()) == size+len(expectedHash), Assert(t, len(target.Bytes()) == size+len(expectedHash),
"HashAppendWriter: invalid number of bytes written: got %d, expected %d", "HashAppendWriter: invalid number of bytes written: got %d, expected %d",
len(target.Bytes()), size+len(expectedHash)) len(target.Bytes()), size+len(expectedHash))
r := target.Bytes() r := target.Bytes()
resultingHash := r[len(r)-len(expectedHash):] resultingHash := r[len(r)-len(expectedHash):]
assert(t, bytes.Equal(expectedHash[:], resultingHash), Assert(t, bytes.Equal(expectedHash[:], resultingHash),
"HashAppendWriter: hashes do not match: expected %02x, got %02x", "HashAppendWriter: hashes do not match: expected %02x, got %02x",
expectedHash, resultingHash) expectedHash, resultingHash)
// write again, this must return an error // write again, this must return an error
_, err = wr.Write([]byte{23}) _, err = wr.Write([]byte{23})
assert(t, err != nil, Assert(t, err != nil,
"HashAppendWriter: Write() after Close() did not return an error") "HashAppendWriter: Write() after Close() did not return an error")
} }
} }
@ -62,18 +63,18 @@ func TestHashingWriter(t *testing.T) {
wr := backend.NewHashingWriter(ioutil.Discard, sha256.New()) wr := backend.NewHashingWriter(ioutil.Discard, sha256.New())
n, err := io.Copy(wr, bytes.NewReader(data)) n, err := io.Copy(wr, bytes.NewReader(data))
ok(t, err) OK(t, err)
assert(t, n == int64(size), Assert(t, n == int64(size),
"HashAppendWriter: invalid number of bytes written: got %d, expected %d", "HashAppendWriter: invalid number of bytes written: got %d, expected %d",
n, size) n, size)
assert(t, wr.Size() == size, Assert(t, wr.Size() == size,
"HashAppendWriter: invalid number of bytes returned: got %d, expected %d", "HashAppendWriter: invalid number of bytes returned: got %d, expected %d",
wr.Size, size) wr.Size, size)
resultingHash := wr.Sum(nil) resultingHash := wr.Sum(nil)
assert(t, bytes.Equal(expectedHash[:], resultingHash), Assert(t, bytes.Equal(expectedHash[:], resultingHash),
"HashAppendWriter: hashes do not match: expected %02x, got %02x", "HashAppendWriter: hashes do not match: expected %02x, got %02x",
expectedHash, resultingHash) expectedHash, resultingHash)
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/restic/restic" "github.com/restic/restic"
"github.com/restic/restic/backend" "github.com/restic/restic/backend"
. "github.com/restic/restic/test"
) )
func TestCache(t *testing.T) { func TestCache(t *testing.T) {
@ -15,45 +16,45 @@ func TestCache(t *testing.T) {
server.SetKey(key) server.SetKey(key)
cache, err := restic.NewCache(server) cache, err := restic.NewCache(server)
ok(t, err) OK(t, err)
arch, err := restic.NewArchiver(server) arch, err := restic.NewArchiver(server)
ok(t, err) OK(t, err)
// archive some files, this should automatically cache all blobs from the snapshot // archive some files, this should automatically cache all blobs from the snapshot
_, id, err := arch.Snapshot(nil, []string{*benchArchiveDirectory}, nil) _, id, err := arch.Snapshot(nil, []string{*benchArchiveDirectory}, nil)
// try to load map from cache // try to load map from cache
rd, err := cache.Load(backend.Snapshot, "blobs", id) rd, err := cache.Load(backend.Snapshot, "blobs", id)
ok(t, err) OK(t, err)
dec := json.NewDecoder(rd) dec := json.NewDecoder(rd)
m := &restic.Map{} m := &restic.Map{}
err = dec.Decode(m) err = dec.Decode(m)
ok(t, err) OK(t, err)
// remove cached blob list // remove cached blob list
ok(t, cache.Purge(backend.Snapshot, "blobs", id)) OK(t, cache.Purge(backend.Snapshot, "blobs", id))
// load map from cache again, this should fail // load map from cache again, this should fail
rd, err = cache.Load(backend.Snapshot, "blobs", id) rd, err = cache.Load(backend.Snapshot, "blobs", id)
assert(t, err != nil, "Expected failure did not occur") Assert(t, err != nil, "Expected failure did not occur")
// recreate cached blob list // recreate cached blob list
err = cache.RefreshSnapshots(server, nil) err = cache.RefreshSnapshots(server, nil)
ok(t, err) OK(t, err)
// load map from cache again // load map from cache again
rd, err = cache.Load(backend.Snapshot, "blobs", id) rd, err = cache.Load(backend.Snapshot, "blobs", id)
ok(t, err) OK(t, err)
dec = json.NewDecoder(rd) dec = json.NewDecoder(rd)
m2 := &restic.Map{} m2 := &restic.Map{}
err = dec.Decode(m2) err = dec.Decode(m2)
ok(t, err) OK(t, err)
// compare maps // compare maps
assert(t, m.Equals(m2), "Maps are not equal") Assert(t, m.Equals(m2), "Maps are not equal")
} }

View File

@ -15,6 +15,7 @@ import (
"time" "time"
"github.com/restic/restic/chunker" "github.com/restic/restic/chunker"
. "github.com/restic/restic/test"
) )
var benchmarkFile = flag.String("bench.file", "", "read from this file for benchmark") var benchmarkFile = flag.String("bench.file", "", "read from this file for benchmark")
@ -189,7 +190,7 @@ func TestChunkerWithRandomPolynomial(t *testing.T) {
// generate a new random polynomial // generate a new random polynomial
start := time.Now() start := time.Now()
p, err := chunker.RandomPolynomial() p, err := chunker.RandomPolynomial()
ok(t, err) OK(t, err)
t.Logf("generating random polynomial took %v", time.Since(start)) t.Logf("generating random polynomial took %v", time.Since(start))
start = time.Now() start = time.Now()
@ -199,11 +200,11 @@ func TestChunkerWithRandomPolynomial(t *testing.T) {
// make sure that first chunk is different // make sure that first chunk is different
c, err := ch.Next() c, err := ch.Next()
assert(t, c.Cut != chunks1[0].CutFP, Assert(t, c.Cut != chunks1[0].CutFP,
"Cut point is the same") "Cut point is the same")
assert(t, c.Length != chunks1[0].Length, Assert(t, c.Length != chunks1[0].Length,
"Length is the same") "Length is the same")
assert(t, !bytes.Equal(c.Digest, chunks1[0].Digest), Assert(t, !bytes.Equal(c.Digest, chunks1[0].Digest),
"Digest is the same") "Digest is the same")
} }
@ -327,7 +328,7 @@ func BenchmarkChunker(b *testing.B) {
func BenchmarkNewChunker(b *testing.B) { func BenchmarkNewChunker(b *testing.B) {
p, err := chunker.RandomPolynomial() p, err := chunker.RandomPolynomial()
ok(b, err) OK(b, err)
b.ResetTimer() b.ResetTimer()

View File

@ -1,36 +0,0 @@
package chunker_test
import (
"fmt"
"path/filepath"
"reflect"
"runtime"
"testing"
)
// assert fails the test if the condition is false.
func assert(tb testing.TB, condition bool, msg string, v ...interface{}) {
if !condition {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: "+msg+"\033[39m\n\n", append([]interface{}{filepath.Base(file), line}, v...)...)
tb.FailNow()
}
}
// ok fails the test if an err is not nil.
func ok(tb testing.TB, err error) {
if err != nil {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: unexpected error: %s\033[39m\n\n", filepath.Base(file), line, err.Error())
tb.FailNow()
}
}
// equals fails the test if exp is not equal to act.
func equals(tb testing.TB, exp, act interface{}) {
if !reflect.DeepEqual(exp, act) {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d:\n\n\texp: %#v\n\n\tgot: %#v\033[39m\n\n", filepath.Base(file), line, exp, act)
tb.FailNow()
}
}

View File

@ -5,6 +5,7 @@ import (
"testing" "testing"
"github.com/restic/restic/chunker" "github.com/restic/restic/chunker"
. "github.com/restic/restic/test"
) )
var polAddTests = []struct { var polAddTests = []struct {
@ -18,8 +19,8 @@ var polAddTests = []struct {
func TestPolAdd(t *testing.T) { func TestPolAdd(t *testing.T) {
for _, test := range polAddTests { for _, test := range polAddTests {
equals(t, test.sum, test.x.Add(test.y)) Equals(t, test.sum, test.x.Add(test.y))
equals(t, test.sum, test.y.Add(test.x)) Equals(t, test.sum, test.y.Add(test.x))
} }
} }
@ -77,11 +78,11 @@ var polMulTests = []struct {
func TestPolMul(t *testing.T) { func TestPolMul(t *testing.T) {
for i, test := range polMulTests { for i, test := range polMulTests {
m := test.x.Mul(test.y) m := test.x.Mul(test.y)
assert(t, test.res == m, Assert(t, test.res == m,
"TestPolMul failed for test %d: %v * %v: want %v, got %v", "TestPolMul failed for test %d: %v * %v: want %v, got %v",
i, test.x, test.y, test.res, m) i, test.x, test.y, test.res, m)
m = test.y.Mul(test.x) m = test.y.Mul(test.x)
assert(t, test.res == test.y.Mul(test.x), Assert(t, test.res == test.y.Mul(test.x),
"TestPolMul failed for %d: %v * %v: want %v, got %v", "TestPolMul failed for %d: %v * %v: want %v, got %v",
i, test.x, test.y, test.res, m) i, test.x, test.y, test.res, m)
} }
@ -138,7 +139,7 @@ var polDivTests = []struct {
func TestPolDiv(t *testing.T) { func TestPolDiv(t *testing.T) {
for i, test := range polDivTests { for i, test := range polDivTests {
m := test.x.Div(test.y) m := test.x.Div(test.y)
assert(t, test.res == m, Assert(t, test.res == m,
"TestPolDiv failed for test %d: %v * %v: want %v, got %v", "TestPolDiv failed for test %d: %v * %v: want %v, got %v",
i, test.x, test.y, test.res, m) i, test.x, test.y, test.res, m)
} }
@ -175,7 +176,7 @@ var polModTests = []struct {
func TestPolModt(t *testing.T) { func TestPolModt(t *testing.T) {
for _, test := range polModTests { for _, test := range polModTests {
equals(t, test.res, test.x.Mod(test.y)) Equals(t, test.res, test.x.Mod(test.y))
} }
} }
@ -221,20 +222,20 @@ func BenchmarkPolDeg(t *testing.B) {
func TestRandomPolynomial(t *testing.T) { func TestRandomPolynomial(t *testing.T) {
_, err := chunker.RandomPolynomial() _, err := chunker.RandomPolynomial()
ok(t, err) OK(t, err)
} }
func BenchmarkRandomPolynomial(t *testing.B) { func BenchmarkRandomPolynomial(t *testing.B) {
for i := 0; i < t.N; i++ { for i := 0; i < t.N; i++ {
_, err := chunker.RandomPolynomial() _, err := chunker.RandomPolynomial()
ok(t, err) OK(t, err)
} }
} }
func TestExpandPolynomial(t *testing.T) { func TestExpandPolynomial(t *testing.T) {
pol := chunker.Pol(0x3DA3358B4DC173) pol := chunker.Pol(0x3DA3358B4DC173)
s := pol.Expand() s := pol.Expand()
equals(t, "x^53+x^52+x^51+x^50+x^48+x^47+x^45+x^41+x^40+x^37+x^36+x^34+x^32+x^31+x^27+x^25+x^24+x^22+x^19+x^18+x^16+x^15+x^14+x^8+x^6+x^5+x^4+x+1", s) Equals(t, "x^53+x^52+x^51+x^50+x^48+x^47+x^45+x^41+x^40+x^37+x^36+x^34+x^32+x^31+x^27+x^25+x^24+x^22+x^19+x^18+x^16+x^15+x^14+x^8+x^6+x^5+x^4+x+1", s)
} }
var polIrredTests = []struct { var polIrredTests = []struct {
@ -269,7 +270,7 @@ var polIrredTests = []struct {
func TestPolIrreducible(t *testing.T) { func TestPolIrreducible(t *testing.T) {
for _, test := range polIrredTests { for _, test := range polIrredTests {
assert(t, test.f.Irreducible() == test.irred, Assert(t, test.f.Irreducible() == test.irred,
"Irreducibility test for Polynomial %v failed: got %v, wanted %v", "Irreducibility test for Polynomial %v failed: got %v, wanted %v",
test.f, test.f.Irreducible(), test.irred) test.f, test.f.Irreducible(), test.irred)
} }
@ -328,11 +329,11 @@ var polGCDTests = []struct {
func TestPolGCD(t *testing.T) { func TestPolGCD(t *testing.T) {
for i, test := range polGCDTests { for i, test := range polGCDTests {
gcd := test.f1.GCD(test.f2) gcd := test.f1.GCD(test.f2)
assert(t, test.gcd == gcd, Assert(t, test.gcd == gcd,
"GCD test %d (%+v) failed: got %v, wanted %v", "GCD test %d (%+v) failed: got %v, wanted %v",
i, test, gcd, test.gcd) i, test, gcd, test.gcd)
gcd = test.f2.GCD(test.f1) gcd = test.f2.GCD(test.f1)
assert(t, test.gcd == gcd, Assert(t, test.gcd == gcd,
"GCD test %d (%+v) failed: got %v, wanted %v", "GCD test %d (%+v) failed: got %v, wanted %v",
i, test, gcd, test.gcd) i, test, gcd, test.gcd)
} }
@ -361,7 +362,7 @@ var polMulModTests = []struct {
func TestPolMulMod(t *testing.T) { func TestPolMulMod(t *testing.T) {
for i, test := range polMulModTests { for i, test := range polMulModTests {
mod := test.f1.MulMod(test.f2, test.g) mod := test.f1.MulMod(test.f2, test.g)
assert(t, mod == test.mod, Assert(t, mod == test.mod,
"MulMod test %d (%+v) failed: got %v, wanted %v", "MulMod test %d (%+v) failed: got %v, wanted %v",
i, test, mod, test.mod) i, test, mod, test.mod)
} }

View File

@ -9,6 +9,7 @@ import (
"github.com/restic/restic" "github.com/restic/restic"
"github.com/restic/restic/chunker" "github.com/restic/restic/chunker"
. "github.com/restic/restic/test"
) )
func TestEncryptDecrypt(t *testing.T) { func TestEncryptDecrypt(t *testing.T) {
@ -24,18 +25,18 @@ func TestEncryptDecrypt(t *testing.T) {
for _, size := range tests { for _, size := range tests {
data := make([]byte, size) data := make([]byte, size)
_, err := io.ReadFull(randomReader(42, size), data) _, err := io.ReadFull(randomReader(42, size), data)
ok(t, err) OK(t, err)
ciphertext := restic.GetChunkBuf("TestEncryptDecrypt") ciphertext := restic.GetChunkBuf("TestEncryptDecrypt")
n, err := k.Encrypt(ciphertext, data) n, err := k.Encrypt(ciphertext, data)
ok(t, err) OK(t, err)
plaintext, err := k.Decrypt(nil, ciphertext[:n]) plaintext, err := k.Decrypt(nil, ciphertext[:n])
ok(t, err) OK(t, err)
restic.FreeChunkBuf("TestEncryptDecrypt", ciphertext) restic.FreeChunkBuf("TestEncryptDecrypt", ciphertext)
equals(t, plaintext, data) Equals(t, plaintext, data)
} }
} }
@ -47,15 +48,15 @@ func TestSmallBuffer(t *testing.T) {
size := 600 size := 600
data := make([]byte, size) data := make([]byte, size)
f, err := os.Open("/dev/urandom") f, err := os.Open("/dev/urandom")
ok(t, err) OK(t, err)
_, err = io.ReadFull(f, data) _, err = io.ReadFull(f, data)
ok(t, err) OK(t, err)
ciphertext := make([]byte, size/2) ciphertext := make([]byte, size/2)
_, err = k.Encrypt(ciphertext, data) _, err = k.Encrypt(ciphertext, data)
// this must throw an error, since the target slice is too small // this must throw an error, since the target slice is too small
assert(t, err != nil && err == restic.ErrBufferTooSmall, Assert(t, err != nil && err == restic.ErrBufferTooSmall,
"expected restic.ErrBufferTooSmall, got %#v", err) "expected restic.ErrBufferTooSmall, got %#v", err)
} }
@ -71,19 +72,19 @@ func TestLargeEncrypt(t *testing.T) {
for _, size := range []int{chunker.MaxSize, chunker.MaxSize + 1, chunker.MaxSize + 1<<20} { for _, size := range []int{chunker.MaxSize, chunker.MaxSize + 1, chunker.MaxSize + 1<<20} {
data := make([]byte, size) data := make([]byte, size)
f, err := os.Open("/dev/urandom") f, err := os.Open("/dev/urandom")
ok(t, err) OK(t, err)
_, err = io.ReadFull(f, data) _, err = io.ReadFull(f, data)
ok(t, err) OK(t, err)
ciphertext := make([]byte, size+restic.CiphertextExtension) ciphertext := make([]byte, size+restic.CiphertextExtension)
n, err := k.Encrypt(ciphertext, data) n, err := k.Encrypt(ciphertext, data)
ok(t, err) OK(t, err)
plaintext, err := k.Decrypt([]byte{}, ciphertext[:n]) plaintext, err := k.Decrypt([]byte{}, ciphertext[:n])
ok(t, err) OK(t, err)
equals(t, plaintext, data) Equals(t, plaintext, data)
} }
} }
@ -102,8 +103,8 @@ func BenchmarkEncryptWriter(b *testing.B) {
rd.Seek(0, 0) rd.Seek(0, 0)
wr := k.EncryptTo(ioutil.Discard) wr := k.EncryptTo(ioutil.Discard)
_, err := io.Copy(wr, rd) _, err := io.Copy(wr, rd)
ok(b, err) OK(b, err)
ok(b, wr.Close()) OK(b, wr.Close())
} }
} }
@ -122,7 +123,7 @@ func BenchmarkEncrypt(b *testing.B) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
_, err := k.Encrypt(buf, data) _, err := k.Encrypt(buf, data)
ok(b, err) OK(b, err)
} }
} }
@ -136,7 +137,7 @@ func BenchmarkDecryptReader(b *testing.B) {
ciphertext := make([]byte, len(buf)+restic.CiphertextExtension) ciphertext := make([]byte, len(buf)+restic.CiphertextExtension)
_, err := k.Encrypt(ciphertext, buf) _, err := k.Encrypt(ciphertext, buf)
ok(b, err) OK(b, err)
rd := bytes.NewReader(ciphertext) rd := bytes.NewReader(ciphertext)
@ -146,10 +147,10 @@ func BenchmarkDecryptReader(b *testing.B) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
rd.Seek(0, 0) rd.Seek(0, 0)
decRd, err := k.DecryptFrom(rd) decRd, err := k.DecryptFrom(rd)
ok(b, err) OK(b, err)
_, err = io.Copy(ioutil.Discard, decRd) _, err = io.Copy(ioutil.Discard, decRd)
ok(b, err) OK(b, err)
} }
} }
@ -170,14 +171,14 @@ func BenchmarkEncryptDecryptReader(b *testing.B) {
buf.Reset() buf.Reset()
wr := k.EncryptTo(buf) wr := k.EncryptTo(buf)
_, err := io.Copy(wr, rd) _, err := io.Copy(wr, rd)
ok(b, err) OK(b, err)
ok(b, wr.Close()) OK(b, wr.Close())
r, err := k.DecryptFrom(buf) r, err := k.DecryptFrom(buf)
ok(b, err) OK(b, err)
_, err = io.Copy(ioutil.Discard, r) _, err = io.Copy(ioutil.Discard, r)
ok(b, err) OK(b, err)
} }
restic.PoolAlloc() restic.PoolAlloc()
@ -197,14 +198,14 @@ func BenchmarkDecrypt(b *testing.B) {
defer restic.FreeChunkBuf("BenchmarkDecrypt", plaintext) defer restic.FreeChunkBuf("BenchmarkDecrypt", plaintext)
n, err := k.Encrypt(ciphertext, data) n, err := k.Encrypt(ciphertext, data)
ok(b, err) OK(b, err)
b.ResetTimer() b.ResetTimer()
b.SetBytes(int64(size)) b.SetBytes(int64(size))
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
plaintext, err = k.Decrypt(plaintext, ciphertext[:n]) plaintext, err = k.Decrypt(plaintext, ciphertext[:n])
ok(b, err) OK(b, err)
} }
} }
@ -221,24 +222,24 @@ func TestEncryptStreamWriter(t *testing.T) {
for _, size := range tests { for _, size := range tests {
data := make([]byte, size) data := make([]byte, size)
_, err := io.ReadFull(randomReader(42, size), data) _, err := io.ReadFull(randomReader(42, size), data)
ok(t, err) OK(t, err)
ciphertext := bytes.NewBuffer(nil) ciphertext := bytes.NewBuffer(nil)
wr := k.EncryptTo(ciphertext) wr := k.EncryptTo(ciphertext)
_, err = io.Copy(wr, bytes.NewReader(data)) _, err = io.Copy(wr, bytes.NewReader(data))
ok(t, err) OK(t, err)
ok(t, wr.Close()) OK(t, wr.Close())
l := len(data) + restic.CiphertextExtension l := len(data) + restic.CiphertextExtension
assert(t, len(ciphertext.Bytes()) == l, Assert(t, len(ciphertext.Bytes()) == l,
"wrong ciphertext length: expected %d, got %d", "wrong ciphertext length: expected %d, got %d",
l, len(ciphertext.Bytes())) l, len(ciphertext.Bytes()))
// decrypt with default function // decrypt with default function
plaintext, err := k.Decrypt([]byte{}, ciphertext.Bytes()) plaintext, err := k.Decrypt([]byte{}, ciphertext.Bytes())
ok(t, err) OK(t, err)
assert(t, bytes.Equal(data, plaintext), Assert(t, bytes.Equal(data, plaintext),
"wrong plaintext after decryption: expected %02x, got %02x", "wrong plaintext after decryption: expected %02x, got %02x",
data, plaintext) data, plaintext)
} }
@ -257,24 +258,24 @@ func TestDecryptStreamReader(t *testing.T) {
for _, size := range tests { for _, size := range tests {
data := make([]byte, size) data := make([]byte, size)
_, err := io.ReadFull(randomReader(42, size), data) _, err := io.ReadFull(randomReader(42, size), data)
ok(t, err) OK(t, err)
ciphertext := make([]byte, size+restic.CiphertextExtension) ciphertext := make([]byte, size+restic.CiphertextExtension)
// encrypt with default function // encrypt with default function
n, err := k.Encrypt(ciphertext, data) n, err := k.Encrypt(ciphertext, data)
ok(t, err) OK(t, err)
assert(t, n == len(data)+restic.CiphertextExtension, Assert(t, n == len(data)+restic.CiphertextExtension,
"wrong number of bytes returned after encryption: expected %d, got %d", "wrong number of bytes returned after encryption: expected %d, got %d",
len(data)+restic.CiphertextExtension, n) len(data)+restic.CiphertextExtension, n)
rd, err := k.DecryptFrom(bytes.NewReader(ciphertext)) rd, err := k.DecryptFrom(bytes.NewReader(ciphertext))
ok(t, err) OK(t, err)
plaintext, err := ioutil.ReadAll(rd) plaintext, err := ioutil.ReadAll(rd)
ok(t, err) OK(t, err)
assert(t, bytes.Equal(data, plaintext), Assert(t, bytes.Equal(data, plaintext),
"wrong plaintext after decryption: expected %02x, got %02x", "wrong plaintext after decryption: expected %02x, got %02x",
data, plaintext) data, plaintext)
} }
@ -293,26 +294,26 @@ func TestEncryptWriter(t *testing.T) {
for _, size := range tests { for _, size := range tests {
data := make([]byte, size) data := make([]byte, size)
_, err := io.ReadFull(randomReader(42, size), data) _, err := io.ReadFull(randomReader(42, size), data)
ok(t, err) OK(t, err)
buf := bytes.NewBuffer(nil) buf := bytes.NewBuffer(nil)
wr := k.EncryptTo(buf) wr := k.EncryptTo(buf)
_, err = io.Copy(wr, bytes.NewReader(data)) _, err = io.Copy(wr, bytes.NewReader(data))
ok(t, err) OK(t, err)
ok(t, wr.Close()) OK(t, wr.Close())
ciphertext := buf.Bytes() ciphertext := buf.Bytes()
l := len(data) + restic.CiphertextExtension l := len(data) + restic.CiphertextExtension
assert(t, len(ciphertext) == l, Assert(t, len(ciphertext) == l,
"wrong ciphertext length: expected %d, got %d", "wrong ciphertext length: expected %d, got %d",
l, len(ciphertext)) l, len(ciphertext))
// decrypt with default function // decrypt with default function
plaintext, err := k.Decrypt([]byte{}, ciphertext) plaintext, err := k.Decrypt([]byte{}, ciphertext)
ok(t, err) OK(t, err)
assert(t, bytes.Equal(data, plaintext), Assert(t, bytes.Equal(data, plaintext),
"wrong plaintext after decryption: expected %02x, got %02x", "wrong plaintext after decryption: expected %02x, got %02x",
data, plaintext) data, plaintext)
} }

View File

@ -1,36 +0,0 @@
package restic_test
import (
"fmt"
"path/filepath"
"reflect"
"runtime"
"testing"
)
// assert fails the test if the condition is false.
func assert(tb testing.TB, condition bool, msg string, v ...interface{}) {
if !condition {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: "+msg+"\033[39m\n\n", append([]interface{}{filepath.Base(file), line}, v...)...)
tb.FailNow()
}
}
// ok fails the test if an err is not nil.
func ok(tb testing.TB, err error) {
if err != nil {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: unexpected error: %s\033[39m\n\n", filepath.Base(file), line, err.Error())
tb.FailNow()
}
}
// equals fails the test if exp is not equal to act.
func equals(tb testing.TB, exp, act interface{}) {
if !reflect.DeepEqual(exp, act) {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d:\n\n\texp: %#v\n\n\tgot: %#v\033[39m\n\n", filepath.Base(file), line, exp, act)
tb.FailNow()
}
}

View File

@ -9,6 +9,7 @@ import (
"github.com/restic/restic" "github.com/restic/restic"
"github.com/restic/restic/backend/local" "github.com/restic/restic/backend/local"
. "github.com/restic/restic/test"
) )
var testPassword = "foobar" var testPassword = "foobar"
@ -18,15 +19,15 @@ var testTempDir = flag.String("test.tempdir", "", "use this directory for tempor
func setupBackend(t testing.TB) restic.Server { func setupBackend(t testing.TB) restic.Server {
tempdir, err := ioutil.TempDir(*testTempDir, "restic-test-") tempdir, err := ioutil.TempDir(*testTempDir, "restic-test-")
ok(t, err) OK(t, err)
// create repository below temp dir // create repository below temp dir
b, err := local.Create(filepath.Join(tempdir, "repo")) b, err := local.Create(filepath.Join(tempdir, "repo"))
ok(t, err) OK(t, err)
// set cache dir // set cache dir
err = os.Setenv("RESTIC_CACHE", filepath.Join(tempdir, "cache")) err = os.Setenv("RESTIC_CACHE", filepath.Join(tempdir, "cache"))
ok(t, err) OK(t, err)
return restic.NewServer(b) return restic.NewServer(b)
} }
@ -38,12 +39,12 @@ func teardownBackend(t testing.TB, s restic.Server) {
return return
} }
ok(t, s.Delete()) OK(t, s.Delete())
} }
func setupKey(t testing.TB, s restic.Server, password string) *restic.Key { func setupKey(t testing.TB, s restic.Server, password string) *restic.Key {
k, err := restic.CreateKey(s, password) k, err := restic.CreateKey(s, password)
ok(t, err) OK(t, err)
return k return k
} }

View File

@ -12,6 +12,7 @@ import (
"github.com/restic/restic" "github.com/restic/restic"
"github.com/restic/restic/backend" "github.com/restic/restic/backend"
. "github.com/restic/restic/test"
) )
var maxWorkers = flag.Uint("workers", 20, "number of workers to test Map concurrent access against") var maxWorkers = flag.Uint("workers", 20, "number of workers to test Map concurrent access against")
@ -46,12 +47,12 @@ func TestMap(t *testing.T) {
} }
b2, err := bl.Find(restic.Blob{ID: b.ID, Size: b.Size}) b2, err := bl.Find(restic.Blob{ID: b.ID, Size: b.Size})
ok(t, err) OK(t, err)
assert(t, b2.Compare(b) == 0, "items are not equal: want %v, got %v", b, b2) Assert(t, b2.Compare(b) == 0, "items are not equal: want %v, got %v", b, b2)
b2, err = bl.FindID(b.ID) b2, err = bl.FindID(b.ID)
ok(t, err) OK(t, err)
assert(t, b2.Compare(b) == 0, "items are not equal: want %v, got %v", b, b2) Assert(t, b2.Compare(b) == 0, "items are not equal: want %v, got %v", b, b2)
bl2 := restic.NewMap() bl2 := restic.NewMap()
for i := 0; i < 1000; i++ { for i := 0; i < 1000; i++ {
@ -59,7 +60,7 @@ func TestMap(t *testing.T) {
} }
b2, err = bl2.Find(b) b2, err = bl2.Find(b)
assert(t, err != nil, "found ID in restic that was never inserted: %v", b2) Assert(t, err != nil, "found ID in restic that was never inserted: %v", b2)
bl2.Merge(bl) bl2.Merge(bl)
@ -81,21 +82,21 @@ func TestMapJSON(t *testing.T) {
bl.Insert(b) bl.Insert(b)
b2, err := bl.Find(b) b2, err := bl.Find(b)
ok(t, err) OK(t, err)
assert(t, b2.Compare(b) == 0, "items are not equal: want %v, got %v", b, b2) Assert(t, b2.Compare(b) == 0, "items are not equal: want %v, got %v", b, b2)
buf, err := json.Marshal(bl) buf, err := json.Marshal(bl)
ok(t, err) OK(t, err)
bl2 := restic.Map{} bl2 := restic.Map{}
json.Unmarshal(buf, &bl2) json.Unmarshal(buf, &bl2)
b2, err = bl2.Find(b) b2, err = bl2.Find(b)
ok(t, err) OK(t, err)
assert(t, b2.Compare(b) == 0, "items are not equal: want %v, got %v", b, b2) Assert(t, b2.Compare(b) == 0, "items are not equal: want %v, got %v", b, b2)
buf, err = json.Marshal(bl2) buf, err = json.Marshal(bl2)
ok(t, err) OK(t, err)
} }
// random insert/find access by several goroutines // random insert/find access by several goroutines

View File

@ -1,36 +0,0 @@
package pipe_test
import (
"fmt"
"path/filepath"
"reflect"
"runtime"
"testing"
)
// assert fails the test if the condition is false.
func assert(tb testing.TB, condition bool, msg string, v ...interface{}) {
if !condition {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: "+msg+"\033[39m\n\n", append([]interface{}{filepath.Base(file), line}, v...)...)
tb.FailNow()
}
}
// ok fails the test if an err is not nil.
func ok(tb testing.TB, err error) {
if err != nil {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: unexpected error: %s\033[39m\n\n", filepath.Base(file), line, err.Error())
tb.FailNow()
}
}
// equals fails the test if exp is not equal to act.
func equals(tb testing.TB, exp, act interface{}) {
if !reflect.DeepEqual(exp, act) {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d:\n\n\texp: %#v\n\n\tgot: %#v\033[39m\n\n", filepath.Base(file), line, exp, act)
tb.FailNow()
}
}

View File

@ -9,6 +9,7 @@ import (
"time" "time"
"github.com/restic/restic/pipe" "github.com/restic/restic/pipe"
. "github.com/restic/restic/test"
) )
var testWalkerPath = flag.String("test.walkerpath", ".", "pipeline walker testpath (default: .)") var testWalkerPath = flag.String("test.walkerpath", ".", "pipeline walker testpath (default: .)")
@ -49,7 +50,7 @@ func TestPipelineWalkerWithSplit(t *testing.T) {
} }
before, err := statPath(*testWalkerPath) before, err := statPath(*testWalkerPath)
ok(t, err) OK(t, err)
t.Logf("walking path %s with %d dirs, %d files", *testWalkerPath, t.Logf("walking path %s with %d dirs, %d files", *testWalkerPath,
before.dirs, before.files) before.dirs, before.files)
@ -120,7 +121,7 @@ func TestPipelineWalkerWithSplit(t *testing.T) {
resCh := make(chan pipe.Result, 1) resCh := make(chan pipe.Result, 1)
err = pipe.Walk([]string{*testWalkerPath}, done, jobs, resCh) err = pipe.Walk([]string{*testWalkerPath}, done, jobs, resCh)
ok(t, err) OK(t, err)
// wait for all workers to terminate // wait for all workers to terminate
wg.Wait() wg.Wait()
@ -131,7 +132,7 @@ func TestPipelineWalkerWithSplit(t *testing.T) {
t.Logf("walked path %s with %d dirs, %d files", *testWalkerPath, t.Logf("walked path %s with %d dirs, %d files", *testWalkerPath,
after.dirs, after.files) after.dirs, after.files)
assert(t, before == after, "stats do not match, expected %v, got %v", before, after) Assert(t, before == after, "stats do not match, expected %v, got %v", before, after)
} }
func TestPipelineWalker(t *testing.T) { func TestPipelineWalker(t *testing.T) {
@ -140,7 +141,7 @@ func TestPipelineWalker(t *testing.T) {
} }
before, err := statPath(*testWalkerPath) before, err := statPath(*testWalkerPath)
ok(t, err) OK(t, err)
t.Logf("walking path %s with %d dirs, %d files", *testWalkerPath, t.Logf("walking path %s with %d dirs, %d files", *testWalkerPath,
before.dirs, before.files) before.dirs, before.files)
@ -160,7 +161,7 @@ func TestPipelineWalker(t *testing.T) {
// channel is closed // channel is closed
return return
} }
assert(t, job != nil, "job is nil") Assert(t, job != nil, "job is nil")
switch j := job.(type) { switch j := job.(type) {
case pipe.Dir: case pipe.Dir:
@ -200,7 +201,7 @@ func TestPipelineWalker(t *testing.T) {
resCh := make(chan pipe.Result, 1) resCh := make(chan pipe.Result, 1)
err = pipe.Walk([]string{*testWalkerPath}, done, jobs, resCh) err = pipe.Walk([]string{*testWalkerPath}, done, jobs, resCh)
ok(t, err) OK(t, err)
// wait for all workers to terminate // wait for all workers to terminate
wg.Wait() wg.Wait()
@ -211,7 +212,7 @@ func TestPipelineWalker(t *testing.T) {
t.Logf("walked path %s with %d dirs, %d files", *testWalkerPath, t.Logf("walked path %s with %d dirs, %d files", *testWalkerPath,
after.dirs, after.files) after.dirs, after.files)
assert(t, before == after, "stats do not match, expected %v, got %v", before, after) Assert(t, before == after, "stats do not match, expected %v, got %v", before, after)
} }
func BenchmarkPipelineWalker(b *testing.B) { func BenchmarkPipelineWalker(b *testing.B) {
@ -300,7 +301,7 @@ func BenchmarkPipelineWalker(b *testing.B) {
resCh := make(chan pipe.Result, 1) resCh := make(chan pipe.Result, 1)
err := pipe.Walk([]string{*testWalkerPath}, done, jobs, resCh) err := pipe.Walk([]string{*testWalkerPath}, done, jobs, resCh)
ok(b, err) OK(b, err)
// wait for all workers to terminate // wait for all workers to terminate
wg.Wait() wg.Wait()
@ -320,7 +321,7 @@ func TestPipelineWalkerMultiple(t *testing.T) {
paths, err := filepath.Glob(filepath.Join(*testWalkerPath, "*")) paths, err := filepath.Glob(filepath.Join(*testWalkerPath, "*"))
before, err := statPath(*testWalkerPath) before, err := statPath(*testWalkerPath)
ok(t, err) OK(t, err)
t.Logf("walking paths %v with %d dirs, %d files", paths, t.Logf("walking paths %v with %d dirs, %d files", paths,
before.dirs, before.files) before.dirs, before.files)
@ -337,7 +338,7 @@ func TestPipelineWalkerMultiple(t *testing.T) {
// channel is closed // channel is closed
return return
} }
assert(t, job != nil, "job is nil") Assert(t, job != nil, "job is nil")
switch j := job.(type) { switch j := job.(type) {
case pipe.Dir: case pipe.Dir:
@ -377,7 +378,7 @@ func TestPipelineWalkerMultiple(t *testing.T) {
resCh := make(chan pipe.Result, 1) resCh := make(chan pipe.Result, 1)
err = pipe.Walk(paths, done, jobs, resCh) err = pipe.Walk(paths, done, jobs, resCh)
ok(t, err) OK(t, err)
// wait for all workers to terminate // wait for all workers to terminate
wg.Wait() wg.Wait()
@ -387,5 +388,5 @@ func TestPipelineWalkerMultiple(t *testing.T) {
t.Logf("walked %d paths with %d dirs, %d files", len(paths), after.dirs, after.files) t.Logf("walked %d paths with %d dirs, %d files", len(paths), after.dirs, after.files)
assert(t, before == after, "stats do not match, expected %v, got %v", before, after) Assert(t, before == after, "stats do not match, expected %v, got %v", before, after)
} }

View File

@ -10,6 +10,7 @@ import (
"github.com/restic/restic" "github.com/restic/restic"
"github.com/restic/restic/backend" "github.com/restic/restic/backend"
. "github.com/restic/restic/test"
) )
type testJSONStruct struct { type testJSONStruct struct {
@ -30,14 +31,14 @@ func TestSaveJSON(t *testing.T) {
for _, obj := range serverTests { for _, obj := range serverTests {
data, err := json.Marshal(obj) data, err := json.Marshal(obj)
ok(t, err) OK(t, err)
data = append(data, '\n') data = append(data, '\n')
h := sha256.Sum256(data) h := sha256.Sum256(data)
blob, err := server.SaveJSON(backend.Tree, obj) blob, err := server.SaveJSON(backend.Tree, obj)
ok(t, err) OK(t, err)
assert(t, bytes.Equal(h[:], blob.ID), Assert(t, bytes.Equal(h[:], blob.ID),
"TestSaveJSON: wrong plaintext ID: expected %02x, got %02x", "TestSaveJSON: wrong plaintext ID: expected %02x, got %02x",
h, blob.ID) h, blob.ID)
} }
@ -52,7 +53,7 @@ func BenchmarkSaveJSON(t *testing.B) {
obj := serverTests[0] obj := serverTests[0]
data, err := json.Marshal(obj) data, err := json.Marshal(obj)
ok(t, err) OK(t, err)
data = append(data, '\n') data = append(data, '\n')
h := sha256.Sum256(data) h := sha256.Sum256(data)
@ -60,9 +61,9 @@ func BenchmarkSaveJSON(t *testing.B) {
for i := 0; i < t.N; i++ { for i := 0; i < t.N; i++ {
blob, err := server.SaveJSON(backend.Tree, obj) blob, err := server.SaveJSON(backend.Tree, obj)
ok(t, err) OK(t, err)
assert(t, bytes.Equal(h[:], blob.ID), Assert(t, bytes.Equal(h[:], blob.ID),
"TestSaveJSON: wrong plaintext ID: expected %02x, got %02x", "TestSaveJSON: wrong plaintext ID: expected %02x, got %02x",
h, blob.ID) h, blob.ID)
} }
@ -79,22 +80,22 @@ func TestSaveFrom(t *testing.T) {
for _, size := range testSizes { for _, size := range testSizes {
data := make([]byte, size) data := make([]byte, size)
_, err := io.ReadFull(rand.Reader, data) _, err := io.ReadFull(rand.Reader, data)
ok(t, err) OK(t, err)
id := sha256.Sum256(data) id := sha256.Sum256(data)
// save // save
blob, err := server.SaveFrom(backend.Data, id[:], uint(size), bytes.NewReader(data)) blob, err := server.SaveFrom(backend.Data, id[:], uint(size), bytes.NewReader(data))
ok(t, err) OK(t, err)
// read back // read back
buf, err := server.Load(backend.Data, blob) buf, err := server.Load(backend.Data, blob)
assert(t, len(buf) == len(data), Assert(t, len(buf) == len(data),
"number of bytes read back does not match: expected %d, got %d", "number of bytes read back does not match: expected %d, got %d",
len(data), len(buf)) len(data), len(buf))
assert(t, bytes.Equal(buf, data), Assert(t, bytes.Equal(buf, data),
"data does not match: expected %02x, got %02x", "data does not match: expected %02x, got %02x",
data, buf) data, buf)
} }
@ -110,7 +111,7 @@ func BenchmarkSaveFrom(t *testing.B) {
data := make([]byte, size) data := make([]byte, size)
_, err := io.ReadFull(rand.Reader, data) _, err := io.ReadFull(rand.Reader, data)
ok(t, err) OK(t, err)
id := sha256.Sum256(data) id := sha256.Sum256(data)
@ -120,7 +121,7 @@ func BenchmarkSaveFrom(t *testing.B) {
for i := 0; i < t.N; i++ { for i := 0; i < t.N; i++ {
// save // save
_, err := server.SaveFrom(backend.Data, id[:], uint(size), bytes.NewReader(data)) _, err := server.SaveFrom(backend.Data, id[:], uint(size), bytes.NewReader(data))
ok(t, err) OK(t, err)
} }
} }
@ -139,7 +140,7 @@ func TestServerStats(t *testing.T) {
t.Logf("archived snapshot %v", sn.ID()) t.Logf("archived snapshot %v", sn.ID())
stats, err := server.Stats() stats, err := server.Stats()
ok(t, err) OK(t, err)
t.Logf("stats: %v", stats) t.Logf("stats: %v", stats)
} }
@ -160,15 +161,15 @@ func TestLoadJSONID(t *testing.T) {
// benchmark loading first tree // benchmark loading first tree
done := make(chan struct{}) done := make(chan struct{})
first, found := <-server.List(backend.Tree, done) first, found := <-server.List(backend.Tree, done)
assert(t, found, "no Trees in repository found") Assert(t, found, "no Trees in repository found")
close(done) close(done)
id, err := backend.ParseID(first) id, err := backend.ParseID(first)
ok(t, err) OK(t, err)
tree := restic.NewTree() tree := restic.NewTree()
err = server.LoadJSONID(backend.Tree, id, &tree) err = server.LoadJSONID(backend.Tree, id, &tree)
ok(t, err) OK(t, err)
} }
func BenchmarkLoadJSONID(t *testing.B) { func BenchmarkLoadJSONID(t *testing.B) {
@ -191,8 +192,8 @@ func BenchmarkLoadJSONID(t *testing.B) {
for i := 0; i < t.N; i++ { for i := 0; i < t.N; i++ {
for name := range server.List(backend.Tree, nil) { for name := range server.List(backend.Tree, nil) {
id, err := backend.ParseID(name) id, err := backend.ParseID(name)
ok(t, err) OK(t, err)
ok(t, server.LoadJSONID(backend.Tree, id, &tree)) OK(t, server.LoadJSONID(backend.Tree, id, &tree))
} }
} }
} }

View File

@ -5,16 +5,17 @@ import (
"time" "time"
"github.com/restic/restic" "github.com/restic/restic"
. "github.com/restic/restic/test"
) )
func testSnapshot(t *testing.T, s restic.Server) { func testSnapshot(t *testing.T, s restic.Server) {
var err error var err error
sn, err := restic.NewSnapshot([]string{"/home/foobar"}) sn, err := restic.NewSnapshot([]string{"/home/foobar"})
ok(t, err) OK(t, err)
// sn.Tree, err = restic.Blob{ID: backend.ParseID("c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2")} // sn.Tree, err = restic.Blob{ID: backend.ParseID("c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2")}
// ok(t, err) // ok(t, err)
sn.Time, err = time.Parse(time.RFC3339Nano, "2014-08-03T17:49:05.378595539+02:00") sn.Time, err = time.Parse(time.RFC3339Nano, "2014-08-03T17:49:05.378595539+02:00")
ok(t, err) OK(t, err)
// _, err = sn.Save(be) // _, err = sn.Save(be)
// ok(t, err) // ok(t, err)

View File

@ -8,6 +8,7 @@ import (
"testing" "testing"
"github.com/restic/restic" "github.com/restic/restic"
. "github.com/restic/restic/test"
) )
var testFiles = []struct { var testFiles = []struct {
@ -22,24 +23,24 @@ var testFiles = []struct {
// prepareDir creates a temporary directory and returns it. // prepareDir creates a temporary directory and returns it.
func prepareDir(t *testing.T) string { func prepareDir(t *testing.T) string {
tempdir, err := ioutil.TempDir(*testTempDir, "restic-test-") tempdir, err := ioutil.TempDir(*testTempDir, "restic-test-")
ok(t, err) OK(t, err)
for _, test := range testFiles { for _, test := range testFiles {
file := filepath.Join(tempdir, test.name) file := filepath.Join(tempdir, test.name)
dir := filepath.Dir(file) dir := filepath.Dir(file)
if dir != "." { if dir != "." {
ok(t, os.MkdirAll(dir, 0755)) OK(t, os.MkdirAll(dir, 0755))
} }
f, err := os.Create(file) f, err := os.Create(file)
defer func() { defer func() {
ok(t, f.Close()) OK(t, f.Close())
}() }()
ok(t, err) OK(t, err)
_, err = f.Write(test.content) _, err = f.Write(test.content)
ok(t, err) OK(t, err)
} }
return tempdir return tempdir
@ -49,7 +50,7 @@ func TestTree(t *testing.T) {
dir := prepareDir(t) dir := prepareDir(t)
defer func() { defer func() {
if *testCleanup { if *testCleanup {
ok(t, os.RemoveAll(dir)) OK(t, os.RemoveAll(dir))
} }
}() }()
} }
@ -65,11 +66,11 @@ var testNodes = []restic.Node{
func TestNodeMarshal(t *testing.T) { func TestNodeMarshal(t *testing.T) {
for i, n := range testNodes { for i, n := range testNodes {
data, err := json.Marshal(&n) data, err := json.Marshal(&n)
ok(t, err) OK(t, err)
var node restic.Node var node restic.Node
err = json.Unmarshal(data, &node) err = json.Unmarshal(data, &node)
ok(t, err) OK(t, err)
if n.Name != node.Name { if n.Name != node.Name {
t.Fatalf("Node %d: Names are not equal, want: %q got: %q", i, n.Name, node.Name) t.Fatalf("Node %d: Names are not equal, want: %q got: %q", i, n.Name, node.Name)
@ -79,14 +80,14 @@ func TestNodeMarshal(t *testing.T) {
func TestNodeComparison(t *testing.T) { func TestNodeComparison(t *testing.T) {
fi, err := os.Lstat("tree_test.go") fi, err := os.Lstat("tree_test.go")
ok(t, err) OK(t, err)
node, err := restic.NodeFromFileInfo("foo", fi) node, err := restic.NodeFromFileInfo("foo", fi)
ok(t, err) OK(t, err)
n2 := *node n2 := *node
assert(t, node.Equals(n2), "nodes aren't equal") Assert(t, node.Equals(n2), "nodes aren't equal")
n2.Size -= 1 n2.Size -= 1
assert(t, !node.Equals(n2), "nodes are equal") Assert(t, !node.Equals(n2), "nodes are equal")
} }

View File

@ -7,13 +7,14 @@ import (
"github.com/restic/restic" "github.com/restic/restic"
"github.com/restic/restic/pipe" "github.com/restic/restic/pipe"
. "github.com/restic/restic/test"
) )
var testWalkDirectory = flag.String("test.walkdir", ".", "test walking a directory (globbing pattern, default: .)") var testWalkDirectory = flag.String("test.walkdir", ".", "test walking a directory (globbing pattern, default: .)")
func TestWalkTree(t *testing.T) { func TestWalkTree(t *testing.T) {
dirs, err := filepath.Glob(*testWalkDirectory) dirs, err := filepath.Glob(*testWalkDirectory)
ok(t, err) OK(t, err)
server := setupBackend(t) server := setupBackend(t)
defer teardownBackend(t, server) defer teardownBackend(t, server)
@ -22,9 +23,9 @@ func TestWalkTree(t *testing.T) {
// archive a few files // archive a few files
arch, err := restic.NewArchiver(server) arch, err := restic.NewArchiver(server)
ok(t, err) OK(t, err)
sn, _, err := arch.Snapshot(nil, dirs, nil) sn, _, err := arch.Snapshot(nil, dirs, nil)
ok(t, err) OK(t, err)
// start benchmark // start benchmark
// t.ResetTimer() // t.ResetTimer()
@ -45,7 +46,7 @@ func TestWalkTree(t *testing.T) {
for { for {
// receive fs job // receive fs job
fsJob, fsChOpen := <-fsJobs fsJob, fsChOpen := <-fsJobs
assert(t, !fsChOpen || fsJob != nil, Assert(t, !fsChOpen || fsJob != nil,
"received nil job from filesystem: %v %v", fsJob, fsChOpen) "received nil job from filesystem: %v %v", fsJob, fsChOpen)
var path string var path string
@ -66,7 +67,7 @@ func TestWalkTree(t *testing.T) {
treeEntries = len(treeJob.Tree.Nodes) treeEntries = len(treeJob.Tree.Nodes)
} }
assert(t, fsChOpen == treeChOpen, Assert(t, fsChOpen == treeChOpen,
"one channel closed too early: fsChOpen %v, treeChOpen %v", "one channel closed too early: fsChOpen %v, treeChOpen %v",
fsChOpen, treeChOpen) fsChOpen, treeChOpen)
@ -74,10 +75,10 @@ func TestWalkTree(t *testing.T) {
break break
} }
assert(t, filepath.Base(path) == filepath.Base(treeJob.Path), Assert(t, filepath.Base(path) == filepath.Base(treeJob.Path),
"paths do not match: %q != %q", filepath.Base(path), filepath.Base(treeJob.Path)) "paths do not match: %q != %q", filepath.Base(path), filepath.Base(treeJob.Path))
assert(t, fsEntries == treeEntries, Assert(t, fsEntries == treeEntries,
"wrong number of entries: %v != %v", fsEntries, treeEntries) "wrong number of entries: %v != %v", fsEntries, treeEntries)
} }
// } // }