mirror of
https://github.com/octoleo/restic.git
synced 2024-11-02 11:46:36 +00:00
tests: Add benchmarks for partial file (+offset)
This commit is contained in:
parent
77ebb95d3d
commit
8fc25cc567
@ -8,24 +8,29 @@ import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// BackendBenchmarkLoad benchmarks the backend's Load function.
|
||||
func BackendBenchmarkLoadFile(t *testing.B, s *Suite) {
|
||||
be := s.open(t)
|
||||
defer s.close(t, be)
|
||||
|
||||
length := 1<<24 + 2123
|
||||
func saveRandomFile(t testing.TB, be restic.Backend, length int) ([]byte, restic.Handle) {
|
||||
data := test.Random(23, length)
|
||||
id := restic.Hash(data)
|
||||
handle := restic.Handle{Type: restic.DataFile, Name: id.String()}
|
||||
if err := be.Save(handle, bytes.NewReader(data)); err != nil {
|
||||
t.Fatalf("Save() error: %+v", err)
|
||||
}
|
||||
return data, handle
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := be.Remove(handle); err != nil {
|
||||
t.Fatalf("Remove() returned error: %v", err)
|
||||
}
|
||||
}()
|
||||
func remove(t testing.TB, be restic.Backend, h restic.Handle) {
|
||||
if err := be.Remove(h); err != nil {
|
||||
t.Fatalf("Remove() returned error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func BackendBenchmarkLoadFile(t *testing.B, s *Suite) {
|
||||
be := s.open(t)
|
||||
defer s.close(t, be)
|
||||
|
||||
length := 1<<24 + 2123
|
||||
data, handle := saveRandomFile(t, be, length)
|
||||
defer remove(t, be, handle)
|
||||
|
||||
buf := make([]byte, length)
|
||||
|
||||
@ -54,6 +59,88 @@ func BackendBenchmarkLoadFile(t *testing.B, s *Suite) {
|
||||
if !bytes.Equal(data, buf) {
|
||||
t.Fatalf("wrong bytes returned")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BackendBenchmarkLoadPartialFile(t *testing.B, s *Suite) {
|
||||
be := s.open(t)
|
||||
defer s.close(t, be)
|
||||
|
||||
datalength := 1<<24 + 2123
|
||||
data, handle := saveRandomFile(t, be, datalength)
|
||||
defer remove(t, be, handle)
|
||||
|
||||
testLength := datalength/4 + 555
|
||||
|
||||
buf := make([]byte, testLength)
|
||||
|
||||
t.SetBytes(int64(testLength))
|
||||
t.ResetTimer()
|
||||
|
||||
for i := 0; i < t.N; i++ {
|
||||
rd, err := be.Load(handle, testLength, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
n, err := io.ReadFull(rd, buf)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err = rd.Close(); err != nil {
|
||||
t.Fatalf("Close() returned error: %v", err)
|
||||
}
|
||||
|
||||
if n != testLength {
|
||||
t.Fatalf("wrong number of bytes read: want %v, got %v", testLength, n)
|
||||
}
|
||||
|
||||
if !bytes.Equal(data[:testLength], buf) {
|
||||
t.Fatalf("wrong bytes returned")
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func BackendBenchmarkLoadPartialFileOffset(t *testing.B, s *Suite) {
|
||||
be := s.open(t)
|
||||
defer s.close(t, be)
|
||||
|
||||
datalength := 1<<24 + 2123
|
||||
data, handle := saveRandomFile(t, be, datalength)
|
||||
defer remove(t, be, handle)
|
||||
|
||||
testLength := datalength/4 + 555
|
||||
testOffset := 8273
|
||||
|
||||
buf := make([]byte, testLength)
|
||||
|
||||
t.SetBytes(int64(testLength))
|
||||
t.ResetTimer()
|
||||
|
||||
for i := 0; i < t.N; i++ {
|
||||
rd, err := be.Load(handle, testLength, int64(testOffset))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
n, err := io.ReadFull(rd, buf)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err = rd.Close(); err != nil {
|
||||
t.Fatalf("Close() returned error: %v", err)
|
||||
}
|
||||
|
||||
if n != testLength {
|
||||
t.Fatalf("wrong number of bytes read: want %v, got %v", testLength, n)
|
||||
}
|
||||
|
||||
if !bytes.Equal(data[testOffset:testOffset+testLength], buf) {
|
||||
t.Fatalf("wrong bytes returned")
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -25,4 +25,6 @@ var benchmarkFunctions = []struct {
|
||||
Fn func(*testing.B, *Suite)
|
||||
}{
|
||||
{"LoadFile", BackendBenchmarkLoadFile},
|
||||
{"LoadPartialFile", BackendBenchmarkLoadPartialFile},
|
||||
{"LoadPartialFileOffset", BackendBenchmarkLoadPartialFileOffset},
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user