mirror of
https://github.com/octoleo/restic.git
synced 2024-11-04 12:34:13 +00:00
Update code to use the new Chunker interface
This commit is contained in:
parent
3db569c45a
commit
2ce49ea0ee
@ -1,7 +1,7 @@
|
|||||||
package restic
|
package restic
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/sha256"
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
@ -11,13 +11,14 @@ import (
|
|||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/restic/chunker"
|
|
||||||
"restic/backend"
|
"restic/backend"
|
||||||
"restic/debug"
|
"restic/debug"
|
||||||
"restic/pack"
|
"restic/pack"
|
||||||
"restic/pipe"
|
"restic/pipe"
|
||||||
"restic/repository"
|
"restic/repository"
|
||||||
|
|
||||||
|
"github.com/restic/chunker"
|
||||||
|
|
||||||
"github.com/juju/errors"
|
"github.com/juju/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -154,12 +155,11 @@ type saveResult struct {
|
|||||||
bytes uint64
|
bytes uint64
|
||||||
}
|
}
|
||||||
|
|
||||||
func (arch *Archiver) saveChunk(chunk *chunker.Chunk, p *Progress, token struct{}, file *os.File, resultChannel chan<- saveResult) {
|
func (arch *Archiver) saveChunk(chunk chunker.Chunk, p *Progress, token struct{}, file *os.File, resultChannel chan<- saveResult) {
|
||||||
hash := chunk.Digest
|
defer freeBuf(chunk.Data)
|
||||||
id := backend.ID{}
|
|
||||||
copy(id[:], hash)
|
|
||||||
|
|
||||||
err := arch.Save(pack.Data, id, chunk.Length, chunk.Reader(file))
|
id := backend.Hash(chunk.Data)
|
||||||
|
err := arch.Save(pack.Data, id, chunk.Length, bytes.NewReader(chunk.Data))
|
||||||
// TODO handle error
|
// TODO handle error
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
@ -220,11 +220,11 @@ func (arch *Archiver) SaveFile(p *Progress, node *Node) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
chnker := chunker.New(file, arch.repo.Config.ChunkerPolynomial, sha256.New())
|
chnker := chunker.New(file, arch.repo.Config.ChunkerPolynomial)
|
||||||
resultChannels := [](<-chan saveResult){}
|
resultChannels := [](<-chan saveResult){}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
chunk, err := chnker.Next()
|
chunk, err := chnker.Next(getBuf())
|
||||||
if err == io.EOF {
|
if err == io.EOF {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
@ -2,12 +2,10 @@ package restic_test
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"crypto/sha256"
|
|
||||||
"io"
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/restic/chunker"
|
|
||||||
"restic"
|
"restic"
|
||||||
"restic/backend"
|
"restic/backend"
|
||||||
"restic/checker"
|
"restic/checker"
|
||||||
@ -15,6 +13,8 @@ import (
|
|||||||
"restic/pack"
|
"restic/pack"
|
||||||
"restic/repository"
|
"restic/repository"
|
||||||
. "restic/test"
|
. "restic/test"
|
||||||
|
|
||||||
|
"github.com/restic/chunker"
|
||||||
)
|
)
|
||||||
|
|
||||||
var testPol = chunker.Pol(0x3DA3358B4DC173)
|
var testPol = chunker.Pol(0x3DA3358B4DC173)
|
||||||
@ -24,17 +24,12 @@ type Rdr interface {
|
|||||||
io.ReaderAt
|
io.ReaderAt
|
||||||
}
|
}
|
||||||
|
|
||||||
type chunkedData struct {
|
|
||||||
buf []byte
|
|
||||||
chunks []*chunker.Chunk
|
|
||||||
}
|
|
||||||
|
|
||||||
func benchmarkChunkEncrypt(b testing.TB, buf, buf2 []byte, rd Rdr, key *crypto.Key) {
|
func benchmarkChunkEncrypt(b testing.TB, buf, buf2 []byte, rd Rdr, key *crypto.Key) {
|
||||||
rd.Seek(0, 0)
|
rd.Seek(0, 0)
|
||||||
ch := chunker.New(rd, testPol, sha256.New())
|
ch := chunker.New(rd, testPol)
|
||||||
|
|
||||||
for {
|
for {
|
||||||
chunk, err := ch.Next()
|
chunk, err := ch.Next(buf)
|
||||||
|
|
||||||
if err == io.EOF {
|
if err == io.EOF {
|
||||||
break
|
break
|
||||||
@ -43,12 +38,10 @@ func benchmarkChunkEncrypt(b testing.TB, buf, buf2 []byte, rd Rdr, key *crypto.K
|
|||||||
OK(b, err)
|
OK(b, err)
|
||||||
|
|
||||||
// reduce length of buf
|
// reduce length of buf
|
||||||
buf = buf[:chunk.Length]
|
Assert(b, uint(len(chunk.Data)) == chunk.Length,
|
||||||
n, err := io.ReadFull(chunk.Reader(rd), buf)
|
"invalid length: got %d, expected %d", len(chunk.Data), chunk.Length)
|
||||||
OK(b, err)
|
|
||||||
Assert(b, uint(n) == chunk.Length, "invalid length: got %d, expected %d", n, chunk.Length)
|
|
||||||
|
|
||||||
_, err = crypto.Encrypt(key, buf2, buf)
|
_, err = crypto.Encrypt(key, buf2, chunk.Data)
|
||||||
OK(b, err)
|
OK(b, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -72,18 +65,16 @@ func BenchmarkChunkEncrypt(b *testing.B) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func benchmarkChunkEncryptP(b *testing.PB, buf []byte, rd Rdr, key *crypto.Key) {
|
func benchmarkChunkEncryptP(b *testing.PB, buf []byte, rd Rdr, key *crypto.Key) {
|
||||||
ch := chunker.New(rd, testPol, sha256.New())
|
ch := chunker.New(rd, testPol)
|
||||||
|
|
||||||
for {
|
for {
|
||||||
chunk, err := ch.Next()
|
chunk, err := ch.Next(buf)
|
||||||
if err == io.EOF {
|
if err == io.EOF {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
// reduce length of chunkBuf
|
// reduce length of chunkBuf
|
||||||
buf = buf[:chunk.Length]
|
crypto.Encrypt(key, chunk.Data, chunk.Data)
|
||||||
io.ReadFull(chunk.Reader(rd), buf)
|
|
||||||
crypto.Encrypt(key, buf, buf)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -258,8 +249,7 @@ func testParallelSaveWithDuplication(t *testing.T, seed int) {
|
|||||||
duplication := 7
|
duplication := 7
|
||||||
|
|
||||||
arch := restic.NewArchiver(repo)
|
arch := restic.NewArchiver(repo)
|
||||||
data, chunks := getRandomData(seed, dataSizeMb*1024*1024)
|
chunks := getRandomData(seed, dataSizeMb*1024*1024)
|
||||||
reader := bytes.NewReader(data)
|
|
||||||
|
|
||||||
errChannels := [](<-chan error){}
|
errChannels := [](<-chan error){}
|
||||||
|
|
||||||
@ -272,18 +262,15 @@ func testParallelSaveWithDuplication(t *testing.T, seed int) {
|
|||||||
errChan := make(chan error)
|
errChan := make(chan error)
|
||||||
errChannels = append(errChannels, errChan)
|
errChannels = append(errChannels, errChan)
|
||||||
|
|
||||||
go func(reader *bytes.Reader, c *chunker.Chunk, errChan chan<- error) {
|
go func(c chunker.Chunk, errChan chan<- error) {
|
||||||
barrier <- struct{}{}
|
barrier <- struct{}{}
|
||||||
|
|
||||||
hash := c.Digest
|
id := backend.Hash(c.Data)
|
||||||
id := backend.ID{}
|
time.Sleep(time.Duration(id[0]))
|
||||||
copy(id[:], hash)
|
err := arch.Save(pack.Data, id, c.Length, bytes.NewReader(c.Data))
|
||||||
|
|
||||||
time.Sleep(time.Duration(hash[0]))
|
|
||||||
err := arch.Save(pack.Data, id, c.Length, c.Reader(reader))
|
|
||||||
<-barrier
|
<-barrier
|
||||||
errChan <- err
|
errChan <- err
|
||||||
}(reader, c, errChan)
|
}(c, errChan)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -298,20 +285,20 @@ func testParallelSaveWithDuplication(t *testing.T, seed int) {
|
|||||||
assertNoUnreferencedPacks(t, chkr)
|
assertNoUnreferencedPacks(t, chkr)
|
||||||
}
|
}
|
||||||
|
|
||||||
func getRandomData(seed int, size int) ([]byte, []*chunker.Chunk) {
|
func getRandomData(seed int, size int) []chunker.Chunk {
|
||||||
buf := Random(seed, size)
|
buf := Random(seed, size)
|
||||||
chunks := []*chunker.Chunk{}
|
var chunks []chunker.Chunk
|
||||||
chunker := chunker.New(bytes.NewReader(buf), testPol, sha256.New())
|
chunker := chunker.New(bytes.NewReader(buf), testPol)
|
||||||
|
|
||||||
for {
|
for {
|
||||||
c, err := chunker.Next()
|
c, err := chunker.Next(nil)
|
||||||
if err == io.EOF {
|
if err == io.EOF {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
chunks = append(chunks, c)
|
chunks = append(chunks, c)
|
||||||
}
|
}
|
||||||
|
|
||||||
return buf, chunks
|
return chunks
|
||||||
}
|
}
|
||||||
|
|
||||||
func createAndInitChecker(t *testing.T, repo *repository.Repository) *checker.Checker {
|
func createAndInitChecker(t *testing.T, repo *repository.Repository) *checker.Checker {
|
||||||
|
21
src/restic/buffer_pool.go
Normal file
21
src/restic/buffer_pool.go
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
package restic
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/restic/chunker"
|
||||||
|
)
|
||||||
|
|
||||||
|
var bufPool = sync.Pool{
|
||||||
|
New: func() interface{} {
|
||||||
|
return make([]byte, chunker.MinSize)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func getBuf() []byte {
|
||||||
|
return bufPool.Get().([]byte)
|
||||||
|
}
|
||||||
|
|
||||||
|
func freeBuf(data []byte) {
|
||||||
|
bufPool.Put(data)
|
||||||
|
}
|
@ -3,7 +3,6 @@ package repository
|
|||||||
import (
|
import (
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/restic/chunker"
|
|
||||||
"restic/backend"
|
"restic/backend"
|
||||||
"restic/crypto"
|
"restic/crypto"
|
||||||
"restic/debug"
|
"restic/debug"
|
||||||
@ -18,8 +17,8 @@ type packerManager struct {
|
|||||||
packs []*pack.Packer
|
packs []*pack.Packer
|
||||||
}
|
}
|
||||||
|
|
||||||
const minPackSize = 4 * chunker.MiB
|
const minPackSize = 4 * 1024 * 1024
|
||||||
const maxPackSize = 16 * chunker.MiB
|
const maxPackSize = 16 * 1024 * 1024
|
||||||
const maxPackers = 200
|
const maxPackers = 200
|
||||||
|
|
||||||
// findPacker returns a packer for a new blob of size bytes. Either a new one is
|
// findPacker returns a packer for a new blob of size bytes. Either a new one is
|
||||||
|
Loading…
Reference in New Issue
Block a user