2
2
mirror of https://github.com/octoleo/restic.git synced 2024-12-22 19:08:55 +00:00

Finalize repacker

This commit is contained in:
Alexander Neumann 2015-11-02 19:28:30 +01:00
parent 1fc0d78913
commit db41102bfa
4 changed files with 81 additions and 18 deletions

View File

@ -12,16 +12,15 @@ import (
// new packs.
type Repacker struct {
unusedBlobs backend.IDSet
src, dst *repository.Repository
repo *repository.Repository
}
// NewRepacker returns a new repacker that (when Repack() in run) cleans up the
// repository and creates new packs and indexs so that all blobs in unusedBlobs
// aren't used any more.
func NewRepacker(src, dst *repository.Repository, unusedBlobs backend.IDSet) *Repacker {
func NewRepacker(repo *repository.Repository, unusedBlobs backend.IDSet) *Repacker {
return &Repacker{
src: src,
dst: dst,
repo: repo,
unusedBlobs: unusedBlobs,
}
}
@ -31,14 +30,14 @@ func NewRepacker(src, dst *repository.Repository, unusedBlobs backend.IDSet) *Re
func (r *Repacker) Repack() error {
debug.Log("Repacker.Repack", "searching packs for %v", r.unusedBlobs)
packs, err := FindPacksForBlobs(r.src, r.unusedBlobs)
unneededPacks, err := FindPacksForBlobs(r.repo, r.unusedBlobs)
if err != nil {
return err
}
debug.Log("Repacker.Repack", "found packs: %v", packs)
debug.Log("Repacker.Repack", "found packs: %v", unneededPacks)
blobs, err := FindBlobsForPacks(r.src, packs)
blobs, err := FindBlobsForPacks(r.repo, unneededPacks)
if err != nil {
return err
}
@ -52,19 +51,39 @@ func (r *Repacker) Repack() error {
debug.Log("Repacker.Repack", "need to repack blobs: %v", blobs)
err = RepackBlobs(r.src, r.dst, blobs)
err = RepackBlobs(r.repo, r.repo, blobs)
if err != nil {
return err
}
debug.Log("Repacker.Repack", "remove unneeded packs: %v", packs)
for packID := range packs {
err = r.src.Backend().Remove(backend.Data, packID.String())
debug.Log("Repacker.Repack", "remove unneeded packs: %v", unneededPacks)
for packID := range unneededPacks {
err = r.repo.Backend().Remove(backend.Data, packID.String())
if err != nil {
return err
}
}
debug.Log("Repacker.Repack", "rebuild index")
idx, err := r.repo.Index().RebuildIndex(unneededPacks)
newIndexID, err := repository.SaveIndex(r.repo, idx)
debug.Log("Repacker.Repack", "saved new index at %v, err %v", newIndexID, err)
if err != nil {
return err
}
debug.Log("Repacker.Repack", "remove old indexes: %v", idx.Supersedes())
for _, id := range idx.Supersedes() {
err = r.repo.Backend().Remove(backend.Index, id.String())
if err != nil {
debug.Log("Repacker.Repack", "error removing index %v: %v", id.Str(), err)
return err
}
debug.Log("Repacker.Repack", "removed index %v", id.Str())
}
return nil
}
@ -140,10 +159,5 @@ func RepackBlobs(src, dst *repository.Repository, blobIDs backend.IDSet) (err er
return err
}
err = dst.SaveIndex()
if err != nil {
return err
}
return nil
}

View File

@ -111,7 +111,7 @@ func TestRepacker(t *testing.T) {
t.Fatalf("expected unused blobs:\n %v\ngot:\n %v", unusedBlobs, list)
}
repacker := checker.NewRepacker(repo, repo, unusedBlobs)
repacker := checker.NewRepacker(repo, unusedBlobs)
OK(t, repacker.Repack())
chkr = checker.New(repo)

View File

@ -439,7 +439,12 @@ func (idx *Index) Dump(w io.Writer) error {
return err
}
buf, err := json.MarshalIndent(list, "", " ")
outer := jsonIndex{
Supersedes: idx.Supersedes(),
Packs: list,
}
buf, err := json.MarshalIndent(outer, "", " ")
if err != nil {
return err
}

View File

@ -240,3 +240,47 @@ func (mi *MasterIndex) All() []*Index {
return mi.idx
}
// RebuildIndex combines all known indexes to a new index, leaving out any
// packs whose ID is contained in packBlacklist. The new index contains the IDs
// of all known indexes in the "supersedes" field.
func (mi *MasterIndex) RebuildIndex(packBlacklist backend.IDSet) (*Index, error) {
mi.idxMutex.Lock()
defer mi.idxMutex.Unlock()
debug.Log("MasterIndex.RebuildIndex", "start rebuilding index, blob blacklist: %v", packBlacklist)
newIndex := NewIndex()
done := make(chan struct{})
defer close(done)
for i, idx := range mi.idx {
debug.Log("MasterIndex.RebuildIndex", "adding %d index ", i)
for pb := range idx.Each(done) {
if packBlacklist.Has(pb.PackID) {
continue
}
newIndex.Store(pb)
}
if !idx.Final() {
continue
}
id, err := idx.ID()
if err != nil {
return nil, err
}
debug.Log("MasterIndex.RebuildIndex", "adding index id %v to supersedes field", id)
err = newIndex.AddToSupersedes(id)
if err != nil {
return nil, err
}
}
return newIndex, nil
}