2
2
mirror of https://github.com/octoleo/restic.git synced 2024-12-22 10:58:55 +00:00

Merge branch 'list-snapshots'

This commit is contained in:
Alexander Neumann 2014-11-24 22:18:16 +01:00
commit 08690b99bf
9 changed files with 223 additions and 21 deletions

View File

@ -4,12 +4,24 @@ import (
"bytes"
"compress/zlib"
"crypto/sha256"
"encoding/hex"
"errors"
"io/ioutil"
"sort"
"sync"
)
const (
MinPrefixLength = 4
)
var idPool = sync.Pool{New: func() interface{} { return ID(make([]byte, IDSize)) }}
var (
ErrNoIDPrefixFound = errors.New("no ID found")
ErrMultipleIDMatches = errors.New("multiple IDs with prefix found")
)
// Each lists all entries of type t in the backend and calls function f() with
// the id and data.
func Each(be Server, t Type, f func(id ID, data []byte, err error)) error {
@ -85,3 +97,83 @@ func Hash(data []byte) ID {
copy(id, h[:])
return id
}
// Find loads the list of all blobs of type t and searches for IDs which start
// with prefix. If none is found, nil and ErrNoIDPrefixFound is returned. If
// more than one is found, nil and ErrMultipleIDMatches is returned.
func Find(be Server, t Type, prefix string) (ID, error) {
p, err := hex.DecodeString(prefix)
if err != nil {
return nil, err
}
list, err := be.List(t)
if err != nil {
return nil, err
}
match := ID(nil)
// TODO: optimize by sorting list etc.
for _, id := range list {
if bytes.Equal(p, id[:len(p)]) {
if match == nil {
match = id
} else {
return nil, ErrMultipleIDMatches
}
}
}
if match != nil {
return match, nil
}
return nil, ErrNoIDPrefixFound
}
// FindSnapshot takes a string and tries to find a snapshot whose ID matches
// the string as closely as possible.
func FindSnapshot(be Server, s string) (ID, error) {
// parse ID directly
if id, err := ParseID(s); err == nil {
return id, nil
}
// find snapshot id with prefix
id, err := Find(be, Snapshot, s)
if err != nil {
return nil, err
}
return id, nil
}
// PrefixLength returns the number of bytes required so that all prefixes of
// all IDs of type t are unique.
func PrefixLength(be Lister, t Type) (int, error) {
// load all IDs of the given type
list, err := be.List(t)
if err != nil {
return 0, err
}
sort.Sort(list)
// select prefixes of length l, test if the last one is the same as the current one
outer:
for l := MinPrefixLength; l < IDSize; l++ {
var last ID
for _, id := range list {
if bytes.Equal(last, id[:l]) {
continue outer
}
last = id[:l]
}
return l, nil
}
return IDSize, nil
}

View File

@ -6,6 +6,8 @@ import (
"reflect"
"runtime"
"testing"
"github.com/fd0/khepri/backend"
)
// assert fails the test if the condition is false.
@ -34,3 +36,43 @@ func equals(tb testing.TB, exp, act interface{}) {
tb.FailNow()
}
}
func str2id(s string) backend.ID {
id, err := backend.ParseID(s)
if err != nil {
panic(err)
}
return id
}
type IDList backend.IDs
var samples = IDList{
str2id("20bdc1402a6fc9b633aaffffffffffffffffffffffffffffffffffffffffffff"),
str2id("20bdc1402a6fc9b633ccd578c4a92d0f4ef1a457fa2e16c596bc73fb409d6cc0"),
str2id("20bdc1402a6fc9b633ffffffffffffffffffffffffffffffffffffffffffffff"),
str2id("20ff988befa5fc40350f00d531a767606efefe242c837aaccb80673f286be53d"),
str2id("326cb59dfe802304f96ee9b5b9af93bdee73a30f53981e5ec579aedb6f1d0f07"),
str2id("86b60b9594d1d429c4aa98fa9562082cabf53b98c7dc083abe5dae31074dd15a"),
str2id("96c8dbe225079e624b5ce509f5bd817d1453cd0a85d30d536d01b64a8669aeae"),
str2id("fa31d65b87affcd167b119e9d3d2a27b8236ca4836cb077ed3e96fcbe209b792"),
}
func (l IDList) List(backend.Type) (backend.IDs, error) {
return backend.IDs(l), nil
}
func TestPrefixLength(t *testing.T) {
l, err := backend.PrefixLength(samples, backend.Snapshot)
ok(t, err)
equals(t, 10, l)
l, err = backend.PrefixLength(samples[:3], backend.Snapshot)
ok(t, err)
equals(t, 10, l)
l, err = backend.PrefixLength(samples[3:], backend.Snapshot)
ok(t, err)
equals(t, 4, l)
}

View File

@ -21,10 +21,14 @@ var (
ErrAlreadyPresent = errors.New("blob is already present in backend")
)
type Lister interface {
List(Type) (IDs, error)
}
type Server interface {
Create(Type, []byte) (ID, error)
Get(Type, ID) ([]byte, error)
List(Type) (IDs, error)
Lister
Test(Type, ID) (bool, error)
Remove(Type, ID) error
Version() uint

View File

@ -19,7 +19,17 @@ func commandCat(be backend.Server, key *khepri.Key, args []string) error {
id, err := backend.ParseID(args[1])
if err != nil {
return err
id = nil
if tpe != "snapshot" {
return err
}
// find snapshot id with prefix
id, err = backend.Find(be, backend.Snapshot, args[1])
if err != nil {
return err
}
}
ch, err := khepri.NewContentHandler(be, key)
@ -105,7 +115,8 @@ func commandCat(be backend.Server, key *khepri.Key, args []string) error {
return nil
case "snapshot":
var sn khepri.Snapshot
err := ch.LoadJSONRaw(backend.Snapshot, id, &sn)
err = ch.LoadJSONRaw(backend.Snapshot, id, &sn)
if err != nil {
return err
}

View File

@ -53,7 +53,7 @@ func commandLs(be backend.Server, key *khepri.Key, args []string) error {
return errors.New("usage: ls SNAPSHOT_ID [dir]")
}
id, err := backend.ParseID(args[0])
id, err := backend.FindSnapshot(be, args[0])
if err != nil {
return err
}

View File

@ -14,7 +14,7 @@ func commandRestore(be backend.Server, key *khepri.Key, args []string) error {
return errors.New("usage: restore ID dir")
}
id, err := backend.ParseID(args[0])
id, err := backend.FindSnapshot(be, args[0])
if err != nil {
errx(1, "invalid id %q: %v", args[0], err)
}

View File

@ -3,34 +3,83 @@ package main
import (
"errors"
"fmt"
"os"
"sort"
"strings"
"time"
"github.com/fd0/khepri"
"github.com/fd0/khepri/backend"
)
const TimeFormat = "02.01.2006 15:04:05 -0700"
const (
minute = 60
hour = 60 * minute
day = 24 * hour
week = 7 * day
)
const TimeFormat = "2006-01-02 15:04:05"
func reltime(t time.Time) string {
sec := uint64(time.Since(t).Seconds())
switch {
case sec > week:
return t.Format(TimeFormat)
case sec > day:
return fmt.Sprintf("%d days ago", sec/day)
case sec > hour:
return fmt.Sprintf("%d hours ago", sec/hour)
case sec > minute:
return fmt.Sprintf("%d minutes ago", sec/minute)
default:
return fmt.Sprintf("%d seconds ago", sec)
}
}
func commandSnapshots(be backend.Server, key *khepri.Key, args []string) error {
if len(args) != 0 {
return errors.New("usage: snapshots")
}
// ch, err := khepri.NewContentHandler(be, key)
// if err != nil {
// return err
// }
ch, err := khepri.NewContentHandler(be, key)
if err != nil {
return err
}
fmt.Printf("%-8s %-19s %-10s %s\n", "ID", "Date", "Source", "Directory")
fmt.Printf("%s\n", strings.Repeat("-", 80))
list := []*khepri.Snapshot{}
plen, err := backend.PrefixLength(be, backend.Snapshot)
if err != nil {
return err
}
backend.EachID(be, backend.Snapshot, func(id backend.ID) {
// sn, err := ch.LoadSnapshot(id)
// if err != nil {
// fmt.Fprintf(os.Stderr, "error loading snapshot %s: %v\n", id, err)
// return
// }
sn, err := ch.LoadSnapshot(id)
if err != nil {
fmt.Fprintf(os.Stderr, "error loading snapshot %s: %v\n", id, err)
return
}
// fmt.Printf("snapshot %s\n %s at %s by %s\n",
// id, sn.Dir, sn.Time, sn.Username)
fmt.Println(id)
pos := sort.Search(len(list), func(i int) bool {
return list[i].Time.After(sn.Time)
})
if pos < len(list) {
list = append(list, nil)
copy(list[pos+1:], list[pos:])
list[pos] = sn
} else {
list = append(list, sn)
}
})
for _, sn := range list {
fmt.Printf("%-8s %-19s %-10s %s\n", sn.ID()[:plen], sn.Time.Format(TimeFormat), sn.Hostname, sn.Dir)
}
return nil
}

View File

@ -51,7 +51,7 @@ func NewSnapshot(dir string) *Snapshot {
}
func LoadSnapshot(ch *ContentHandler, id backend.ID) (*Snapshot, error) {
sn := &Snapshot{}
sn := &Snapshot{id: id}
err := ch.LoadJSON(backend.Snapshot, id, sn)
if err != nil {
return nil, err
@ -60,6 +60,10 @@ func LoadSnapshot(ch *ContentHandler, id backend.ID) (*Snapshot, error) {
return sn, nil
}
func (sn *Snapshot) String() string {
func (sn Snapshot) String() string {
return fmt.Sprintf("<Snapshot %q at %s>", sn.Dir, sn.Time)
}
func (sn Snapshot) ID() backend.ID {
return sn.id
}

View File

@ -3,6 +3,6 @@ set -e
prepare
run khepri init
run khepri backup "${BASE}/fake-data"
run khepri restore "$(khepri snapshots)" "${BASE}/fake-data-restore"
run khepri restore "$(basename "$KHEPRI_REPOSITORY"/snapshots/*)" "${BASE}/fake-data-restore"
dirdiff "${BASE}/fake-data" "${BASE}/fake-data-restore/fake-data"
cleanup