2
2
mirror of https://github.com/octoleo/restic.git synced 2024-10-31 19:02:32 +00:00

Move SaveFile to Archiver, add blobs status

This commit is contained in:
Alexander Neumann 2014-11-17 23:28:51 +01:00
parent 94d1482888
commit fe92062735
2 changed files with 51 additions and 60 deletions

View File

@ -1,11 +1,14 @@
package khepri package khepri
import ( import (
"io"
"io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
"sync" "sync"
"github.com/fd0/khepri/backend" "github.com/fd0/khepri/backend"
"github.com/fd0/khepri/chunker"
) )
const ( const (
@ -111,10 +114,55 @@ func (arch *Archiver) SaveJSON(t backend.Type, item interface{}) (*Blob, error)
return blob, nil return blob, nil
} }
// SaveFile stores the content of the file on the backend as a Blob by calling
// Save for each chunk.
func (arch *Archiver) SaveFile(node *Node) error { func (arch *Archiver) SaveFile(node *Node) error {
blobs, err := arch.ch.SaveFile(node.path, uint(node.Size)) file, err := os.Open(node.path)
defer file.Close()
if err != nil { if err != nil {
return arch.Error(node.path, nil, err) return err
}
var blobs Blobs
// if the file is small enough, store it directly
if node.Size < chunker.MinSize {
buf, err := ioutil.ReadAll(file)
if err != nil {
return err
}
blob, err := arch.ch.Save(backend.Data, buf)
if err != nil {
return err
}
arch.saveUpdate(Stats{Bytes: blob.Size})
blobs = Blobs{blob}
} else {
// else store all chunks
chunker := chunker.New(file)
for {
chunk, err := chunker.Next()
if err == io.EOF {
break
}
if err != nil {
return err
}
blob, err := arch.ch.Save(backend.Data, chunk.Data)
if err != nil {
return err
}
arch.saveUpdate(Stats{Bytes: blob.Size})
blobs = append(blobs, blob)
}
} }
node.Content = make([]backend.ID, len(blobs)) node.Content = make([]backend.ID, len(blobs))
@ -233,13 +281,9 @@ func (arch *Archiver) saveTree(t *Tree) (*Blob, error) {
arch.fileToken <- token arch.fileToken <- token
}() }()
// debug("start: %s", n.path)
// TODO: handle error // TODO: handle error
arch.SaveFile(n) arch.SaveFile(n)
arch.saveUpdate(Stats{Files: 1, Bytes: n.Size}) arch.saveUpdate(Stats{Files: 1})
// debug("done: %s", n.path)
}(node) }(node)
} else { } else {
arch.saveUpdate(Stats{Other: 1}) arch.saveUpdate(Stats{Other: 1})

View File

@ -3,13 +3,9 @@ package khepri
import ( import (
"encoding/json" "encoding/json"
"errors" "errors"
"io"
"io/ioutil"
"os"
"sync" "sync"
"github.com/fd0/khepri/backend" "github.com/fd0/khepri/backend"
"github.com/fd0/khepri/chunker"
) )
type ContentHandler struct { type ContentHandler struct {
@ -118,55 +114,6 @@ func (ch *ContentHandler) SaveJSON(t backend.Type, item interface{}) (*Blob, err
return ch.Save(t, backend.Compress(data)) return ch.Save(t, backend.Compress(data))
} }
// SaveFile stores the content of the file on the backend as a Blob by calling
// Save for each chunk.
func (ch *ContentHandler) SaveFile(filename string, size uint) (Blobs, error) {
file, err := os.Open(filename)
defer file.Close()
if err != nil {
return nil, err
}
// if the file is small enough, store it directly
if size < chunker.MinSize {
buf, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
blob, err := ch.Save(backend.Data, buf)
if err != nil {
return nil, err
}
return Blobs{blob}, nil
}
// else store all chunks
blobs := Blobs{}
chunker := chunker.New(file)
for {
chunk, err := chunker.Next()
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
blob, err := ch.Save(backend.Data, chunk.Data)
if err != nil {
return nil, err
}
blobs = append(blobs, blob)
}
return blobs, nil
}
// Load tries to load and decrypt content identified by t and id from the backend. // Load tries to load and decrypt content identified by t and id from the backend.
func (ch *ContentHandler) Load(t backend.Type, id backend.ID) ([]byte, error) { func (ch *ContentHandler) Load(t backend.Type, id backend.ID) ([]byte, error) {
if t == backend.Snapshot { if t == backend.Snapshot {