mirror of
https://github.com/octoleo/restic.git
synced 2024-11-21 20:35:12 +00:00
Restructured repository
This commit is contained in:
parent
6e17708dc0
commit
13bb557cdc
@ -10,10 +10,10 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/fd0/khepri/storage"
|
"github.com/fd0/khepri"
|
||||||
)
|
)
|
||||||
|
|
||||||
func hash(filename string) (storage.ID, error) {
|
func hash(filename string) (khepri.ID, error) {
|
||||||
h := sha256.New()
|
h := sha256.New()
|
||||||
f, err := os.Open(filename)
|
f, err := os.Open(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -24,7 +24,7 @@ func hash(filename string) (storage.ID, error) {
|
|||||||
return h.Sum([]byte{}), nil
|
return h.Sum([]byte{}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func archive_dir(repo *storage.DirRepository, path string) (storage.ID, error) {
|
func archive_dir(repo *khepri.DirRepository, path string) (khepri.ID, error) {
|
||||||
log.Printf("archiving dir %q", path)
|
log.Printf("archiving dir %q", path)
|
||||||
dir, err := os.Open(path)
|
dir, err := os.Open(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -43,11 +43,11 @@ func archive_dir(repo *storage.DirRepository, path string) (storage.ID, error) {
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
t := storage.NewTree()
|
t := khepri.NewTree()
|
||||||
for _, e := range entries {
|
for _, e := range entries {
|
||||||
node := storage.NodeFromFileInfo(e)
|
node := khepri.NodeFromFileInfo(e)
|
||||||
|
|
||||||
var id storage.ID
|
var id khepri.ID
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if e.IsDir() {
|
if e.IsDir() {
|
||||||
@ -81,7 +81,7 @@ func archive_dir(repo *storage.DirRepository, path string) (storage.ID, error) {
|
|||||||
return id, nil
|
return id, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func commandBackup(repo *storage.DirRepository, args []string) error {
|
func commandBackup(repo *khepri.DirRepository, args []string) error {
|
||||||
if len(args) != 1 {
|
if len(args) != 1 {
|
||||||
return errors.New("usage: backup dir")
|
return errors.New("usage: backup dir")
|
||||||
}
|
}
|
@ -7,10 +7,10 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
|
|
||||||
"github.com/fd0/khepri/storage"
|
"github.com/fd0/khepri"
|
||||||
)
|
)
|
||||||
|
|
||||||
func restore_file(repo *storage.DirRepository, node storage.Node, target string) error {
|
func restore_file(repo *khepri.DirRepository, node khepri.Node, target string) error {
|
||||||
fmt.Printf(" restore file %q\n", target)
|
fmt.Printf(" restore file %q\n", target)
|
||||||
|
|
||||||
rd, err := repo.Get(node.Content)
|
rd, err := repo.Get(node.Content)
|
||||||
@ -47,14 +47,14 @@ func restore_file(repo *storage.DirRepository, node storage.Node, target string)
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func restore_dir(repo *storage.DirRepository, id storage.ID, target string) error {
|
func restore_dir(repo *khepri.DirRepository, id khepri.ID, target string) error {
|
||||||
fmt.Printf(" restore dir %q\n", target)
|
fmt.Printf(" restore dir %q\n", target)
|
||||||
rd, err := repo.Get(id)
|
rd, err := repo.Get(id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
t := storage.NewTree()
|
t := khepri.NewTree()
|
||||||
err = t.Restore(rd)
|
err = t.Restore(rd)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -104,12 +104,12 @@ func restore_dir(repo *storage.DirRepository, id storage.ID, target string) erro
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func commandRestore(repo *storage.DirRepository, args []string) error {
|
func commandRestore(repo *khepri.DirRepository, args []string) error {
|
||||||
if len(args) != 2 {
|
if len(args) != 2 {
|
||||||
return errors.New("usage: restore ID dir")
|
return errors.New("usage: restore ID dir")
|
||||||
}
|
}
|
||||||
|
|
||||||
id, err := storage.ParseID(args[0])
|
id, err := khepri.ParseID(args[0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errmsg(1, "invalid id %q: %v", args[0], err)
|
errmsg(1, "invalid id %q: %v", args[0], err)
|
||||||
}
|
}
|
@ -6,7 +6,7 @@ import (
|
|||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/fd0/khepri/storage"
|
"github.com/fd0/khepri"
|
||||||
"github.com/jessevdk/go-flags"
|
"github.com/jessevdk/go-flags"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -22,7 +22,7 @@ func errmsg(code int, format string, data ...interface{}) {
|
|||||||
os.Exit(code)
|
os.Exit(code)
|
||||||
}
|
}
|
||||||
|
|
||||||
type commandFunc func(*storage.DirRepository, []string) error
|
type commandFunc func(*khepri.DirRepository, []string) error
|
||||||
|
|
||||||
var commands map[string]commandFunc
|
var commands map[string]commandFunc
|
||||||
|
|
||||||
@ -60,7 +60,7 @@ func main() {
|
|||||||
errmsg(1, "unknown command: %q\n", cmd)
|
errmsg(1, "unknown command: %q\n", cmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
repo, err := storage.NewDirRepository(Opts.Repo)
|
repo, err := khepri.NewDirRepository(Opts.Repo)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errmsg(1, "unable to create/open repo: %v", err)
|
errmsg(1, "unable to create/open repo: %v", err)
|
84
hashing.go
Normal file
84
hashing.go
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
package khepri
|
||||||
|
|
||||||
|
import (
|
||||||
|
"hash"
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HashingReader is the interfaces that wraps a normal reader. When Hash() is called,
|
||||||
|
// it returns the hash for all data that has been read so far.
|
||||||
|
type HashingReader interface {
|
||||||
|
io.Reader
|
||||||
|
Hash() []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
// HashingWriter is the interfaces that wraps a normal writer. When Hash() is called,
|
||||||
|
// it returns the hash for all data that has been written so far.
|
||||||
|
type HashingWriter interface {
|
||||||
|
io.Writer
|
||||||
|
Hash() []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
type reader struct {
|
||||||
|
reader io.Reader
|
||||||
|
hash hash.Hash
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHashingReader wraps an io.Reader and in addition feeds all data read through the
|
||||||
|
// given hash.
|
||||||
|
func NewHashingReader(r io.Reader, h func() hash.Hash) *reader {
|
||||||
|
return &reader{
|
||||||
|
reader: r,
|
||||||
|
hash: h(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *reader) Read(p []byte) (int, error) {
|
||||||
|
// call original reader
|
||||||
|
n, err := h.reader.Read(p)
|
||||||
|
|
||||||
|
// hash bytes
|
||||||
|
if n > 0 {
|
||||||
|
// hash
|
||||||
|
h.hash.Write(p[0:n])
|
||||||
|
}
|
||||||
|
|
||||||
|
// return result
|
||||||
|
return n, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *reader) Hash() []byte {
|
||||||
|
return h.hash.Sum([]byte{})
|
||||||
|
}
|
||||||
|
|
||||||
|
type writer struct {
|
||||||
|
writer io.Writer
|
||||||
|
hash hash.Hash
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHashingWriter wraps an io.Reader and in addition feeds all data written through
|
||||||
|
// the given hash.
|
||||||
|
func NewHashingWriter(w io.Writer, h func() hash.Hash) *writer {
|
||||||
|
return &writer{
|
||||||
|
writer: w,
|
||||||
|
hash: h(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *writer) Write(p []byte) (int, error) {
|
||||||
|
// call original writer
|
||||||
|
n, err := h.writer.Write(p)
|
||||||
|
|
||||||
|
// hash bytes
|
||||||
|
if n > 0 {
|
||||||
|
// hash
|
||||||
|
h.hash.Write(p[0:n])
|
||||||
|
}
|
||||||
|
|
||||||
|
// return result
|
||||||
|
return n, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *writer) Hash() []byte {
|
||||||
|
return h.hash.Sum([]byte{})
|
||||||
|
}
|
109
hashing/impl.go
109
hashing/impl.go
@ -1,109 +0,0 @@
|
|||||||
// Copyright (c) 2014, Alexander Neumann <alexander@bumpern.de>
|
|
||||||
// All rights reserved.
|
|
||||||
//
|
|
||||||
// Redistribution and use in source and binary forms, with or without
|
|
||||||
// modification, are permitted provided that the following conditions are met:
|
|
||||||
//
|
|
||||||
// 1. Redistributions of source code must retain the above copyright notice, this
|
|
||||||
// list of conditions and the following disclaimer.
|
|
||||||
//
|
|
||||||
// 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
// this list of conditions and the following disclaimer in the documentation
|
|
||||||
// and/or other materials provided with the distribution.
|
|
||||||
//
|
|
||||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
||||||
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
||||||
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
||||||
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
||||||
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
||||||
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
// Packgae hashing provides hashing readers and writers.
|
|
||||||
package hashing
|
|
||||||
|
|
||||||
import (
|
|
||||||
"hash"
|
|
||||||
"io"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Reader is the interfaces that wraps a normal reader. When Hash() is called,
|
|
||||||
// it returns the hash for all data that has been read so far.
|
|
||||||
type Reader interface {
|
|
||||||
io.Reader
|
|
||||||
Hash() []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
// Writer is the interfaces that wraps a normal writer. When Hash() is called,
|
|
||||||
// it returns the hash for all data that has been written so far.
|
|
||||||
type Writer interface {
|
|
||||||
io.Writer
|
|
||||||
Hash() []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
type reader struct {
|
|
||||||
reader io.Reader
|
|
||||||
hash hash.Hash
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewReader wraps an io.Reader and in addition feeds all data read through the
|
|
||||||
// given hash.
|
|
||||||
func NewReader(r io.Reader, h func() hash.Hash) *reader {
|
|
||||||
return &reader{
|
|
||||||
reader: r,
|
|
||||||
hash: h(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *reader) Read(p []byte) (int, error) {
|
|
||||||
// call original reader
|
|
||||||
n, err := h.reader.Read(p)
|
|
||||||
|
|
||||||
// hash bytes
|
|
||||||
if n > 0 {
|
|
||||||
// hash
|
|
||||||
h.hash.Write(p[0:n])
|
|
||||||
}
|
|
||||||
|
|
||||||
// return result
|
|
||||||
return n, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *reader) Hash() []byte {
|
|
||||||
return h.hash.Sum([]byte{})
|
|
||||||
}
|
|
||||||
|
|
||||||
type writer struct {
|
|
||||||
writer io.Writer
|
|
||||||
hash hash.Hash
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewWriter wraps an io.Reader and in addition feeds all data written through
|
|
||||||
// the given hash.
|
|
||||||
func NewWriter(w io.Writer, h func() hash.Hash) *writer {
|
|
||||||
return &writer{
|
|
||||||
writer: w,
|
|
||||||
hash: h(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *writer) Write(p []byte) (int, error) {
|
|
||||||
// call original writer
|
|
||||||
n, err := h.writer.Write(p)
|
|
||||||
|
|
||||||
// hash bytes
|
|
||||||
if n > 0 {
|
|
||||||
// hash
|
|
||||||
h.hash.Write(p[0:n])
|
|
||||||
}
|
|
||||||
|
|
||||||
// return result
|
|
||||||
return n, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *writer) Hash() []byte {
|
|
||||||
return h.hash.Sum([]byte{})
|
|
||||||
}
|
|
@ -1,4 +1,4 @@
|
|||||||
package hashing_test
|
package khepri_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@ -7,12 +7,12 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/fd0/khepri/hashing"
|
"github.com/fd0/khepri"
|
||||||
)
|
)
|
||||||
|
|
||||||
func ExampleReader() {
|
func ExampleReader() {
|
||||||
str := "foobar"
|
str := "foobar"
|
||||||
reader := hashing.NewReader(strings.NewReader(str), md5.New)
|
reader := khepri.NewHashingReader(strings.NewReader(str), md5.New)
|
||||||
buf := make([]byte, len(str))
|
buf := make([]byte, len(str))
|
||||||
|
|
||||||
reader.Read(buf)
|
reader.Read(buf)
|
||||||
@ -25,7 +25,7 @@ func ExampleWriter() {
|
|||||||
str := "foobar"
|
str := "foobar"
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
|
|
||||||
writer := hashing.NewWriter(&buf, sha1.New)
|
writer := khepri.NewHashingWriter(&buf, sha1.New)
|
||||||
writer.Write([]byte(str))
|
writer.Write([]byte(str))
|
||||||
|
|
||||||
fmt.Printf("hash for %q is %02x", str, writer.Hash())
|
fmt.Printf("hash for %q is %02x", str, writer.Hash())
|
@ -1,9 +1,9 @@
|
|||||||
package hashing_test
|
package khepri_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
. "github.com/onsi/ginkgo"
|
. "github.com/onsi/ginkgo"
|
||||||
. "github.com/onsi/gomega"
|
. "github.com/onsi/gomega"
|
||||||
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestHashing(t *testing.T) {
|
func TestHashing(t *testing.T) {
|
@ -1,4 +1,4 @@
|
|||||||
package hashing_test
|
package khepri_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@ -7,7 +7,7 @@ import (
|
|||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"hash"
|
"hash"
|
||||||
|
|
||||||
"github.com/fd0/khepri/hashing"
|
"github.com/fd0/khepri"
|
||||||
. "github.com/onsi/ginkgo"
|
. "github.com/onsi/ginkgo"
|
||||||
. "github.com/onsi/gomega"
|
. "github.com/onsi/gomega"
|
||||||
)
|
)
|
||||||
@ -28,7 +28,7 @@ var _ = Describe("Hashing", func() {
|
|||||||
Context("Static Strings", func() {
|
Context("Static Strings", func() {
|
||||||
It("Should compute digest", func() {
|
It("Should compute digest", func() {
|
||||||
for _, t := range static_tests {
|
for _, t := range static_tests {
|
||||||
r := hashing.NewReader(bytes.NewBuffer([]byte(t.text)), t.hash)
|
r := khepri.NewHashingReader(bytes.NewBuffer([]byte(t.text)), t.hash)
|
||||||
|
|
||||||
n, err := r.Read(make([]byte, len(t.text)+1))
|
n, err := r.Read(make([]byte, len(t.text)+1))
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ var _ = Describe("Hashing", func() {
|
|||||||
It("Should compute digest", func() {
|
It("Should compute digest", func() {
|
||||||
for _, t := range static_tests {
|
for _, t := range static_tests {
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
w := hashing.NewWriter(&buf, t.hash)
|
w := khepri.NewHashingWriter(&buf, t.hash)
|
||||||
|
|
||||||
n, err := w.Write([]byte(t.text))
|
n, err := w.Write([]byte(t.text))
|
||||||
|
|
@ -1,4 +1,4 @@
|
|||||||
package storage
|
package khepri
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@ -19,6 +19,7 @@ func ParseID(s string) (ID, error) {
|
|||||||
|
|
||||||
return ID(b), nil
|
return ID(b), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (id ID) String() string {
|
func (id ID) String() string {
|
||||||
return hex.EncodeToString(id)
|
return hex.EncodeToString(id)
|
||||||
}
|
}
|
@ -1,4 +1,4 @@
|
|||||||
package storage
|
package khepri
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
@ -10,8 +10,6 @@ import (
|
|||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
|
|
||||||
"github.com/fd0/khepri/hashing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -89,7 +87,7 @@ func (r *DirRepository) Put(reader io.Reader) (ID, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
rd := hashing.NewReader(reader, r.hash)
|
rd := NewHashingReader(reader, r.hash)
|
||||||
_, err = io.Copy(file, rd)
|
_, err = io.Copy(file, rd)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -130,7 +128,7 @@ func (r *DirRepository) PutRaw(buf []byte) (ID, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
wr := hashing.NewWriter(file, r.hash)
|
wr := NewHashingWriter(file, r.hash)
|
||||||
_, err = wr.Write(buf)
|
_, err = wr.Write(buf)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
@ -1,4 +1,4 @@
|
|||||||
package storage_test
|
package khepri_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
. "github.com/onsi/ginkgo"
|
. "github.com/onsi/ginkgo"
|
@ -1,4 +1,4 @@
|
|||||||
package storage_test
|
package khepri_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@ -7,7 +7,7 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/fd0/khepri/storage"
|
"github.com/fd0/khepri"
|
||||||
. "github.com/onsi/ginkgo"
|
. "github.com/onsi/ginkgo"
|
||||||
. "github.com/onsi/gomega"
|
. "github.com/onsi/gomega"
|
||||||
)
|
)
|
||||||
@ -25,9 +25,9 @@ var TestStrings = []struct {
|
|||||||
var _ = Describe("Storage", func() {
|
var _ = Describe("Storage", func() {
|
||||||
var (
|
var (
|
||||||
tempdir string
|
tempdir string
|
||||||
repo *storage.DirRepository
|
repo *khepri.DirRepository
|
||||||
err error
|
err error
|
||||||
id storage.ID
|
id khepri.ID
|
||||||
)
|
)
|
||||||
|
|
||||||
BeforeEach(func() {
|
BeforeEach(func() {
|
||||||
@ -35,7 +35,7 @@ var _ = Describe("Storage", func() {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
repo, err = storage.NewDirRepository(tempdir)
|
repo, err = khepri.NewDirRepository(tempdir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
@ -54,7 +54,7 @@ var _ = Describe("Storage", func() {
|
|||||||
Context("File Operations", func() {
|
Context("File Operations", func() {
|
||||||
It("Should detect non-existing file", func() {
|
It("Should detect non-existing file", func() {
|
||||||
for _, test := range TestStrings {
|
for _, test := range TestStrings {
|
||||||
id, err := storage.ParseID(test.id)
|
id, err := khepri.ParseID(test.id)
|
||||||
Expect(err).NotTo(HaveOccurred())
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
|
||||||
// try to get string out, should fail
|
// try to get string out, should fail
|
@ -1,4 +1,4 @@
|
|||||||
package storage
|
package khepri
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
@ -1,13 +1,13 @@
|
|||||||
package storage_test
|
package khepri_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/fd0/khepri/storage"
|
"github.com/fd0/khepri"
|
||||||
. "github.com/onsi/ginkgo"
|
|
||||||
. "github.com/onsi/gomega"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func parseTime(str string) time.Time {
|
func parseTime(str string) time.Time {
|
||||||
@ -20,13 +20,13 @@ func parseTime(str string) time.Time {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var _ = Describe("Tree", func() {
|
var _ = Describe("Tree", func() {
|
||||||
var t *storage.Tree
|
var t *khepri.Tree
|
||||||
var raw string
|
var raw string
|
||||||
|
|
||||||
BeforeEach(func() {
|
BeforeEach(func() {
|
||||||
t = new(storage.Tree)
|
t = new(khepri.Tree)
|
||||||
t.Nodes = []storage.Node{
|
t.Nodes = []khepri.Node{
|
||||||
storage.Node{
|
khepri.Node{
|
||||||
Name: "foobar",
|
Name: "foobar",
|
||||||
Mode: 0755,
|
Mode: 0755,
|
||||||
ModTime: parseTime("2014-04-20T22:16:54.161401+02:00"),
|
ModTime: parseTime("2014-04-20T22:16:54.161401+02:00"),
|
||||||
@ -35,7 +35,7 @@ var _ = Describe("Tree", func() {
|
|||||||
Group: 1001,
|
Group: 1001,
|
||||||
Content: []byte{0x41, 0x42, 0x43},
|
Content: []byte{0x41, 0x42, 0x43},
|
||||||
},
|
},
|
||||||
storage.Node{
|
khepri.Node{
|
||||||
Name: "baz",
|
Name: "baz",
|
||||||
Mode: 0755,
|
Mode: 0755,
|
||||||
User: 1000,
|
User: 1000,
|
||||||
@ -54,7 +54,7 @@ var _ = Describe("Tree", func() {
|
|||||||
t.Save(&buf)
|
t.Save(&buf)
|
||||||
Expect(strings.TrimRight(buf.String(), "\n")).To(Equal(raw))
|
Expect(strings.TrimRight(buf.String(), "\n")).To(Equal(raw))
|
||||||
|
|
||||||
t2 := new(storage.Tree)
|
t2 := new(khepri.Tree)
|
||||||
err := t2.Restore(&buf)
|
err := t2.Restore(&buf)
|
||||||
Expect(err).NotTo(HaveOccurred())
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
|
||||||
@ -69,7 +69,7 @@ var _ = Describe("Tree", func() {
|
|||||||
|
|
||||||
It("Should restore", func() {
|
It("Should restore", func() {
|
||||||
buf := bytes.NewBufferString(raw)
|
buf := bytes.NewBufferString(raw)
|
||||||
t2 := new(storage.Tree)
|
t2 := new(khepri.Tree)
|
||||||
err := t2.Restore(buf)
|
err := t2.Restore(buf)
|
||||||
Expect(err).NotTo(HaveOccurred())
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
|
Loading…
Reference in New Issue
Block a user