mirror of
https://github.com/octoleo/restic.git
synced 2024-11-25 06:07:44 +00:00
Merge pull request #691 from restic/fix-604
Correctly save modified files
This commit is contained in:
commit
51cd78e16c
@ -431,10 +431,9 @@ func runBackup(opts BackupOptions, gopts GlobalOptions, args []string) error {
|
||||
arch.Excludes = opts.Excludes
|
||||
arch.SelectFilter = selectFilter
|
||||
|
||||
arch.Error = func(dir string, fi os.FileInfo, err error) error {
|
||||
arch.Warn = func(dir string, fi os.FileInfo, err error) {
|
||||
// TODO: make ignoring errors configurable
|
||||
Warnf("%s\rerror for %s: %v\n", ClearLine(), dir, err)
|
||||
return nil
|
||||
Warnf("%s\rwarning for %s: %v\n", ClearLine(), dir, err)
|
||||
}
|
||||
|
||||
_, id, err := arch.Snapshot(newArchiveProgress(gopts, stat), target, opts.Tags, parentSnapshotID)
|
||||
|
@ -343,6 +343,52 @@ func TestBackupMissingFile2(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestBackupChangedFile(t *testing.T) {
|
||||
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) {
|
||||
datafile := filepath.Join("testdata", "backup-data.tar.gz")
|
||||
fd, err := os.Open(datafile)
|
||||
if os.IsNotExist(errors.Cause(err)) {
|
||||
t.Skipf("unable to find data file %q, skipping", datafile)
|
||||
return
|
||||
}
|
||||
OK(t, err)
|
||||
OK(t, fd.Close())
|
||||
|
||||
SetupTarTestFixture(t, env.testdata, datafile)
|
||||
|
||||
testRunInit(t, gopts)
|
||||
|
||||
globalOptions.stderr = ioutil.Discard
|
||||
defer func() {
|
||||
globalOptions.stderr = os.Stderr
|
||||
}()
|
||||
|
||||
modFile := filepath.Join(env.testdata, "0", "0", "6", "18")
|
||||
|
||||
ranHook := false
|
||||
debug.Hook("archiver.SaveFile", func(context interface{}) {
|
||||
pathname := context.(string)
|
||||
|
||||
if pathname != modFile {
|
||||
return
|
||||
}
|
||||
|
||||
t.Logf("in hook, modifying test file %v", modFile)
|
||||
ranHook = true
|
||||
|
||||
OK(t, ioutil.WriteFile(modFile, []byte("modified"), 0600))
|
||||
})
|
||||
|
||||
opts := BackupOptions{}
|
||||
|
||||
testRunBackup(t, []string{env.testdata}, opts, gopts)
|
||||
testRunCheck(t, gopts)
|
||||
|
||||
Assert(t, ranHook, "hook did not run")
|
||||
debug.RemoveHook("archiver.SaveFile")
|
||||
})
|
||||
}
|
||||
|
||||
func TestBackupDirectoryError(t *testing.T) {
|
||||
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) {
|
||||
datafile := filepath.Join("testdata", "backup-data.tar.gz")
|
||||
|
@ -26,7 +26,9 @@ const (
|
||||
maxConcurrency = 10
|
||||
)
|
||||
|
||||
var archiverAbortOnAllErrors = func(str string, fi os.FileInfo, err error) error { return err }
|
||||
var archiverPrintWarnings = func(path string, fi os.FileInfo, err error) {
|
||||
fmt.Fprintf(os.Stderr, "warning for %v: %v", path, err)
|
||||
}
|
||||
var archiverAllowAllFiles = func(string, os.FileInfo) bool { return true }
|
||||
|
||||
// Archiver is used to backup a set of directories.
|
||||
@ -39,7 +41,7 @@ type Archiver struct {
|
||||
|
||||
blobToken chan struct{}
|
||||
|
||||
Error func(dir string, fi os.FileInfo, err error) error
|
||||
Warn func(dir string, fi os.FileInfo, err error)
|
||||
SelectFilter pipe.SelectFunc
|
||||
Excludes []string
|
||||
}
|
||||
@ -61,7 +63,7 @@ func New(repo restic.Repository) *Archiver {
|
||||
arch.blobToken <- struct{}{}
|
||||
}
|
||||
|
||||
arch.Error = archiverAbortOnAllErrors
|
||||
arch.Warn = archiverPrintWarnings
|
||||
arch.SelectFilter = archiverAllowAllFiles
|
||||
|
||||
return arch
|
||||
@ -135,10 +137,7 @@ func (arch *Archiver) reloadFileIfChanged(node *restic.Node, file fs.File) (*res
|
||||
return node, nil
|
||||
}
|
||||
|
||||
err = arch.Error(node.Path, fi, errors.New("file has changed"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
arch.Warn(node.Path, fi, errors.New("file has changed"))
|
||||
|
||||
node, err = restic.NodeFromFileInfo(node.Path, fi)
|
||||
if err != nil {
|
||||
@ -207,16 +206,18 @@ func updateNodeContent(node *restic.Node, results []saveResult) error {
|
||||
|
||||
// SaveFile stores the content of the file on the backend as a Blob by calling
|
||||
// Save for each chunk.
|
||||
func (arch *Archiver) SaveFile(p *restic.Progress, node *restic.Node) error {
|
||||
func (arch *Archiver) SaveFile(p *restic.Progress, node *restic.Node) (*restic.Node, error) {
|
||||
file, err := fs.Open(node.Path)
|
||||
defer file.Close()
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "Open")
|
||||
return nil, errors.Wrap(err, "Open")
|
||||
}
|
||||
|
||||
debug.RunHook("archiver.SaveFile", node.Path)
|
||||
|
||||
node, err = arch.reloadFileIfChanged(node, file)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
chnker := chunker.New(file, arch.repo.Config().ChunkerPolynomial)
|
||||
@ -229,7 +230,7 @@ func (arch *Archiver) SaveFile(p *restic.Progress, node *restic.Node) error {
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "chunker.Next")
|
||||
return nil, errors.Wrap(err, "chunker.Next")
|
||||
}
|
||||
|
||||
resCh := make(chan saveResult, 1)
|
||||
@ -239,11 +240,11 @@ func (arch *Archiver) SaveFile(p *restic.Progress, node *restic.Node) error {
|
||||
|
||||
results, err := waitForResults(resultChannels)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = updateNodeContent(node, results)
|
||||
return err
|
||||
|
||||
return node, err
|
||||
}
|
||||
|
||||
func (arch *Archiver) fileWorker(wg *sync.WaitGroup, p *restic.Progress, done <-chan struct{}, entCh <-chan pipe.Entry) {
|
||||
@ -307,7 +308,7 @@ func (arch *Archiver) fileWorker(wg *sync.WaitGroup, p *restic.Progress, done <-
|
||||
// otherwise read file normally
|
||||
if node.Type == "file" && len(node.Content) == 0 {
|
||||
debug.Log(" read and save %v, content: %v", e.Path(), node.Content)
|
||||
err = arch.SaveFile(p, node)
|
||||
node, err = arch.SaveFile(p, node)
|
||||
if err != nil {
|
||||
// TODO: integrate error reporting
|
||||
fmt.Fprintf(os.Stderr, "error for %v: %v\n", node.Path, err)
|
||||
|
Loading…
Reference in New Issue
Block a user