From d629333efedc3aa760a0289a0500910e5bfc71cf Mon Sep 17 00:00:00 2001 From: Fred Date: Sat, 21 Mar 2020 19:05:26 +0000 Subject: [PATCH 1/4] Add function to notify of success after retrying --- internal/backend/backend_retry.go | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/internal/backend/backend_retry.go b/internal/backend/backend_retry.go index fe0d34799..fea0714a9 100644 --- a/internal/backend/backend_retry.go +++ b/internal/backend/backend_retry.go @@ -32,6 +32,25 @@ func NewRetryBackend(be restic.Backend, maxTries int, report func(string, error, } } +// retryNotifyErrorWithSuccess is an extension of backoff.RetryNotify with notification of success after an error +// success is NOT notified on the first run of operation (only after an error) +func retryNotifyErrorWithSuccess(operation backoff.Operation, b backoff.BackOff, notify backoff.Notify, success func()) error { + if success == nil { + return backoff.RetryNotify(operation, b, notify) + } + errorDetected := false + operationWrapper := func() error { + err := operation() + if err != nil { + errorDetected = true + } else if errorDetected { + success() + } + return err + } + return backoff.RetryNotify(operationWrapper, b, notify) +} + func (be *RetryBackend) retry(ctx context.Context, msg string, f func() error) error { // Don't do anything when called with an already cancelled context. There would be // no retries in that case either, so be consistent and abort always. From baf58fbaa8e13707d0fbedbddd3c1c2214ee47a6 Mon Sep 17 00:00:00 2001 From: Fred Date: Sat, 21 Mar 2020 19:05:36 +0000 Subject: [PATCH 2/4] Add unit tests --- internal/backend/backend_retry_test.go | 55 ++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/internal/backend/backend_retry_test.go b/internal/backend/backend_retry_test.go index cd58b9d77..3e19da669 100644 --- a/internal/backend/backend_retry_test.go +++ b/internal/backend/backend_retry_test.go @@ -6,7 +6,9 @@ import ( "io" "io/ioutil" "testing" + "time" + "github.com/cenkalti/backoff/v4" "github.com/restic/restic/internal/backend/mock" "github.com/restic/restic/internal/errors" "github.com/restic/restic/internal/restic" @@ -301,3 +303,56 @@ func TestBackendCanceledContext(t *testing.T) { // don't test "Delete" as it is not used by normal code } + +func TestNotifyWithSuccessIsNotCalled(t *testing.T) { + operation := func() error { + return nil + } + + notify := func(error, time.Duration) { + t.Fatal("Notify should not have been called") + } + + success := func() { + t.Fatal("Success should not have been called") + } + + err := retryNotifyErrorWithSuccess(operation, &backoff.ZeroBackOff{}, notify, success) + if err != nil { + t.Fatal("retry should not have returned an error") + } +} + +func TestNotifyWithSuccessIsCalled(t *testing.T) { + operationCalled := 0 + operation := func() error { + operationCalled++ + if operationCalled <= 2 { + return errors.New("expected error in test") + } + return nil + } + + notifyCalled := 0 + notify := func(error, time.Duration) { + notifyCalled++ + } + + successCalled := 0 + success := func() { + successCalled++ + } + + err := retryNotifyErrorWithSuccess(operation, &backoff.ZeroBackOff{}, notify, success) + if err != nil { + t.Fatal("retry should not have returned an error") + } + + if notifyCalled != 2 { + t.Fatalf("Notify should have been called 2 times, but was called %d times instead", notifyCalled) + } + + if successCalled != 1 { + t.Fatalf("Success should have been called only once, but was called %d times instead", successCalled) + } +} From be6baaec12c765074724ad3afcac26d4a49e4d58 Mon Sep 17 00:00:00 2001 From: Fred Date: Sat, 21 Mar 2020 19:39:01 +0000 Subject: [PATCH 3/4] Add success callback to the backend --- cmd/restic/global.go | 8 ++++++-- internal/backend/backend_retry.go | 27 +++++++++++++++++--------- internal/backend/backend_retry_test.go | 18 ++++++++--------- 3 files changed, 33 insertions(+), 20 deletions(-) diff --git a/cmd/restic/global.go b/cmd/restic/global.go index 3fe124dac..75edfa805 100644 --- a/cmd/restic/global.go +++ b/cmd/restic/global.go @@ -439,9 +439,13 @@ func OpenRepository(opts GlobalOptions) (*repository.Repository, error) { return nil, err } - be = backend.NewRetryBackend(be, 10, func(msg string, err error, d time.Duration) { + report := func(msg string, err error, d time.Duration) { Warnf("%v returned error, retrying after %v: %v\n", msg, d, err) - }) + } + success := func(msg string, retries int) { + Warnf("%v operation successful after %d retries\n", msg, retries) + } + be = backend.NewRetryBackend(be, 10, report, success) // wrap backend if a test specified a hook if opts.backendTestHook != nil { diff --git a/internal/backend/backend_retry.go b/internal/backend/backend_retry.go index fea0714a9..1910bec59 100644 --- a/internal/backend/backend_retry.go +++ b/internal/backend/backend_retry.go @@ -17,6 +17,7 @@ type RetryBackend struct { restic.Backend MaxTries int Report func(string, error, time.Duration) + Success func(string, int) } // statically ensure that RetryBackend implements restic.Backend. @@ -24,27 +25,30 @@ var _ restic.Backend = &RetryBackend{} // NewRetryBackend wraps be with a backend that retries operations after a // backoff. report is called with a description and the error, if one occurred. -func NewRetryBackend(be restic.Backend, maxTries int, report func(string, error, time.Duration)) *RetryBackend { +// success is called with the number of retries before a successful operation +// (it is not called if it succeeded on the first try) +func NewRetryBackend(be restic.Backend, maxTries int, report func(string, error, time.Duration), success func(string, int)) *RetryBackend { return &RetryBackend{ Backend: be, MaxTries: maxTries, Report: report, + Success: success, } } -// retryNotifyErrorWithSuccess is an extension of backoff.RetryNotify with notification of success after an error -// success is NOT notified on the first run of operation (only after an error) -func retryNotifyErrorWithSuccess(operation backoff.Operation, b backoff.BackOff, notify backoff.Notify, success func()) error { +// retryNotifyErrorWithSuccess is an extension of backoff.RetryNotify with notification of success after an error. +// success is NOT notified on the first run of operation (only after an error). +func retryNotifyErrorWithSuccess(operation backoff.Operation, b backoff.BackOff, notify backoff.Notify, success func(retries int)) error { if success == nil { return backoff.RetryNotify(operation, b, notify) } - errorDetected := false + retries := 0 operationWrapper := func() error { err := operation() if err != nil { - errorDetected = true - } else if errorDetected { - success() + retries++ + } else if retries > 0 { + success(retries) } return err } @@ -62,13 +66,18 @@ func (be *RetryBackend) retry(ctx context.Context, msg string, f func() error) e return ctx.Err() } - err := backoff.RetryNotify(f, + err := retryNotifyErrorWithSuccess(f, backoff.WithContext(backoff.WithMaxRetries(backoff.NewExponentialBackOff(), uint64(be.MaxTries)), ctx), func(err error, d time.Duration) { if be.Report != nil { be.Report(msg, err, d) } }, + func(retries int) { + if be.Success != nil { + be.Success(msg, retries) + } + }, ) return err diff --git a/internal/backend/backend_retry_test.go b/internal/backend/backend_retry_test.go index 3e19da669..7890eab42 100644 --- a/internal/backend/backend_retry_test.go +++ b/internal/backend/backend_retry_test.go @@ -35,7 +35,7 @@ func TestBackendSaveRetry(t *testing.T) { }, } - retryBackend := NewRetryBackend(be, 10, nil) + retryBackend := NewRetryBackend(be, 10, nil, nil) data := test.Random(23, 5*1024*1024+11241) err := retryBackend.Save(context.TODO(), restic.Handle{}, restic.NewByteReader(data, be.Hasher())) @@ -70,7 +70,7 @@ func TestBackendSaveRetryAtomic(t *testing.T) { HasAtomicReplaceFn: func() bool { return true }, } - retryBackend := NewRetryBackend(be, 10, nil) + retryBackend := NewRetryBackend(be, 10, nil, nil) data := test.Random(23, 5*1024*1024+11241) err := retryBackend.Save(context.TODO(), restic.Handle{}, restic.NewByteReader(data, be.Hasher())) @@ -103,7 +103,7 @@ func TestBackendListRetry(t *testing.T) { }, } - retryBackend := NewRetryBackend(be, 10, nil) + retryBackend := NewRetryBackend(be, 10, nil, nil) var listed []string err := retryBackend.List(context.TODO(), restic.PackFile, func(fi restic.FileInfo) error { @@ -132,7 +132,7 @@ func TestBackendListRetryErrorFn(t *testing.T) { }, } - retryBackend := NewRetryBackend(be, 10, nil) + retryBackend := NewRetryBackend(be, 10, nil, nil) var ErrTest = errors.New("test error") @@ -188,7 +188,7 @@ func TestBackendListRetryErrorBackend(t *testing.T) { } const maxRetries = 2 - retryBackend := NewRetryBackend(be, maxRetries, nil) + retryBackend := NewRetryBackend(be, maxRetries, nil, nil) var listed []string err := retryBackend.List(context.TODO(), restic.PackFile, func(fi restic.FileInfo) error { @@ -257,7 +257,7 @@ func TestBackendLoadRetry(t *testing.T) { return failingReader{data: data, limit: limit}, nil } - retryBackend := NewRetryBackend(be, 10, nil) + retryBackend := NewRetryBackend(be, 10, nil, nil) var buf []byte err := retryBackend.Load(context.TODO(), restic.Handle{}, 0, 0, func(rd io.Reader) (err error) { @@ -276,7 +276,7 @@ func assertIsCanceled(t *testing.T, err error) { func TestBackendCanceledContext(t *testing.T) { // unimplemented mock backend functions return an error by default // check that we received the expected context canceled error instead - retryBackend := NewRetryBackend(mock.NewBackend(), 2, nil) + retryBackend := NewRetryBackend(mock.NewBackend(), 2, nil, nil) h := restic.Handle{Type: restic.PackFile, Name: restic.NewRandomID().String()} // create an already canceled context @@ -313,7 +313,7 @@ func TestNotifyWithSuccessIsNotCalled(t *testing.T) { t.Fatal("Notify should not have been called") } - success := func() { + success := func(retries int) { t.Fatal("Success should not have been called") } @@ -339,7 +339,7 @@ func TestNotifyWithSuccessIsCalled(t *testing.T) { } successCalled := 0 - success := func() { + success := func(retries int) { successCalled++ } From 4042db516950f1b644787a41f60bf6327fad6f1e Mon Sep 17 00:00:00 2001 From: Fred Date: Sat, 21 Mar 2020 23:00:09 +0000 Subject: [PATCH 4/4] Add changelog --- changelog/unreleased/issue-1734 | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 changelog/unreleased/issue-1734 diff --git a/changelog/unreleased/issue-1734 b/changelog/unreleased/issue-1734 new file mode 100644 index 000000000..e1d65aa6b --- /dev/null +++ b/changelog/unreleased/issue-1734 @@ -0,0 +1,16 @@ +Enhancement: "Everything is ok" message after retrying + +When a recoverable error is encountered, restic displays a warning message +saying it's retrying, like this one: + +Save() returned error, retrying after 357.131936ms: CreateBlockBlobFromReader: Put https://example.blob.core.windows.net/restic/data/95/956b9ced99...aac: write tcp 192.168.11.18:51620->1.2.3.4:443: write: connection reset by peer + +This message can be a little confusing because it seems like there was an +error, but we're not sure if it was actually fixed by retrying. + +restic is now displaying a confirmation that the action was successful after retrying: + +Save() operation successful after 1 retries + +https://github.com/restic/restic/issues/1734 +https://github.com/restic/restic/pull/2661