From af7b97f70a96a2fc178cdf09e0d0502a28237079 Mon Sep 17 00:00:00 2001 From: Jarek Kowalski Date: Mon, 4 Sep 2023 09:07:48 -0700 Subject: [PATCH] chore(ci): added exponential back-off to gettool (#3267) --- tools/gettool/autodownload/autodownload.go | 70 ++++++++++++++++++++-- 1 file changed, 66 insertions(+), 4 deletions(-) diff --git a/tools/gettool/autodownload/autodownload.go b/tools/gettool/autodownload/autodownload.go index 398636ccf36..e6fc5fff4fb 100644 --- a/tools/gettool/autodownload/autodownload.go +++ b/tools/gettool/autodownload/autodownload.go @@ -158,8 +158,66 @@ func unzip(dir string, r io.Reader, stripPathComponents int) error { return nil } -// Download downloads the provided. -func Download(url, dir string, checksum map[string]string, stripPathComponents int) (err error) { +// Download downloads the provided URL and extracts it to the provided directory, retrying +// exponentionally until succeeded. +func Download(url, dir string, checksum map[string]string, stripPathComponents int) error { + const ( + // sleep durations 5, 10, 20, 40, 80, 160, 320 + // total: 635 seconds, ~10 minutes + maxRetries = 8 + initialSleepTime = 5 * time.Second + ) + + nextSleepTime := initialSleepTime + + for i := 0; i < maxRetries; i++ { + err := downloadInternal(url, dir, checksum, stripPathComponents) + if err == nil { + // success + return nil + } + + // 404 is non-retryable + if errors.Is(err, errNotFound) { + return errors.Wrap(err, "non-retryable") + } + + // invalid checksum is non-retryable + var ec InvalidChecksumError + if errors.As(err, &ec) { + // invalid checksum, do not retry. + return errors.Wrap(err, "non-retryable") + } + + // all other errors are retryable + if i != maxRetries-1 { + log.Printf("Attempt #%v failed, sleeping for %v: %v", i, nextSleepTime, err) + time.Sleep(nextSleepTime) + + nextSleepTime *= 2 + + if err := os.RemoveAll(dir); err != nil { + log.Printf("unable to remove %v: %v", dir, err) + } + } + } + + return errors.Errorf("unable to download %v", url) +} + +// InvalidChecksumError is returned by Download when the checksum of the downloaded file does not match the expected checksum. +type InvalidChecksumError struct { + actual string + expected string +} + +func (e InvalidChecksumError) Error() string { + return fmt.Sprintf("invalid checksum: %v, wanted %v", e.actual, e.expected) +} + +var errNotFound = errors.New("not found") + +func downloadInternal(url, dir string, checksum map[string]string, stripPathComponents int) (err error) { resp, err := http.Get(url) //nolint:gosec,noctx if err != nil { return errors.Wrapf(err, "unable to get %q", url) @@ -168,6 +226,10 @@ func Download(url, dir string, checksum map[string]string, stripPathComponents i defer resp.Body.Close() //nolint:errcheck if resp.StatusCode != http.StatusOK { + if resp.StatusCode == http.StatusNotFound { + return errNotFound + } + return errors.Errorf("invalid server response for %q: %v", url, resp.Status) } @@ -187,7 +249,7 @@ func Download(url, dir string, checksum map[string]string, stripPathComponents i return errors.Errorf("missing checksum - calculated as %v", actualChecksum) case checksum[url] != actualChecksum: - return errors.Errorf("invalid checksum: %v, wanted %v", actualChecksum, checksum[url]) + return InvalidChecksumError{actualChecksum, checksum[url]} default: log.Printf("%v checksum ok", url) @@ -210,7 +272,7 @@ func Download(url, dir string, checksum map[string]string, stripPathComponents i case strings.HasSuffix(url, ".tar.gz"): return errors.Wrap(untar(dir, r, stripPathComponents), "untar error") case strings.HasSuffix(url, ".zip"): - return errors.Wrap(unzip(dir, r, stripPathComponents), "untar error") + return errors.Wrap(unzip(dir, r, stripPathComponents), "unzip error") default: return errors.Errorf("unsupported archive format") }