Rework HTTP downloader retry logic

Apply retries as global, config-level option `downloadRetries` so that
it can be applied to any aptly command which downloads objects.

Unwrap `errors.Wrap` which is used in downloader.

Unwrap `*url.Error` which should be the actual error returned from the
HTTP client, catch more cases, be more specific around failures.
This commit is contained in:
Andrey Smirnov
2019-08-06 00:41:14 +03:00
committed by Andrey Smirnov
parent 2e7f624b34
commit f0a370db24
22 changed files with 123 additions and 63 deletions

View File

@@ -127,7 +127,7 @@ type Downloader interface {
// Download starts new download task
Download(ctx context.Context, url string, destination string) error
// DownloadWithChecksum starts new download task with checksum verification
DownloadWithChecksum(ctx context.Context, url string, destination string, expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) error
DownloadWithChecksum(ctx context.Context, url string, destination string, expected *utils.ChecksumInfo, ignoreMismatch bool) error
// GetProgress returns Progress object
GetProgress() Progress
// GetLength returns size by heading object with url

View File

@@ -41,7 +41,6 @@ func aptlyMirrorUpdate(cmd *commander.Command, args []string) error {
}
ignoreMismatch := context.Flags().Lookup("ignore-checksums").Value.Get().(bool)
maxTries := context.Flags().Lookup("max-tries").Value.Get().(int)
verifier, err := getVerifier(context.Flags())
if err != nil {
@@ -54,7 +53,7 @@ func aptlyMirrorUpdate(cmd *commander.Command, args []string) error {
}
context.Progress().Printf("Downloading & parsing package files...\n")
err = repo.DownloadPackageIndexes(context.Progress(), context.Downloader(), verifier, context.CollectionFactory(), ignoreMismatch, maxTries)
err = repo.DownloadPackageIndexes(context.Progress(), context.Downloader(), verifier, context.CollectionFactory(), ignoreMismatch)
if err != nil {
return fmt.Errorf("unable to update: %s", err)
}
@@ -173,8 +172,7 @@ func aptlyMirrorUpdate(cmd *commander.Command, args []string) error {
repo.PackageURL(task.File.DownloadURL()).String(),
task.TempDownPath,
&task.File.Checksums,
ignoreMismatch,
maxTries)
ignoreMismatch)
if e != nil {
pushError(e)
continue

View File

@@ -213,7 +213,15 @@ func (context *AptlyContext) Downloader() aptly.Downloader {
if downloadLimit == 0 {
downloadLimit = context.config().DownloadLimit
}
context.downloader = http.NewDownloader(downloadLimit*1024, context._progress())
maxTries := context.config().DownloadRetries + 1
maxTriesFlag := context.flags.Lookup("max-tries")
if maxTriesFlag != nil {
maxTriesFlagValue := maxTriesFlag.Value.Get().(int)
if maxTriesFlagValue > maxTries {
maxTries = maxTriesFlagValue
}
}
context.downloader = http.NewDownloader(downloadLimit*1024, maxTries, context._progress())
}
return context.downloader

View File

@@ -423,7 +423,7 @@ ok:
// DownloadPackageIndexes downloads & parses package index files
func (repo *RemoteRepo) DownloadPackageIndexes(progress aptly.Progress, d aptly.Downloader, verifier pgp.Verifier, collectionFactory *CollectionFactory,
ignoreMismatch bool, maxTries int) error {
ignoreMismatch bool) error {
if repo.packageList != nil {
panic("packageList != nil")
}
@@ -456,7 +456,7 @@ func (repo *RemoteRepo) DownloadPackageIndexes(progress aptly.Progress, d aptly.
for _, info := range packagesPaths {
path, kind, component, architecture := info[0], info[1], info[2], info[3]
packagesReader, packagesFile, err := http.DownloadTryCompression(gocontext.TODO(), d, repo.IndexesRootURL(), path, repo.ReleaseFiles, ignoreMismatch, maxTries)
packagesReader, packagesFile, err := http.DownloadTryCompression(gocontext.TODO(), d, repo.IndexesRootURL(), path, repo.ReleaseFiles, ignoreMismatch)
isInstaller := kind == PackageTypeInstaller
if err != nil {

View File

@@ -276,7 +276,7 @@ func (s *RemoteRepoSuite) TestDownload(c *C) {
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages.gz", &http.Error{Code: 404})
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages", examplePackagesFile)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false)
c.Assert(err, IsNil)
c.Assert(s.downloader.Empty(), Equals, true)
@@ -303,7 +303,7 @@ func (s *RemoteRepoSuite) TestDownload(c *C) {
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages.gz", &http.Error{Code: 404})
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages", examplePackagesFile)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false)
c.Assert(err, IsNil)
c.Assert(s.downloader.Empty(), Equals, true)
@@ -324,7 +324,7 @@ func (s *RemoteRepoSuite) TestDownload(c *C) {
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages.gz", &http.Error{Code: 404})
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages", examplePackagesFile)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false)
c.Assert(err, IsNil)
c.Assert(s.downloader.Empty(), Equals, true)
@@ -351,7 +351,7 @@ func (s *RemoteRepoSuite) TestDownloadWithInstaller(c *C) {
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/installer-i386/current/images/SHA256SUMS", exampleInstallerHashSumFile)
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/installer-i386/current/images/MANIFEST", exampleInstallerManifestFile)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false)
c.Assert(err, IsNil)
c.Assert(s.downloader.Empty(), Equals, true)
@@ -395,7 +395,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSources(c *C) {
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources.gz", &http.Error{Code: 404})
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources", exampleSourcesFile)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false)
c.Assert(err, IsNil)
c.Assert(s.downloader.Empty(), Equals, true)
@@ -439,7 +439,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSources(c *C) {
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources.gz", &http.Error{Code: 404})
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources", exampleSourcesFile)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false)
c.Assert(err, IsNil)
c.Assert(s.downloader.Empty(), Equals, true)
@@ -464,7 +464,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSources(c *C) {
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources.gz", &http.Error{Code: 404})
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources", exampleSourcesFile)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false)
c.Assert(err, IsNil)
c.Assert(s.downloader.Empty(), Equals, true)
@@ -488,7 +488,7 @@ func (s *RemoteRepoSuite) TestDownloadFlat(c *C) {
err := s.flat.Fetch(downloader, nil)
c.Assert(err, IsNil)
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true, 1)
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true)
c.Assert(err, IsNil)
c.Assert(downloader.Empty(), Equals, true)
@@ -516,7 +516,7 @@ func (s *RemoteRepoSuite) TestDownloadFlat(c *C) {
err = s.flat.Fetch(downloader, nil)
c.Assert(err, IsNil)
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true, 1)
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true)
c.Assert(err, IsNil)
c.Assert(downloader.Empty(), Equals, true)
@@ -538,7 +538,7 @@ func (s *RemoteRepoSuite) TestDownloadFlat(c *C) {
err = s.flat.Fetch(downloader, nil)
c.Assert(err, IsNil)
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true, 1)
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true)
c.Assert(err, IsNil)
c.Assert(downloader.Empty(), Equals, true)
@@ -569,7 +569,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSourcesFlat(c *C) {
err := s.flat.Fetch(downloader, nil)
c.Assert(err, IsNil)
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true, 1)
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true)
c.Assert(err, IsNil)
c.Assert(downloader.Empty(), Equals, true)
@@ -615,7 +615,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSourcesFlat(c *C) {
err = s.flat.Fetch(downloader, nil)
c.Assert(err, IsNil)
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true, 1)
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true)
c.Assert(err, IsNil)
c.Assert(downloader.Empty(), Equals, true)
@@ -641,7 +641,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSourcesFlat(c *C) {
err = s.flat.Fetch(downloader, nil)
c.Assert(err, IsNil)
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true, 1)
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true)
c.Assert(err, IsNil)
c.Assert(downloader.Empty(), Equals, true)

View File

@@ -39,7 +39,7 @@ var compressionMethods = []struct {
// DownloadTryCompression tries to download from URL .bz2, .gz and raw extension until
// it finds existing file.
func DownloadTryCompression(ctx context.Context, downloader aptly.Downloader, baseURL *url.URL, path string, expectedChecksums map[string]utils.ChecksumInfo, ignoreMismatch bool, maxTries int) (io.Reader, *os.File, error) {
func DownloadTryCompression(ctx context.Context, downloader aptly.Downloader, baseURL *url.URL, path string, expectedChecksums map[string]utils.ChecksumInfo, ignoreMismatch bool) (io.Reader, *os.File, error) {
var err error
for _, method := range compressionMethods {
@@ -63,7 +63,7 @@ func DownloadTryCompression(ctx context.Context, downloader aptly.Downloader, ba
if foundChecksum {
expected := expectedChecksums[bestSuffix]
file, err = DownloadTempWithChecksum(ctx, downloader, tryURL.String(), &expected, ignoreMismatch, maxTries)
file, err = DownloadTempWithChecksum(ctx, downloader, tryURL.String(), &expected, ignoreMismatch)
} else {
if !ignoreMismatch {
continue

View File

@@ -44,7 +44,7 @@ func (s *CompressionSuite) TestDownloadTryCompression(c *C) {
buf = make([]byte, 4)
d := NewFakeDownloader()
d.ExpectResponse("http://example.com/file.bz2", bzipData)
r, file, err := DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false, 1)
r, file, err := DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false)
c.Assert(err, IsNil)
defer file.Close()
io.ReadFull(r, buf)
@@ -56,7 +56,7 @@ func (s *CompressionSuite) TestDownloadTryCompression(c *C) {
d = NewFakeDownloader()
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
d.ExpectResponse("http://example.com/file.gz", gzipData)
r, file, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false, 1)
r, file, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false)
c.Assert(err, IsNil)
defer file.Close()
io.ReadFull(r, buf)
@@ -69,7 +69,7 @@ func (s *CompressionSuite) TestDownloadTryCompression(c *C) {
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
d.ExpectResponse("http://example.com/file.xz", xzData)
r, file, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false, 1)
r, file, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false)
c.Assert(err, IsNil)
defer file.Close()
io.ReadFull(r, buf)
@@ -83,7 +83,7 @@ func (s *CompressionSuite) TestDownloadTryCompression(c *C) {
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
d.ExpectError("http://example.com/file.xz", &Error{Code: 404})
d.ExpectResponse("http://example.com/file", rawData)
r, file, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false, 1)
r, file, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false)
c.Assert(err, IsNil)
defer file.Close()
io.ReadFull(r, buf)
@@ -94,7 +94,7 @@ func (s *CompressionSuite) TestDownloadTryCompression(c *C) {
d = NewFakeDownloader()
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
d.ExpectResponse("http://example.com/file.gz", "x")
_, _, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", nil, true, 1)
_, _, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", nil, true)
c.Assert(err, ErrorMatches, "unexpected EOF")
c.Assert(d.Empty(), Equals, true)
}
@@ -112,7 +112,7 @@ func (s *CompressionSuite) TestDownloadTryCompressionLongestSuffix(c *C) {
buf = make([]byte, 4)
d := NewFakeDownloader()
d.ExpectResponse("http://example.com/subdir/file.bz2", bzipData)
r, file, err := DownloadTryCompression(s.ctx, d, s.baseURL, "subdir/file", expectedChecksums, false, 1)
r, file, err := DownloadTryCompression(s.ctx, d, s.baseURL, "subdir/file", expectedChecksums, false)
c.Assert(err, IsNil)
defer file.Close()
io.ReadFull(r, buf)
@@ -122,7 +122,7 @@ func (s *CompressionSuite) TestDownloadTryCompressionLongestSuffix(c *C) {
func (s *CompressionSuite) TestDownloadTryCompressionErrors(c *C) {
d := NewFakeDownloader()
_, _, err := DownloadTryCompression(s.ctx, d, s.baseURL, "file", nil, true, 1)
_, _, err := DownloadTryCompression(s.ctx, d, s.baseURL, "file", nil, true)
c.Assert(err, ErrorMatches, "unexpected request.*")
d = NewFakeDownloader()
@@ -130,7 +130,7 @@ func (s *CompressionSuite) TestDownloadTryCompressionErrors(c *C) {
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
d.ExpectError("http://example.com/file.xz", &Error{Code: 404})
d.ExpectError("http://example.com/file", errors.New("403"))
_, _, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", nil, true, 1)
_, _, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", nil, true)
c.Assert(err, ErrorMatches, "403")
d = NewFakeDownloader()
@@ -144,6 +144,6 @@ func (s *CompressionSuite) TestDownloadTryCompressionErrors(c *C) {
"file.xz": {Size: 7},
"file": {Size: 7},
}
_, _, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false, 1)
_, _, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false)
c.Assert(err, ErrorMatches, "checksums don't match.*")
}

View File

@@ -6,6 +6,7 @@ import (
"io"
"net"
"net/http"
"net/url"
"os"
"path/filepath"
"strings"
@@ -28,12 +29,13 @@ var (
type downloaderImpl struct {
progress aptly.Progress
aggWriter io.Writer
maxTries int
client *http.Client
}
// NewDownloader creates new instance of Downloader which specified number
// of threads and download limit in bytes/sec
func NewDownloader(downLimit int64, progress aptly.Progress) aptly.Downloader {
func NewDownloader(downLimit int64, maxTries int, progress aptly.Progress) aptly.Downloader {
transport := http.Transport{}
transport.Proxy = http.DefaultTransport.(*http.Transport).Proxy
transport.ResponseHeaderTimeout = 30 * time.Second
@@ -45,6 +47,7 @@ func NewDownloader(downLimit int64, progress aptly.Progress) aptly.Downloader {
downloader := &downloaderImpl{
progress: progress,
maxTries: maxTries,
client: &http.Client{
Transport: &transport,
},
@@ -71,7 +74,19 @@ func (downloader *downloaderImpl) GetLength(ctx context.Context, url string) (in
return -1, err
}
resp, err := downloader.client.Do(req)
var resp *http.Response
maxTries := downloader.maxTries
for maxTries > 0 {
resp, err = downloader.client.Do(req)
if err != nil && retryableError(err) {
maxTries--
} else {
// stop retrying
break
}
}
if err != nil {
return -1, errors.Wrap(err, url)
}
@@ -89,10 +104,25 @@ func (downloader *downloaderImpl) GetLength(ctx context.Context, url string) (in
// Download starts new download task
func (downloader *downloaderImpl) Download(ctx context.Context, url string, destination string) error {
return downloader.DownloadWithChecksum(ctx, url, destination, nil, false, 1)
return downloader.DownloadWithChecksum(ctx, url, destination, nil, false)
}
func retryableError(err error) bool {
// unwrap errors.Wrap
err = errors.Cause(err)
// unwrap *url.Error
if wrapped, ok := err.(*url.Error); ok {
err = wrapped.Err
}
switch err {
case io.EOF:
return true
case io.ErrUnexpectedEOF:
return true
}
switch err.(type) {
case *net.OpError:
return true
@@ -101,6 +131,7 @@ func retryableError(err error) bool {
case net.Error:
return true
}
return false
}
@@ -123,7 +154,7 @@ func (downloader *downloaderImpl) newRequest(ctx context.Context, method, url st
// DownloadWithChecksum starts new download task with checksum verification
func (downloader *downloaderImpl) DownloadWithChecksum(ctx context.Context, url string, destination string,
expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) error {
expected *utils.ChecksumInfo, ignoreMismatch bool) error {
if downloader.progress != nil {
downloader.progress.Printf("Downloading %s...\n", url)
@@ -131,6 +162,7 @@ func (downloader *downloaderImpl) DownloadWithChecksum(ctx context.Context, url
req, err := downloader.newRequest(ctx, "GET", url)
var temppath string
maxTries := downloader.maxTries
for maxTries > 0 {
temppath, err = downloader.download(req, url, destination, expected, ignoreMismatch)

View File

@@ -45,7 +45,7 @@ func (s *DownloaderSuiteBase) SetUpTest(c *C) {
s.progress = console.NewProgress()
s.progress.Start()
s.d = NewDownloader(0, s.progress)
s.d = NewDownloader(0, 1, s.progress)
s.ctx = context.Background()
}
@@ -78,32 +78,32 @@ func (s *DownloaderSuite) TestDownloadOK(c *C) {
}
func (s *DownloaderSuite) TestDownloadWithChecksum(c *C) {
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{}, false, 1),
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{}, false),
ErrorMatches, ".*size check mismatch 12 != 0")
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "abcdef"}, false, 1),
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "abcdef"}, false),
ErrorMatches, ".*md5 hash mismatch \"a1acb0fe91c7db45ec4d775192ec5738\" != \"abcdef\"")
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "abcdef"}, true, 1),
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "abcdef"}, true),
IsNil)
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738"}, false, 1),
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738"}, false),
IsNil)
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738", SHA1: "abcdef"}, false, 1),
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738", SHA1: "abcdef"}, false),
ErrorMatches, ".*sha1 hash mismatch \"921893bae6ad6fd818401875d6779254ef0ff0ec\" != \"abcdef\"")
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec"}, false, 1),
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec"}, false),
IsNil)
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "abcdef"}, false, 1),
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "abcdef"}, false),
ErrorMatches, ".*sha256 hash mismatch \"b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac\" != \"abcdef\"")
checksums := utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac"}
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &checksums, false, 1),
c.Assert(s.d.DownloadWithChecksum(s.ctx, s.url+"/test", s.tempfile.Name(), &checksums, false),
IsNil)
// download backfills missing checksums
c.Check(checksums.SHA512, Equals, "bac18bf4e564856369acc2ed57300fecba3a2c1af5ae8304021e4252488678feb18118466382ee4e1210fe1f065080210e453a80cfb37ccb8752af3269df160e")

View File

@@ -89,7 +89,7 @@ func (f *FakeDownloader) getExpectedRequest(url string) (*expectedRequest, error
}
// DownloadWithChecksum performs fake download by matching against first expectation in the queue or any expectation, with cheksum verification
func (f *FakeDownloader) DownloadWithChecksum(ctx context.Context, url string, filename string, expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) error {
func (f *FakeDownloader) DownloadWithChecksum(ctx context.Context, url string, filename string, expected *utils.ChecksumInfo, ignoreMismatch bool) error {
expectation, err := f.getExpectedRequest(url)
if err != nil {
return err
@@ -130,7 +130,7 @@ func (f *FakeDownloader) DownloadWithChecksum(ctx context.Context, url string, f
// Download performs fake download by matching against first expectation in the queue
func (f *FakeDownloader) Download(ctx context.Context, url string, filename string) error {
return f.DownloadWithChecksum(ctx, url, filename, nil, false, 1)
return f.DownloadWithChecksum(ctx, url, filename, nil, false)
}
// GetProgress returns Progress object

View File

@@ -14,13 +14,13 @@ import (
//
// Temporary file would be already removed, so no need to cleanup
func DownloadTemp(ctx context.Context, downloader aptly.Downloader, url string) (*os.File, error) {
return DownloadTempWithChecksum(ctx, downloader, url, nil, false, 1)
return DownloadTempWithChecksum(ctx, downloader, url, nil, false)
}
// DownloadTempWithChecksum is a DownloadTemp with checksum verification
//
// Temporary file would be already removed, so no need to cleanup
func DownloadTempWithChecksum(ctx context.Context, downloader aptly.Downloader, url string, expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) (*os.File, error) {
func DownloadTempWithChecksum(ctx context.Context, downloader aptly.Downloader, url string, expected *utils.ChecksumInfo, ignoreMismatch bool) (*os.File, error) {
tempdir, err := ioutil.TempDir(os.TempDir(), "aptly")
if err != nil {
return nil, err
@@ -34,7 +34,7 @@ func DownloadTempWithChecksum(ctx context.Context, downloader aptly.Downloader,
defer downloader.GetProgress().ShutdownBar()
}
err = downloader.DownloadWithChecksum(ctx, url, tempfile, expected, ignoreMismatch, maxTries)
err = downloader.DownloadWithChecksum(ctx, url, tempfile, expected, ignoreMismatch)
if err != nil {
return nil, err
}

View File

@@ -38,12 +38,12 @@ func (s *TempSuite) TestDownloadTemp(c *C) {
func (s *TempSuite) TestDownloadTempWithChecksum(c *C) {
f, err := DownloadTempWithChecksum(s.ctx, s.d, s.url+"/test", &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac"}, false, 1)
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac"}, false)
c.Assert(err, IsNil)
c.Assert(f.Close(), IsNil)
_, err = DownloadTempWithChecksum(s.ctx, s.d, s.url+"/test", &utils.ChecksumInfo{Size: 13}, false, 1)
_, err = DownloadTempWithChecksum(s.ctx, s.d, s.url+"/test", &utils.ChecksumInfo{Size: 13}, false)
c.Assert(err, ErrorMatches, ".*size check mismatch 12 != 13")
}

View File

@@ -1,10 +1,12 @@
all: prepare generate
RUBYBINPATH=$(shell ruby -r rubygems -e 'puts Gem.user_dir')/bin
prepare:
gem install specific_install
gem specific_install -l smira/ronn
all: $(RUBYBINPATH)/ronn generate
$(RUBYBINPATH)/ronn:
gem install --user-install specific_install
gem specific_install --user-install -l smira/ronn
generate:
go run ../_man/gen.go
PATH=$(RUBYBINPATH):$(PATH) go run ../_man/gen.go
.PHONY: prepare generate
.PHONY: generate

View File

@@ -1,7 +1,7 @@
.\" generated with Ronn/v0.7.3
.\" http://github.com/rtomayko/ronn/tree/0.7.3
.
.TH "APTLY" "1" "September 2018" "" ""
.TH "APTLY" "1" "August 2019" "" ""
.
.SH "NAME"
\fBaptly\fR \- Debian repository management tool
@@ -38,6 +38,7 @@ Configuration file is stored in JSON format (default values shown below):
"rootDir": "$HOME/\.aptly",
"downloadConcurrency": 4,
"downloadSpeedLimit": 0,
"downloadRetries": 0,
"architectures": [],
"dependencyFollowSuggests": false,
"dependencyFollowRecommends": false,
@@ -114,7 +115,11 @@ is a number of parallel download threads to use when downloading packages
.
.TP
\fBdownloadSpeedLimit\fR
limit in kbytes/sec on download speed while mirroring remote repositieis
limit in kbytes/sec on download speed while mirroring remote repositories
.
.TP
\fBdownloadRetries\fR
number of retries for download attempts
.
.TP
\fBarchitectures\fR
@@ -2050,5 +2055,11 @@ Strajan Sebastian (https://github\.com/strajansebastian)
.IP "\[ci]" 4
Artem Smirnov (https://github\.com/urpylka)
.
.IP "\[ci]" 4
William Manley (https://github\.com/wmanley)
.
.IP "\[ci]" 4
Shengjing Zhu (https://github\.com/zhsj)
.
.IP "" 0

View File

@@ -30,6 +30,7 @@ Configuration file is stored in JSON format (default values shown below):
"rootDir": "$HOME/.aptly",
"downloadConcurrency": 4,
"downloadSpeedLimit": 0,
"downloadRetries": 0,
"architectures": [],
"dependencyFollowSuggests": false,
"dependencyFollowRecommends": false,
@@ -99,7 +100,10 @@ Options:
is a number of parallel download threads to use when downloading packages
* `downloadSpeedLimit`:
limit in kbytes/sec on download speed while mirroring remote repositieis
limit in kbytes/sec on download speed while mirroring remote repositories
* `downloadRetries`:
number of retries for download attempts
* `architectures`:
is a list of architectures to process; if left empty defaults to all available architectures; could be

View File

@@ -98,6 +98,7 @@ class BaseTest(object):
"rootDir": "%s/.aptly" % os.environ["HOME"],
"downloadConcurrency": 4,
"downloadSpeedLimit": 0,
"downloadRetries": 5,
"architectures": [],
"dependencyFollowSuggests": False,
"dependencyFollowRecommends": False,

View File

@@ -2,6 +2,7 @@
"rootDir": "${HOME}/.aptly",
"downloadConcurrency": 4,
"downloadSpeedLimit": 0,
"downloadRetries": 5,
"architectures": [],
"dependencyFollowSuggests": false,
"dependencyFollowRecommends": false,

View File

@@ -2,6 +2,7 @@
"rootDir": "${HOME}/.aptly",
"downloadConcurrency": 4,
"downloadSpeedLimit": 0,
"downloadRetries": 0,
"architectures": [],
"dependencyFollowSuggests": false,
"dependencyFollowRecommends": false,

View File

@@ -6,6 +6,6 @@ gpgv: Good signature from "Package Maintainer (PagerDuty, Inc.) <packages@pagerd
Downloading & parsing package files...
Downloading http://packages.pagerduty.com/pdagent/deb/Packages.gz...
Building download queue...
Download queue: 15 items (1.87 MiB)
Download queue: 16 items (2.08 MiB)
Mirror `pagerduty` has been successfully updated.

View File

@@ -6,6 +6,6 @@ openpgp: Good signature from "Package Maintainer (PagerDuty, Inc.) <packages@pag
Downloading & parsing package files...
Downloading http://packages.pagerduty.com/pdagent/deb/Packages.gz...
Building download queue...
Download queue: 15 items (1.87 MiB)
Download queue: 16 items (2.08 MiB)
Mirror `pagerduty` has been successfully updated.

View File

@@ -11,6 +11,7 @@ type ConfigStructure struct { // nolint: maligned
RootDir string `json:"rootDir"`
DownloadConcurrency int `json:"downloadConcurrency"`
DownloadLimit int64 `json:"downloadSpeedLimit"`
DownloadRetries int `json:"downloadRetries"`
Architectures []string `json:"architectures"`
DepFollowSuggests bool `json:"dependencyFollowSuggests"`
DepFollowRecommends bool `json:"dependencyFollowRecommends"`

View File

@@ -57,6 +57,7 @@ func (s *ConfigSuite) TestSaveConfig(c *C) {
" \"rootDir\": \"/tmp/aptly\",\n"+
" \"downloadConcurrency\": 5,\n"+
" \"downloadSpeedLimit\": 0,\n"+
" \"downloadRetries\": 0,\n"+
" \"architectures\": null,\n"+
" \"dependencyFollowSuggests\": false,\n"+
" \"dependencyFollowRecommends\": false,\n"+