Use Go context to abort gracefully mirror updates

There are two fixes here:

1. Abort package download immediately as ^C is pressed.
2. Import all the already downloaded files into package pool,
so that next time mirror is updated, aptly won't download them
once again.
This commit is contained in:
Andrey Smirnov
2017-11-30 00:49:37 +03:00
parent a037615962
commit 15618c8ea8
12 changed files with 131 additions and 80 deletions

View File

@@ -1,6 +1,7 @@
package http
import (
"context"
"errors"
"io"
"net/url"
@@ -12,6 +13,7 @@ import (
type CompressionSuite struct {
baseURL *url.URL
ctx context.Context
}
var _ = Suite(&CompressionSuite{})
@@ -25,6 +27,7 @@ const (
func (s *CompressionSuite) SetUpTest(c *C) {
s.baseURL, _ = url.Parse("http://example.com/")
s.ctx = context.Background()
}
func (s *CompressionSuite) TestDownloadTryCompression(c *C) {
@@ -41,7 +44,7 @@ func (s *CompressionSuite) TestDownloadTryCompression(c *C) {
buf = make([]byte, 4)
d := NewFakeDownloader()
d.ExpectResponse("http://example.com/file.bz2", bzipData)
r, file, err := DownloadTryCompression(d, s.baseURL, "file", expectedChecksums, false, 1)
r, file, err := DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false, 1)
c.Assert(err, IsNil)
defer file.Close()
io.ReadFull(r, buf)
@@ -53,7 +56,7 @@ func (s *CompressionSuite) TestDownloadTryCompression(c *C) {
d = NewFakeDownloader()
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
d.ExpectResponse("http://example.com/file.gz", gzipData)
r, file, err = DownloadTryCompression(d, s.baseURL, "file", expectedChecksums, false, 1)
r, file, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false, 1)
c.Assert(err, IsNil)
defer file.Close()
io.ReadFull(r, buf)
@@ -66,7 +69,7 @@ func (s *CompressionSuite) TestDownloadTryCompression(c *C) {
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
d.ExpectResponse("http://example.com/file.xz", xzData)
r, file, err = DownloadTryCompression(d, s.baseURL, "file", expectedChecksums, false, 1)
r, file, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false, 1)
c.Assert(err, IsNil)
defer file.Close()
io.ReadFull(r, buf)
@@ -80,7 +83,7 @@ func (s *CompressionSuite) TestDownloadTryCompression(c *C) {
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
d.ExpectError("http://example.com/file.xz", &Error{Code: 404})
d.ExpectResponse("http://example.com/file", rawData)
r, file, err = DownloadTryCompression(d, s.baseURL, "file", expectedChecksums, false, 1)
r, file, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false, 1)
c.Assert(err, IsNil)
defer file.Close()
io.ReadFull(r, buf)
@@ -91,7 +94,7 @@ func (s *CompressionSuite) TestDownloadTryCompression(c *C) {
d = NewFakeDownloader()
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
d.ExpectResponse("http://example.com/file.gz", "x")
_, _, err = DownloadTryCompression(d, s.baseURL, "file", nil, true, 1)
_, _, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", nil, true, 1)
c.Assert(err, ErrorMatches, "unexpected EOF")
c.Assert(d.Empty(), Equals, true)
}
@@ -109,7 +112,7 @@ func (s *CompressionSuite) TestDownloadTryCompressionLongestSuffix(c *C) {
buf = make([]byte, 4)
d := NewFakeDownloader()
d.ExpectResponse("http://example.com/subdir/file.bz2", bzipData)
r, file, err := DownloadTryCompression(d, s.baseURL, "subdir/file", expectedChecksums, false, 1)
r, file, err := DownloadTryCompression(s.ctx, d, s.baseURL, "subdir/file", expectedChecksums, false, 1)
c.Assert(err, IsNil)
defer file.Close()
io.ReadFull(r, buf)
@@ -119,7 +122,7 @@ func (s *CompressionSuite) TestDownloadTryCompressionLongestSuffix(c *C) {
func (s *CompressionSuite) TestDownloadTryCompressionErrors(c *C) {
d := NewFakeDownloader()
_, _, err := DownloadTryCompression(d, s.baseURL, "file", nil, true, 1)
_, _, err := DownloadTryCompression(s.ctx, d, s.baseURL, "file", nil, true, 1)
c.Assert(err, ErrorMatches, "unexpected request.*")
d = NewFakeDownloader()
@@ -127,7 +130,7 @@ func (s *CompressionSuite) TestDownloadTryCompressionErrors(c *C) {
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
d.ExpectError("http://example.com/file.xz", &Error{Code: 404})
d.ExpectError("http://example.com/file", errors.New("403"))
_, _, err = DownloadTryCompression(d, s.baseURL, "file", nil, true, 1)
_, _, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", nil, true, 1)
c.Assert(err, ErrorMatches, "403")
d = NewFakeDownloader()
@@ -141,6 +144,6 @@ func (s *CompressionSuite) TestDownloadTryCompressionErrors(c *C) {
"file.xz": {Size: 7},
"file": {Size: 7},
}
_, _, err = DownloadTryCompression(d, s.baseURL, "file", expectedChecksums, false, 1)
_, _, err = DownloadTryCompression(s.ctx, d, s.baseURL, "file", expectedChecksums, false, 1)
c.Assert(err, ErrorMatches, "checksums don't match.*")
}