mirror of
https://github.com/aptly-dev/aptly.git
synced 2026-01-12 03:21:33 +00:00
Refactor HTTP downloader package
* Drop multi-threaded downloader. It doesn't really belong here - some places require it, some do not, but it's definitely not the right place to handle it, as it's being used only when updating mirrors * Pass expectedChecksums as pointer, so it's easy to drive `nil` value, and also downloader can fill back checksums (not implemented right now). * Break down downloader and tests into more files * Use pkg/errors instead of fmt
This commit is contained in:
@@ -81,18 +81,9 @@ type Progress interface {
|
||||
// Downloader is parallel HTTP fetcher
|
||||
type Downloader interface {
|
||||
// Download starts new download task
|
||||
Download(url string, destination string, result chan<- error)
|
||||
Download(url string, destination string) error
|
||||
// DownloadWithChecksum starts new download task with checksum verification
|
||||
DownloadWithChecksum(url string, destination string, result chan<- error, expected utils.ChecksumInfo, ignoreMismatch bool, maxTries int)
|
||||
// Pause pauses task processing
|
||||
Pause()
|
||||
// Resume resumes task processing
|
||||
Resume()
|
||||
// Shutdown stops downloader after current tasks are finished,
|
||||
// but doesn't process rest of queue
|
||||
Shutdown()
|
||||
// Abort stops downloader without waiting for shutdown
|
||||
Abort()
|
||||
DownloadWithChecksum(url string, destination string, expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) error
|
||||
// GetProgress returns Progress object
|
||||
GetProgress() Progress
|
||||
}
|
||||
|
||||
86
http/compression.go
Normal file
86
http/compression.go
Normal file
@@ -0,0 +1,86 @@
|
||||
package http
|
||||
|
||||
import (
|
||||
"compress/bzip2"
|
||||
"compress/gzip"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/smira/aptly/aptly"
|
||||
"github.com/smira/aptly/utils"
|
||||
xz "github.com/smira/go-xz"
|
||||
)
|
||||
|
||||
// List of extensions + corresponding uncompression support
|
||||
var compressionMethods = []struct {
|
||||
extenstion string
|
||||
transformation func(io.Reader) (io.Reader, error)
|
||||
}{
|
||||
{
|
||||
extenstion: ".bz2",
|
||||
transformation: func(r io.Reader) (io.Reader, error) { return bzip2.NewReader(r), nil },
|
||||
},
|
||||
{
|
||||
extenstion: ".gz",
|
||||
transformation: func(r io.Reader) (io.Reader, error) { return gzip.NewReader(r) },
|
||||
},
|
||||
{
|
||||
extenstion: ".xz",
|
||||
transformation: func(r io.Reader) (io.Reader, error) { return xz.NewReader(r) },
|
||||
},
|
||||
{
|
||||
extenstion: "",
|
||||
transformation: func(r io.Reader) (io.Reader, error) { return r, nil },
|
||||
},
|
||||
}
|
||||
|
||||
// DownloadTryCompression tries to download from URL .bz2, .gz and raw extension until
|
||||
// it finds existing file.
|
||||
func DownloadTryCompression(downloader aptly.Downloader, url string, expectedChecksums map[string]utils.ChecksumInfo, ignoreMismatch bool, maxTries int) (io.Reader, *os.File, error) {
|
||||
var err error
|
||||
|
||||
for _, method := range compressionMethods {
|
||||
var file *os.File
|
||||
|
||||
tryURL := url + method.extenstion
|
||||
foundChecksum := false
|
||||
|
||||
for suffix, expected := range expectedChecksums {
|
||||
if strings.HasSuffix(tryURL, suffix) {
|
||||
file, err = DownloadTempWithChecksum(downloader, tryURL, &expected, ignoreMismatch, maxTries)
|
||||
foundChecksum = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !foundChecksum {
|
||||
if !ignoreMismatch {
|
||||
continue
|
||||
}
|
||||
|
||||
file, err = DownloadTemp(downloader, tryURL)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err1, ok := err.(*Error); ok && (err1.Code == 404 || err1.Code == 403) {
|
||||
continue
|
||||
}
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
var uncompressed io.Reader
|
||||
uncompressed, err = method.transformation(file)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return uncompressed, file, err
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
err = fmt.Errorf("no candidates for %s found", url)
|
||||
}
|
||||
return nil, nil, err
|
||||
}
|
||||
118
http/compression_test.go
Normal file
118
http/compression_test.go
Normal file
@@ -0,0 +1,118 @@
|
||||
package http
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
|
||||
"github.com/smira/aptly/utils"
|
||||
|
||||
. "gopkg.in/check.v1"
|
||||
)
|
||||
|
||||
type CompressionSuite struct{}
|
||||
|
||||
var _ = Suite(&CompressionSuite{})
|
||||
|
||||
const (
|
||||
bzipData = "BZh91AY&SY\xcc\xc3q\xd4\x00\x00\x02A\x80\x00\x10\x02\x00\x0c\x00 \x00!\x9ah3M\x19\x97\x8b\xb9\"\x9c(Hfa\xb8\xea\x00"
|
||||
gzipData = "\x1f\x8b\x08\x00\xc8j\xb0R\x00\x03+I-.\xe1\x02\x00\xc65\xb9;\x05\x00\x00\x00"
|
||||
xzData = "\xfd\x37\x7a\x58\x5a\x00\x00\x04\xe6\xd6\xb4\x46\x02\x00\x21\x01\x16\x00\x00\x00\x74\x2f\xe5\xa3\x01\x00\x04\x74\x65\x73\x74\x0a\x00\x00\x00\x00\x9d\xed\x31\x1d\x0f\x9f\xd7\xe6\x00\x01\x1d\x05\xb8\x2d\x80\xaf\x1f\xb6\xf3\x7d\x01\x00\x00\x00\x00\x04\x59\x5a"
|
||||
rawData = "test"
|
||||
)
|
||||
|
||||
func (s *CompressionSuite) TestDownloadTryCompression(c *C) {
|
||||
var buf []byte
|
||||
|
||||
expectedChecksums := map[string]utils.ChecksumInfo{
|
||||
"file.bz2": {Size: int64(len(bzipData))},
|
||||
"file.gz": {Size: int64(len(gzipData))},
|
||||
"file.xz": {Size: int64(len(xzData))},
|
||||
"file": {Size: int64(len(rawData))},
|
||||
}
|
||||
|
||||
// bzip2 only available
|
||||
buf = make([]byte, 4)
|
||||
d := NewFakeDownloader()
|
||||
d.ExpectResponse("http://example.com/file.bz2", bzipData)
|
||||
r, file, err := DownloadTryCompression(d, "http://example.com/file", expectedChecksums, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
defer file.Close()
|
||||
io.ReadFull(r, buf)
|
||||
c.Assert(string(buf), Equals, rawData)
|
||||
c.Assert(d.Empty(), Equals, true)
|
||||
|
||||
// bzip2 not available, but gz is
|
||||
buf = make([]byte, 4)
|
||||
d = NewFakeDownloader()
|
||||
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
|
||||
d.ExpectResponse("http://example.com/file.gz", gzipData)
|
||||
r, file, err = DownloadTryCompression(d, "http://example.com/file", expectedChecksums, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
defer file.Close()
|
||||
io.ReadFull(r, buf)
|
||||
c.Assert(string(buf), Equals, rawData)
|
||||
c.Assert(d.Empty(), Equals, true)
|
||||
|
||||
// bzip2 & gzip not available, but xz is
|
||||
buf = make([]byte, 4)
|
||||
d = NewFakeDownloader()
|
||||
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
|
||||
d.ExpectResponse("http://example.com/file.xz", xzData)
|
||||
r, file, err = DownloadTryCompression(d, "http://example.com/file", expectedChecksums, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
defer file.Close()
|
||||
io.ReadFull(r, buf)
|
||||
c.Assert(string(buf), Equals, rawData)
|
||||
c.Assert(d.Empty(), Equals, true)
|
||||
|
||||
// bzip2, gzip & xz not available, but raw is
|
||||
buf = make([]byte, 4)
|
||||
d = NewFakeDownloader()
|
||||
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.xz", &Error{Code: 404})
|
||||
d.ExpectResponse("http://example.com/file", rawData)
|
||||
r, file, err = DownloadTryCompression(d, "http://example.com/file", expectedChecksums, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
defer file.Close()
|
||||
io.ReadFull(r, buf)
|
||||
c.Assert(string(buf), Equals, rawData)
|
||||
c.Assert(d.Empty(), Equals, true)
|
||||
|
||||
// gzip available, but broken
|
||||
d = NewFakeDownloader()
|
||||
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
|
||||
d.ExpectResponse("http://example.com/file.gz", "x")
|
||||
_, file, err = DownloadTryCompression(d, "http://example.com/file", nil, true, 1)
|
||||
c.Assert(err, ErrorMatches, "unexpected EOF")
|
||||
c.Assert(d.Empty(), Equals, true)
|
||||
}
|
||||
|
||||
func (s *CompressionSuite) TestDownloadTryCompressionErrors(c *C) {
|
||||
d := NewFakeDownloader()
|
||||
_, _, err := DownloadTryCompression(d, "http://example.com/file", nil, true, 1)
|
||||
c.Assert(err, ErrorMatches, "unexpected request.*")
|
||||
|
||||
d = NewFakeDownloader()
|
||||
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.xz", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file", errors.New("403"))
|
||||
_, _, err = DownloadTryCompression(d, "http://example.com/file", nil, true, 1)
|
||||
c.Assert(err, ErrorMatches, "403")
|
||||
|
||||
d = NewFakeDownloader()
|
||||
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.xz", &Error{Code: 404})
|
||||
d.ExpectResponse("http://example.com/file", rawData)
|
||||
expectedChecksums := map[string]utils.ChecksumInfo{
|
||||
"file.bz2": {Size: 7},
|
||||
"file.gz": {Size: 7},
|
||||
"file.xz": {Size: 7},
|
||||
"file": {Size: 7},
|
||||
}
|
||||
_, _, err = DownloadTryCompression(d, "http://example.com/file", expectedChecksums, false, 1)
|
||||
c.Assert(err, ErrorMatches, "checksums don't match.*")
|
||||
}
|
||||
283
http/download.go
283
http/download.go
@@ -1,11 +1,8 @@
|
||||
package http
|
||||
|
||||
import (
|
||||
"compress/bzip2"
|
||||
"compress/gzip"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
@@ -13,23 +10,12 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/mxk/go-flowrate/flowrate"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/smira/aptly/aptly"
|
||||
"github.com/smira/aptly/utils"
|
||||
"github.com/smira/go-ftp-protocol/protocol"
|
||||
"github.com/smira/go-xz"
|
||||
)
|
||||
|
||||
// Error is download error connected to HTTP code
|
||||
type Error struct {
|
||||
Code int
|
||||
URL string
|
||||
}
|
||||
|
||||
// Error
|
||||
func (e *Error) Error() string {
|
||||
return fmt.Sprintf("HTTP code %d while fetching %s", e.Code, e.URL)
|
||||
}
|
||||
|
||||
// Check interface
|
||||
var (
|
||||
_ aptly.Downloader = (*downloaderImpl)(nil)
|
||||
@@ -37,30 +23,14 @@ var (
|
||||
|
||||
// downloaderImpl is implementation of Downloader interface
|
||||
type downloaderImpl struct {
|
||||
queue chan *downloadTask
|
||||
stop chan struct{}
|
||||
stopped chan struct{}
|
||||
pause chan struct{}
|
||||
unpause chan struct{}
|
||||
progress aptly.Progress
|
||||
aggWriter io.Writer
|
||||
threads int
|
||||
client *http.Client
|
||||
}
|
||||
|
||||
// downloadTask represents single item in queue
|
||||
type downloadTask struct {
|
||||
url string
|
||||
destination string
|
||||
result chan<- error
|
||||
expected utils.ChecksumInfo
|
||||
ignoreMismatch bool
|
||||
triesLeft int
|
||||
}
|
||||
|
||||
// NewDownloader creates new instance of Downloader which specified number
|
||||
// of threads and download limit in bytes/sec
|
||||
func NewDownloader(threads int, downLimit int64, progress aptly.Progress) aptly.Downloader {
|
||||
func NewDownloader(downLimit int64, progress aptly.Progress) aptly.Downloader {
|
||||
transport := http.Transport{}
|
||||
transport.Proxy = http.DefaultTransport.(*http.Transport).Proxy
|
||||
transport.ResponseHeaderTimeout = 30 * time.Second
|
||||
@@ -71,12 +41,6 @@ func NewDownloader(threads int, downLimit int64, progress aptly.Progress) aptly.
|
||||
transport.RegisterProtocol("ftp", &protocol.FTPRoundTripper{})
|
||||
|
||||
downloader := &downloaderImpl{
|
||||
queue: make(chan *downloadTask, 1000),
|
||||
stop: make(chan struct{}, threads),
|
||||
stopped: make(chan struct{}, threads),
|
||||
pause: make(chan struct{}),
|
||||
unpause: make(chan struct{}),
|
||||
threads: threads,
|
||||
progress: progress,
|
||||
client: &http.Client{
|
||||
Transport: &transport,
|
||||
@@ -89,70 +53,28 @@ func NewDownloader(threads int, downLimit int64, progress aptly.Progress) aptly.
|
||||
downloader.aggWriter = progress
|
||||
}
|
||||
|
||||
for i := 0; i < downloader.threads; i++ {
|
||||
go downloader.process()
|
||||
}
|
||||
|
||||
return downloader
|
||||
}
|
||||
|
||||
// Shutdown stops downloader after current tasks are finished,
|
||||
// but doesn't process rest of queue
|
||||
func (downloader *downloaderImpl) Shutdown() {
|
||||
for i := 0; i < downloader.threads; i++ {
|
||||
downloader.stop <- struct{}{}
|
||||
}
|
||||
|
||||
for i := 0; i < downloader.threads; i++ {
|
||||
<-downloader.stopped
|
||||
}
|
||||
}
|
||||
|
||||
// Abort stops downloader but doesn't wait for downloader to stop
|
||||
func (downloader *downloaderImpl) Abort() {
|
||||
for i := 0; i < downloader.threads; i++ {
|
||||
downloader.stop <- struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
// Pause pauses task processing
|
||||
func (downloader *downloaderImpl) Pause() {
|
||||
for i := 0; i < downloader.threads; i++ {
|
||||
downloader.pause <- struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
// Resume resumes task processing
|
||||
func (downloader *downloaderImpl) Resume() {
|
||||
for i := 0; i < downloader.threads; i++ {
|
||||
downloader.unpause <- struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
// GetProgress returns Progress object
|
||||
func (downloader *downloaderImpl) GetProgress() aptly.Progress {
|
||||
return downloader.progress
|
||||
}
|
||||
|
||||
// Download starts new download task
|
||||
func (downloader *downloaderImpl) Download(url string, destination string, result chan<- error) {
|
||||
downloader.DownloadWithChecksum(url, destination, result, utils.ChecksumInfo{Size: -1}, false, 1)
|
||||
func (downloader *downloaderImpl) Download(url string, destination string) error {
|
||||
return downloader.DownloadWithChecksum(url, destination, nil, false, 1)
|
||||
}
|
||||
|
||||
// DownloadWithChecksum starts new download task with checksum verification
|
||||
func (downloader *downloaderImpl) DownloadWithChecksum(url string, destination string, result chan<- error,
|
||||
expected utils.ChecksumInfo, ignoreMismatch bool, maxTries int) {
|
||||
downloader.queue <- &downloadTask{url: url, destination: destination, result: result, expected: expected, ignoreMismatch: ignoreMismatch, triesLeft: maxTries}
|
||||
}
|
||||
func (downloader *downloaderImpl) DownloadWithChecksum(url string, destination string,
|
||||
expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) error {
|
||||
|
||||
// handleTask processes single download task
|
||||
func (downloader *downloaderImpl) handleTask(task *downloadTask) {
|
||||
downloader.progress.Printf("Downloading %s...\n", task.url)
|
||||
downloader.progress.Printf("Downloading %s...\n", url)
|
||||
|
||||
req, err := http.NewRequest("GET", task.url, nil)
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
task.result <- fmt.Errorf("%s: %s", task.url, err)
|
||||
return
|
||||
return errors.Wrap(err, url)
|
||||
}
|
||||
req.Close = true
|
||||
|
||||
@@ -163,12 +85,11 @@ func (downloader *downloaderImpl) handleTask(task *downloadTask) {
|
||||
}
|
||||
|
||||
var temppath string
|
||||
for task.triesLeft > 0 {
|
||||
|
||||
temppath, err = downloader.downloadTask(req, task)
|
||||
for maxTries > 0 {
|
||||
temppath, err = downloader.download(req, url, destination, expected, ignoreMismatch)
|
||||
|
||||
if err != nil {
|
||||
task.triesLeft--
|
||||
maxTries--
|
||||
} else {
|
||||
// successful download
|
||||
break
|
||||
@@ -177,50 +98,48 @@ func (downloader *downloaderImpl) handleTask(task *downloadTask) {
|
||||
|
||||
// still an error after retrying, giving up
|
||||
if err != nil {
|
||||
task.result <- err
|
||||
return
|
||||
return err
|
||||
}
|
||||
|
||||
err = os.Rename(temppath, task.destination)
|
||||
err = os.Rename(temppath, destination)
|
||||
if err != nil {
|
||||
os.Remove(temppath)
|
||||
task.result <- fmt.Errorf("%s: %s", task.url, err)
|
||||
return
|
||||
return errors.Wrap(err, url)
|
||||
}
|
||||
|
||||
task.result <- nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (downloader *downloaderImpl) downloadTask(req *http.Request, task *downloadTask) (string, error) {
|
||||
func (downloader *downloaderImpl) download(req *http.Request, url, destination string, expected *utils.ChecksumInfo, ignoreMismatch bool) (string, error) {
|
||||
resp, err := downloader.client.Do(req)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("%s: %s", task.url, err)
|
||||
return "", errors.Wrap(err, url)
|
||||
}
|
||||
if resp.Body != nil {
|
||||
defer resp.Body.Close()
|
||||
}
|
||||
|
||||
if resp.StatusCode < 200 || resp.StatusCode > 299 {
|
||||
return "", &Error{Code: resp.StatusCode, URL: task.url}
|
||||
return "", &Error{Code: resp.StatusCode, URL: url}
|
||||
}
|
||||
|
||||
err = os.MkdirAll(filepath.Dir(task.destination), 0777)
|
||||
err = os.MkdirAll(filepath.Dir(destination), 0777)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("%s: %s", task.url, err)
|
||||
return "", errors.Wrap(err, url)
|
||||
}
|
||||
|
||||
temppath := task.destination + ".down"
|
||||
temppath := destination + ".down"
|
||||
|
||||
outfile, err := os.Create(temppath)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("%s: %s", task.url, err)
|
||||
return "", errors.Wrap(err, url)
|
||||
}
|
||||
defer outfile.Close()
|
||||
|
||||
checksummer := utils.NewChecksumWriter()
|
||||
writers := []io.Writer{outfile, downloader.aggWriter}
|
||||
|
||||
if task.expected.Size != -1 {
|
||||
if expected != nil {
|
||||
writers = append(writers, checksummer)
|
||||
}
|
||||
|
||||
@@ -229,26 +148,26 @@ func (downloader *downloaderImpl) downloadTask(req *http.Request, task *download
|
||||
_, err = io.Copy(w, resp.Body)
|
||||
if err != nil {
|
||||
os.Remove(temppath)
|
||||
return "", fmt.Errorf("%s: %s", task.url, err)
|
||||
return "", errors.Wrap(err, url)
|
||||
}
|
||||
|
||||
if task.expected.Size != -1 {
|
||||
if expected != nil {
|
||||
actual := checksummer.Sum()
|
||||
|
||||
if actual.Size != task.expected.Size {
|
||||
err = fmt.Errorf("%s: size check mismatch %d != %d", task.url, actual.Size, task.expected.Size)
|
||||
} else if task.expected.MD5 != "" && actual.MD5 != task.expected.MD5 {
|
||||
err = fmt.Errorf("%s: md5 hash mismatch %#v != %#v", task.url, actual.MD5, task.expected.MD5)
|
||||
} else if task.expected.SHA1 != "" && actual.SHA1 != task.expected.SHA1 {
|
||||
err = fmt.Errorf("%s: sha1 hash mismatch %#v != %#v", task.url, actual.SHA1, task.expected.SHA1)
|
||||
} else if task.expected.SHA256 != "" && actual.SHA256 != task.expected.SHA256 {
|
||||
err = fmt.Errorf("%s: sha256 hash mismatch %#v != %#v", task.url, actual.SHA256, task.expected.SHA256)
|
||||
} else if task.expected.SHA512 != "" && actual.SHA512 != task.expected.SHA512 {
|
||||
err = fmt.Errorf("%s: sha512 hash mismatch %#v != %#v", task.url, actual.SHA512, task.expected.SHA512)
|
||||
if actual.Size != expected.Size {
|
||||
err = fmt.Errorf("%s: size check mismatch %d != %d", url, actual.Size, expected.Size)
|
||||
} else if expected.MD5 != "" && actual.MD5 != expected.MD5 {
|
||||
err = fmt.Errorf("%s: md5 hash mismatch %#v != %#v", url, actual.MD5, expected.MD5)
|
||||
} else if expected.SHA1 != "" && actual.SHA1 != expected.SHA1 {
|
||||
err = fmt.Errorf("%s: sha1 hash mismatch %#v != %#v", url, actual.SHA1, expected.SHA1)
|
||||
} else if expected.SHA256 != "" && actual.SHA256 != expected.SHA256 {
|
||||
err = fmt.Errorf("%s: sha256 hash mismatch %#v != %#v", url, actual.SHA256, expected.SHA256)
|
||||
} else if expected.SHA512 != "" && actual.SHA512 != expected.SHA512 {
|
||||
err = fmt.Errorf("%s: sha512 hash mismatch %#v != %#v", url, actual.SHA512, expected.SHA512)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if task.ignoreMismatch {
|
||||
if ignoreMismatch {
|
||||
downloader.progress.Printf("WARNING: %s\n", err.Error())
|
||||
} else {
|
||||
os.Remove(temppath)
|
||||
@@ -259,131 +178,3 @@ func (downloader *downloaderImpl) downloadTask(req *http.Request, task *download
|
||||
|
||||
return temppath, nil
|
||||
}
|
||||
|
||||
// process implements download thread in goroutine
|
||||
func (downloader *downloaderImpl) process() {
|
||||
for {
|
||||
select {
|
||||
case <-downloader.stop:
|
||||
downloader.stopped <- struct{}{}
|
||||
return
|
||||
case <-downloader.pause:
|
||||
<-downloader.unpause
|
||||
case task := <-downloader.queue:
|
||||
downloader.handleTask(task)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// DownloadTemp starts new download to temporary file and returns File
|
||||
//
|
||||
// Temporary file would be already removed, so no need to cleanup
|
||||
func DownloadTemp(downloader aptly.Downloader, url string) (*os.File, error) {
|
||||
return DownloadTempWithChecksum(downloader, url, utils.ChecksumInfo{Size: -1}, false, 1)
|
||||
}
|
||||
|
||||
// DownloadTempWithChecksum is a DownloadTemp with checksum verification
|
||||
//
|
||||
// Temporary file would be already removed, so no need to cleanup
|
||||
func DownloadTempWithChecksum(downloader aptly.Downloader, url string, expected utils.ChecksumInfo, ignoreMismatch bool, maxTries int) (*os.File, error) {
|
||||
tempdir, err := ioutil.TempDir(os.TempDir(), "aptly")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer os.RemoveAll(tempdir)
|
||||
|
||||
tempfile := filepath.Join(tempdir, "buffer")
|
||||
|
||||
if expected.Size != -1 && downloader.GetProgress() != nil {
|
||||
downloader.GetProgress().InitBar(expected.Size, true)
|
||||
defer downloader.GetProgress().ShutdownBar()
|
||||
}
|
||||
|
||||
ch := make(chan error, 1)
|
||||
downloader.DownloadWithChecksum(url, tempfile, ch, expected, ignoreMismatch, maxTries)
|
||||
|
||||
err = <-ch
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
file, err := os.Open(tempfile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return file, nil
|
||||
}
|
||||
|
||||
// List of extensions + corresponding uncompression support
|
||||
var compressionMethods = []struct {
|
||||
extenstion string
|
||||
transformation func(io.Reader) (io.Reader, error)
|
||||
}{
|
||||
{
|
||||
extenstion: ".bz2",
|
||||
transformation: func(r io.Reader) (io.Reader, error) { return bzip2.NewReader(r), nil },
|
||||
},
|
||||
{
|
||||
extenstion: ".gz",
|
||||
transformation: func(r io.Reader) (io.Reader, error) { return gzip.NewReader(r) },
|
||||
},
|
||||
{
|
||||
extenstion: ".xz",
|
||||
transformation: func(r io.Reader) (io.Reader, error) { return xz.NewReader(r) },
|
||||
},
|
||||
{
|
||||
extenstion: "",
|
||||
transformation: func(r io.Reader) (io.Reader, error) { return r, nil },
|
||||
},
|
||||
}
|
||||
|
||||
// DownloadTryCompression tries to download from URL .bz2, .gz and raw extension until
|
||||
// it finds existing file.
|
||||
func DownloadTryCompression(downloader aptly.Downloader, url string, expectedChecksums map[string]utils.ChecksumInfo, ignoreMismatch bool, maxTries int) (io.Reader, *os.File, error) {
|
||||
var err error
|
||||
|
||||
for _, method := range compressionMethods {
|
||||
var file *os.File
|
||||
|
||||
tryURL := url + method.extenstion
|
||||
foundChecksum := false
|
||||
|
||||
for suffix, expected := range expectedChecksums {
|
||||
if strings.HasSuffix(tryURL, suffix) {
|
||||
file, err = DownloadTempWithChecksum(downloader, tryURL, expected, ignoreMismatch, maxTries)
|
||||
foundChecksum = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !foundChecksum {
|
||||
if !ignoreMismatch {
|
||||
continue
|
||||
}
|
||||
|
||||
file, err = DownloadTemp(downloader, tryURL)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err1, ok := err.(*Error); ok && (err1.Code == 404 || err1.Code == 403) {
|
||||
continue
|
||||
}
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
var uncompressed io.Reader
|
||||
uncompressed, err = method.transformation(file)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return uncompressed, file, err
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
err = fmt.Errorf("no candidates for %s found", url)
|
||||
}
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
@@ -1,15 +1,11 @@
|
||||
package http
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"runtime"
|
||||
"time"
|
||||
|
||||
"github.com/smira/aptly/aptly"
|
||||
"github.com/smira/aptly/console"
|
||||
@@ -18,17 +14,16 @@ import (
|
||||
. "gopkg.in/check.v1"
|
||||
)
|
||||
|
||||
type DownloaderSuite struct {
|
||||
type DownloaderSuiteBase struct {
|
||||
tempfile *os.File
|
||||
l net.Listener
|
||||
url string
|
||||
ch chan bool
|
||||
ch chan struct{}
|
||||
progress aptly.Progress
|
||||
d aptly.Downloader
|
||||
}
|
||||
|
||||
var _ = Suite(&DownloaderSuite{})
|
||||
|
||||
func (s *DownloaderSuite) SetUpTest(c *C) {
|
||||
func (s *DownloaderSuiteBase) SetUpTest(c *C) {
|
||||
s.tempfile, _ = ioutil.TempFile(os.TempDir(), "aptly-test")
|
||||
s.l, _ = net.ListenTCP("tcp4", &net.TCPAddr{IP: net.IPv4(127, 0, 0, 1)})
|
||||
s.url = fmt.Sprintf("http://localhost:%d", s.l.Addr().(*net.TCPAddr).Port)
|
||||
@@ -38,18 +33,20 @@ func (s *DownloaderSuite) SetUpTest(c *C) {
|
||||
fmt.Fprintf(w, "Hello, %s", r.URL.Path)
|
||||
})
|
||||
|
||||
s.ch = make(chan bool)
|
||||
s.ch = make(chan struct{})
|
||||
|
||||
go func() {
|
||||
http.Serve(s.l, mux)
|
||||
s.ch <- true
|
||||
close(s.ch)
|
||||
}()
|
||||
|
||||
s.progress = console.NewProgress()
|
||||
s.progress.Start()
|
||||
|
||||
s.d = NewDownloader(0, s.progress)
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TearDownTest(c *C) {
|
||||
func (s *DownloaderSuiteBase) TearDownTest(c *C) {
|
||||
s.progress.Shutdown()
|
||||
|
||||
s.l.Close()
|
||||
@@ -59,249 +56,64 @@ func (s *DownloaderSuite) TearDownTest(c *C) {
|
||||
s.tempfile.Close()
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestStartupShutdown(c *C) {
|
||||
goroutines := runtime.NumGoroutine()
|
||||
|
||||
d := NewDownloader(10, 100, s.progress)
|
||||
d.Shutdown()
|
||||
|
||||
// wait for goroutines to shutdown
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
|
||||
if runtime.NumGoroutine()-goroutines > 1 {
|
||||
c.Errorf("Number of goroutines %d, expected %d", runtime.NumGoroutine(), goroutines)
|
||||
}
|
||||
type DownloaderSuite struct {
|
||||
DownloaderSuiteBase
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestPauseResume(c *C) {
|
||||
d := NewDownloader(2, 0, s.progress)
|
||||
defer d.Shutdown()
|
||||
var _ = Suite(&DownloaderSuite{})
|
||||
|
||||
d.Pause()
|
||||
d.Resume()
|
||||
func (s *DownloaderSuite) SetUpTest(c *C) {
|
||||
s.DownloaderSuiteBase.SetUpTest(c)
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TearDownTest(c *C) {
|
||||
s.DownloaderSuiteBase.TearDownTest(c)
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestDownloadOK(c *C) {
|
||||
d := NewDownloader(2, 0, s.progress)
|
||||
defer d.Shutdown()
|
||||
ch := make(chan error)
|
||||
|
||||
d.Download(s.url+"/test", s.tempfile.Name(), ch)
|
||||
res := <-ch
|
||||
c.Assert(res, IsNil)
|
||||
c.Assert(s.d.Download(s.url+"/test", s.tempfile.Name()), IsNil)
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestDownloadWithChecksum(c *C) {
|
||||
d := NewDownloader(2, 0, s.progress)
|
||||
defer d.Shutdown()
|
||||
ch := make(chan error)
|
||||
c.Assert(s.d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{}, false, 1),
|
||||
ErrorMatches, ".*size check mismatch 12 != 0")
|
||||
|
||||
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{}, false, 1)
|
||||
res := <-ch
|
||||
c.Assert(res, ErrorMatches, ".*size check mismatch 12 != 0")
|
||||
c.Assert(s.d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "abcdef"}, false, 1),
|
||||
ErrorMatches, ".*md5 hash mismatch \"a1acb0fe91c7db45ec4d775192ec5738\" != \"abcdef\"")
|
||||
|
||||
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "abcdef"}, false, 1)
|
||||
res = <-ch
|
||||
c.Assert(res, ErrorMatches, ".*md5 hash mismatch \"a1acb0fe91c7db45ec4d775192ec5738\" != \"abcdef\"")
|
||||
c.Assert(s.d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "abcdef"}, true, 1),
|
||||
IsNil)
|
||||
|
||||
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "abcdef"}, true, 1)
|
||||
res = <-ch
|
||||
c.Assert(res, IsNil)
|
||||
c.Assert(s.d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738"}, false, 1),
|
||||
IsNil)
|
||||
|
||||
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738"}, false, 1)
|
||||
res = <-ch
|
||||
c.Assert(res, IsNil)
|
||||
c.Assert(s.d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738", SHA1: "abcdef"}, false, 1),
|
||||
ErrorMatches, ".*sha1 hash mismatch \"921893bae6ad6fd818401875d6779254ef0ff0ec\" != \"abcdef\"")
|
||||
|
||||
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738", SHA1: "abcdef"}, false, 1)
|
||||
res = <-ch
|
||||
c.Assert(res, ErrorMatches, ".*sha1 hash mismatch \"921893bae6ad6fd818401875d6779254ef0ff0ec\" != \"abcdef\"")
|
||||
c.Assert(s.d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
|
||||
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec"}, false, 1),
|
||||
IsNil)
|
||||
|
||||
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
|
||||
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec"}, false, 1)
|
||||
res = <-ch
|
||||
c.Assert(res, IsNil)
|
||||
c.Assert(s.d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
|
||||
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "abcdef"}, false, 1),
|
||||
ErrorMatches, ".*sha256 hash mismatch \"b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac\" != \"abcdef\"")
|
||||
|
||||
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
|
||||
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "abcdef"}, false, 1)
|
||||
res = <-ch
|
||||
c.Assert(res, ErrorMatches, ".*sha256 hash mismatch \"b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac\" != \"abcdef\"")
|
||||
|
||||
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
|
||||
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac"}, false, 1)
|
||||
res = <-ch
|
||||
c.Assert(res, IsNil)
|
||||
c.Assert(s.d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
|
||||
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac"}, false, 1),
|
||||
IsNil)
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestDownload404(c *C) {
|
||||
d := NewDownloader(2, 0, s.progress)
|
||||
defer d.Shutdown()
|
||||
ch := make(chan error)
|
||||
|
||||
d.Download(s.url+"/doesntexist", s.tempfile.Name(), ch)
|
||||
res := <-ch
|
||||
c.Assert(res, ErrorMatches, "HTTP code 404.*")
|
||||
c.Assert(s.d.Download(s.url+"/doesntexist", s.tempfile.Name()),
|
||||
ErrorMatches, "HTTP code 404.*")
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestDownloadConnectError(c *C) {
|
||||
d := NewDownloader(2, 0, s.progress)
|
||||
defer d.Shutdown()
|
||||
ch := make(chan error)
|
||||
|
||||
d.Download("http://nosuch.localhost/", s.tempfile.Name(), ch)
|
||||
res := <-ch
|
||||
c.Assert(res, ErrorMatches, ".*no such host")
|
||||
c.Assert(s.d.Download("http://nosuch.localhost/", s.tempfile.Name()),
|
||||
ErrorMatches, ".*no such host")
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestDownloadFileError(c *C) {
|
||||
d := NewDownloader(2, 0, s.progress)
|
||||
defer d.Shutdown()
|
||||
ch := make(chan error)
|
||||
|
||||
d.Download(s.url+"/test", "/", ch)
|
||||
res := <-ch
|
||||
c.Assert(res, ErrorMatches, ".*permission denied")
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestDownloadTemp(c *C) {
|
||||
d := NewDownloader(2, 0, s.progress)
|
||||
defer d.Shutdown()
|
||||
|
||||
f, err := DownloadTemp(d, s.url+"/test")
|
||||
c.Assert(err, IsNil)
|
||||
defer f.Close()
|
||||
|
||||
buf := make([]byte, 1)
|
||||
|
||||
f.Read(buf)
|
||||
c.Assert(buf, DeepEquals, []byte("H"))
|
||||
|
||||
_, err = os.Stat(f.Name())
|
||||
c.Assert(os.IsNotExist(err), Equals, true)
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestDownloadTempWithChecksum(c *C) {
|
||||
d := NewDownloader(2, 0, s.progress)
|
||||
defer d.Shutdown()
|
||||
|
||||
f, err := DownloadTempWithChecksum(d, s.url+"/test", utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
|
||||
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac"}, false, 1)
|
||||
defer f.Close()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
_, err = DownloadTempWithChecksum(d, s.url+"/test", utils.ChecksumInfo{Size: 13}, false, 1)
|
||||
c.Assert(err, ErrorMatches, ".*size check mismatch 12 != 13")
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestDownloadTempError(c *C) {
|
||||
d := NewDownloader(2, 0, s.progress)
|
||||
defer d.Shutdown()
|
||||
|
||||
f, err := DownloadTemp(d, s.url+"/doesntexist")
|
||||
c.Assert(err, NotNil)
|
||||
c.Assert(f, IsNil)
|
||||
c.Assert(err, ErrorMatches, "HTTP code 404.*")
|
||||
}
|
||||
|
||||
const (
|
||||
bzipData = "BZh91AY&SY\xcc\xc3q\xd4\x00\x00\x02A\x80\x00\x10\x02\x00\x0c\x00 \x00!\x9ah3M\x19\x97\x8b\xb9\"\x9c(Hfa\xb8\xea\x00"
|
||||
gzipData = "\x1f\x8b\x08\x00\xc8j\xb0R\x00\x03+I-.\xe1\x02\x00\xc65\xb9;\x05\x00\x00\x00"
|
||||
xzData = "\xfd\x37\x7a\x58\x5a\x00\x00\x04\xe6\xd6\xb4\x46\x02\x00\x21\x01\x16\x00\x00\x00\x74\x2f\xe5\xa3\x01\x00\x04\x74\x65\x73\x74\x0a\x00\x00\x00\x00\x9d\xed\x31\x1d\x0f\x9f\xd7\xe6\x00\x01\x1d\x05\xb8\x2d\x80\xaf\x1f\xb6\xf3\x7d\x01\x00\x00\x00\x00\x04\x59\x5a"
|
||||
rawData = "test"
|
||||
)
|
||||
|
||||
func (s *DownloaderSuite) TestDownloadTryCompression(c *C) {
|
||||
var buf []byte
|
||||
|
||||
expectedChecksums := map[string]utils.ChecksumInfo{
|
||||
"file.bz2": {Size: int64(len(bzipData))},
|
||||
"file.gz": {Size: int64(len(gzipData))},
|
||||
"file.xz": {Size: int64(len(xzData))},
|
||||
"file": {Size: int64(len(rawData))},
|
||||
}
|
||||
|
||||
// bzip2 only available
|
||||
buf = make([]byte, 4)
|
||||
d := NewFakeDownloader()
|
||||
d.ExpectResponse("http://example.com/file.bz2", bzipData)
|
||||
r, file, err := DownloadTryCompression(d, "http://example.com/file", expectedChecksums, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
defer file.Close()
|
||||
io.ReadFull(r, buf)
|
||||
c.Assert(string(buf), Equals, rawData)
|
||||
c.Assert(d.Empty(), Equals, true)
|
||||
|
||||
// bzip2 not available, but gz is
|
||||
buf = make([]byte, 4)
|
||||
d = NewFakeDownloader()
|
||||
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
|
||||
d.ExpectResponse("http://example.com/file.gz", gzipData)
|
||||
r, file, err = DownloadTryCompression(d, "http://example.com/file", expectedChecksums, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
defer file.Close()
|
||||
io.ReadFull(r, buf)
|
||||
c.Assert(string(buf), Equals, rawData)
|
||||
c.Assert(d.Empty(), Equals, true)
|
||||
|
||||
// bzip2 & gzip not available, but xz is
|
||||
buf = make([]byte, 4)
|
||||
d = NewFakeDownloader()
|
||||
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
|
||||
d.ExpectResponse("http://example.com/file.xz", xzData)
|
||||
r, file, err = DownloadTryCompression(d, "http://example.com/file", expectedChecksums, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
defer file.Close()
|
||||
io.ReadFull(r, buf)
|
||||
c.Assert(string(buf), Equals, rawData)
|
||||
c.Assert(d.Empty(), Equals, true)
|
||||
|
||||
// bzip2, gzip & xz not available, but raw is
|
||||
buf = make([]byte, 4)
|
||||
d = NewFakeDownloader()
|
||||
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.xz", &Error{Code: 404})
|
||||
d.ExpectResponse("http://example.com/file", rawData)
|
||||
r, file, err = DownloadTryCompression(d, "http://example.com/file", expectedChecksums, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
defer file.Close()
|
||||
io.ReadFull(r, buf)
|
||||
c.Assert(string(buf), Equals, rawData)
|
||||
c.Assert(d.Empty(), Equals, true)
|
||||
|
||||
// gzip available, but broken
|
||||
d = NewFakeDownloader()
|
||||
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
|
||||
d.ExpectResponse("http://example.com/file.gz", "x")
|
||||
_, file, err = DownloadTryCompression(d, "http://example.com/file", nil, true, 1)
|
||||
c.Assert(err, ErrorMatches, "unexpected EOF")
|
||||
c.Assert(d.Empty(), Equals, true)
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestDownloadTryCompressionErrors(c *C) {
|
||||
d := NewFakeDownloader()
|
||||
_, _, err := DownloadTryCompression(d, "http://example.com/file", nil, true, 1)
|
||||
c.Assert(err, ErrorMatches, "unexpected request.*")
|
||||
|
||||
d = NewFakeDownloader()
|
||||
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.xz", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file", errors.New("403"))
|
||||
_, _, err = DownloadTryCompression(d, "http://example.com/file", nil, true, 1)
|
||||
c.Assert(err, ErrorMatches, "403")
|
||||
|
||||
d = NewFakeDownloader()
|
||||
d.ExpectError("http://example.com/file.bz2", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.gz", &Error{Code: 404})
|
||||
d.ExpectError("http://example.com/file.xz", &Error{Code: 404})
|
||||
d.ExpectResponse("http://example.com/file", rawData)
|
||||
expectedChecksums := map[string]utils.ChecksumInfo{
|
||||
"file.bz2": {Size: 7},
|
||||
"file.gz": {Size: 7},
|
||||
"file.xz": {Size: 7},
|
||||
"file": {Size: 7},
|
||||
}
|
||||
_, _, err = DownloadTryCompression(d, "http://example.com/file", expectedChecksums, false, 1)
|
||||
c.Assert(err, ErrorMatches, "checksums don't match.*")
|
||||
c.Assert(s.d.Download(s.url+"/test", "/"),
|
||||
ErrorMatches, ".*permission denied")
|
||||
}
|
||||
|
||||
45
http/fake.go
45
http/fake.go
@@ -60,7 +60,7 @@ func (f *FakeDownloader) Empty() bool {
|
||||
}
|
||||
|
||||
// DownloadWithChecksum performs fake download by matching against first expectation in the queue or any expectation, with cheksum verification
|
||||
func (f *FakeDownloader) DownloadWithChecksum(url string, filename string, result chan<- error, expected utils.ChecksumInfo, ignoreMismatch bool, maxTries int) {
|
||||
func (f *FakeDownloader) DownloadWithChecksum(url string, filename string, expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) error {
|
||||
var expectation expectedRequest
|
||||
if len(f.expected) > 0 && f.expected[0].URL == url {
|
||||
expectation, f.expected = f.expected[0], f.expected[1:]
|
||||
@@ -68,25 +68,21 @@ func (f *FakeDownloader) DownloadWithChecksum(url string, filename string, resul
|
||||
expectation = f.anyExpected[url]
|
||||
delete(f.anyExpected, url)
|
||||
} else {
|
||||
result <- fmt.Errorf("unexpected request for %s", url)
|
||||
return
|
||||
return fmt.Errorf("unexpected request for %s", url)
|
||||
}
|
||||
|
||||
if expectation.Err != nil {
|
||||
result <- expectation.Err
|
||||
return
|
||||
return expectation.Err
|
||||
}
|
||||
|
||||
err := os.MkdirAll(filepath.Dir(filename), 0755)
|
||||
if err != nil {
|
||||
result <- err
|
||||
return
|
||||
return err
|
||||
}
|
||||
|
||||
outfile, err := os.Create(filename)
|
||||
if err != nil {
|
||||
result <- err
|
||||
return
|
||||
return err
|
||||
}
|
||||
defer outfile.Close()
|
||||
|
||||
@@ -95,45 +91,26 @@ func (f *FakeDownloader) DownloadWithChecksum(url string, filename string, resul
|
||||
|
||||
_, err = w.Write([]byte(expectation.Response))
|
||||
if err != nil {
|
||||
result <- err
|
||||
return
|
||||
return err
|
||||
}
|
||||
|
||||
if expected.Size != -1 {
|
||||
if expected != nil {
|
||||
if expected.Size != cks.Sum().Size || expected.MD5 != "" && expected.MD5 != cks.Sum().MD5 ||
|
||||
expected.SHA1 != "" && expected.SHA1 != cks.Sum().SHA1 || expected.SHA256 != "" && expected.SHA256 != cks.Sum().SHA256 {
|
||||
if ignoreMismatch {
|
||||
fmt.Printf("WARNING: checksums don't match: %#v != %#v for %s\n", expected, cks.Sum(), url)
|
||||
} else {
|
||||
result <- fmt.Errorf("checksums don't match: %#v != %#v for %s", expected, cks.Sum(), url)
|
||||
return
|
||||
return fmt.Errorf("checksums don't match: %#v != %#v for %s", expected, cks.Sum(), url)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result <- nil
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
// Download performs fake download by matching against first expectation in the queue
|
||||
func (f *FakeDownloader) Download(url string, filename string, result chan<- error) {
|
||||
f.DownloadWithChecksum(url, filename, result, utils.ChecksumInfo{Size: -1}, false, 1)
|
||||
}
|
||||
|
||||
// Shutdown does nothing
|
||||
func (f *FakeDownloader) Shutdown() {
|
||||
}
|
||||
|
||||
// Abort does nothing
|
||||
func (f *FakeDownloader) Abort() {
|
||||
}
|
||||
|
||||
// Pause does nothing
|
||||
func (f *FakeDownloader) Pause() {
|
||||
}
|
||||
|
||||
// Resume does nothing
|
||||
func (f *FakeDownloader) Resume() {
|
||||
func (f *FakeDownloader) Download(url string, filename string) error {
|
||||
return f.DownloadWithChecksum(url, filename, nil, false, 1)
|
||||
}
|
||||
|
||||
// GetProgress returns Progress object
|
||||
|
||||
15
http/http.go
15
http/http.go
@@ -1,2 +1,17 @@
|
||||
// Package http provides all HTTP (and FTP)-related operations
|
||||
package http
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// Error is download error connected to HTTP code
|
||||
type Error struct {
|
||||
Code int
|
||||
URL string
|
||||
}
|
||||
|
||||
// Error
|
||||
func (e *Error) Error() string {
|
||||
return fmt.Sprintf("HTTP code %d while fetching %s", e.Code, e.URL)
|
||||
}
|
||||
|
||||
47
http/temp.go
Normal file
47
http/temp.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package http
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/smira/aptly/aptly"
|
||||
"github.com/smira/aptly/utils"
|
||||
)
|
||||
|
||||
// DownloadTemp starts new download to temporary file and returns File
|
||||
//
|
||||
// Temporary file would be already removed, so no need to cleanup
|
||||
func DownloadTemp(downloader aptly.Downloader, url string) (*os.File, error) {
|
||||
return DownloadTempWithChecksum(downloader, url, nil, false, 1)
|
||||
}
|
||||
|
||||
// DownloadTempWithChecksum is a DownloadTemp with checksum verification
|
||||
//
|
||||
// Temporary file would be already removed, so no need to cleanup
|
||||
func DownloadTempWithChecksum(downloader aptly.Downloader, url string, expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) (*os.File, error) {
|
||||
tempdir, err := ioutil.TempDir(os.TempDir(), "aptly")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer os.RemoveAll(tempdir)
|
||||
|
||||
tempfile := filepath.Join(tempdir, "buffer")
|
||||
|
||||
if expected != nil && downloader.GetProgress() != nil {
|
||||
downloader.GetProgress().InitBar(expected.Size, true)
|
||||
defer downloader.GetProgress().ShutdownBar()
|
||||
}
|
||||
|
||||
err = downloader.DownloadWithChecksum(url, tempfile, expected, ignoreMismatch, maxTries)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
file, err := os.Open(tempfile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return file, nil
|
||||
}
|
||||
54
http/temp_test.go
Normal file
54
http/temp_test.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package http
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/smira/aptly/utils"
|
||||
|
||||
. "gopkg.in/check.v1"
|
||||
)
|
||||
|
||||
type TempSuite struct {
|
||||
DownloaderSuiteBase
|
||||
}
|
||||
|
||||
var _ = Suite(&TempSuite{})
|
||||
|
||||
func (s *TempSuite) SetUpTest(c *C) {
|
||||
s.DownloaderSuiteBase.SetUpTest(c)
|
||||
}
|
||||
|
||||
func (s *TempSuite) TearDownTest(c *C) {
|
||||
s.DownloaderSuiteBase.TearDownTest(c)
|
||||
}
|
||||
|
||||
func (s *TempSuite) TestDownloadTemp(c *C) {
|
||||
f, err := DownloadTemp(s.d, s.url+"/test")
|
||||
c.Assert(err, IsNil)
|
||||
defer f.Close()
|
||||
|
||||
buf := make([]byte, 1)
|
||||
|
||||
f.Read(buf)
|
||||
c.Assert(buf, DeepEquals, []byte("H"))
|
||||
|
||||
_, err = os.Stat(f.Name())
|
||||
c.Assert(os.IsNotExist(err), Equals, true)
|
||||
}
|
||||
|
||||
func (s *TempSuite) TestDownloadTempWithChecksum(c *C) {
|
||||
f, err := DownloadTempWithChecksum(s.d, s.url+"/test", &utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
|
||||
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac"}, false, 1)
|
||||
defer f.Close()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
_, err = DownloadTempWithChecksum(s.d, s.url+"/test", &utils.ChecksumInfo{Size: 13}, false, 1)
|
||||
c.Assert(err, ErrorMatches, ".*size check mismatch 12 != 13")
|
||||
}
|
||||
|
||||
func (s *TempSuite) TestDownloadTempError(c *C) {
|
||||
f, err := DownloadTemp(s.d, s.url+"/doesntexist")
|
||||
c.Assert(err, NotNil)
|
||||
c.Assert(f, IsNil)
|
||||
c.Assert(err, ErrorMatches, "HTTP code 404.*")
|
||||
}
|
||||
Reference in New Issue
Block a user