Refactoring: new packages console, http, Progress is interface.

This commit is contained in:
Andrey Smirnov
2014-02-19 13:08:55 +04:00
parent bd119dbfed
commit 2d589bd23d
18 changed files with 193 additions and 138 deletions

View File

@@ -4,6 +4,7 @@ package aptly
import (
"github.com/smira/aptly/utils"
"io"
"os"
)
@@ -13,7 +14,7 @@ import (
type PackagePool interface {
Path(filename string, hashMD5 string) (string, error)
RelativePath(filename string, hashMD5 string) (string, error)
FilepathList(progress *utils.Progress) ([]string, error)
FilepathList(progress Progress) ([]string, error)
Remove(path string) (size int64, err error)
}
@@ -26,3 +27,30 @@ type PublishedStorage interface {
LinkFromPool(prefix string, component string, poolDirectory string, sourcePool PackagePool, sourcePath string) (string, error)
ChecksumsForFile(path string) (utils.ChecksumInfo, error)
}
// Progress is a progress displaying entity, it allows progress bars & simple prints
type Progress interface {
// Writer interface to support progress bar ticking
io.Writer
// Start makes progress start its work
Start()
// Shutdown shuts down progress display
Shutdown()
// InitBar starts progressbar for count bytes or count items
InitBar(count int64, isBytes bool)
// ShutdownBar stops progress bar and hides it
ShutdownBar()
// AddBar increments progress for progress bar
AddBar(count int)
// Printf does printf but in safe manner: not overwriting progress bar
Printf(msg string, a ...interface{})
}
// Downloader is parallel HTTP fetcher
type Downloader interface {
Download(url string, destination string, result chan<- error)
DownloadWithChecksum(url string, destination string, result chan<- error, expected utils.ChecksumInfo, ignoreMismatch bool)
Pause()
Resume()
Shutdown()
}

View File

@@ -20,7 +20,7 @@ func aptlyDbCleanup(cmd *commander.Command, args []string) error {
// collect information about references packages...
existingPackageRefs := debian.NewPackageRefList()
context.downloader.GetProgress().Printf("Loading mirrors and snapshots...\n")
context.progress.Printf("Loading mirrors and snapshots...\n")
repoCollection := debian.NewRemoteRepoCollection(context.database)
err = repoCollection.ForEach(func(repo *debian.RemoteRepo) error {
err := repoCollection.LoadComplete(repo)
@@ -48,14 +48,14 @@ func aptlyDbCleanup(cmd *commander.Command, args []string) error {
}
// ... and compare it to the list of all packages
context.downloader.GetProgress().Printf("Loading list of all packages...\n")
context.progress.Printf("Loading list of all packages...\n")
packageCollection := debian.NewPackageCollection(context.database)
allPackageRefs := packageCollection.AllPackageRefs()
toDelete := allPackageRefs.Substract(existingPackageRefs)
// delete packages that are no longer referenced
context.downloader.GetProgress().Printf("Deleting unreferenced packages (%d)...\n", toDelete.Len())
context.progress.Printf("Deleting unreferenced packages (%d)...\n", toDelete.Len())
context.database.StartBatch()
err = toDelete.ForEach(func(ref []byte) error {
@@ -71,9 +71,9 @@ func aptlyDbCleanup(cmd *commander.Command, args []string) error {
}
// now, build a list of files that should be present in Repository (package pool)
context.downloader.GetProgress().Printf("Building list of files referenced by packages...\n")
context.progress.Printf("Building list of files referenced by packages...\n")
referencedFiles := make([]string, 0, existingPackageRefs.Len())
context.downloader.GetProgress().InitBar(int64(existingPackageRefs.Len()), false)
context.progress.InitBar(int64(existingPackageRefs.Len()), false)
err = existingPackageRefs.ForEach(func(key []byte) error {
pkg, err := packageCollection.ByKey(key)
@@ -85,7 +85,7 @@ func aptlyDbCleanup(cmd *commander.Command, args []string) error {
return err
}
referencedFiles = append(referencedFiles, paths...)
context.downloader.GetProgress().AddBar(1)
context.progress.AddBar(1)
return nil
})
@@ -94,11 +94,11 @@ func aptlyDbCleanup(cmd *commander.Command, args []string) error {
}
sort.Strings(referencedFiles)
context.downloader.GetProgress().ShutdownBar()
context.progress.ShutdownBar()
// build a list of files in the package pool
context.downloader.GetProgress().Printf("Building list of files in package pool...\n")
existingFiles, err := context.packagePool.FilepathList(context.downloader.GetProgress())
context.progress.Printf("Building list of files in package pool...\n")
existingFiles, err := context.packagePool.FilepathList(context.progress)
if err != nil {
return fmt.Errorf("unable to collect file paths: %s", err)
}
@@ -107,10 +107,10 @@ func aptlyDbCleanup(cmd *commander.Command, args []string) error {
filesToDelete := utils.StrSlicesSubstract(existingFiles, referencedFiles)
// delete files that are no longer referenced
context.downloader.GetProgress().Printf("Deleting unreferenced files (%d)...\n", len(filesToDelete))
context.progress.Printf("Deleting unreferenced files (%d)...\n", len(filesToDelete))
if len(filesToDelete) > 0 {
context.downloader.GetProgress().InitBar(int64(len(filesToDelete)), false)
context.progress.InitBar(int64(len(filesToDelete)), false)
totalSize := int64(0)
for _, file := range filesToDelete {
size, err := context.packagePool.Remove(file)
@@ -118,12 +118,12 @@ func aptlyDbCleanup(cmd *commander.Command, args []string) error {
return err
}
context.downloader.GetProgress().AddBar(1)
context.progress.AddBar(1)
totalSize += size
}
context.downloader.GetProgress().ShutdownBar()
context.progress.ShutdownBar()
context.downloader.GetProgress().Printf("Disk space freed: %.2f GiB...\n", float64(totalSize)/1024.0/1024.0/1024.0)
context.progress.Printf("Disk space freed: %.2f GiB...\n", float64(totalSize)/1024.0/1024.0/1024.0)
}
return err
}

View File

@@ -201,7 +201,7 @@ func aptlyMirrorUpdate(cmd *commander.Command, args []string) error {
packageCollection := debian.NewPackageCollection(context.database)
err = repo.Download(context.downloader, packageCollection, context.packagePool, ignoreMismatch)
err = repo.Download(context.progress, context.downloader, packageCollection, context.packagePool, ignoreMismatch)
if err != nil {
return fmt.Errorf("unable to update: %s", err)
}

View File

@@ -1,8 +1,9 @@
package utils
package console
import (
"fmt"
"github.com/cheggaaa/pb"
"github.com/smira/aptly/aptly"
)
const (
@@ -26,6 +27,11 @@ type Progress struct {
barShown bool
}
// Check interface
var (
_ aptly.Progress = (*Progress)(nil)
)
// NewProgress creates new progress instance
func NewProgress() *Progress {
return &Progress{
@@ -47,7 +53,7 @@ func (p *Progress) Shutdown() {
<-p.stopped
}
// InitBar creates progressbar for count bytes
// InitBar starts progressbar for count bytes or count items
func (p *Progress) InitBar(count int64, isBytes bool) {
if p.bar != nil {
panic("bar already initialized")

View File

@@ -1,6 +1,6 @@
// +build !freebsd
package utils
package console
import (
"code.google.com/p/go.crypto/ssh/terminal"

View File

@@ -1,6 +1,6 @@
// +build freebsd
package utils
package console
// RunningOnTerminal checks whether stdout is terminal
//

2
debian/debian.go vendored Normal file
View File

@@ -0,0 +1,2 @@
// Package debian implements Debian-specific repository handling
package debian

11
debian/debian_test.go vendored Normal file
View File

@@ -0,0 +1,11 @@
package debian
import (
. "launchpad.net/gocheck"
"testing"
)
// Launch gocheck tests
func Test(t *testing.T) {
TestingT(t)
}

34
debian/remote.go vendored
View File

@@ -1,4 +1,3 @@
// Package debian implements Debian-specific repository handling
package debian
import (
@@ -7,6 +6,7 @@ import (
"fmt"
"github.com/smira/aptly/aptly"
"github.com/smira/aptly/database"
"github.com/smira/aptly/http"
"github.com/smira/aptly/utils"
"github.com/ugorji/go/codec"
"log"
@@ -162,7 +162,7 @@ func (repo *RemoteRepo) PackageURL(filename string) *url.URL {
}
// Fetch updates information about repository
func (repo *RemoteRepo) Fetch(d utils.Downloader, verifier utils.Verifier) error {
func (repo *RemoteRepo) Fetch(d aptly.Downloader, verifier utils.Verifier) error {
var (
release *os.File
err error
@@ -170,13 +170,13 @@ func (repo *RemoteRepo) Fetch(d utils.Downloader, verifier utils.Verifier) error
if verifier == nil {
// 0. Just download release file to temporary URL
release, err = utils.DownloadTemp(d, repo.ReleaseURL("Release").String())
release, err = http.DownloadTemp(d, repo.ReleaseURL("Release").String())
if err != nil {
return err
}
} else {
// 1. try InRelease file
inrelease, err := utils.DownloadTemp(d, repo.ReleaseURL("InRelease").String())
inrelease, err := http.DownloadTemp(d, repo.ReleaseURL("InRelease").String())
if err != nil {
goto splitsignature
}
@@ -191,12 +191,12 @@ func (repo *RemoteRepo) Fetch(d utils.Downloader, verifier utils.Verifier) error
splitsignature:
// 2. try Release + Release.gpg
release, err = utils.DownloadTemp(d, repo.ReleaseURL("Release").String())
release, err = http.DownloadTemp(d, repo.ReleaseURL("Release").String())
if err != nil {
return err
}
releasesig, err := utils.DownloadTemp(d, repo.ReleaseURL("Release.gpg").String())
releasesig, err := http.DownloadTemp(d, repo.ReleaseURL("Release.gpg").String())
if err != nil {
return err
}
@@ -298,10 +298,10 @@ ok:
}
// Download downloads all repo files
func (repo *RemoteRepo) Download(d utils.Downloader, packageCollection *PackageCollection, packagePool aptly.PackagePool, ignoreMismatch bool) error {
func (repo *RemoteRepo) Download(progress aptly.Progress, d aptly.Downloader, packageCollection *PackageCollection, packagePool aptly.PackagePool, ignoreMismatch bool) error {
list := NewPackageList()
d.GetProgress().Printf("Downloading & parsing package files...\n")
progress.Printf("Downloading & parsing package files...\n")
// Download and parse all Packages & Source files
packagesURLs := [][]string{}
@@ -324,7 +324,7 @@ func (repo *RemoteRepo) Download(d utils.Downloader, packageCollection *PackageC
for _, info := range packagesURLs {
url, kind := info[0], info[1]
packagesReader, packagesFile, err := utils.DownloadTryCompression(d, url, repo.ReleaseFiles, ignoreMismatch)
packagesReader, packagesFile, err := http.DownloadTryCompression(d, url, repo.ReleaseFiles, ignoreMismatch)
if err != nil {
return err
}
@@ -355,16 +355,16 @@ func (repo *RemoteRepo) Download(d utils.Downloader, packageCollection *PackageC
}
}
d.GetProgress().Printf("Saving packages to database...\n")
progress.Printf("Saving packages to database...\n")
d.GetProgress().InitBar(int64(list.Len()), false)
progress.InitBar(int64(list.Len()), false)
packageCollection.db.StartBatch()
count := 0
// Save package meta information to DB
err := list.ForEach(func(p *Package) error {
d.GetProgress().AddBar(1)
progress.AddBar(1)
count++
if count > 1000 {
count = 0
@@ -385,9 +385,9 @@ func (repo *RemoteRepo) Download(d utils.Downloader, packageCollection *PackageC
return fmt.Errorf("unable to save packages to db: %s", err)
}
d.GetProgress().ShutdownBar()
progress.ShutdownBar()
d.GetProgress().Printf("Building download queue...\n")
progress.Printf("Building download queue...\n")
// Build download queue
queued := make(map[string]PackageDownloadTask, list.Len())
@@ -420,9 +420,9 @@ func (repo *RemoteRepo) Download(d utils.Downloader, packageCollection *PackageC
// free up package list, we don't need it after this point
list = nil
d.GetProgress().Printf("Download queue: %d items, %.2f GiB size\n", count, float64(downloadSize)/(1024.0*1024.0*1024.0))
progress.Printf("Download queue: %d items, %.2f GiB size\n", count, float64(downloadSize)/(1024.0*1024.0*1024.0))
d.GetProgress().InitBar(downloadSize, true)
progress.InitBar(downloadSize, true)
// Download all package files
ch := make(chan error, len(queued))
@@ -445,7 +445,7 @@ func (repo *RemoteRepo) Download(d utils.Downloader, packageCollection *PackageC
count--
}
d.GetProgress().ShutdownBar()
progress.ShutdownBar()
if len(errors) > 0 {
return fmt.Errorf("download errors:\n %s\n", strings.Join(errors, "\n "))

32
debian/remote_test.go vendored
View File

@@ -3,21 +3,17 @@ package debian
import (
"errors"
"github.com/smira/aptly/aptly"
"github.com/smira/aptly/console"
"github.com/smira/aptly/database"
"github.com/smira/aptly/files"
"github.com/smira/aptly/http"
"github.com/smira/aptly/utils"
"io"
"io/ioutil"
. "launchpad.net/gocheck"
"os"
"testing"
)
// Launch gocheck tests
func Test(t *testing.T) {
TestingT(t)
}
type NullVerifier struct {
}
@@ -69,7 +65,8 @@ type RemoteRepoSuite struct {
PackageListMixinSuite
repo *RemoteRepo
flat *RemoteRepo
downloader *utils.FakeDownloader
downloader *http.FakeDownloader
progress aptly.Progress
db database.Storage
packageCollection *PackageCollection
packagePool aptly.PackagePool
@@ -80,14 +77,17 @@ var _ = Suite(&RemoteRepoSuite{})
func (s *RemoteRepoSuite) SetUpTest(c *C) {
s.repo, _ = NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false)
s.flat, _ = NewRemoteRepo("exp42", "http://repos.express42.com/virool/precise/", "./", []string{}, []string{}, false)
s.downloader = utils.NewFakeDownloader().ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/Release", exampleReleaseFile)
s.downloader = http.NewFakeDownloader().ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/Release", exampleReleaseFile)
s.progress = console.NewProgress()
s.db, _ = database.OpenDB(c.MkDir())
s.packageCollection = NewPackageCollection(s.db)
s.packagePool = files.NewPackagePool(c.MkDir())
s.SetUpPackages()
s.progress.Start()
}
func (s *RemoteRepoSuite) TearDownTest(c *C) {
s.progress.Shutdown()
s.db.Close()
}
@@ -176,7 +176,7 @@ func (s *RemoteRepoSuite) TestFetch(c *C) {
}
func (s *RemoteRepoSuite) TestFetchNullVerifier1(c *C) {
downloader := utils.NewFakeDownloader()
downloader := http.NewFakeDownloader()
downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/InRelease", errors.New("404"))
downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/Release", exampleReleaseFile)
downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/Release.gpg", "GPG")
@@ -189,7 +189,7 @@ func (s *RemoteRepoSuite) TestFetchNullVerifier1(c *C) {
}
func (s *RemoteRepoSuite) TestFetchNullVerifier2(c *C) {
downloader := utils.NewFakeDownloader()
downloader := http.NewFakeDownloader()
downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/InRelease", exampleReleaseFile)
err := s.repo.Fetch(downloader, &NullVerifier{})
@@ -242,7 +242,7 @@ func (s *RemoteRepoSuite) TestDownload(c *C) {
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages", examplePackagesFile)
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/pool/main/a/amanda/amanda-client_3.3.1-3~bpo60+1_amd64.deb", "xyz")
err = s.repo.Download(s.downloader, s.packageCollection, s.packagePool, false)
err = s.repo.Download(s.progress, s.downloader, s.packageCollection, s.packagePool, false)
c.Assert(err, IsNil)
c.Assert(s.downloader.Empty(), Equals, true)
c.Assert(s.repo.packageRefs, NotNil)
@@ -275,7 +275,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSources(c *C) {
s.downloader.AnyExpectResponse("http://mirror.yandex.ru/debian/pool/main/a/access-modifier-checker/access-modifier-checker_1.0.orig.tar.gz", "abcd")
s.downloader.AnyExpectResponse("http://mirror.yandex.ru/debian/pool/main/a/access-modifier-checker/access-modifier-checker_1.0-4.debian.tar.gz", "abcde")
err = s.repo.Download(s.downloader, s.packageCollection, s.packagePool, false)
err = s.repo.Download(s.progress, s.downloader, s.packageCollection, s.packagePool, false)
c.Assert(err, IsNil)
c.Assert(s.downloader.Empty(), Equals, true)
c.Assert(s.repo.packageRefs, NotNil)
@@ -300,7 +300,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSources(c *C) {
}
func (s *RemoteRepoSuite) TestDownloadFlat(c *C) {
downloader := utils.NewFakeDownloader()
downloader := http.NewFakeDownloader()
downloader.ExpectResponse("http://repos.express42.com/virool/precise/Release", exampleReleaseFile)
downloader.ExpectError("http://repos.express42.com/virool/precise/Packages.bz2", errors.New("HTTP 404"))
downloader.ExpectError("http://repos.express42.com/virool/precise/Packages.gz", errors.New("HTTP 404"))
@@ -310,7 +310,7 @@ func (s *RemoteRepoSuite) TestDownloadFlat(c *C) {
err := s.flat.Fetch(downloader, nil)
c.Assert(err, IsNil)
err = s.flat.Download(downloader, s.packageCollection, s.packagePool, false)
err = s.flat.Download(s.progress, downloader, s.packageCollection, s.packagePool, false)
c.Assert(err, IsNil)
c.Assert(downloader.Empty(), Equals, true)
c.Assert(s.flat.packageRefs, NotNil)
@@ -328,7 +328,7 @@ func (s *RemoteRepoSuite) TestDownloadFlat(c *C) {
func (s *RemoteRepoSuite) TestDownloadWithSourcesFlat(c *C) {
s.flat.DownloadSources = true
downloader := utils.NewFakeDownloader()
downloader := http.NewFakeDownloader()
downloader.ExpectResponse("http://repos.express42.com/virool/precise/Release", exampleReleaseFile)
downloader.ExpectError("http://repos.express42.com/virool/precise/Packages.bz2", errors.New("HTTP 404"))
downloader.ExpectError("http://repos.express42.com/virool/precise/Packages.gz", errors.New("HTTP 404"))
@@ -344,7 +344,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSourcesFlat(c *C) {
err := s.flat.Fetch(downloader, nil)
c.Assert(err, IsNil)
err = s.flat.Download(downloader, s.packageCollection, s.packagePool, false)
err = s.flat.Download(s.progress, downloader, s.packageCollection, s.packagePool, false)
c.Assert(err, IsNil)
c.Assert(downloader.Empty(), Equals, true)
c.Assert(s.flat.packageRefs, NotNil)

View File

@@ -1,9 +1,11 @@
package utils
package http
import (
"compress/bzip2"
"compress/gzip"
"fmt"
"github.com/smira/aptly/aptly"
"github.com/smira/aptly/utils"
"io"
"io/ioutil"
"net/http"
@@ -12,19 +14,9 @@ import (
"strings"
)
// Downloader is parallel HTTP fetcher
type Downloader interface {
Download(url string, destination string, result chan<- error)
DownloadWithChecksum(url string, destination string, result chan<- error, expected ChecksumInfo, ignoreMismatch bool)
Pause()
Resume()
Shutdown()
GetProgress() *Progress
}
// Check interface
var (
_ Downloader = &downloaderImpl{}
_ aptly.Downloader = (*downloaderImpl)(nil)
)
// downloaderImpl is implementation of Downloader interface
@@ -34,7 +26,7 @@ type downloaderImpl struct {
stopped chan bool
pause chan bool
unpause chan bool
progress *Progress
progress aptly.Progress
threads int
}
@@ -43,13 +35,13 @@ type downloadTask struct {
url string
destination string
result chan<- error
expected ChecksumInfo
expected utils.ChecksumInfo
ignoreMismatch bool
}
// NewDownloader creates new instance of Downloader which specified number
// of threads
func NewDownloader(threads int) Downloader {
func NewDownloader(threads int, progress aptly.Progress) aptly.Downloader {
downloader := &downloaderImpl{
queue: make(chan *downloadTask, 1000),
stop: make(chan bool),
@@ -57,11 +49,9 @@ func NewDownloader(threads int) Downloader {
pause: make(chan bool),
unpause: make(chan bool),
threads: threads,
progress: NewProgress(),
progress: progress,
}
downloader.progress.Start()
for i := 0; i < downloader.threads; i++ {
go downloader.process()
}
@@ -79,8 +69,6 @@ func (downloader *downloaderImpl) Shutdown() {
for i := 0; i < downloader.threads; i++ {
<-downloader.stopped
}
downloader.progress.Shutdown()
}
// Pause pauses task processing
@@ -97,19 +85,14 @@ func (downloader *downloaderImpl) Resume() {
}
}
// Resume resumes task processing
func (downloader *downloaderImpl) GetProgress() *Progress {
return downloader.progress
}
// Download starts new download task
func (downloader *downloaderImpl) Download(url string, destination string, result chan<- error) {
downloader.DownloadWithChecksum(url, destination, result, ChecksumInfo{Size: -1}, false)
downloader.DownloadWithChecksum(url, destination, result, utils.ChecksumInfo{Size: -1}, false)
}
// DownloadWithChecksum starts new download task with checksum verification
func (downloader *downloaderImpl) DownloadWithChecksum(url string, destination string, result chan<- error,
expected ChecksumInfo, ignoreMismatch bool) {
expected utils.ChecksumInfo, ignoreMismatch bool) {
downloader.queue <- &downloadTask{url: url, destination: destination, result: result, expected: expected, ignoreMismatch: ignoreMismatch}
}
@@ -144,17 +127,13 @@ func (downloader *downloaderImpl) handleTask(task *downloadTask) {
}
defer outfile.Close()
checksummer := NewChecksumWriter()
writers := []io.Writer{outfile}
checksummer := utils.NewChecksumWriter()
writers := []io.Writer{outfile, downloader.progress}
if task.expected.Size != -1 {
writers = append(writers, checksummer)
}
if downloader.progress != nil {
writers = append(writers, downloader.progress)
}
w := io.MultiWriter(writers...)
_, err = io.Copy(w, resp.Body)
@@ -216,14 +195,14 @@ func (downloader *downloaderImpl) process() {
// DownloadTemp starts new download to temporary file and returns File
//
// Temporary file would be already removed, so no need to cleanup
func DownloadTemp(downloader Downloader, url string) (*os.File, error) {
return DownloadTempWithChecksum(downloader, url, ChecksumInfo{Size: -1}, false)
func DownloadTemp(downloader aptly.Downloader, url string) (*os.File, error) {
return DownloadTempWithChecksum(downloader, url, utils.ChecksumInfo{Size: -1}, false)
}
// DownloadTempWithChecksum is a DownloadTemp with checksum verification
//
// Temporary file would be already removed, so no need to cleanup
func DownloadTempWithChecksum(downloader Downloader, url string, expected ChecksumInfo, ignoreMismatch bool) (*os.File, error) {
func DownloadTempWithChecksum(downloader aptly.Downloader, url string, expected utils.ChecksumInfo, ignoreMismatch bool) (*os.File, error) {
tempdir, err := ioutil.TempDir(os.TempDir(), "aptly")
if err != nil {
return nil, err
@@ -269,7 +248,7 @@ var compressionMethods = []struct {
// DownloadTryCompression tries to download from URL .bz2, .gz and raw extension until
// it finds existing file.
func DownloadTryCompression(downloader Downloader, url string, expectedChecksums map[string]ChecksumInfo, ignoreMismatch bool) (io.Reader, *os.File, error) {
func DownloadTryCompression(downloader aptly.Downloader, url string, expectedChecksums map[string]utils.ChecksumInfo, ignoreMismatch bool) (io.Reader, *os.File, error) {
var err error
for _, method := range compressionMethods {

View File

@@ -1,8 +1,11 @@
package utils
package http
import (
"errors"
"fmt"
"github.com/smira/aptly/aptly"
"github.com/smira/aptly/console"
"github.com/smira/aptly/utils"
"io"
"io/ioutil"
. "launchpad.net/gocheck"
@@ -10,20 +13,15 @@ import (
"net/http"
"os"
"runtime"
"testing"
"time"
)
// Launch gocheck tests
func Test(t *testing.T) {
TestingT(t)
}
type DownloaderSuite struct {
tempfile *os.File
l net.Listener
url string
ch chan bool
progress aptly.Progress
}
var _ = Suite(&DownloaderSuite{})
@@ -44,9 +42,14 @@ func (s *DownloaderSuite) SetUpTest(c *C) {
http.Serve(s.l, mux)
s.ch <- true
}()
s.progress = console.NewProgress()
s.progress.Start()
}
func (s *DownloaderSuite) TearDownTest(c *C) {
s.progress.Shutdown()
s.l.Close()
<-s.ch
@@ -57,7 +60,7 @@ func (s *DownloaderSuite) TearDownTest(c *C) {
func (s *DownloaderSuite) TestStartupShutdown(c *C) {
goroutines := runtime.NumGoroutine()
d := NewDownloader(10)
d := NewDownloader(10, s.progress)
d.Shutdown()
// wait for goroutines to shutdown
@@ -69,7 +72,7 @@ func (s *DownloaderSuite) TestStartupShutdown(c *C) {
}
func (s *DownloaderSuite) TestPauseResume(c *C) {
d := NewDownloader(2)
d := NewDownloader(2, s.progress)
defer d.Shutdown()
d.Pause()
@@ -77,7 +80,7 @@ func (s *DownloaderSuite) TestPauseResume(c *C) {
}
func (s *DownloaderSuite) TestDownloadOK(c *C) {
d := NewDownloader(2)
d := NewDownloader(2, s.progress)
defer d.Shutdown()
ch := make(chan error)
@@ -87,48 +90,48 @@ func (s *DownloaderSuite) TestDownloadOK(c *C) {
}
func (s *DownloaderSuite) TestDownloadWithChecksum(c *C) {
d := NewDownloader(2)
d := NewDownloader(2, s.progress)
defer d.Shutdown()
ch := make(chan error)
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, ChecksumInfo{}, false)
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{}, false)
res := <-ch
c.Assert(res, ErrorMatches, ".*size check mismatch 12 != 0")
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, ChecksumInfo{Size: 12, MD5: "abcdef"}, false)
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "abcdef"}, false)
res = <-ch
c.Assert(res, ErrorMatches, ".*md5 hash mismatch \"a1acb0fe91c7db45ec4d775192ec5738\" != \"abcdef\"")
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, ChecksumInfo{Size: 12, MD5: "abcdef"}, true)
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "abcdef"}, true)
res = <-ch
c.Assert(res, IsNil)
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738"}, false)
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738"}, false)
res = <-ch
c.Assert(res, IsNil)
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738", SHA1: "abcdef"}, false)
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738", SHA1: "abcdef"}, false)
res = <-ch
c.Assert(res, ErrorMatches, ".*sha1 hash mismatch \"921893bae6ad6fd818401875d6779254ef0ff0ec\" != \"abcdef\"")
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec"}, false)
res = <-ch
c.Assert(res, IsNil)
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "abcdef"}, false)
res = <-ch
c.Assert(res, ErrorMatches, ".*sha256 hash mismatch \"b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac\" != \"abcdef\"")
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
d.DownloadWithChecksum(s.url+"/test", s.tempfile.Name(), ch, utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac"}, false)
res = <-ch
c.Assert(res, IsNil)
}
func (s *DownloaderSuite) TestDownload404(c *C) {
d := NewDownloader(2)
d := NewDownloader(2, s.progress)
defer d.Shutdown()
ch := make(chan error)
@@ -138,7 +141,7 @@ func (s *DownloaderSuite) TestDownload404(c *C) {
}
func (s *DownloaderSuite) TestDownloadConnectError(c *C) {
d := NewDownloader(2)
d := NewDownloader(2, s.progress)
defer d.Shutdown()
ch := make(chan error)
@@ -148,7 +151,7 @@ func (s *DownloaderSuite) TestDownloadConnectError(c *C) {
}
func (s *DownloaderSuite) TestDownloadFileError(c *C) {
d := NewDownloader(2)
d := NewDownloader(2, s.progress)
defer d.Shutdown()
ch := make(chan error)
@@ -158,7 +161,7 @@ func (s *DownloaderSuite) TestDownloadFileError(c *C) {
}
func (s *DownloaderSuite) TestDownloadTemp(c *C) {
d := NewDownloader(2)
d := NewDownloader(2, s.progress)
defer d.Shutdown()
f, err := DownloadTemp(d, s.url+"/test")
@@ -175,20 +178,20 @@ func (s *DownloaderSuite) TestDownloadTemp(c *C) {
}
func (s *DownloaderSuite) TestDownloadTempWithChecksum(c *C) {
d := NewDownloader(2)
d := NewDownloader(2, s.progress)
defer d.Shutdown()
f, err := DownloadTempWithChecksum(d, s.url+"/test", ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
f, err := DownloadTempWithChecksum(d, s.url+"/test", utils.ChecksumInfo{Size: 12, MD5: "a1acb0fe91c7db45ec4d775192ec5738",
SHA1: "921893bae6ad6fd818401875d6779254ef0ff0ec", SHA256: "b3c92ee1246176ed35f6e8463cd49074f29442f5bbffc3f8591cde1dcc849dac"}, false)
defer f.Close()
c.Assert(err, IsNil)
_, err = DownloadTempWithChecksum(d, s.url+"/test", ChecksumInfo{Size: 13}, false)
_, err = DownloadTempWithChecksum(d, s.url+"/test", utils.ChecksumInfo{Size: 13}, false)
c.Assert(err, ErrorMatches, ".*size check mismatch 12 != 13")
}
func (s *DownloaderSuite) TestDownloadTempError(c *C) {
d := NewDownloader(2)
d := NewDownloader(2, s.progress)
defer d.Shutdown()
f, err := DownloadTemp(d, s.url+"/doesntexist")
@@ -206,10 +209,10 @@ const (
func (s *DownloaderSuite) TestDownloadTryCompression(c *C) {
var buf []byte
expectedChecksums := map[string]ChecksumInfo{
"file.bz2": ChecksumInfo{Size: int64(len(bzipData))},
"file.gz": ChecksumInfo{Size: int64(len(gzipData))},
"file": ChecksumInfo{Size: int64(len(rawData))},
expectedChecksums := map[string]utils.ChecksumInfo{
"file.bz2": utils.ChecksumInfo{Size: int64(len(bzipData))},
"file.gz": utils.ChecksumInfo{Size: int64(len(gzipData))},
"file": utils.ChecksumInfo{Size: int64(len(rawData))},
}
// bzip2 only available
@@ -278,6 +281,6 @@ func (s *DownloaderSuite) TestDownloadTryCompressionErrors(c *C) {
d.ExpectError("http://example.com/file.bz2", errors.New("404"))
d.ExpectError("http://example.com/file.gz", errors.New("404"))
d.ExpectResponse("http://example.com/file", rawData)
_, _, err = DownloadTryCompression(d, "http://example.com/file", map[string]ChecksumInfo{"file": ChecksumInfo{Size: 7}}, false)
_, _, err = DownloadTryCompression(d, "http://example.com/file", map[string]utils.ChecksumInfo{"file": utils.ChecksumInfo{Size: 7}}, false)
c.Assert(err, ErrorMatches, "checksums don't match.*")
}

View File

@@ -1,7 +1,9 @@
package utils
package http
import (
"fmt"
"github.com/smira/aptly/aptly"
"github.com/smira/aptly/utils"
"io"
"os"
"path/filepath"
@@ -18,12 +20,11 @@ type expectedRequest struct {
type FakeDownloader struct {
expected []expectedRequest
anyExpected map[string]expectedRequest
progress *Progress
}
// Check interface
var (
_ Downloader = &FakeDownloader{}
_ aptly.Downloader = (*FakeDownloader)(nil)
)
// NewFakeDownloader creates new expected downloader
@@ -31,8 +32,6 @@ func NewFakeDownloader() *FakeDownloader {
result := &FakeDownloader{}
result.expected = make([]expectedRequest, 0)
result.anyExpected = make(map[string]expectedRequest)
result.progress = NewProgress()
result.progress.Start()
return result
}
@@ -60,7 +59,7 @@ func (f *FakeDownloader) Empty() bool {
}
// DownloadWithChecksum performs fake download by matching against first expectation in the queue or any expectation, with cheksum verification
func (f *FakeDownloader) DownloadWithChecksum(url string, filename string, result chan<- error, expected ChecksumInfo, ignoreMismatch bool) {
func (f *FakeDownloader) DownloadWithChecksum(url string, filename string, result chan<- error, expected utils.ChecksumInfo, ignoreMismatch bool) {
var expectation expectedRequest
if len(f.expected) > 0 && f.expected[0].URL == url {
expectation, f.expected = f.expected[0], f.expected[1:]
@@ -90,7 +89,7 @@ func (f *FakeDownloader) DownloadWithChecksum(url string, filename string, resul
}
defer outfile.Close()
cks := NewChecksumWriter()
cks := utils.NewChecksumWriter()
w := io.MultiWriter(outfile, cks)
_, err = w.Write([]byte(expectation.Response))
@@ -117,12 +116,11 @@ func (f *FakeDownloader) DownloadWithChecksum(url string, filename string, resul
// Download performs fake download by matching against first expectation in the queue
func (f *FakeDownloader) Download(url string, filename string, result chan<- error) {
f.DownloadWithChecksum(url, filename, result, ChecksumInfo{Size: -1}, false)
f.DownloadWithChecksum(url, filename, result, utils.ChecksumInfo{Size: -1}, false)
}
// Shutdown does nothing
func (f *FakeDownloader) Shutdown() {
f.progress.Shutdown()
}
// Pause does nothing
@@ -132,8 +130,3 @@ func (f *FakeDownloader) Pause() {
// Resume does nothing
func (f *FakeDownloader) Resume() {
}
// GetProgress does nothing
func (f *FakeDownloader) GetProgress() *Progress {
return f.progress
}

2
http/http.go Normal file
View File

@@ -0,0 +1,2 @@
// Package http provides all HTTP-related operations
package http

11
http/http_test.go Normal file
View File

@@ -0,0 +1,11 @@
package http
import (
. "launchpad.net/gocheck"
"testing"
)
// Launch gocheck tests
func Test(t *testing.T) {
TestingT(t)
}

11
main.go
View File

@@ -5,9 +5,11 @@ import (
"github.com/gonuts/commander"
"github.com/gonuts/flag"
"github.com/smira/aptly/aptly"
"github.com/smira/aptly/console"
"github.com/smira/aptly/database"
"github.com/smira/aptly/debian"
"github.com/smira/aptly/files"
"github.com/smira/aptly/http"
"github.com/smira/aptly/utils"
"os"
"path/filepath"
@@ -48,7 +50,8 @@ take snapshots and publish them back as Debian repositories.`,
}
var context struct {
downloader utils.Downloader
progress aptly.Progress
downloader aptly.Downloader
database database.Storage
packagePool aptly.PackagePool
publishedStorage aptly.PublishedStorage
@@ -116,7 +119,11 @@ func main() {
context.architecturesList = strings.Split(optionArchitectures, ",")
}
context.downloader = utils.NewDownloader(utils.Config.DownloadConcurrency)
context.progress = console.NewProgress()
context.progress.Start()
defer context.progress.Shutdown()
context.downloader = http.NewDownloader(utils.Config.DownloadConcurrency, context.progress)
defer context.downloader.Shutdown()
context.database, err = database.OpenDB(filepath.Join(utils.Config.RootDir, "db"))

2
utils/utils.go Normal file
View File

@@ -0,0 +1,2 @@
// Package utils collects various services: simple operations, compression, etc.
package utils

11
utils/utils_test.go Normal file
View File

@@ -0,0 +1,11 @@
package utils
import (
. "launchpad.net/gocheck"
"testing"
)
// Launch gocheck tests
func Test(t *testing.T) {
TestingT(t)
}