mirror of
https://github.com/aptly-dev/aptly.git
synced 2026-01-11 03:11:50 +00:00
Rename debian -> deb. #21
This commit is contained in:
65
debian/collections.go
vendored
65
debian/collections.go
vendored
@@ -1,65 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"github.com/smira/aptly/database"
|
||||
)
|
||||
|
||||
// CollectionFactory is a single place to generate all desired collections
|
||||
type CollectionFactory struct {
|
||||
db database.Storage
|
||||
packages *PackageCollection
|
||||
remoteRepos *RemoteRepoCollection
|
||||
snapshots *SnapshotCollection
|
||||
localRepos *LocalRepoCollection
|
||||
publishedRepos *PublishedRepoCollection
|
||||
}
|
||||
|
||||
// NewCollectionFactory creates new factory
|
||||
func NewCollectionFactory(db database.Storage) *CollectionFactory {
|
||||
return &CollectionFactory{db: db}
|
||||
}
|
||||
|
||||
// PackageCollection returns (or creates) new PackageCollection
|
||||
func (factory *CollectionFactory) PackageCollection() *PackageCollection {
|
||||
if factory.packages == nil {
|
||||
factory.packages = NewPackageCollection(factory.db)
|
||||
}
|
||||
|
||||
return factory.packages
|
||||
}
|
||||
|
||||
// RemoteRepoCollection returns (or creates) new RemoteRepoCollection
|
||||
func (factory *CollectionFactory) RemoteRepoCollection() *RemoteRepoCollection {
|
||||
if factory.remoteRepos == nil {
|
||||
factory.remoteRepos = NewRemoteRepoCollection(factory.db)
|
||||
}
|
||||
|
||||
return factory.remoteRepos
|
||||
}
|
||||
|
||||
// SnapshotCollection returns (or creates) new SnapshotCollection
|
||||
func (factory *CollectionFactory) SnapshotCollection() *SnapshotCollection {
|
||||
if factory.snapshots == nil {
|
||||
factory.snapshots = NewSnapshotCollection(factory.db)
|
||||
}
|
||||
|
||||
return factory.snapshots
|
||||
}
|
||||
|
||||
// LocalRepoCollection returns (or creates) new LocalRepoCollection
|
||||
func (factory *CollectionFactory) LocalRepoCollection() *LocalRepoCollection {
|
||||
if factory.localRepos == nil {
|
||||
factory.localRepos = NewLocalRepoCollection(factory.db)
|
||||
}
|
||||
|
||||
return factory.localRepos
|
||||
}
|
||||
|
||||
// PublishedRepoCollection returns (or creates) new PublishedRepoCollection
|
||||
func (factory *CollectionFactory) PublishedRepoCollection() *PublishedRepoCollection {
|
||||
if factory.publishedRepos == nil {
|
||||
factory.publishedRepos = NewPublishedRepoCollection(factory.db)
|
||||
}
|
||||
|
||||
return factory.publishedRepos
|
||||
}
|
||||
99
debian/deb.go
vendored
99
debian/deb.go
vendored
@@ -1,99 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"bufio"
|
||||
"compress/gzip"
|
||||
"fmt"
|
||||
"github.com/mkrautz/goar"
|
||||
"github.com/smira/aptly/utils"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// GetControlFileFromDeb reads control file from deb package
|
||||
func GetControlFileFromDeb(packageFile string) (Stanza, error) {
|
||||
file, err := os.Open(packageFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
library := ar.NewReader(file)
|
||||
for {
|
||||
header, err := library.Next()
|
||||
if err == io.EOF {
|
||||
return nil, fmt.Errorf("unable to find control.tar.gz part")
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read .deb archive: %s", err)
|
||||
}
|
||||
|
||||
if header.Name == "control.tar.gz" {
|
||||
ungzip, err := gzip.NewReader(library)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to ungzip: %s", err)
|
||||
}
|
||||
defer ungzip.Close()
|
||||
|
||||
untar := tar.NewReader(ungzip)
|
||||
for {
|
||||
tarHeader, err := untar.Next()
|
||||
if err == io.EOF {
|
||||
return nil, fmt.Errorf("unable to find control file")
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read .tar archive: %s", err)
|
||||
}
|
||||
|
||||
if tarHeader.Name == "./control" {
|
||||
reader := NewControlFileReader(untar)
|
||||
stanza, err := reader.ReadStanza()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return stanza, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GetControlFileFromDsc reads control file from dsc package
|
||||
func GetControlFileFromDsc(dscFile string, verifier utils.Verifier) (Stanza, error) {
|
||||
file, err := os.Open(dscFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
line, err := bufio.NewReader(file).ReadString('\n')
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
file.Seek(0, 0)
|
||||
|
||||
var text *os.File
|
||||
|
||||
if strings.Index(line, "BEGIN PGP SIGN") != -1 {
|
||||
text, err = verifier.ExtractClearsigned(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer text.Close()
|
||||
} else {
|
||||
text = file
|
||||
}
|
||||
|
||||
reader := NewControlFileReader(text)
|
||||
stanza, err := reader.ReadStanza()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return stanza, nil
|
||||
|
||||
}
|
||||
56
debian/deb_test.go
vendored
56
debian/deb_test.go
vendored
@@ -1,56 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"github.com/smira/aptly/utils"
|
||||
. "launchpad.net/gocheck"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
type DebSuite struct {
|
||||
debFile, dscFile, dscFileNoSign string
|
||||
}
|
||||
|
||||
var _ = Suite(&DebSuite{})
|
||||
|
||||
func (s *DebSuite) SetUpSuite(c *C) {
|
||||
_, _File, _, _ := runtime.Caller(0)
|
||||
s.debFile = filepath.Join(filepath.Dir(_File), "../system/files/libboost-program-options-dev_1.49.0.1_i386.deb")
|
||||
s.dscFile = filepath.Join(filepath.Dir(_File), "../system/files/pyspi_0.6.1-1.3.dsc")
|
||||
s.dscFileNoSign = filepath.Join(filepath.Dir(_File), "../system/files/pyspi-0.6.1-1.3.stripped.dsc")
|
||||
}
|
||||
|
||||
func (s *DebSuite) TestGetControlFileFromDeb(c *C) {
|
||||
_, err := GetControlFileFromDeb("/no/such/file")
|
||||
c.Check(err, ErrorMatches, ".*no such file or directory")
|
||||
|
||||
_, _File, _, _ := runtime.Caller(0)
|
||||
_, err = GetControlFileFromDeb(_File)
|
||||
c.Check(err, ErrorMatches, "unable to read .deb archive: ar: missing global header")
|
||||
|
||||
st, err := GetControlFileFromDeb(s.debFile)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(st["Version"], Equals, "1.49.0.1")
|
||||
c.Check(st["Package"], Equals, "libboost-program-options-dev")
|
||||
}
|
||||
|
||||
func (s *DebSuite) TestGetControlFileFromDsc(c *C) {
|
||||
verifier := &utils.GpgVerifier{}
|
||||
|
||||
_, err := GetControlFileFromDsc("/no/such/file", verifier)
|
||||
c.Check(err, ErrorMatches, ".*no such file or directory")
|
||||
|
||||
_, _File, _, _ := runtime.Caller(0)
|
||||
_, err = GetControlFileFromDsc(_File, verifier)
|
||||
c.Check(err, ErrorMatches, "malformed stanza syntax")
|
||||
|
||||
st, err := GetControlFileFromDsc(s.dscFile, verifier)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(st["Version"], Equals, "0.6.1-1.3")
|
||||
c.Check(st["Source"], Equals, "pyspi")
|
||||
|
||||
st, err = GetControlFileFromDsc(s.dscFileNoSign, verifier)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(st["Version"], Equals, "0.6.1-1.4")
|
||||
c.Check(st["Source"], Equals, "pyspi")
|
||||
}
|
||||
2
debian/debian.go
vendored
2
debian/debian.go
vendored
@@ -1,2 +0,0 @@
|
||||
// Package debian implements Debian-specific repository handling
|
||||
package debian
|
||||
11
debian/debian_test.go
vendored
11
debian/debian_test.go
vendored
@@ -1,11 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
. "launchpad.net/gocheck"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Launch gocheck tests
|
||||
func Test(t *testing.T) {
|
||||
TestingT(t)
|
||||
}
|
||||
141
debian/format.go
vendored
141
debian/format.go
vendored
@@ -1,141 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Stanza or paragraph of Debian control file
|
||||
type Stanza map[string]string
|
||||
|
||||
// Canonical order of fields in stanza
|
||||
var canocialOrder = []string{"Origin", "Label", "Suite", "Package", "Version", "Installed-Size", "Priority", "Section", "Maintainer",
|
||||
"Architecture", "Codename", "Date", "Architectures", "Components", "Description", "MD5sum", "MD5Sum", "SHA1", "SHA256"}
|
||||
|
||||
// Copy returns copy of Stanza
|
||||
func (s Stanza) Copy() (result Stanza) {
|
||||
result = make(Stanza, len(s))
|
||||
for k, v := range s {
|
||||
result[k] = v
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Write single field from Stanza to writer
|
||||
func writeField(w *bufio.Writer, field, value string) (err error) {
|
||||
_, multiline := multilineFields[field]
|
||||
|
||||
if !multiline {
|
||||
_, err = w.WriteString(field + ": " + value + "\n")
|
||||
} else {
|
||||
if !strings.HasSuffix(value, "\n") {
|
||||
value = value + "\n"
|
||||
}
|
||||
_, err = w.WriteString(field + ":" + value)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// WriteTo saves stanza back to stream, modifying itself on the fly
|
||||
func (s Stanza) WriteTo(w *bufio.Writer) error {
|
||||
for _, field := range canocialOrder {
|
||||
value, ok := s[field]
|
||||
if ok {
|
||||
delete(s, field)
|
||||
err := writeField(w, field, value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for field, value := range s {
|
||||
err := writeField(w, field, value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Parsing errors
|
||||
var (
|
||||
ErrMalformedStanza = errors.New("malformed stanza syntax")
|
||||
)
|
||||
|
||||
var multilineFields = make(map[string]bool)
|
||||
|
||||
func init() {
|
||||
multilineFields["Description"] = true
|
||||
multilineFields["Files"] = true
|
||||
multilineFields["Changes"] = true
|
||||
multilineFields["Checksums-Sha1"] = true
|
||||
multilineFields["Checksums-Sha256"] = true
|
||||
multilineFields["Package-List"] = true
|
||||
multilineFields["SHA256"] = true
|
||||
multilineFields["SHA1"] = true
|
||||
multilineFields["MD5Sum"] = true
|
||||
}
|
||||
|
||||
// ControlFileReader implements reading of control files stanza by stanza
|
||||
type ControlFileReader struct {
|
||||
scanner *bufio.Scanner
|
||||
}
|
||||
|
||||
// NewControlFileReader creates ControlFileReader, it wraps with buffering
|
||||
func NewControlFileReader(r io.Reader) *ControlFileReader {
|
||||
return &ControlFileReader{scanner: bufio.NewScanner(bufio.NewReaderSize(r, 32768))}
|
||||
}
|
||||
|
||||
// ReadStanza reeads one stanza from control file
|
||||
func (c *ControlFileReader) ReadStanza() (Stanza, error) {
|
||||
stanza := make(Stanza, 32)
|
||||
lastField := ""
|
||||
lastFieldMultiline := false
|
||||
|
||||
for c.scanner.Scan() {
|
||||
line := c.scanner.Text()
|
||||
|
||||
// Current stanza ends with empty line
|
||||
if line == "" {
|
||||
if len(stanza) > 0 {
|
||||
return stanza, nil
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if line[0] == ' ' || line[0] == '\t' {
|
||||
if lastFieldMultiline {
|
||||
stanza[lastField] += line + "\n"
|
||||
} else {
|
||||
stanza[lastField] += strings.TrimSpace(line)
|
||||
}
|
||||
} else {
|
||||
parts := strings.SplitN(line, ":", 2)
|
||||
if len(parts) != 2 {
|
||||
return nil, ErrMalformedStanza
|
||||
}
|
||||
lastField = parts[0]
|
||||
_, lastFieldMultiline = multilineFields[lastField]
|
||||
if lastFieldMultiline {
|
||||
stanza[lastField] = parts[1]
|
||||
if parts[1] != "" {
|
||||
stanza[lastField] += "\n"
|
||||
}
|
||||
} else {
|
||||
stanza[lastField] = strings.TrimSpace(parts[1])
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := c.scanner.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(stanza) > 0 {
|
||||
return stanza, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
136
debian/format_test.go
vendored
136
debian/format_test.go
vendored
@@ -1,136 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
. "launchpad.net/gocheck"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ControlFileSuite struct {
|
||||
reader *bytes.Buffer
|
||||
}
|
||||
|
||||
var _ = Suite(&ControlFileSuite{})
|
||||
|
||||
const controlFile = `Package: bti
|
||||
Binary: bti
|
||||
Version: 032-1
|
||||
Maintainer: gregor herrmann <gregoa@debian.org>
|
||||
Uploaders: tony mancill <tmancill@debian.org>
|
||||
Build-Depends: debhelper (>= 8),
|
||||
bash-completion (>= 1:1.1-3),
|
||||
libcurl4-nss-dev, libreadline-dev, libxml2-dev, libpcre3-dev, liboauth-dev, xsltproc, docbook-xsl, docbook-xml, dh-autoreconf
|
||||
Architecture: any
|
||||
Standards-Version: 3.9.2
|
||||
Format: 3.0 (quilt)
|
||||
Files:
|
||||
3d5f65778bf3f89be03c313b0024b62c 1980 bti_032-1.dsc
|
||||
1e0d0b693fdeebec268004ba41701baf 59773 bti_032.orig.tar.gz
|
||||
ac1229a6d685023aeb8fcb0806324aa8 5065 bti_032-1.debian.tar.gz
|
||||
Vcs-Browser: http://svn.toastfreeware.priv.at/wsvn/ToastfreewareDebian/bti/trunk/
|
||||
Vcs-Svn: http://svn.toastfreeware.priv.at/debian/bti/trunk/
|
||||
Checksums-Sha1:
|
||||
3da2c5a42138c884a7d9524b9706dc56c0d6d46e 1980 bti_032-1.dsc
|
||||
22061e3f56074703be415d65abc9ca27ef775c6a 59773 bti_032.orig.tar.gz
|
||||
66ae7f56a3c1f0ebe0638d0ec0599a819d72baea 5065 bti_032-1.debian.tar.gz
|
||||
Checksums-Sha256:
|
||||
ed6015b79693f270d0a826c695b40e4d8eb4307942cac81a98f1fda479f74215 1980 bti_032-1.dsc
|
||||
feeabec98a89040a53283d798f7d55eb4311a854f17312a177dc45919883746a 59773 bti_032.orig.tar.gz
|
||||
f025da42efaf57db5e71a14cb8be27eb802ad23e7ab02b7ce2252454a86ac1d9 5065 bti_032-1.debian.tar.gz
|
||||
Homepage: http://gregkh.github.com/bti/
|
||||
Package-List:
|
||||
bti deb net extra
|
||||
Directory: pool/main/b/bti
|
||||
Description: This is cool
|
||||
Multiline description
|
||||
Section: net
|
||||
|
||||
|
||||
Package: i3-wm
|
||||
Version: 4.2-1
|
||||
Installed-Size: 1573
|
||||
Maintainer: Michael Stapelberg <stapelberg@debian.org>
|
||||
Architecture: amd64
|
||||
Provides: x-window-manager
|
||||
Depends: libc6 (>= 2.8), libev4 (>= 1:4.04), libpcre3 (>= 8.10), libstartup-notification0 (>= 0.10), libx11-6, libxcb-icccm4 (>= 0.3.8), libxcb-keysyms1 (>= 0.3.8), libxcb-randr0 (>= 1.3), libxcb-util0 (>= 0.3.8), libxcb-xinerama0, libxcb1, libxcursor1 (>> 1.1.2), libyajl2 (>= 2.0.4), perl, x11-utils
|
||||
Recommends: xfonts-base
|
||||
Suggests: rxvt-unicode | x-terminal-emulator
|
||||
Description-en: improved dynamic tiling window manager
|
||||
Key features of i3 are good documentation, reasonable defaults (changeable in
|
||||
a simple configuration file) and good multi-monitor support. The user
|
||||
interface is designed for power users and emphasizes keyboard usage. i3 uses
|
||||
XCB for asynchronous communication with X11 and aims to be fast and
|
||||
light-weight.
|
||||
.
|
||||
Please be aware i3 is primarily targeted at advanced users and developers.
|
||||
Homepage: http://i3wm.org/
|
||||
Description-md5: 2be7e62f455351435b1e055745d3e81c
|
||||
Tag: implemented-in::c, interface::x11, role::program, uitoolkit::TODO,
|
||||
works-with::unicode, x11::window-manager
|
||||
Section: x11
|
||||
Priority: extra
|
||||
Filename: pool/main/i/i3-wm/i3-wm_4.2-1_amd64.deb
|
||||
Size: 798186
|
||||
MD5sum: 3c7dbecd76d5c271401860967563fa8c
|
||||
SHA1: 2e94f3faa5d4d617061f94076b2537d15fbff73f
|
||||
SHA256: 2894bc999b3982c4e57f100fa31e21b52e14c5f3bc7ad5345f46842fcdab0db7`
|
||||
|
||||
func (s *ControlFileSuite) SetUpTest(c *C) {
|
||||
s.reader = bytes.NewBufferString(controlFile)
|
||||
}
|
||||
|
||||
func (s *ControlFileSuite) TestReadStanza(c *C) {
|
||||
r := NewControlFileReader(s.reader)
|
||||
|
||||
stanza1, err := r.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
stanza2, err := r.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
stanza3, err := r.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(stanza3, IsNil)
|
||||
|
||||
c.Check(stanza1["Format"], Equals, "3.0 (quilt)")
|
||||
c.Check(stanza1["Build-Depends"], Equals, "debhelper (>= 8),bash-completion (>= 1:1.1-3),libcurl4-nss-dev, libreadline-dev, libxml2-dev, libpcre3-dev, liboauth-dev, xsltproc, docbook-xsl, docbook-xml, dh-autoreconf")
|
||||
c.Check(stanza1["Files"], Equals, " 3d5f65778bf3f89be03c313b0024b62c 1980 bti_032-1.dsc\n"+
|
||||
" 1e0d0b693fdeebec268004ba41701baf 59773 bti_032.orig.tar.gz\n"+" ac1229a6d685023aeb8fcb0806324aa8 5065 bti_032-1.debian.tar.gz\n")
|
||||
c.Check(len(stanza2), Equals, 20)
|
||||
}
|
||||
|
||||
func (s *ControlFileSuite) TestReadWriteStanza(c *C) {
|
||||
r := NewControlFileReader(s.reader)
|
||||
stanza, err := r.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
buf := &bytes.Buffer{}
|
||||
w := bufio.NewWriter(buf)
|
||||
err = stanza.Copy().WriteTo(w)
|
||||
c.Assert(err, IsNil)
|
||||
err = w.Flush()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
str := buf.String()
|
||||
|
||||
r = NewControlFileReader(buf)
|
||||
stanza2, err := r.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Assert(stanza2, DeepEquals, stanza)
|
||||
c.Assert(strings.HasPrefix(str, "Package: "), Equals, true)
|
||||
}
|
||||
|
||||
func (s *ControlFileSuite) BenchmarkReadStanza(c *C) {
|
||||
for i := 0; i < c.N; i++ {
|
||||
reader := bytes.NewBufferString(controlFile)
|
||||
r := NewControlFileReader(reader)
|
||||
for {
|
||||
s, e := r.ReadStanza()
|
||||
if s == nil && e == nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
437
debian/list.go
vendored
437
debian/list.go
vendored
@@ -1,437 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/smira/aptly/aptly"
|
||||
"github.com/smira/aptly/utils"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Dependency options
|
||||
const (
|
||||
// DepFollowSource pulls source packages when required
|
||||
DepFollowSource = 1 << iota
|
||||
// DepFollowSuggests pulls from suggests
|
||||
DepFollowSuggests
|
||||
// DepFollowRecommends pulls from recommends
|
||||
DepFollowRecommends
|
||||
// DepFollowAllVariants follows all variants if depends on "a | b"
|
||||
DepFollowAllVariants
|
||||
// DepFollowBuild pulls build dependencies
|
||||
DepFollowBuild
|
||||
)
|
||||
|
||||
// PackageList is list of unique (by key) packages
|
||||
//
|
||||
// It could be seen as repo snapshot, repo contents, result of filtering,
|
||||
// merge, etc.
|
||||
//
|
||||
// If indexed, PackageList starts supporting searching
|
||||
type PackageList struct {
|
||||
// Straight list of packages as map
|
||||
packages map[string]*Package
|
||||
// Has index been prepared?
|
||||
indexed bool
|
||||
// Indexed list of packages, sorted by name internally
|
||||
packagesIndex []*Package
|
||||
// Map of packages for each virtual package (provides)
|
||||
providesIndex map[string][]*Package
|
||||
}
|
||||
|
||||
// Verify interface
|
||||
var (
|
||||
_ sort.Interface = &PackageList{}
|
||||
)
|
||||
|
||||
// NewPackageList creates empty package list
|
||||
func NewPackageList() *PackageList {
|
||||
return &PackageList{packages: make(map[string]*Package, 1000)}
|
||||
}
|
||||
|
||||
// NewPackageListFromRefList loads packages list from PackageRefList
|
||||
func NewPackageListFromRefList(reflist *PackageRefList, collection *PackageCollection, progress aptly.Progress) (*PackageList, error) {
|
||||
// empty reflist
|
||||
if reflist == nil {
|
||||
return NewPackageList(), nil
|
||||
}
|
||||
|
||||
result := &PackageList{packages: make(map[string]*Package, reflist.Len())}
|
||||
|
||||
if progress != nil {
|
||||
progress.InitBar(int64(reflist.Len()), false)
|
||||
}
|
||||
|
||||
err := reflist.ForEach(func(key []byte) error {
|
||||
p, err2 := collection.ByKey(key)
|
||||
if err2 != nil {
|
||||
return fmt.Errorf("unable to load package with key %s: %s", key, err2)
|
||||
}
|
||||
if progress != nil {
|
||||
progress.AddBar(1)
|
||||
}
|
||||
return result.Add(p)
|
||||
})
|
||||
|
||||
if progress != nil {
|
||||
progress.ShutdownBar()
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Add appends package to package list, additionally checking for uniqueness
|
||||
func (l *PackageList) Add(p *Package) error {
|
||||
key := string(p.Key(""))
|
||||
existing, ok := l.packages[key]
|
||||
if ok {
|
||||
if !existing.Equals(p) {
|
||||
return fmt.Errorf("conflict in package %s: %#v != %#v", p, existing, p)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
l.packages[key] = p
|
||||
|
||||
if l.indexed {
|
||||
for _, provides := range p.Provides {
|
||||
l.providesIndex[provides] = append(l.providesIndex[provides], p)
|
||||
}
|
||||
|
||||
i := sort.Search(len(l.packagesIndex), func(j int) bool { return l.packagesIndex[j].Name >= p.Name })
|
||||
|
||||
// insert p into l.packagesIndex in position i
|
||||
l.packagesIndex = append(l.packagesIndex, nil)
|
||||
copy(l.packagesIndex[i+1:], l.packagesIndex[i:])
|
||||
l.packagesIndex[i] = p
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ForEach calls handler for each package in list
|
||||
func (l *PackageList) ForEach(handler func(*Package) error) error {
|
||||
var err error
|
||||
for _, p := range l.packages {
|
||||
err = handler(p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Len returns number of packages in the list
|
||||
func (l *PackageList) Len() int {
|
||||
return len(l.packages)
|
||||
}
|
||||
|
||||
// Append adds content from one package list to another
|
||||
func (l *PackageList) Append(pl *PackageList) error {
|
||||
if l.indexed {
|
||||
panic("Append not supported when indexed")
|
||||
}
|
||||
for k, p := range pl.packages {
|
||||
existing, ok := l.packages[k]
|
||||
if ok {
|
||||
if !existing.Equals(p) {
|
||||
return fmt.Errorf("conflict in package %s: %#v != %#v", p, existing, p)
|
||||
}
|
||||
} else {
|
||||
l.packages[k] = p
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Remove removes package from the list, and updates index when required
|
||||
func (l *PackageList) Remove(p *Package) {
|
||||
delete(l.packages, string(p.Key("")))
|
||||
if l.indexed {
|
||||
for _, provides := range p.Provides {
|
||||
for i, pkg := range l.providesIndex[provides] {
|
||||
if pkg.Equals(p) {
|
||||
// remove l.ProvidesIndex[provides][i] w/o preserving order
|
||||
l.providesIndex[provides][len(l.providesIndex[provides])-1], l.providesIndex[provides][i], l.providesIndex[provides] =
|
||||
nil, l.providesIndex[provides][len(l.providesIndex[provides])-1], l.providesIndex[provides][:len(l.providesIndex[provides])-1]
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
i := sort.Search(len(l.packagesIndex), func(j int) bool { return l.packagesIndex[j].Name >= p.Name })
|
||||
for i < len(l.packagesIndex) && l.packagesIndex[i].Name == p.Name {
|
||||
if l.packagesIndex[i].Equals(p) {
|
||||
// remove l.packagesIndex[i] preserving order
|
||||
copy(l.packagesIndex[i:], l.packagesIndex[i+1:])
|
||||
l.packagesIndex[len(l.packagesIndex)-1] = nil
|
||||
l.packagesIndex = l.packagesIndex[:len(l.packagesIndex)-1]
|
||||
break
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Architectures returns list of architectures present in packages and flag if source packages are present.
|
||||
//
|
||||
// If includeSource is true, meta-architecture "source" would be present in the list
|
||||
func (l *PackageList) Architectures(includeSource bool) (result []string) {
|
||||
result = make([]string, 0, 10)
|
||||
for _, pkg := range l.packages {
|
||||
if pkg.Architecture != "all" && (pkg.Architecture != "source" || includeSource) && !utils.StrSliceHasItem(result, pkg.Architecture) {
|
||||
result = append(result, pkg.Architecture)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// depSliceDeduplicate removes dups in slice of Dependencies
|
||||
func depSliceDeduplicate(s []Dependency) []Dependency {
|
||||
l := len(s)
|
||||
if l < 2 {
|
||||
return s
|
||||
}
|
||||
if l == 2 {
|
||||
if s[0] == s[1] {
|
||||
return s[0:1]
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
found := make(map[string]bool, l)
|
||||
j := 0
|
||||
for i, x := range s {
|
||||
h := x.Hash()
|
||||
if !found[h] {
|
||||
found[h] = true
|
||||
s[j] = s[i]
|
||||
j++
|
||||
}
|
||||
}
|
||||
|
||||
return s[:j]
|
||||
}
|
||||
|
||||
// VerifyDependencies looks for missing dependencies in package list.
|
||||
//
|
||||
// Analysis would be peformed for each architecture, in specified sources
|
||||
func (l *PackageList) VerifyDependencies(options int, architectures []string, sources *PackageList, progress aptly.Progress) ([]Dependency, error) {
|
||||
missing := make([]Dependency, 0, 128)
|
||||
|
||||
if progress != nil {
|
||||
progress.InitBar(int64(l.Len())*int64(len(architectures)), false)
|
||||
}
|
||||
|
||||
for _, arch := range architectures {
|
||||
cache := make(map[string]bool, 2048)
|
||||
|
||||
for _, p := range l.packages {
|
||||
if progress != nil {
|
||||
progress.AddBar(1)
|
||||
}
|
||||
|
||||
if !p.MatchesArchitecture(arch) {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, dep := range p.GetDependencies(options) {
|
||||
variants, err := ParseDependencyVariants(dep)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to process package %s: %s", p, err)
|
||||
}
|
||||
|
||||
variants = depSliceDeduplicate(variants)
|
||||
|
||||
variantsMissing := make([]Dependency, 0, len(variants))
|
||||
missingCount := 0
|
||||
|
||||
for _, dep := range variants {
|
||||
if dep.Architecture == "" {
|
||||
dep.Architecture = arch
|
||||
}
|
||||
|
||||
hash := dep.Hash()
|
||||
r, ok := cache[hash]
|
||||
if ok {
|
||||
if !r {
|
||||
missingCount++
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if sources.Search(dep) == nil {
|
||||
variantsMissing = append(variantsMissing, dep)
|
||||
missingCount++
|
||||
} else {
|
||||
cache[hash] = true
|
||||
}
|
||||
}
|
||||
|
||||
if options&DepFollowAllVariants == DepFollowAllVariants {
|
||||
missing = append(missing, variantsMissing...)
|
||||
for _, dep := range variantsMissing {
|
||||
cache[dep.Hash()] = false
|
||||
}
|
||||
} else {
|
||||
if missingCount == len(variants) {
|
||||
missing = append(missing, variantsMissing...)
|
||||
for _, dep := range variantsMissing {
|
||||
cache[dep.Hash()] = false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if progress != nil {
|
||||
progress.ShutdownBar()
|
||||
}
|
||||
|
||||
return missing, nil
|
||||
}
|
||||
|
||||
// Swap swaps two packages in index
|
||||
func (l *PackageList) Swap(i, j int) {
|
||||
l.packagesIndex[i], l.packagesIndex[j] = l.packagesIndex[j], l.packagesIndex[i]
|
||||
}
|
||||
|
||||
// Compare compares two names in lexographical order
|
||||
func (l *PackageList) Less(i, j int) bool {
|
||||
return l.packagesIndex[i].Name < l.packagesIndex[j].Name
|
||||
}
|
||||
|
||||
// PrepareIndex prepares list for indexing
|
||||
func (l *PackageList) PrepareIndex() {
|
||||
l.packagesIndex = make([]*Package, l.Len())
|
||||
l.providesIndex = make(map[string][]*Package, 128)
|
||||
|
||||
i := 0
|
||||
for _, p := range l.packages {
|
||||
l.packagesIndex[i] = p
|
||||
i++
|
||||
|
||||
for _, provides := range p.Provides {
|
||||
l.providesIndex[provides] = append(l.providesIndex[provides], p)
|
||||
}
|
||||
}
|
||||
|
||||
sort.Sort(l)
|
||||
|
||||
l.indexed = true
|
||||
}
|
||||
|
||||
// Search searches package index for specified package
|
||||
func (l *PackageList) Search(dep Dependency) *Package {
|
||||
if !l.indexed {
|
||||
panic("list not indexed, can't search")
|
||||
}
|
||||
|
||||
if dep.Relation == VersionDontCare {
|
||||
for _, p := range l.providesIndex[dep.Pkg] {
|
||||
if p.MatchesArchitecture(dep.Architecture) {
|
||||
return p
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
i := sort.Search(len(l.packagesIndex), func(j int) bool { return l.packagesIndex[j].Name >= dep.Pkg })
|
||||
|
||||
for i < len(l.packagesIndex) && l.packagesIndex[i].Name == dep.Pkg {
|
||||
p := l.packagesIndex[i]
|
||||
if p.MatchesDependency(dep) {
|
||||
return p
|
||||
}
|
||||
|
||||
i++
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Filter filters package index by specified queries (ORed together), possibly pulling dependencies
|
||||
func (l *PackageList) Filter(queries []string, withDependencies bool, source *PackageList, dependencyOptions int, architecturesList []string) (*PackageList, error) {
|
||||
if !l.indexed {
|
||||
panic("list not indexed, can't filter")
|
||||
}
|
||||
|
||||
result := NewPackageList()
|
||||
|
||||
for _, query := range queries {
|
||||
isDepQuery := strings.IndexAny(query, " (){}=<>") != -1
|
||||
|
||||
if !isDepQuery {
|
||||
// try to interpret query as package string representation
|
||||
|
||||
// convert Package.String() to Package.Key()
|
||||
i := strings.Index(query, "_")
|
||||
if i != -1 {
|
||||
pkg, query := query[:i], query[i+1:]
|
||||
j := strings.LastIndex(query, "_")
|
||||
if j != -1 {
|
||||
version, arch := query[:j], query[j+1:]
|
||||
p := l.packages["P"+arch+" "+pkg+" "+version]
|
||||
if p != nil {
|
||||
result.Add(p)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// try as dependency
|
||||
dep, err := ParseDependency(query)
|
||||
if err != nil {
|
||||
if isDepQuery {
|
||||
return nil, err
|
||||
}
|
||||
// parsing failed, but probably that wasn't a dep query
|
||||
continue
|
||||
}
|
||||
|
||||
i := sort.Search(len(l.packagesIndex), func(j int) bool { return l.packagesIndex[j].Name >= dep.Pkg })
|
||||
|
||||
for i < len(l.packagesIndex) && l.packagesIndex[i].Name == dep.Pkg {
|
||||
p := l.packagesIndex[i]
|
||||
if p.MatchesDependency(dep) {
|
||||
result.Add(p)
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
if withDependencies {
|
||||
added := result.Len()
|
||||
|
||||
dependencySource := NewPackageList()
|
||||
dependencySource.Append(source)
|
||||
dependencySource.Append(result)
|
||||
dependencySource.PrepareIndex()
|
||||
|
||||
// while some new dependencies were discovered
|
||||
for added > 0 {
|
||||
added = 0
|
||||
|
||||
// find missing dependencies
|
||||
missing, err := result.VerifyDependencies(dependencyOptions, architecturesList, dependencySource, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// try to satisfy dependencies
|
||||
for _, dep := range missing {
|
||||
p := l.Search(dep)
|
||||
if p != nil {
|
||||
result.Add(p)
|
||||
dependencySource.Add(p)
|
||||
added++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
303
debian/list_test.go
vendored
303
debian/list_test.go
vendored
@@ -1,303 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"errors"
|
||||
. "launchpad.net/gocheck"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type PackageListSuite struct {
|
||||
// Simple list with "real" packages from stanzas
|
||||
list *PackageList
|
||||
p1, p2, p3, p4, p5, p6 *Package
|
||||
|
||||
// Mocked packages in list
|
||||
packages []*Package
|
||||
sourcePackages []*Package
|
||||
il *PackageList
|
||||
}
|
||||
|
||||
var _ = Suite(&PackageListSuite{})
|
||||
|
||||
func (s *PackageListSuite) SetUpTest(c *C) {
|
||||
s.list = NewPackageList()
|
||||
|
||||
s.p1 = NewPackageFromControlFile(packageStanza.Copy())
|
||||
s.p2 = NewPackageFromControlFile(packageStanza.Copy())
|
||||
stanza := packageStanza.Copy()
|
||||
stanza["Package"] = "mars-invaders"
|
||||
s.p3 = NewPackageFromControlFile(stanza)
|
||||
stanza = packageStanza.Copy()
|
||||
stanza["Size"] = "42"
|
||||
s.p4 = NewPackageFromControlFile(stanza)
|
||||
stanza = packageStanza.Copy()
|
||||
stanza["Package"] = "lonely-strangers"
|
||||
s.p5 = NewPackageFromControlFile(stanza)
|
||||
stanza = packageStanza.Copy()
|
||||
stanza["Version"] = "99.1"
|
||||
s.p6 = NewPackageFromControlFile(stanza)
|
||||
|
||||
s.il = NewPackageList()
|
||||
s.packages = []*Package{
|
||||
&Package{Name: "lib", Version: "1.0", Architecture: "i386", Source: "lib (0.9)", deps: &PackageDependencies{PreDepends: []string{"dpkg (>= 1.6)"}, Depends: []string{"mail-agent"}}},
|
||||
&Package{Name: "dpkg", Version: "1.7", Architecture: "i386", Provides: []string{"package-installer"}, deps: &PackageDependencies{}},
|
||||
&Package{Name: "data", Version: "1.1~bp1", Architecture: "all", Source: "app", deps: &PackageDependencies{PreDepends: []string{"dpkg (>= 1.6)"}}},
|
||||
&Package{Name: "app", Version: "1.1~bp1", Architecture: "i386", deps: &PackageDependencies{PreDepends: []string{"dpkg (>= 1.6)"}, Depends: []string{"lib (>> 0.9)", "data (>= 1.0)"}}},
|
||||
&Package{Name: "mailer", Version: "3.5.8", Architecture: "i386", Source: "postfix (1.3)", Provides: []string{"mail-agent"}, deps: &PackageDependencies{}},
|
||||
&Package{Name: "app", Version: "1.1~bp1", Architecture: "amd64", deps: &PackageDependencies{PreDepends: []string{"dpkg (>= 1.6)"}, Depends: []string{"lib (>> 0.9)", "data (>= 1.0)"}}},
|
||||
&Package{Name: "app", Version: "1.1~bp1", Architecture: "arm", deps: &PackageDependencies{PreDepends: []string{"dpkg (>= 1.6)"}, Depends: []string{"lib (>> 0.9) | libx (>= 1.5)", "data (>= 1.0) | mail-agent"}}},
|
||||
&Package{Name: "app", Version: "1.0", Architecture: "s390", deps: &PackageDependencies{PreDepends: []string{"dpkg >= 1.6)"}, Depends: []string{"lib (>> 0.9)", "data (>= 1.0)"}}},
|
||||
&Package{Name: "aa", Version: "2.0-1", Architecture: "i386", deps: &PackageDependencies{PreDepends: []string{"dpkg (>= 1.6)"}}},
|
||||
&Package{Name: "dpkg", Version: "1.6.1-3", Architecture: "amd64", Provides: []string{"package-installer"}, deps: &PackageDependencies{}},
|
||||
&Package{Name: "libx", Version: "1.5", Architecture: "arm", deps: &PackageDependencies{PreDepends: []string{"dpkg (>= 1.6)"}}},
|
||||
&Package{Name: "dpkg", Version: "1.6.1-3", Architecture: "arm", Provides: []string{"package-installer"}, deps: &PackageDependencies{}},
|
||||
&Package{Name: "dpkg", Version: "1.6.1-3", Architecture: "source", SourceArchitecture: "any", IsSource: true, deps: &PackageDependencies{}},
|
||||
&Package{Name: "dpkg", Version: "1.7", Architecture: "source", SourceArchitecture: "any", IsSource: true, deps: &PackageDependencies{}},
|
||||
}
|
||||
for _, p := range s.packages {
|
||||
s.il.Add(p)
|
||||
}
|
||||
s.il.PrepareIndex()
|
||||
|
||||
s.sourcePackages = []*Package{
|
||||
&Package{Name: "postfix", Version: "1.3", Architecture: "source", SourceArchitecture: "any", IsSource: true, deps: &PackageDependencies{}},
|
||||
&Package{Name: "app", Version: "1.1~bp1", Architecture: "source", SourceArchitecture: "any", IsSource: true, deps: &PackageDependencies{}},
|
||||
&Package{Name: "aa", Version: "2.0-1", Architecture: "source", SourceArchitecture: "any", IsSource: true, deps: &PackageDependencies{}},
|
||||
&Package{Name: "lib", Version: "0.9", Architecture: "source", SourceArchitecture: "any", IsSource: true, deps: &PackageDependencies{}},
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *PackageListSuite) TestAddLen(c *C) {
|
||||
c.Check(s.list.Len(), Equals, 0)
|
||||
c.Check(s.list.Add(s.p1), IsNil)
|
||||
c.Check(s.list.Len(), Equals, 1)
|
||||
c.Check(s.list.Add(s.p2), IsNil)
|
||||
c.Check(s.list.Len(), Equals, 1)
|
||||
c.Check(s.list.Add(s.p3), IsNil)
|
||||
c.Check(s.list.Len(), Equals, 2)
|
||||
c.Check(s.list.Add(s.p4), ErrorMatches, "conflict in package.*")
|
||||
}
|
||||
|
||||
func (s *PackageListSuite) TestRemove(c *C) {
|
||||
c.Check(s.list.Add(s.p1), IsNil)
|
||||
c.Check(s.list.Add(s.p3), IsNil)
|
||||
c.Check(s.list.Len(), Equals, 2)
|
||||
|
||||
s.list.Remove(s.p1)
|
||||
c.Check(s.list.Len(), Equals, 1)
|
||||
}
|
||||
|
||||
func (s *PackageListSuite) TestAddWhenIndexed(c *C) {
|
||||
c.Check(s.list.Len(), Equals, 0)
|
||||
s.list.PrepareIndex()
|
||||
|
||||
c.Check(s.list.Add(&Package{Name: "a1st", Version: "1.0", Architecture: "i386", Provides: []string{"fa", "fb"}}), IsNil)
|
||||
c.Check(s.list.packagesIndex[0].Name, Equals, "a1st")
|
||||
c.Check(s.list.providesIndex["fa"][0].Name, Equals, "a1st")
|
||||
c.Check(s.list.providesIndex["fb"][0].Name, Equals, "a1st")
|
||||
|
||||
c.Check(s.list.Add(&Package{Name: "c3rd", Version: "1.0", Architecture: "i386", Provides: []string{"fa"}}), IsNil)
|
||||
c.Check(s.list.packagesIndex[0].Name, Equals, "a1st")
|
||||
c.Check(s.list.packagesIndex[1].Name, Equals, "c3rd")
|
||||
c.Check(s.list.providesIndex["fa"][0].Name, Equals, "a1st")
|
||||
c.Check(s.list.providesIndex["fa"][1].Name, Equals, "c3rd")
|
||||
c.Check(s.list.providesIndex["fb"][0].Name, Equals, "a1st")
|
||||
|
||||
c.Check(s.list.Add(&Package{Name: "b2nd", Version: "1.0", Architecture: "i386"}), IsNil)
|
||||
c.Check(s.list.packagesIndex[0].Name, Equals, "a1st")
|
||||
c.Check(s.list.packagesIndex[1].Name, Equals, "b2nd")
|
||||
c.Check(s.list.packagesIndex[2].Name, Equals, "c3rd")
|
||||
c.Check(s.list.providesIndex["fa"][0].Name, Equals, "a1st")
|
||||
c.Check(s.list.providesIndex["fa"][1].Name, Equals, "c3rd")
|
||||
c.Check(s.list.providesIndex["fb"][0].Name, Equals, "a1st")
|
||||
}
|
||||
|
||||
func (s *PackageListSuite) TestRemoveWhenIndexed(c *C) {
|
||||
s.il.Remove(s.packages[0])
|
||||
names := make([]string, s.il.Len())
|
||||
for i, p := range s.il.packagesIndex {
|
||||
names[i] = p.Name
|
||||
}
|
||||
c.Check(names, DeepEquals, []string{"aa", "app", "app", "app", "app", "data", "dpkg", "dpkg", "dpkg", "dpkg", "dpkg", "libx", "mailer"})
|
||||
|
||||
s.il.Remove(s.packages[4])
|
||||
names = make([]string, s.il.Len())
|
||||
for i, p := range s.il.packagesIndex {
|
||||
names[i] = p.Name
|
||||
}
|
||||
c.Check(names, DeepEquals, []string{"aa", "app", "app", "app", "app", "data", "dpkg", "dpkg", "dpkg", "dpkg", "dpkg", "libx"})
|
||||
c.Check(s.il.providesIndex["mail-agent"], DeepEquals, []*Package{})
|
||||
|
||||
s.il.Remove(s.packages[9])
|
||||
names = make([]string, s.il.Len())
|
||||
for i, p := range s.il.packagesIndex {
|
||||
names[i] = p.Name
|
||||
}
|
||||
c.Check(names, DeepEquals, []string{"aa", "app", "app", "app", "app", "data", "dpkg", "dpkg", "dpkg", "dpkg", "libx"})
|
||||
c.Check(s.il.providesIndex["package-installer"], HasLen, 2)
|
||||
|
||||
s.il.Remove(s.packages[1])
|
||||
names = make([]string, s.il.Len())
|
||||
for i, p := range s.il.packagesIndex {
|
||||
names[i] = p.Name
|
||||
}
|
||||
c.Check(names, DeepEquals, []string{"aa", "app", "app", "app", "app", "data", "dpkg", "dpkg", "dpkg", "libx"})
|
||||
c.Check(s.il.providesIndex["package-installer"], DeepEquals, []*Package{s.packages[11]})
|
||||
}
|
||||
|
||||
func (s *PackageListSuite) TestForeach(c *C) {
|
||||
s.list.Add(s.p1)
|
||||
s.list.Add(s.p3)
|
||||
|
||||
Len := 0
|
||||
err := s.list.ForEach(func(*Package) error {
|
||||
Len++
|
||||
return nil
|
||||
})
|
||||
|
||||
c.Check(Len, Equals, 2)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
e := errors.New("a")
|
||||
|
||||
err = s.list.ForEach(func(*Package) error {
|
||||
return e
|
||||
})
|
||||
|
||||
c.Check(err, Equals, e)
|
||||
|
||||
}
|
||||
|
||||
func (s *PackageListSuite) TestIndex(c *C) {
|
||||
c.Check(len(s.il.providesIndex), Equals, 2)
|
||||
c.Check(len(s.il.providesIndex["mail-agent"]), Equals, 1)
|
||||
c.Check(len(s.il.providesIndex["package-installer"]), Equals, 3)
|
||||
c.Check(s.il.packagesIndex[0], Equals, s.packages[8])
|
||||
}
|
||||
|
||||
func (s *PackageListSuite) TestAppend(c *C) {
|
||||
s.list.Add(s.p1)
|
||||
s.list.Add(s.p3)
|
||||
|
||||
err := s.list.Append(s.il)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(s.list.Len(), Equals, 16)
|
||||
|
||||
list := NewPackageList()
|
||||
list.Add(s.p4)
|
||||
|
||||
err = s.list.Append(list)
|
||||
c.Check(err, ErrorMatches, "conflict.*")
|
||||
|
||||
s.list.PrepareIndex()
|
||||
c.Check(func() { s.list.Append(s.il) }, Panics, "Append not supported when indexed")
|
||||
}
|
||||
|
||||
func (s *PackageListSuite) TestSearch(c *C) {
|
||||
c.Check(func() { s.list.Search(Dependency{Architecture: "i386", Pkg: "app"}) }, Panics, "list not indexed, can't search")
|
||||
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app"}), Equals, s.packages[3])
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "mail-agent"}), Equals, s.packages[4])
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "puppy"}), IsNil)
|
||||
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app", Relation: VersionEqual, Version: "1.1~bp1"}), Equals, s.packages[3])
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app", Relation: VersionEqual, Version: "1.1~bp2"}), IsNil)
|
||||
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app", Relation: VersionLess, Version: "1.1"}), Equals, s.packages[3])
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app", Relation: VersionLess, Version: "1.1~~"}), IsNil)
|
||||
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app", Relation: VersionLessOrEqual, Version: "1.1"}), Equals, s.packages[3])
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app", Relation: VersionLessOrEqual, Version: "1.1~bp1"}), Equals, s.packages[3])
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app", Relation: VersionLessOrEqual, Version: "1.1~~"}), IsNil)
|
||||
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app", Relation: VersionGreater, Version: "1.0"}), Equals, s.packages[3])
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app", Relation: VersionGreater, Version: "1.2"}), IsNil)
|
||||
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app", Relation: VersionGreaterOrEqual, Version: "1.0"}), Equals, s.packages[3])
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app", Relation: VersionGreaterOrEqual, Version: "1.1~bp1"}), Equals, s.packages[3])
|
||||
c.Check(s.il.Search(Dependency{Architecture: "i386", Pkg: "app", Relation: VersionGreaterOrEqual, Version: "1.2"}), IsNil)
|
||||
}
|
||||
|
||||
func (s *PackageListSuite) TestFilter(c *C) {
|
||||
c.Check(func() { s.list.Filter([]string{"abcd_0.3_i386"}, false, nil, 0, nil) }, Panics, "list not indexed, can't filter")
|
||||
|
||||
_, err := s.il.Filter([]string{"app >3)"}, false, nil, 0, nil)
|
||||
c.Check(err, ErrorMatches, "unable to parse dependency.*")
|
||||
|
||||
plString := func(l *PackageList) string {
|
||||
list := make([]string, 0, l.Len())
|
||||
for _, p := range l.packages {
|
||||
list = append(list, p.String())
|
||||
}
|
||||
|
||||
sort.Strings(list)
|
||||
|
||||
return strings.Join(list, " ")
|
||||
}
|
||||
|
||||
result, err := s.il.Filter([]string{"app_1.1~bp1_i386"}, false, nil, 0, nil)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(plString(result), Equals, "app_1.1~bp1_i386")
|
||||
|
||||
result, err = s.il.Filter([]string{"app_1.1~bp1_i386", "dpkg_1.7_source", "dpkg_1.8_amd64"}, false, nil, 0, nil)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(plString(result), Equals, "app_1.1~bp1_i386 dpkg_1.7_source")
|
||||
|
||||
result, err = s.il.Filter([]string{"app", "dpkg (>>1.6.1-3)", "app (>=1.0)", "xyz", "aa (>>3.0)"}, false, nil, 0, nil)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(plString(result), Equals, "app_1.0_s390 app_1.1~bp1_amd64 app_1.1~bp1_arm app_1.1~bp1_i386 dpkg_1.7_i386 dpkg_1.7_source")
|
||||
|
||||
result, err = s.il.Filter([]string{"app {i386}"}, true, NewPackageList(), 0, []string{"i386"})
|
||||
c.Check(err, IsNil)
|
||||
c.Check(plString(result), Equals, "app_1.1~bp1_i386 data_1.1~bp1_all dpkg_1.7_i386 lib_1.0_i386 mailer_3.5.8_i386")
|
||||
|
||||
result, err = s.il.Filter([]string{"app (>=0.9)", "lib", "data"}, true, NewPackageList(), 0, []string{"i386", "amd64"})
|
||||
c.Check(err, IsNil)
|
||||
c.Check(plString(result), Equals, "app_1.0_s390 app_1.1~bp1_amd64 app_1.1~bp1_arm app_1.1~bp1_i386 data_1.1~bp1_all dpkg_1.6.1-3_amd64 dpkg_1.7_i386 lib_1.0_i386 mailer_3.5.8_i386")
|
||||
}
|
||||
|
||||
func (s *PackageListSuite) TestVerifyDependencies(c *C) {
|
||||
missing, err := s.il.VerifyDependencies(0, []string{"i386"}, s.il, nil)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(missing, DeepEquals, []Dependency{})
|
||||
|
||||
missing, err = s.il.VerifyDependencies(0, []string{"i386", "amd64"}, s.il, nil)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(missing, DeepEquals, []Dependency{Dependency{Pkg: "lib", Relation: VersionGreater, Version: "0.9", Architecture: "amd64"}})
|
||||
|
||||
missing, err = s.il.VerifyDependencies(0, []string{"arm"}, s.il, nil)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(missing, DeepEquals, []Dependency{})
|
||||
|
||||
missing, err = s.il.VerifyDependencies(DepFollowAllVariants, []string{"arm"}, s.il, nil)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(missing, DeepEquals, []Dependency{Dependency{Pkg: "lib", Relation: VersionGreater, Version: "0.9", Architecture: "arm"},
|
||||
Dependency{Pkg: "mail-agent", Relation: VersionDontCare, Version: "", Architecture: "arm"}})
|
||||
|
||||
for _, p := range s.sourcePackages {
|
||||
s.il.Add(p)
|
||||
}
|
||||
|
||||
missing, err = s.il.VerifyDependencies(DepFollowSource, []string{"i386", "amd64"}, s.il, nil)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(missing, DeepEquals, []Dependency{Dependency{Pkg: "lib", Relation: VersionGreater, Version: "0.9", Architecture: "amd64"}})
|
||||
|
||||
missing, err = s.il.VerifyDependencies(DepFollowSource, []string{"arm"}, s.il, nil)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(missing, DeepEquals, []Dependency{Dependency{Pkg: "libx", Relation: VersionEqual, Version: "1.5", Architecture: "source"}})
|
||||
|
||||
_, err = s.il.VerifyDependencies(0, []string{"i386", "amd64", "s390"}, s.il, nil)
|
||||
c.Check(err, ErrorMatches, "unable to process package app_1.0_s390:.*")
|
||||
}
|
||||
|
||||
func (s *PackageListSuite) TestArchitectures(c *C) {
|
||||
archs := s.il.Architectures(true)
|
||||
sort.Strings(archs)
|
||||
c.Check(archs, DeepEquals, []string{"amd64", "arm", "i386", "s390", "source"})
|
||||
|
||||
archs = s.il.Architectures(false)
|
||||
sort.Strings(archs)
|
||||
c.Check(archs, DeepEquals, []string{"amd64", "arm", "i386", "s390"})
|
||||
}
|
||||
223
debian/local.go
vendored
223
debian/local.go
vendored
@@ -1,223 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"code.google.com/p/go-uuid/uuid"
|
||||
"fmt"
|
||||
"github.com/smira/aptly/database"
|
||||
"github.com/ugorji/go/codec"
|
||||
"log"
|
||||
)
|
||||
|
||||
// LocalRepo is a collection of packages created locally
|
||||
type LocalRepo struct {
|
||||
// Permanent internal ID
|
||||
UUID string
|
||||
// User-assigned name
|
||||
Name string
|
||||
// Comment
|
||||
Comment string
|
||||
// DefaultDistribution
|
||||
DefaultDistribution string `codec:",omitempty"`
|
||||
// DefaultComponent
|
||||
DefaultComponent string `codec:",omitempty"`
|
||||
// "Snapshot" of current list of packages
|
||||
packageRefs *PackageRefList
|
||||
}
|
||||
|
||||
// NewLocalRepo creates new instance of Debian local repository
|
||||
func NewLocalRepo(name string, comment string) *LocalRepo {
|
||||
return &LocalRepo{
|
||||
UUID: uuid.New(),
|
||||
Name: name,
|
||||
Comment: comment,
|
||||
}
|
||||
}
|
||||
|
||||
// String interface
|
||||
func (repo *LocalRepo) String() string {
|
||||
if repo.Comment != "" {
|
||||
return fmt.Sprintf("[%s]: %s", repo.Name, repo.Comment)
|
||||
}
|
||||
return fmt.Sprintf("[%s]", repo.Name)
|
||||
}
|
||||
|
||||
// NumPackages return number of packages in local repo
|
||||
func (repo *LocalRepo) NumPackages() int {
|
||||
if repo.packageRefs == nil {
|
||||
return 0
|
||||
}
|
||||
return repo.packageRefs.Len()
|
||||
}
|
||||
|
||||
// RefList returns package list for repo
|
||||
func (repo *LocalRepo) RefList() *PackageRefList {
|
||||
return repo.packageRefs
|
||||
}
|
||||
|
||||
// UpdateRefList changes package list for local repo
|
||||
func (repo *LocalRepo) UpdateRefList(reflist *PackageRefList) {
|
||||
repo.packageRefs = reflist
|
||||
}
|
||||
|
||||
// Encode does msgpack encoding of LocalRepo
|
||||
func (repo *LocalRepo) Encode() []byte {
|
||||
var buf bytes.Buffer
|
||||
|
||||
encoder := codec.NewEncoder(&buf, &codec.MsgpackHandle{})
|
||||
encoder.Encode(repo)
|
||||
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// Decode decodes msgpack representation into LocalRepo
|
||||
func (repo *LocalRepo) Decode(input []byte) error {
|
||||
decoder := codec.NewDecoderBytes(input, &codec.MsgpackHandle{})
|
||||
return decoder.Decode(repo)
|
||||
}
|
||||
|
||||
// Key is a unique id in DB
|
||||
func (repo *LocalRepo) Key() []byte {
|
||||
return []byte("L" + repo.UUID)
|
||||
}
|
||||
|
||||
// RefKey is a unique id for package reference list
|
||||
func (repo *LocalRepo) RefKey() []byte {
|
||||
return []byte("E" + repo.UUID)
|
||||
}
|
||||
|
||||
// LocalRepoCollection does listing, updating/adding/deleting of LocalRepos
|
||||
type LocalRepoCollection struct {
|
||||
db database.Storage
|
||||
list []*LocalRepo
|
||||
}
|
||||
|
||||
// NewLocalRepoCollection loads LocalRepos from DB and makes up collection
|
||||
func NewLocalRepoCollection(db database.Storage) *LocalRepoCollection {
|
||||
result := &LocalRepoCollection{
|
||||
db: db,
|
||||
}
|
||||
|
||||
blobs := db.FetchByPrefix([]byte("L"))
|
||||
result.list = make([]*LocalRepo, 0, len(blobs))
|
||||
|
||||
for _, blob := range blobs {
|
||||
r := &LocalRepo{}
|
||||
if err := r.Decode(blob); err != nil {
|
||||
log.Printf("Error decoding mirror: %s\n", err)
|
||||
} else {
|
||||
result.list = append(result.list, r)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Add appends new repo to collection and saves it
|
||||
func (collection *LocalRepoCollection) Add(repo *LocalRepo) error {
|
||||
for _, r := range collection.list {
|
||||
if r.Name == repo.Name {
|
||||
return fmt.Errorf("local repo with name %s already exists", repo.Name)
|
||||
}
|
||||
}
|
||||
|
||||
err := collection.Update(repo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
collection.list = append(collection.list, repo)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Update stores updated information about repo in DB
|
||||
func (collection *LocalRepoCollection) Update(repo *LocalRepo) error {
|
||||
err := collection.db.Put(repo.Key(), repo.Encode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if repo.packageRefs != nil {
|
||||
err = collection.db.Put(repo.RefKey(), repo.packageRefs.Encode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadComplete loads additional information for local repo
|
||||
func (collection *LocalRepoCollection) LoadComplete(repo *LocalRepo) error {
|
||||
encoded, err := collection.db.Get(repo.RefKey())
|
||||
if err == database.ErrNotFound {
|
||||
return nil
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
repo.packageRefs = &PackageRefList{}
|
||||
return repo.packageRefs.Decode(encoded)
|
||||
}
|
||||
|
||||
// ByName looks up repository by name
|
||||
func (collection *LocalRepoCollection) ByName(name string) (*LocalRepo, error) {
|
||||
for _, r := range collection.list {
|
||||
if r.Name == name {
|
||||
return r, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("local repo with name %s not found", name)
|
||||
}
|
||||
|
||||
// ByUUID looks up repository by uuid
|
||||
func (collection *LocalRepoCollection) ByUUID(uuid string) (*LocalRepo, error) {
|
||||
for _, r := range collection.list {
|
||||
if r.UUID == uuid {
|
||||
return r, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("local repo with uuid %s not found", uuid)
|
||||
}
|
||||
|
||||
// ForEach runs method for each repository
|
||||
func (collection *LocalRepoCollection) ForEach(handler func(*LocalRepo) error) error {
|
||||
var err error
|
||||
for _, r := range collection.list {
|
||||
err = handler(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Len returns number of remote repos
|
||||
func (collection *LocalRepoCollection) Len() int {
|
||||
return len(collection.list)
|
||||
}
|
||||
|
||||
// Drop removes remote repo from collection
|
||||
func (collection *LocalRepoCollection) Drop(repo *LocalRepo) error {
|
||||
repoPosition := -1
|
||||
|
||||
for i, r := range collection.list {
|
||||
if r == repo {
|
||||
repoPosition = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if repoPosition == -1 {
|
||||
panic("local repo not found!")
|
||||
}
|
||||
|
||||
collection.list[len(collection.list)-1], collection.list[repoPosition], collection.list =
|
||||
nil, collection.list[len(collection.list)-1], collection.list[:len(collection.list)-1]
|
||||
|
||||
err := collection.db.Delete(repo.Key())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return collection.db.Delete(repo.RefKey())
|
||||
}
|
||||
195
debian/local_test.go
vendored
195
debian/local_test.go
vendored
@@ -1,195 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"github.com/smira/aptly/database"
|
||||
. "launchpad.net/gocheck"
|
||||
)
|
||||
|
||||
type LocalRepoSuite struct {
|
||||
db database.Storage
|
||||
list *PackageList
|
||||
reflist *PackageRefList
|
||||
repo *LocalRepo
|
||||
}
|
||||
|
||||
var _ = Suite(&LocalRepoSuite{})
|
||||
|
||||
func (s *LocalRepoSuite) SetUpTest(c *C) {
|
||||
s.db, _ = database.OpenDB(c.MkDir())
|
||||
s.list = NewPackageList()
|
||||
s.list.Add(&Package{Name: "lib", Version: "1.7", Architecture: "i386"})
|
||||
s.list.Add(&Package{Name: "app", Version: "1.9", Architecture: "amd64"})
|
||||
|
||||
s.reflist = NewPackageRefListFromPackageList(s.list)
|
||||
|
||||
s.repo = NewLocalRepo("lrepo", "Super repo")
|
||||
s.repo.packageRefs = s.reflist
|
||||
}
|
||||
|
||||
func (s *LocalRepoSuite) TearDownTest(c *C) {
|
||||
s.db.Close()
|
||||
}
|
||||
|
||||
func (s *LocalRepoSuite) TestString(c *C) {
|
||||
c.Check(NewLocalRepo("lrepo", "My first repo").String(), Equals, "[lrepo]: My first repo")
|
||||
c.Check(NewLocalRepo("lrepo2", "").String(), Equals, "[lrepo2]")
|
||||
}
|
||||
|
||||
func (s *LocalRepoSuite) TestNumPackages(c *C) {
|
||||
c.Check(NewLocalRepo("lrepo", "My first repo").NumPackages(), Equals, 0)
|
||||
c.Check(s.repo.NumPackages(), Equals, 2)
|
||||
}
|
||||
|
||||
func (s *LocalRepoSuite) TestRefList(c *C) {
|
||||
c.Check(NewLocalRepo("lrepo", "My first repo").RefList(), IsNil)
|
||||
c.Check(s.repo.RefList(), Equals, s.reflist)
|
||||
}
|
||||
|
||||
func (s *LocalRepoSuite) TestUpdateRefList(c *C) {
|
||||
s.repo.UpdateRefList(nil)
|
||||
c.Check(s.repo.RefList(), IsNil)
|
||||
}
|
||||
|
||||
func (s *LocalRepoSuite) TestEncodeDecode(c *C) {
|
||||
repo := &LocalRepo{}
|
||||
err := repo.Decode(s.repo.Encode())
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(repo.Name, Equals, s.repo.Name)
|
||||
c.Check(repo.Comment, Equals, s.repo.Comment)
|
||||
}
|
||||
|
||||
func (s *LocalRepoSuite) TestKey(c *C) {
|
||||
c.Assert(len(s.repo.Key()), Equals, 37)
|
||||
c.Assert(s.repo.Key()[0], Equals, byte('L'))
|
||||
}
|
||||
|
||||
func (s *LocalRepoSuite) TestRefKey(c *C) {
|
||||
c.Assert(len(s.repo.RefKey()), Equals, 37)
|
||||
c.Assert(s.repo.RefKey()[0], Equals, byte('E'))
|
||||
c.Assert(s.repo.RefKey()[1:], DeepEquals, s.repo.Key()[1:])
|
||||
}
|
||||
|
||||
type LocalRepoCollectionSuite struct {
|
||||
db database.Storage
|
||||
collection *LocalRepoCollection
|
||||
list *PackageList
|
||||
reflist *PackageRefList
|
||||
}
|
||||
|
||||
var _ = Suite(&LocalRepoCollectionSuite{})
|
||||
|
||||
func (s *LocalRepoCollectionSuite) SetUpTest(c *C) {
|
||||
s.db, _ = database.OpenDB(c.MkDir())
|
||||
s.collection = NewLocalRepoCollection(s.db)
|
||||
|
||||
s.list = NewPackageList()
|
||||
s.list.Add(&Package{Name: "lib", Version: "1.7", Architecture: "i386"})
|
||||
s.list.Add(&Package{Name: "app", Version: "1.9", Architecture: "amd64"})
|
||||
|
||||
s.reflist = NewPackageRefListFromPackageList(s.list)
|
||||
}
|
||||
|
||||
func (s *LocalRepoCollectionSuite) TearDownTest(c *C) {
|
||||
s.db.Close()
|
||||
}
|
||||
|
||||
func (s *LocalRepoCollectionSuite) TestAddByName(c *C) {
|
||||
r, err := s.collection.ByName("local1")
|
||||
c.Assert(err, ErrorMatches, "*.not found")
|
||||
|
||||
repo := NewLocalRepo("local1", "Comment 1")
|
||||
c.Assert(s.collection.Add(repo), IsNil)
|
||||
c.Assert(s.collection.Add(repo), ErrorMatches, ".*already exists")
|
||||
|
||||
r, err = s.collection.ByName("local1")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.String(), Equals, repo.String())
|
||||
|
||||
collection := NewLocalRepoCollection(s.db)
|
||||
r, err = collection.ByName("local1")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.String(), Equals, repo.String())
|
||||
}
|
||||
|
||||
func (s *LocalRepoCollectionSuite) TestByUUID(c *C) {
|
||||
r, err := s.collection.ByUUID("some-uuid")
|
||||
c.Assert(err, ErrorMatches, "*.not found")
|
||||
|
||||
repo := NewLocalRepo("local1", "Comment 1")
|
||||
c.Assert(s.collection.Add(repo), IsNil)
|
||||
|
||||
r, err = s.collection.ByUUID(repo.UUID)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.String(), Equals, repo.String())
|
||||
}
|
||||
|
||||
func (s *LocalRepoCollectionSuite) TestUpdateLoadComplete(c *C) {
|
||||
repo := NewLocalRepo("local1", "Comment 1")
|
||||
c.Assert(s.collection.Update(repo), IsNil)
|
||||
|
||||
collection := NewLocalRepoCollection(s.db)
|
||||
r, err := collection.ByName("local1")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.packageRefs, IsNil)
|
||||
|
||||
repo.packageRefs = s.reflist
|
||||
c.Assert(s.collection.Update(repo), IsNil)
|
||||
|
||||
collection = NewLocalRepoCollection(s.db)
|
||||
r, err = collection.ByName("local1")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.packageRefs, IsNil)
|
||||
c.Assert(r.NumPackages(), Equals, 0)
|
||||
c.Assert(s.collection.LoadComplete(r), IsNil)
|
||||
c.Assert(r.NumPackages(), Equals, 2)
|
||||
}
|
||||
|
||||
func (s *LocalRepoCollectionSuite) TestForEachAndLen(c *C) {
|
||||
repo := NewLocalRepo("local1", "Comment 1")
|
||||
s.collection.Add(repo)
|
||||
|
||||
count := 0
|
||||
err := s.collection.ForEach(func(*LocalRepo) error {
|
||||
count++
|
||||
return nil
|
||||
})
|
||||
c.Assert(count, Equals, 1)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(s.collection.Len(), Equals, 1)
|
||||
|
||||
e := errors.New("c")
|
||||
|
||||
err = s.collection.ForEach(func(*LocalRepo) error {
|
||||
return e
|
||||
})
|
||||
c.Assert(err, Equals, e)
|
||||
}
|
||||
|
||||
func (s *LocalRepoCollectionSuite) TestDrop(c *C) {
|
||||
repo1 := NewLocalRepo("local1", "Comment 1")
|
||||
s.collection.Add(repo1)
|
||||
|
||||
repo2 := NewLocalRepo("local2", "Comment 2")
|
||||
s.collection.Add(repo2)
|
||||
|
||||
r1, _ := s.collection.ByUUID(repo1.UUID)
|
||||
c.Check(r1, Equals, repo1)
|
||||
|
||||
err := s.collection.Drop(repo1)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
_, err = s.collection.ByUUID(repo1.UUID)
|
||||
c.Check(err, ErrorMatches, "local repo .* not found")
|
||||
|
||||
collection := NewLocalRepoCollection(s.db)
|
||||
_, err = collection.ByName("local1")
|
||||
c.Check(err, ErrorMatches, "local repo .* not found")
|
||||
|
||||
r2, _ := collection.ByName("local2")
|
||||
c.Check(r2.String(), Equals, repo2.String())
|
||||
|
||||
c.Check(func() { s.collection.Drop(repo1) }, Panics, "local repo not found!")
|
||||
}
|
||||
487
debian/package.go
vendored
487
debian/package.go
vendored
@@ -1,487 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/smira/aptly/aptly"
|
||||
"github.com/smira/aptly/utils"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Package is single instance of Debian package
|
||||
type Package struct {
|
||||
// Basic package properties
|
||||
Name string
|
||||
Version string
|
||||
Architecture string
|
||||
// If this source package, this field holds "real" architecture value,
|
||||
// while Architecture would be equal to "source"
|
||||
SourceArchitecture string
|
||||
// For binary package, name of source package
|
||||
Source string
|
||||
// List of virtual packages this package provides
|
||||
Provides []string
|
||||
// Is this source package
|
||||
IsSource bool
|
||||
// Hash of files section
|
||||
FilesHash uint64
|
||||
// Offload fields
|
||||
deps *PackageDependencies
|
||||
extra *Stanza
|
||||
files *PackageFiles
|
||||
// Mother collection
|
||||
collection *PackageCollection
|
||||
}
|
||||
|
||||
// NewPackageFromControlFile creates Package from parsed Debian control file
|
||||
func NewPackageFromControlFile(input Stanza) *Package {
|
||||
result := &Package{
|
||||
Name: input["Package"],
|
||||
Version: input["Version"],
|
||||
Architecture: input["Architecture"],
|
||||
Source: input["Source"],
|
||||
}
|
||||
|
||||
delete(input, "Package")
|
||||
delete(input, "Version")
|
||||
delete(input, "Architecture")
|
||||
delete(input, "Source")
|
||||
|
||||
filesize, _ := strconv.ParseInt(input["Size"], 10, 64)
|
||||
|
||||
result.UpdateFiles(PackageFiles{PackageFile{
|
||||
Filename: filepath.Base(input["Filename"]),
|
||||
downloadPath: filepath.Dir(input["Filename"]),
|
||||
Checksums: utils.ChecksumInfo{
|
||||
Size: filesize,
|
||||
MD5: strings.TrimSpace(input["MD5sum"]),
|
||||
SHA1: strings.TrimSpace(input["SHA1"]),
|
||||
SHA256: strings.TrimSpace(input["SHA256"]),
|
||||
},
|
||||
}})
|
||||
|
||||
delete(input, "Filename")
|
||||
delete(input, "MD5sum")
|
||||
delete(input, "SHA1")
|
||||
delete(input, "SHA256")
|
||||
delete(input, "Size")
|
||||
|
||||
depends := &PackageDependencies{}
|
||||
depends.Depends = parseDependencies(input, "Depends")
|
||||
depends.PreDepends = parseDependencies(input, "Pre-Depends")
|
||||
depends.Suggests = parseDependencies(input, "Suggests")
|
||||
depends.Recommends = parseDependencies(input, "Recommends")
|
||||
result.deps = depends
|
||||
|
||||
result.Provides = parseDependencies(input, "Provides")
|
||||
|
||||
result.extra = &input
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// NewSourcePackageFromControlFile creates Package from parsed Debian control file for source package
|
||||
func NewSourcePackageFromControlFile(input Stanza) (*Package, error) {
|
||||
result := &Package{
|
||||
IsSource: true,
|
||||
Name: input["Package"],
|
||||
Version: input["Version"],
|
||||
Architecture: "source",
|
||||
SourceArchitecture: input["Architecture"],
|
||||
}
|
||||
|
||||
delete(input, "Package")
|
||||
delete(input, "Version")
|
||||
delete(input, "Architecture")
|
||||
|
||||
files := make(PackageFiles, 0, 3)
|
||||
|
||||
parseSums := func(field string, setter func(sum *utils.ChecksumInfo, data string)) error {
|
||||
for _, line := range strings.Split(input[field], "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
parts := strings.Fields(line)
|
||||
|
||||
if len(parts) != 3 {
|
||||
return fmt.Errorf("unparseable hash sum line: %#v", line)
|
||||
}
|
||||
|
||||
size, err := strconv.ParseInt(parts[1], 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to parse size: %s", err)
|
||||
}
|
||||
|
||||
filename := filepath.Base(parts[2])
|
||||
|
||||
found := false
|
||||
pos := 0
|
||||
for i, file := range files {
|
||||
if file.Filename == filename {
|
||||
found = true
|
||||
pos = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
files = append(files, PackageFile{Filename: filename, downloadPath: input["Directory"]})
|
||||
pos = len(files) - 1
|
||||
}
|
||||
|
||||
files[pos].Checksums.Size = size
|
||||
setter(&files[pos].Checksums, parts[0])
|
||||
}
|
||||
|
||||
delete(input, field)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
err := parseSums("Files", func(sum *utils.ChecksumInfo, data string) { sum.MD5 = data })
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = parseSums("Checksums-Sha1", func(sum *utils.ChecksumInfo, data string) { sum.SHA1 = data })
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = parseSums("Checksums-Sha256", func(sum *utils.ChecksumInfo, data string) { sum.SHA256 = data })
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result.UpdateFiles(files)
|
||||
|
||||
depends := &PackageDependencies{}
|
||||
depends.BuildDepends = parseDependencies(input, "Build-Depends")
|
||||
depends.BuildDependsInDep = parseDependencies(input, "Build-Depends-Indep")
|
||||
result.deps = depends
|
||||
|
||||
result.extra = &input
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Key returns unique key identifying package
|
||||
func (p *Package) Key(prefix string) []byte {
|
||||
return []byte(prefix + "P" + p.Architecture + " " + p.Name + " " + p.Version)
|
||||
}
|
||||
|
||||
// String creates readable representation
|
||||
func (p *Package) String() string {
|
||||
return fmt.Sprintf("%s_%s_%s", p.Name, p.Version, p.Architecture)
|
||||
}
|
||||
|
||||
// MatchesArchitecture checks whether packages matches specified architecture
|
||||
func (p *Package) MatchesArchitecture(arch string) bool {
|
||||
if p.Architecture == "all" && arch != "source" {
|
||||
return true
|
||||
}
|
||||
|
||||
return p.Architecture == arch
|
||||
}
|
||||
|
||||
// MatchesDependency checks whether package matches specified dependency
|
||||
func (p *Package) MatchesDependency(dep Dependency) bool {
|
||||
if dep.Pkg != p.Name {
|
||||
return false
|
||||
}
|
||||
|
||||
if dep.Architecture != "" && !p.MatchesArchitecture(dep.Architecture) {
|
||||
return false
|
||||
}
|
||||
|
||||
if dep.Relation == VersionDontCare {
|
||||
return true
|
||||
}
|
||||
|
||||
r := CompareVersions(p.Version, dep.Version)
|
||||
switch dep.Relation {
|
||||
case VersionEqual:
|
||||
return r == 0
|
||||
case VersionLess:
|
||||
return r < 0
|
||||
case VersionGreater:
|
||||
return r > 0
|
||||
case VersionLessOrEqual:
|
||||
return r <= 0
|
||||
case VersionGreaterOrEqual:
|
||||
return r >= 0
|
||||
}
|
||||
|
||||
panic("unknown relation")
|
||||
}
|
||||
|
||||
// GetDependencies compiles list of dependenices by flags from options
|
||||
func (p *Package) GetDependencies(options int) (dependencies []string) {
|
||||
deps := p.Deps()
|
||||
|
||||
dependencies = make([]string, 0, 30)
|
||||
dependencies = append(dependencies, deps.Depends...)
|
||||
dependencies = append(dependencies, deps.PreDepends...)
|
||||
|
||||
if options&DepFollowRecommends == DepFollowRecommends {
|
||||
dependencies = append(dependencies, deps.Recommends...)
|
||||
}
|
||||
|
||||
if options&DepFollowSuggests == DepFollowSuggests {
|
||||
dependencies = append(dependencies, deps.Suggests...)
|
||||
}
|
||||
|
||||
if options&DepFollowBuild == DepFollowBuild {
|
||||
dependencies = append(dependencies, deps.BuildDepends...)
|
||||
dependencies = append(dependencies, deps.BuildDependsInDep...)
|
||||
}
|
||||
|
||||
if options&DepFollowSource == DepFollowSource {
|
||||
source := p.Source
|
||||
if source == "" {
|
||||
source = p.Name
|
||||
}
|
||||
if strings.Index(source, ")") != -1 {
|
||||
dependencies = append(dependencies, fmt.Sprintf("%s {source}", source))
|
||||
} else {
|
||||
dependencies = append(dependencies, fmt.Sprintf("%s (= %s) {source}", source, p.Version))
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Extra returns Stanza of extra fields (it may load it from collection)
|
||||
func (p *Package) Extra() Stanza {
|
||||
if p.extra == nil {
|
||||
if p.collection == nil {
|
||||
panic("extra == nil && collection == nil")
|
||||
}
|
||||
p.extra = p.collection.loadExtra(p)
|
||||
}
|
||||
|
||||
return *p.extra
|
||||
}
|
||||
|
||||
// Deps returns parsed package dependencies (it may load it from collection)
|
||||
func (p *Package) Deps() *PackageDependencies {
|
||||
if p.deps == nil {
|
||||
if p.collection == nil {
|
||||
panic("deps == nil && collection == nil")
|
||||
}
|
||||
|
||||
p.deps = p.collection.loadDependencies(p)
|
||||
}
|
||||
|
||||
return p.deps
|
||||
}
|
||||
|
||||
// Files returns parsed files records (it may load it from collection)
|
||||
func (p *Package) Files() PackageFiles {
|
||||
if p.files == nil {
|
||||
if p.collection == nil {
|
||||
panic("files == nil && collection == nil")
|
||||
}
|
||||
|
||||
p.files = p.collection.loadFiles(p)
|
||||
}
|
||||
|
||||
return *p.files
|
||||
}
|
||||
|
||||
// UpdateFiles saves new state of files
|
||||
func (p *Package) UpdateFiles(files PackageFiles) {
|
||||
p.files = &files
|
||||
p.FilesHash = files.Hash()
|
||||
}
|
||||
|
||||
// Stanza creates original stanza from package
|
||||
func (p *Package) Stanza() (result Stanza) {
|
||||
result = p.Extra().Copy()
|
||||
result["Package"] = p.Name
|
||||
result["Version"] = p.Version
|
||||
|
||||
if p.IsSource {
|
||||
result["Architecture"] = p.SourceArchitecture
|
||||
} else {
|
||||
result["Architecture"] = p.Architecture
|
||||
result["Source"] = p.Source
|
||||
}
|
||||
|
||||
if p.IsSource {
|
||||
md5, sha1, sha256 := make([]string, 0), make([]string, 0), make([]string, 0)
|
||||
|
||||
for _, f := range p.Files() {
|
||||
if f.Checksums.MD5 != "" {
|
||||
md5 = append(md5, fmt.Sprintf(" %s %d %s\n", f.Checksums.MD5, f.Checksums.Size, f.Filename))
|
||||
}
|
||||
if f.Checksums.SHA1 != "" {
|
||||
sha1 = append(sha1, fmt.Sprintf(" %s %d %s\n", f.Checksums.SHA1, f.Checksums.Size, f.Filename))
|
||||
}
|
||||
if f.Checksums.SHA256 != "" {
|
||||
sha256 = append(sha256, fmt.Sprintf(" %s %d %s\n", f.Checksums.SHA256, f.Checksums.Size, f.Filename))
|
||||
}
|
||||
}
|
||||
|
||||
result["Files"] = strings.Join(md5, "")
|
||||
result["Checksums-Sha1"] = strings.Join(sha1, "")
|
||||
result["Checksums-Sha256"] = strings.Join(sha256, "")
|
||||
} else {
|
||||
f := p.Files()[0]
|
||||
result["Filename"] = f.DownloadURL()
|
||||
if f.Checksums.MD5 != "" {
|
||||
result["MD5sum"] = f.Checksums.MD5
|
||||
}
|
||||
if f.Checksums.SHA1 != "" {
|
||||
result["SHA1"] = " " + f.Checksums.SHA1
|
||||
}
|
||||
if f.Checksums.SHA256 != "" {
|
||||
result["SHA256"] = " " + f.Checksums.SHA256
|
||||
}
|
||||
result["Size"] = fmt.Sprintf("%d", f.Checksums.Size)
|
||||
}
|
||||
|
||||
deps := p.Deps()
|
||||
|
||||
if deps.Depends != nil {
|
||||
result["Depends"] = strings.Join(deps.Depends, ", ")
|
||||
}
|
||||
if deps.PreDepends != nil {
|
||||
result["Pre-Depends"] = strings.Join(deps.PreDepends, ", ")
|
||||
}
|
||||
if deps.Suggests != nil {
|
||||
result["Suggests"] = strings.Join(deps.Suggests, ", ")
|
||||
}
|
||||
if deps.Recommends != nil {
|
||||
result["Recommends"] = strings.Join(deps.Recommends, ", ")
|
||||
}
|
||||
if p.Provides != nil {
|
||||
result["Provides"] = strings.Join(p.Provides, ", ")
|
||||
}
|
||||
if deps.BuildDepends != nil {
|
||||
result["Build-Depends"] = strings.Join(deps.BuildDepends, ", ")
|
||||
}
|
||||
if deps.BuildDependsInDep != nil {
|
||||
result["Build-Depends-Indep"] = strings.Join(deps.BuildDependsInDep, ", ")
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Equals compares two packages to be identical
|
||||
func (p *Package) Equals(p2 *Package) bool {
|
||||
return p.Name == p2.Name && p.Version == p2.Version && p.SourceArchitecture == p2.SourceArchitecture &&
|
||||
p.Architecture == p2.Architecture && p.Source == p2.Source && p.IsSource == p2.IsSource &&
|
||||
p.FilesHash == p2.FilesHash
|
||||
}
|
||||
|
||||
// LinkFromPool links package file from pool to dist's pool location
|
||||
func (p *Package) LinkFromPool(publishedStorage aptly.PublishedStorage, packagePool aptly.PackagePool, prefix string, component string) error {
|
||||
poolDir, err := p.PoolDirectory()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i, f := range p.Files() {
|
||||
sourcePath, err := packagePool.Path(f.Filename, f.Checksums.MD5)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
relPath, err := publishedStorage.LinkFromPool(prefix, component, poolDir, packagePool, sourcePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dir := filepath.Dir(relPath)
|
||||
if p.IsSource {
|
||||
p.Extra()["Directory"] = dir
|
||||
} else {
|
||||
p.Files()[i].downloadPath = dir
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// PoolDirectory returns directory in package pool of published repository for this package files
|
||||
func (p *Package) PoolDirectory() (string, error) {
|
||||
source := p.Source
|
||||
if source == "" {
|
||||
source = p.Name
|
||||
}
|
||||
|
||||
if len(source) < 2 {
|
||||
return "", fmt.Errorf("package source %s too short", source)
|
||||
}
|
||||
|
||||
var subdir string
|
||||
if strings.HasPrefix(source, "lib") {
|
||||
subdir = source[:4]
|
||||
} else {
|
||||
subdir = source[:1]
|
||||
|
||||
}
|
||||
|
||||
return filepath.Join(subdir, source), nil
|
||||
}
|
||||
|
||||
// PackageDownloadTask is a element of download queue for the package
|
||||
type PackageDownloadTask struct {
|
||||
RepoURI string
|
||||
DestinationPath string
|
||||
Checksums utils.ChecksumInfo
|
||||
}
|
||||
|
||||
// DownloadList returns list of missing package files for download in format
|
||||
// [[srcpath, dstpath]]
|
||||
func (p *Package) DownloadList(packagePool aptly.PackagePool) (result []PackageDownloadTask, err error) {
|
||||
result = make([]PackageDownloadTask, 0, 1)
|
||||
|
||||
for _, f := range p.Files() {
|
||||
poolPath, err := packagePool.Path(f.Filename, f.Checksums.MD5)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
verified, err := f.Verify(packagePool)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !verified {
|
||||
result = append(result, PackageDownloadTask{RepoURI: f.DownloadURL(), DestinationPath: poolPath, Checksums: f.Checksums})
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// VerifyFiles verifies that all package files have neen correctly downloaded
|
||||
func (p *Package) VerifyFiles(packagePool aptly.PackagePool) (result bool, err error) {
|
||||
result = true
|
||||
|
||||
for _, f := range p.Files() {
|
||||
result, err = f.Verify(packagePool)
|
||||
if err != nil || !result {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// FilepathList returns list of paths to files in package repository
|
||||
func (p *Package) FilepathList(packagePool aptly.PackagePool) ([]string, error) {
|
||||
var err error
|
||||
result := make([]string, len(p.Files()))
|
||||
|
||||
for i, f := range p.Files() {
|
||||
result[i], err = packagePool.RelativePath(f.Filename, f.Checksums.MD5)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
256
debian/package_collection.go
vendored
256
debian/package_collection.go
vendored
@@ -1,256 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/smira/aptly/database"
|
||||
"github.com/ugorji/go/codec"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// PackageCollection does management of packages in DB
|
||||
type PackageCollection struct {
|
||||
db database.Storage
|
||||
encodeBuffer bytes.Buffer
|
||||
}
|
||||
|
||||
// NewPackageCollection creates new PackageCollection and binds it to database
|
||||
func NewPackageCollection(db database.Storage) *PackageCollection {
|
||||
return &PackageCollection{
|
||||
db: db,
|
||||
}
|
||||
}
|
||||
|
||||
// oldPackage is Package struct for aptly < 0.4 with all fields in one struct
|
||||
// It is used to decode old aptly DBs
|
||||
type oldPackage struct {
|
||||
IsSource bool
|
||||
Name string
|
||||
Version string
|
||||
Architecture string
|
||||
SourceArchitecture string
|
||||
Source string
|
||||
Provides []string
|
||||
Depends []string
|
||||
BuildDepends []string
|
||||
BuildDependsInDep []string
|
||||
PreDepends []string
|
||||
Suggests []string
|
||||
Recommends []string
|
||||
Files []PackageFile
|
||||
Extra Stanza
|
||||
}
|
||||
|
||||
// ByKey find package in DB by its key
|
||||
func (collection *PackageCollection) ByKey(key []byte) (*Package, error) {
|
||||
encoded, err := collection.db.Get(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := &Package{}
|
||||
|
||||
if len(encoded) > 2 && (encoded[0] != 0xc1 || encoded[1] != 0x1) {
|
||||
oldp := &oldPackage{}
|
||||
|
||||
decoder := codec.NewDecoderBytes(encoded, &codec.MsgpackHandle{})
|
||||
err = decoder.Decode(oldp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p.Name = oldp.Name
|
||||
p.Version = oldp.Version
|
||||
p.Architecture = oldp.Architecture
|
||||
p.IsSource = oldp.IsSource
|
||||
p.SourceArchitecture = oldp.SourceArchitecture
|
||||
p.Source = oldp.Source
|
||||
p.Provides = oldp.Provides
|
||||
|
||||
p.deps = &PackageDependencies{
|
||||
Depends: oldp.Depends,
|
||||
BuildDepends: oldp.BuildDepends,
|
||||
BuildDependsInDep: oldp.BuildDependsInDep,
|
||||
PreDepends: oldp.PreDepends,
|
||||
Suggests: oldp.Suggests,
|
||||
Recommends: oldp.Recommends,
|
||||
}
|
||||
|
||||
p.extra = &oldp.Extra
|
||||
for i := range oldp.Files {
|
||||
oldp.Files[i].Filename = filepath.Base(oldp.Files[i].Filename)
|
||||
}
|
||||
p.UpdateFiles(PackageFiles(oldp.Files))
|
||||
|
||||
// Save in new format
|
||||
err = collection.internalUpdate(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
decoder := codec.NewDecoderBytes(encoded[2:], &codec.MsgpackHandle{})
|
||||
err = decoder.Decode(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
p.collection = collection
|
||||
|
||||
return p, nil
|
||||
}
|
||||
|
||||
// loadExtra loads Stanza with all the xtra information about the package
|
||||
func (collection *PackageCollection) loadExtra(p *Package) *Stanza {
|
||||
encoded, err := collection.db.Get(p.Key("xE"))
|
||||
if err != nil {
|
||||
panic("unable to load extra")
|
||||
}
|
||||
|
||||
stanza := &Stanza{}
|
||||
|
||||
decoder := codec.NewDecoderBytes(encoded, &codec.MsgpackHandle{})
|
||||
err = decoder.Decode(stanza)
|
||||
if err != nil {
|
||||
panic("unable to decode extra")
|
||||
}
|
||||
|
||||
return stanza
|
||||
}
|
||||
|
||||
// loadDependencies loads dependencies for the package
|
||||
func (collection *PackageCollection) loadDependencies(p *Package) *PackageDependencies {
|
||||
encoded, err := collection.db.Get(p.Key("xD"))
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("unable to load deps: %s, %s", p, err))
|
||||
}
|
||||
|
||||
deps := &PackageDependencies{}
|
||||
|
||||
decoder := codec.NewDecoderBytes(encoded, &codec.MsgpackHandle{})
|
||||
err = decoder.Decode(deps)
|
||||
if err != nil {
|
||||
panic("unable to decode deps")
|
||||
}
|
||||
|
||||
return deps
|
||||
}
|
||||
|
||||
// loadFiles loads additional PackageFiles record
|
||||
func (collection *PackageCollection) loadFiles(p *Package) *PackageFiles {
|
||||
encoded, err := collection.db.Get(p.Key("xF"))
|
||||
if err != nil {
|
||||
panic("unable to load files")
|
||||
}
|
||||
|
||||
files := &PackageFiles{}
|
||||
|
||||
decoder := codec.NewDecoderBytes(encoded, &codec.MsgpackHandle{})
|
||||
err = decoder.Decode(files)
|
||||
if err != nil {
|
||||
panic("unable to decode files")
|
||||
}
|
||||
|
||||
return files
|
||||
}
|
||||
|
||||
// Update adds or updates information about package in DB checking for conficts first
|
||||
func (collection *PackageCollection) Update(p *Package) error {
|
||||
existing, err := collection.ByKey(p.Key(""))
|
||||
if err == nil {
|
||||
// if .Files is different, consider to be conflict
|
||||
if p.FilesHash != existing.FilesHash {
|
||||
return fmt.Errorf("unable to save: %s, conflict with existing packge", p)
|
||||
}
|
||||
// ok, .Files are the same, but maybe some meta-data is different, proceed to saving
|
||||
} else {
|
||||
if err != database.ErrNotFound {
|
||||
return err
|
||||
}
|
||||
// ok, package doesn't exist yet
|
||||
}
|
||||
|
||||
return collection.internalUpdate(p)
|
||||
}
|
||||
|
||||
// internalUpdate updates information in DB about package and offloaded fields
|
||||
func (collection *PackageCollection) internalUpdate(p *Package) error {
|
||||
encoder := codec.NewEncoder(&collection.encodeBuffer, &codec.MsgpackHandle{})
|
||||
|
||||
collection.encodeBuffer.Reset()
|
||||
collection.encodeBuffer.WriteByte(0xc1)
|
||||
collection.encodeBuffer.WriteByte(0x1)
|
||||
err := encoder.Encode(p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = collection.db.Put(p.Key(""), collection.encodeBuffer.Bytes())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Encode offloaded fields one by one
|
||||
if p.files != nil {
|
||||
collection.encodeBuffer.Reset()
|
||||
err = encoder.Encode(*p.files)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = collection.db.Put(p.Key("xF"), collection.encodeBuffer.Bytes())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if p.deps != nil {
|
||||
collection.encodeBuffer.Reset()
|
||||
err = encoder.Encode(*p.deps)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = collection.db.Put(p.Key("xD"), collection.encodeBuffer.Bytes())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
p.deps = nil
|
||||
}
|
||||
|
||||
if p.extra != nil {
|
||||
collection.encodeBuffer.Reset()
|
||||
err = encoder.Encode(*p.extra)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = collection.db.Put(p.Key("xE"), collection.encodeBuffer.Bytes())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
p.extra = nil
|
||||
}
|
||||
|
||||
p.collection = collection
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// AllPackageRefs returns list of all packages as PackageRefList
|
||||
func (collection *PackageCollection) AllPackageRefs() *PackageRefList {
|
||||
return &PackageRefList{Refs: collection.db.KeysByPrefix([]byte("P"))}
|
||||
}
|
||||
|
||||
// DeleteByKey deletes package in DB by key
|
||||
func (collection *PackageCollection) DeleteByKey(key []byte) error {
|
||||
for _, key := range [][]byte{key, append([]byte("xF"), key...), append([]byte("xD"), key...), append([]byte("xE"), key...)} {
|
||||
err := collection.db.Delete(key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
205
debian/package_collection_test.go
vendored
205
debian/package_collection_test.go
vendored
@@ -1,205 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"github.com/smira/aptly/database"
|
||||
"github.com/smira/aptly/utils"
|
||||
. "launchpad.net/gocheck"
|
||||
)
|
||||
|
||||
type PackageCollectionSuite struct {
|
||||
collection *PackageCollection
|
||||
p *Package
|
||||
db database.Storage
|
||||
}
|
||||
|
||||
var _ = Suite(&PackageCollectionSuite{})
|
||||
|
||||
func (s *PackageCollectionSuite) SetUpTest(c *C) {
|
||||
s.p = NewPackageFromControlFile(packageStanza.Copy())
|
||||
s.db, _ = database.OpenDB(c.MkDir())
|
||||
s.collection = NewPackageCollection(s.db)
|
||||
}
|
||||
|
||||
func (s *PackageCollectionSuite) TearDownTest(c *C) {
|
||||
s.db.Close()
|
||||
}
|
||||
|
||||
func (s *PackageCollectionSuite) TestUpdate(c *C) {
|
||||
// package doesn't exist, update ok
|
||||
err := s.collection.Update(s.p)
|
||||
c.Assert(err, IsNil)
|
||||
res, err := s.collection.ByKey(s.p.Key(""))
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(res.Equals(s.p), Equals, true)
|
||||
|
||||
// same package, ok
|
||||
p2 := NewPackageFromControlFile(packageStanza.Copy())
|
||||
err = s.collection.Update(p2)
|
||||
c.Assert(err, IsNil)
|
||||
res, err = s.collection.ByKey(p2.Key(""))
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(res.Equals(s.p), Equals, true)
|
||||
|
||||
// change some metadata
|
||||
p2.Source = "lala"
|
||||
err = s.collection.Update(p2)
|
||||
c.Assert(err, IsNil)
|
||||
res, err = s.collection.ByKey(p2.Key(""))
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(res.Equals(s.p), Equals, false)
|
||||
c.Assert(res.Equals(p2), Equals, true)
|
||||
|
||||
// change file info
|
||||
p2 = NewPackageFromControlFile(packageStanza.Copy())
|
||||
p2.UpdateFiles(nil)
|
||||
res, err = s.collection.ByKey(p2.Key(""))
|
||||
err = s.collection.Update(p2)
|
||||
c.Assert(err, ErrorMatches, ".*conflict with existing packge")
|
||||
p2 = NewPackageFromControlFile(packageStanza.Copy())
|
||||
files := p2.Files()
|
||||
files[0].Checksums.MD5 = "abcdef"
|
||||
p2.UpdateFiles(files)
|
||||
res, err = s.collection.ByKey(p2.Key(""))
|
||||
err = s.collection.Update(p2)
|
||||
c.Assert(err, ErrorMatches, ".*conflict with existing packge")
|
||||
}
|
||||
|
||||
func (s *PackageCollectionSuite) TestByKey(c *C) {
|
||||
err := s.collection.Update(s.p)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
p2, err := s.collection.ByKey(s.p.Key(""))
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(p2.Equals(s.p), Equals, true)
|
||||
|
||||
c.Check(p2.GetDependencies(0), DeepEquals, []string{"libc6 (>= 2.7)", "alien-arena-data (>= 7.40)", "dpkg (>= 1.6)"})
|
||||
c.Check(p2.Extra()["Priority"], Equals, "extra")
|
||||
c.Check(p2.Files()[0].Filename, Equals, "alien-arena-common_7.40-2_i386.deb")
|
||||
}
|
||||
|
||||
func (s *PackageCollectionSuite) TestByKeyOld_0_3(c *C) {
|
||||
key := []byte("Pi386 vmware-view-open-client 4.5.0-297975+dfsg-4+b1")
|
||||
s.db.Put(key, old_0_3_Package)
|
||||
|
||||
p, err := s.collection.ByKey(key)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(p.Name, Equals, "vmware-view-open-client")
|
||||
c.Check(p.Version, Equals, "4.5.0-297975+dfsg-4+b1")
|
||||
c.Check(p.Architecture, Equals, "i386")
|
||||
c.Check(p.Files(), DeepEquals, PackageFiles{
|
||||
PackageFile{Filename: "vmware-view-open-client_4.5.0-297975+dfsg-4+b1_i386.deb",
|
||||
Checksums: utils.ChecksumInfo{
|
||||
Size: 520080,
|
||||
MD5: "9c61b54e2638a18f955a695b9162d6af",
|
||||
SHA1: "5b7c99e64a70f4f509bfa3a674088ff9cef68163",
|
||||
SHA256: "4a9e4b2d9b3db13f9a29e522f3ffbb34eee96fc6f34a0647042ab1b5b0f2e04d"}}})
|
||||
c.Check(p.GetDependencies(0), DeepEquals, []string{"libatk1.0-0 (>= 1.12.4)", "libboost-signals1.49.0 (>= 1.49.0-1)",
|
||||
"libc6 (>= 2.3.6-6~)", "libcairo2 (>= 1.2.4)", "libcurl3 (>= 7.18.0)", "libfontconfig1 (>= 2.8.0)", "libfreetype6 (>= 2.2.1)",
|
||||
"libgcc1 (>= 1:4.1.1)", "libgdk-pixbuf2.0-0 (>= 2.22.0)", "libglib2.0-0 (>= 2.24.0)", "libgtk2.0-0 (>= 2.24.0)",
|
||||
"libicu48 (>= 4.8-1)", "libpango1.0-0 (>= 1.14.0)", "libssl1.0.0 (>= 1.0.0)", "libstdc++6 (>= 4.6)", "libx11-6",
|
||||
"libxml2 (>= 2.7.4)", "rdesktop"})
|
||||
c.Check(p.Extra()["Priority"], Equals, "optional")
|
||||
}
|
||||
|
||||
func (s *PackageCollectionSuite) TestAllPackageRefs(c *C) {
|
||||
err := s.collection.Update(s.p)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
refs := s.collection.AllPackageRefs()
|
||||
c.Check(refs.Len(), Equals, 1)
|
||||
c.Check(refs.Refs[0], DeepEquals, s.p.Key(""))
|
||||
}
|
||||
|
||||
func (s *PackageCollectionSuite) TestDeleteByKey(c *C) {
|
||||
err := s.collection.Update(s.p)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
_, err = s.db.Get(s.p.Key(""))
|
||||
c.Check(err, IsNil)
|
||||
|
||||
_, err = s.db.Get(s.p.Key("xD"))
|
||||
c.Check(err, IsNil)
|
||||
|
||||
_, err = s.db.Get(s.p.Key("xE"))
|
||||
c.Check(err, IsNil)
|
||||
|
||||
_, err = s.db.Get(s.p.Key("xF"))
|
||||
c.Check(err, IsNil)
|
||||
|
||||
err = s.collection.DeleteByKey(s.p.Key(""))
|
||||
c.Check(err, IsNil)
|
||||
|
||||
_, err = s.collection.ByKey(s.p.Key(""))
|
||||
c.Check(err, ErrorMatches, "key not found")
|
||||
|
||||
_, err = s.db.Get(s.p.Key(""))
|
||||
c.Check(err, ErrorMatches, "key not found")
|
||||
|
||||
_, err = s.db.Get(s.p.Key("xD"))
|
||||
c.Check(err, ErrorMatches, "key not found")
|
||||
|
||||
_, err = s.db.Get(s.p.Key("xE"))
|
||||
c.Check(err, ErrorMatches, "key not found")
|
||||
|
||||
_, err = s.db.Get(s.p.Key("xF"))
|
||||
c.Check(err, ErrorMatches, "key not found")
|
||||
}
|
||||
|
||||
// This is old package (pre-0.4) that would habe to be converted
|
||||
var old_0_3_Package = []byte{0x8f, 0xac, 0x41, 0x72, 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0xa4, 0x69, 0x33, 0x38, 0x36,
|
||||
0xac, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x44, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x73, 0xc0, 0xb1, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x44, 0x65,
|
||||
0x70, 0x65, 0x6e, 0x64, 0x73, 0x49, 0x6e, 0x44, 0x65, 0x70, 0xc0, 0xa7, 0x44, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x73, 0xdc, 0x0, 0x12,
|
||||
0xb7, 0x6c, 0x69, 0x62, 0x61, 0x74, 0x6b, 0x31, 0x2e, 0x30, 0x2d, 0x30, 0x20, 0x28, 0x3e, 0x3d, 0x20, 0x31, 0x2e, 0x31, 0x32, 0x2e,
|
||||
0x34, 0x29, 0xda, 0x0, 0x24, 0x6c, 0x69, 0x62, 0x62, 0x6f, 0x6f, 0x73, 0x74, 0x2d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x73, 0x31,
|
||||
0x2e, 0x34, 0x39, 0x2e, 0x30, 0x20, 0x28, 0x3e, 0x3d, 0x20, 0x31, 0x2e, 0x34, 0x39, 0x2e, 0x30, 0x2d, 0x31, 0x29, 0xb3, 0x6c, 0x69,
|
||||
0x62, 0x63, 0x36, 0x20, 0x28, 0x3e, 0x3d, 0x20, 0x32, 0x2e, 0x33, 0x2e, 0x36, 0x2d, 0x36, 0x7e, 0x29, 0xb4, 0x6c, 0x69, 0x62, 0x63,
|
||||
0x61, 0x69, 0x72, 0x6f, 0x32, 0x20, 0x28, 0x3e, 0x3d, 0x20, 0x31, 0x2e, 0x32, 0x2e, 0x34, 0x29, 0xb4, 0x6c, 0x69, 0x62, 0x63, 0x75,
|
||||
0x72, 0x6c, 0x33, 0x20, 0x28, 0x3e, 0x3d, 0x20, 0x37, 0x2e, 0x31, 0x38, 0x2e, 0x30, 0x29, 0xb9, 0x6c, 0x69, 0x62, 0x66, 0x6f, 0x6e,
|
||||
0x74, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x31, 0x20, 0x28, 0x3e, 0x3d, 0x20, 0x32, 0x2e, 0x38, 0x2e, 0x30, 0x29, 0xb7, 0x6c, 0x69,
|
||||
0x62, 0x66, 0x72, 0x65, 0x65, 0x74, 0x79, 0x70, 0x65, 0x36, 0x20, 0x28, 0x3e, 0x3d, 0x20, 0x32, 0x2e, 0x32, 0x2e, 0x31, 0x29, 0xb4,
|
||||
0x6c, 0x69, 0x62, 0x67, 0x63, 0x63, 0x31, 0x20, 0x28, 0x3e, 0x3d, 0x20, 0x31, 0x3a, 0x34, 0x2e, 0x31, 0x2e, 0x31, 0x29, 0xbe, 0x6c,
|
||||
0x69, 0x62, 0x67, 0x64, 0x6b, 0x2d, 0x70, 0x69, 0x78, 0x62, 0x75, 0x66, 0x32, 0x2e, 0x30, 0x2d, 0x30, 0x20, 0x28, 0x3e, 0x3d, 0x20,
|
||||
0x32, 0x2e, 0x32, 0x32, 0x2e, 0x30, 0x29, 0xb8, 0x6c, 0x69, 0x62, 0x67, 0x6c, 0x69, 0x62, 0x32, 0x2e, 0x30, 0x2d, 0x30, 0x20, 0x28,
|
||||
0x3e, 0x3d, 0x20, 0x32, 0x2e, 0x32, 0x34, 0x2e, 0x30, 0x29, 0xb7, 0x6c, 0x69, 0x62, 0x67, 0x74, 0x6b, 0x32, 0x2e, 0x30, 0x2d, 0x30,
|
||||
0x20, 0x28, 0x3e, 0x3d, 0x20, 0x32, 0x2e, 0x32, 0x34, 0x2e, 0x30, 0x29, 0xb3, 0x6c, 0x69, 0x62, 0x69, 0x63, 0x75, 0x34, 0x38, 0x20,
|
||||
0x28, 0x3e, 0x3d, 0x20, 0x34, 0x2e, 0x38, 0x2d, 0x31, 0x29, 0xb9, 0x6c, 0x69, 0x62, 0x70, 0x61, 0x6e, 0x67, 0x6f, 0x31, 0x2e, 0x30,
|
||||
0x2d, 0x30, 0x20, 0x28, 0x3e, 0x3d, 0x20, 0x31, 0x2e, 0x31, 0x34, 0x2e, 0x30, 0x29, 0xb6, 0x6c, 0x69, 0x62, 0x73, 0x73, 0x6c, 0x31,
|
||||
0x2e, 0x30, 0x2e, 0x30, 0x20, 0x28, 0x3e, 0x3d, 0x20, 0x31, 0x2e, 0x30, 0x2e, 0x30, 0x29, 0xb3, 0x6c, 0x69, 0x62, 0x73, 0x74, 0x64,
|
||||
0x63, 0x2b, 0x2b, 0x36, 0x20, 0x28, 0x3e, 0x3d, 0x20, 0x34, 0x2e, 0x36, 0x29, 0xa8, 0x6c, 0x69, 0x62, 0x78, 0x31, 0x31, 0x2d, 0x36,
|
||||
0xb2, 0x6c, 0x69, 0x62, 0x78, 0x6d, 0x6c, 0x32, 0x20, 0x28, 0x3e, 0x3d, 0x20, 0x32, 0x2e, 0x37, 0x2e, 0x34, 0x29, 0xa8, 0x72, 0x64,
|
||||
0x65, 0x73, 0x6b, 0x74, 0x6f, 0x70, 0xa5, 0x45, 0x78, 0x74, 0x72, 0x61, 0x88, 0xa3, 0x54, 0x61, 0x67, 0xbd, 0x72, 0x6f, 0x6c, 0x65,
|
||||
0x3a, 0x3a, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x2c, 0x20, 0x75, 0x69, 0x74, 0x6f, 0x6f, 0x6c, 0x6b, 0x69, 0x74, 0x3a, 0x3a,
|
||||
0x67, 0x74, 0x6b, 0xa8, 0x50, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0xa8, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0xaa,
|
||||
0x4d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0xda, 0x0, 0x28, 0x44, 0x65, 0x62, 0x69, 0x61, 0x6e, 0x20, 0x51, 0x41,
|
||||
0x20, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x20, 0x3c, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x40, 0x71, 0x61, 0x2e, 0x64, 0x65,
|
||||
0x62, 0x69, 0x61, 0x6e, 0x2e, 0x6f, 0x72, 0x67, 0x3e, 0xa8, 0x48, 0x6f, 0x6d, 0x65, 0x70, 0x61, 0x67, 0x65, 0xda, 0x0, 0x30, 0x68,
|
||||
0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x2f,
|
||||
0x70, 0x2f, 0x76, 0x6d, 0x77, 0x61, 0x72, 0x65, 0x2d, 0x76, 0x69, 0x65, 0x77, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x2d, 0x63, 0x6c, 0x69,
|
||||
0x65, 0x6e, 0x74, 0xaf, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x2d, 0x6d, 0x64, 0x35, 0xda, 0x0, 0x20,
|
||||
0x62, 0x34, 0x34, 0x64, 0x34, 0x39, 0x62, 0x34, 0x37, 0x61, 0x65, 0x30, 0x35, 0x35, 0x32, 0x63, 0x62, 0x66, 0x61, 0x64, 0x32, 0x31,
|
||||
0x30, 0x64, 0x65, 0x32, 0x31, 0x63, 0x64, 0x65, 0x31, 0x39, 0xa7, 0x53, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0xab, 0x63, 0x6f, 0x6e,
|
||||
0x74, 0x72, 0x69, 0x62, 0x2f, 0x78, 0x31, 0x31, 0xae, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6c, 0x6c, 0x65, 0x64, 0x2d, 0x53, 0x69, 0x7a,
|
||||
0x65, 0xa4, 0x31, 0x34, 0x35, 0x39, 0xab, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0xb9, 0x20, 0x56, 0x4d,
|
||||
0x77, 0x61, 0x72, 0x65, 0x20, 0x56, 0x69, 0x65, 0x77, 0x20, 0x4f, 0x70, 0x65, 0x6e, 0x20, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0xa,
|
||||
0xa5, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x91, 0x82, 0xa9, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x73, 0x75, 0x6d, 0x73, 0x84, 0xa3, 0x4d, 0x44,
|
||||
0x35, 0xda, 0x0, 0x20, 0x39, 0x63, 0x36, 0x31, 0x62, 0x35, 0x34, 0x65, 0x32, 0x36, 0x33, 0x38, 0x61, 0x31, 0x38, 0x66, 0x39, 0x35,
|
||||
0x35, 0x61, 0x36, 0x39, 0x35, 0x62, 0x39, 0x31, 0x36, 0x32, 0x64, 0x36, 0x61, 0x66, 0xa4, 0x53, 0x48, 0x41, 0x31, 0xda, 0x0, 0x28,
|
||||
0x35, 0x62, 0x37, 0x63, 0x39, 0x39, 0x65, 0x36, 0x34, 0x61, 0x37, 0x30, 0x66, 0x34, 0x66, 0x35, 0x30, 0x39, 0x62, 0x66, 0x61, 0x33,
|
||||
0x61, 0x36, 0x37, 0x34, 0x30, 0x38, 0x38, 0x66, 0x66, 0x39, 0x63, 0x65, 0x66, 0x36, 0x38, 0x31, 0x36, 0x33, 0xa6, 0x53, 0x48, 0x41,
|
||||
0x32, 0x35, 0x36, 0xda, 0x0, 0x40, 0x34, 0x61, 0x39, 0x65, 0x34, 0x62, 0x32, 0x64, 0x39, 0x62, 0x33, 0x64, 0x62, 0x31, 0x33, 0x66,
|
||||
0x39, 0x61, 0x32, 0x39, 0x65, 0x35, 0x32, 0x32, 0x66, 0x33, 0x66, 0x66, 0x62, 0x62, 0x33, 0x34, 0x65, 0x65, 0x65, 0x39, 0x36, 0x66,
|
||||
0x63, 0x36, 0x66, 0x33, 0x34, 0x61, 0x30, 0x36, 0x34, 0x37, 0x30, 0x34, 0x32, 0x61, 0x62, 0x31, 0x62, 0x35, 0x62, 0x30, 0x66, 0x32,
|
||||
0x65, 0x30, 0x34, 0x64, 0xa4, 0x53, 0x69, 0x7a, 0x65, 0xce, 0x0, 0x7, 0xef, 0x90, 0xa8, 0x46, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d,
|
||||
0x65, 0xda, 0x0, 0x5e, 0x70, 0x6f, 0x6f, 0x6c, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2f, 0x76, 0x2f, 0x76, 0x6d, 0x77,
|
||||
0x61, 0x72, 0x65, 0x2d, 0x76, 0x69, 0x65, 0x77, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x2d, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2f, 0x76,
|
||||
0x6d, 0x77, 0x61, 0x72, 0x65, 0x2d, 0x76, 0x69, 0x65, 0x77, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x2d, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74,
|
||||
0x5f, 0x34, 0x2e, 0x35, 0x2e, 0x30, 0x2d, 0x32, 0x39, 0x37, 0x39, 0x37, 0x35, 0x2b, 0x64, 0x66, 0x73, 0x67, 0x2d, 0x34, 0x2b, 0x62,
|
||||
0x31, 0x5f, 0x69, 0x33, 0x38, 0x36, 0x2e, 0x64, 0x65, 0x62, 0xa8, 0x49, 0x73, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0xc2, 0xa4, 0x4e,
|
||||
0x61, 0x6d, 0x65, 0xb7, 0x76, 0x6d, 0x77, 0x61, 0x72, 0x65, 0x2d, 0x76, 0x69, 0x65, 0x77, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x2d, 0x63,
|
||||
0x6c, 0x69, 0x65, 0x6e, 0x74, 0xaa, 0x50, 0x72, 0x65, 0x44, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x73, 0xc0, 0xa8, 0x50, 0x72, 0x6f, 0x76,
|
||||
0x69, 0x64, 0x65, 0x73, 0xc0, 0xaa, 0x52, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x73, 0xc0, 0xa6, 0x53, 0x6f, 0x75, 0x72,
|
||||
0x63, 0x65, 0xda, 0x0, 0x2d, 0x76, 0x6d, 0x77, 0x61, 0x72, 0x65, 0x2d, 0x76, 0x69, 0x65, 0x77, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x2d,
|
||||
0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x20, 0x28, 0x34, 0x2e, 0x35, 0x2e, 0x30, 0x2d, 0x32, 0x39, 0x37, 0x39, 0x37, 0x35, 0x2b, 0x64,
|
||||
0x66, 0x73, 0x67, 0x2d, 0x34, 0x29, 0xb2, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, 0x74,
|
||||
0x75, 0x72, 0x65, 0xa0, 0xa8, 0x53, 0x75, 0x67, 0x67, 0x65, 0x73, 0x74, 0x73, 0xc0, 0xa7, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
|
||||
0xb6, 0x34, 0x2e, 0x35, 0x2e, 0x30, 0x2d, 0x32, 0x39, 0x37, 0x39, 0x37, 0x35, 0x2b, 0x64, 0x66, 0x73, 0x67, 0x2d, 0x34, 0x2b, 0x62, 0x31}
|
||||
30
debian/package_deps.go
vendored
30
debian/package_deps.go
vendored
@@ -1,30 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// PackageDependencies are various parsed dependencies
|
||||
type PackageDependencies struct {
|
||||
Depends []string
|
||||
BuildDepends []string
|
||||
BuildDependsInDep []string
|
||||
PreDepends []string
|
||||
Suggests []string
|
||||
Recommends []string
|
||||
}
|
||||
|
||||
func parseDependencies(input Stanza, key string) []string {
|
||||
value, ok := input[key]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
delete(input, key)
|
||||
|
||||
result := strings.Split(value, ",")
|
||||
for i := range result {
|
||||
result[i] = strings.TrimSpace(result[i])
|
||||
}
|
||||
return result
|
||||
}
|
||||
78
debian/package_files.go
vendored
78
debian/package_files.go
vendored
@@ -1,78 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"github.com/smira/aptly/aptly"
|
||||
"github.com/smira/aptly/utils"
|
||||
"hash/fnv"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// PackageFile is a single file entry in package
|
||||
type PackageFile struct {
|
||||
// Filename is name of file for the package (without directory)
|
||||
Filename string
|
||||
// Hashes for the file
|
||||
Checksums utils.ChecksumInfo
|
||||
// Temporary field used while downloading, stored relative path on the mirror
|
||||
downloadPath string
|
||||
}
|
||||
|
||||
// Verify that package file is present and correct
|
||||
func (f *PackageFile) Verify(packagePool aptly.PackagePool) (bool, error) {
|
||||
poolPath, err := packagePool.Path(f.Filename, f.Checksums.MD5)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
st, err := os.Stat(poolPath)
|
||||
if err != nil {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// verify size
|
||||
// TODO: verify checksum if configured
|
||||
return st.Size() == f.Checksums.Size, nil
|
||||
}
|
||||
|
||||
// DownloadURL return relative URL to package download location
|
||||
func (f *PackageFile) DownloadURL() string {
|
||||
return filepath.Join(f.downloadPath, f.Filename)
|
||||
}
|
||||
|
||||
// PackageFiles is collection of package files
|
||||
type PackageFiles []PackageFile
|
||||
|
||||
// Hash compute hash of all file items, sorting them first
|
||||
func (files PackageFiles) Hash() uint64 {
|
||||
sort.Sort(files)
|
||||
|
||||
h := fnv.New64a()
|
||||
|
||||
for _, f := range files {
|
||||
h.Write([]byte(f.Filename))
|
||||
binary.Write(h, binary.BigEndian, f.Checksums.Size)
|
||||
h.Write([]byte(f.Checksums.MD5))
|
||||
h.Write([]byte(f.Checksums.SHA1))
|
||||
h.Write([]byte(f.Checksums.SHA256))
|
||||
}
|
||||
|
||||
return h.Sum64()
|
||||
}
|
||||
|
||||
// Len returns number of files
|
||||
func (files PackageFiles) Len() int {
|
||||
return len(files)
|
||||
}
|
||||
|
||||
// Swap swaps elements
|
||||
func (files PackageFiles) Swap(i, j int) {
|
||||
files[i], files[j] = files[j], files[i]
|
||||
}
|
||||
|
||||
// Less compares by filename
|
||||
func (files PackageFiles) Less(i, j int) bool {
|
||||
return files[i].Filename < files[j].Filename
|
||||
}
|
||||
61
debian/package_files_test.go
vendored
61
debian/package_files_test.go
vendored
@@ -1,61 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"github.com/smira/aptly/files"
|
||||
"github.com/smira/aptly/utils"
|
||||
. "launchpad.net/gocheck"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type PackageFilesSuite struct {
|
||||
files PackageFiles
|
||||
}
|
||||
|
||||
var _ = Suite(&PackageFilesSuite{})
|
||||
|
||||
func (s *PackageFilesSuite) SetUpTest(c *C) {
|
||||
s.files = PackageFiles{PackageFile{
|
||||
Filename: "alien-arena-common_7.40-2_i386.deb",
|
||||
downloadPath: "pool/contrib/a/alien-arena",
|
||||
Checksums: utils.ChecksumInfo{
|
||||
Size: 187518,
|
||||
MD5: "1e8cba92c41420aa7baa8a5718d67122",
|
||||
SHA1: "46955e48cad27410a83740a21d766ce362364024",
|
||||
SHA256: "eb4afb9885cba6dc70cccd05b910b2dbccc02c5900578be5e99f0d3dbf9d76a5",
|
||||
}}}
|
||||
}
|
||||
|
||||
func (s *PackageFilesSuite) TestVerify(c *C) {
|
||||
packagePool := files.NewPackagePool(c.MkDir())
|
||||
poolPath, _ := packagePool.Path(s.files[0].Filename, s.files[0].Checksums.MD5)
|
||||
|
||||
result, err := s.files[0].Verify(packagePool)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(result, Equals, false)
|
||||
|
||||
err = os.MkdirAll(filepath.Dir(poolPath), 0755)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
file, err := os.Create(poolPath)
|
||||
c.Assert(err, IsNil)
|
||||
file.WriteString("abcde")
|
||||
file.Close()
|
||||
|
||||
result, err = s.files[0].Verify(packagePool)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(result, Equals, false)
|
||||
|
||||
s.files[0].Checksums.Size = 5
|
||||
result, err = s.files[0].Verify(packagePool)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(result, Equals, true)
|
||||
}
|
||||
|
||||
func (s *PackageFilesSuite) TestDownloadURL(c *C) {
|
||||
c.Check(s.files[0].DownloadURL(), Equals, "pool/contrib/a/alien-arena/alien-arena-common_7.40-2_i386.deb")
|
||||
}
|
||||
|
||||
func (s *PackageFilesSuite) TestHash(c *C) {
|
||||
c.Check(s.files.Hash(), Equals, uint64(0xc8901eedd79ac51b))
|
||||
}
|
||||
361
debian/package_test.go
vendored
361
debian/package_test.go
vendored
@@ -1,361 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/smira/aptly/files"
|
||||
"github.com/smira/aptly/utils"
|
||||
. "launchpad.net/gocheck"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type PackageSuite struct {
|
||||
stanza Stanza
|
||||
sourceStanza Stanza
|
||||
}
|
||||
|
||||
var _ = Suite(&PackageSuite{})
|
||||
|
||||
func (s *PackageSuite) SetUpTest(c *C) {
|
||||
s.stanza = packageStanza.Copy()
|
||||
|
||||
buf := bytes.NewBufferString(sourcePackageMeta)
|
||||
s.sourceStanza, _ = NewControlFileReader(buf).ReadStanza()
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestNewFromPara(c *C) {
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
|
||||
c.Check(p.IsSource, Equals, false)
|
||||
c.Check(p.Name, Equals, "alien-arena-common")
|
||||
c.Check(p.Version, Equals, "7.40-2")
|
||||
c.Check(p.Architecture, Equals, "i386")
|
||||
c.Check(p.Provides, DeepEquals, []string(nil))
|
||||
c.Check(p.Files(), HasLen, 1)
|
||||
c.Check(p.Files()[0].Filename, Equals, "alien-arena-common_7.40-2_i386.deb")
|
||||
c.Check(p.Files()[0].downloadPath, Equals, "pool/contrib/a/alien-arena")
|
||||
c.Check(p.Files()[0].Checksums.Size, Equals, int64(187518))
|
||||
c.Check(p.Files()[0].Checksums.MD5, Equals, "1e8cba92c41420aa7baa8a5718d67122")
|
||||
c.Check(p.deps.Depends, DeepEquals, []string{"libc6 (>= 2.7)", "alien-arena-data (>= 7.40)"})
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestNewSourceFromPara(c *C) {
|
||||
p, err := NewSourcePackageFromControlFile(s.sourceStanza)
|
||||
|
||||
c.Check(err, IsNil)
|
||||
c.Check(p.IsSource, Equals, true)
|
||||
c.Check(p.Name, Equals, "access-modifier-checker")
|
||||
c.Check(p.Version, Equals, "1.0-4")
|
||||
c.Check(p.Architecture, Equals, "source")
|
||||
c.Check(p.SourceArchitecture, Equals, "all")
|
||||
c.Check(p.Provides, IsNil)
|
||||
c.Check(p.deps.BuildDepends, DeepEquals, []string{"cdbs", "debhelper (>= 7)", "default-jdk", "maven-debian-helper"})
|
||||
c.Check(p.deps.BuildDependsInDep, DeepEquals, []string{"default-jdk-doc", "junit (>= 3.8.1)", "libannotation-indexer-java (>= 1.3)", "libannotation-indexer-java-doc", "libasm3-java", "libmaven-install-plugin-java", "libmaven-javadoc-plugin-java", "libmaven-scm-java", "libmaven2-core-java", "libmaven2-core-java-doc", "libmetainf-services-java", "libmetainf-services-java-doc", "libmaven-plugin-tools-java (>= 2.8)"})
|
||||
c.Check(p.Files(), HasLen, 3)
|
||||
|
||||
c.Check(p.Files()[0].Filename, Equals, "access-modifier-checker_1.0-4.debian.tar.gz")
|
||||
c.Check(p.Files()[0].downloadPath, Equals, "pool/main/a/access-modifier-checker")
|
||||
|
||||
c.Check(p.Files()[1].Filename, Equals, "access-modifier-checker_1.0-4.dsc")
|
||||
c.Check(p.Files()[1].downloadPath, Equals, "pool/main/a/access-modifier-checker")
|
||||
c.Check(p.Files()[1].Checksums.Size, Equals, int64(3))
|
||||
c.Check(p.Files()[1].Checksums.MD5, Equals, "900150983cd24fb0d6963f7d28e17f72")
|
||||
c.Check(p.Files()[1].Checksums.SHA1, Equals, "a9993e364706816aba3e25717850c26c9cd0d89d")
|
||||
c.Check(p.Files()[1].Checksums.SHA256, Equals, "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad")
|
||||
|
||||
c.Check(p.Files()[2].Filename, Equals, "access-modifier-checker_1.0.orig.tar.gz")
|
||||
c.Check(p.Files()[2].downloadPath, Equals, "pool/main/a/access-modifier-checker")
|
||||
c.Check(p.Files()[2].Checksums.Size, Equals, int64(4))
|
||||
c.Check(p.Files()[2].Checksums.MD5, Equals, "e2fc714c4727ee9395f324cd2e7f331f")
|
||||
c.Check(p.Files()[2].Checksums.SHA1, Equals, "81fe8bfe87576c3ecb22426f8e57847382917acf")
|
||||
c.Check(p.Files()[2].Checksums.SHA256, Equals, "88d4266fd4e6338d13b845fcf289579d209c897823b9217da3e161936f031589")
|
||||
|
||||
c.Check(p.deps.Depends, IsNil)
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestWithProvides(c *C) {
|
||||
s.stanza["Provides"] = "arena"
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
|
||||
c.Check(p.Name, Equals, "alien-arena-common")
|
||||
c.Check(p.Provides, DeepEquals, []string{"arena"})
|
||||
|
||||
st := p.Stanza()
|
||||
c.Check(st["Provides"], Equals, "arena")
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestKey(c *C) {
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
|
||||
c.Check(p.Key(""), DeepEquals, []byte("Pi386 alien-arena-common 7.40-2"))
|
||||
c.Check(p.Key("xD"), DeepEquals, []byte("xDPi386 alien-arena-common 7.40-2"))
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestStanza(c *C) {
|
||||
p := NewPackageFromControlFile(s.stanza.Copy())
|
||||
stanza := p.Stanza()
|
||||
|
||||
c.Assert(stanza, DeepEquals, s.stanza)
|
||||
|
||||
p, _ = NewSourcePackageFromControlFile(s.sourceStanza.Copy())
|
||||
stanza = p.Stanza()
|
||||
|
||||
c.Assert(stanza, DeepEquals, s.sourceStanza)
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestString(c *C) {
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
c.Assert(p.String(), Equals, "alien-arena-common_7.40-2_i386")
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestEquals(c *C) {
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
|
||||
p2 := NewPackageFromControlFile(packageStanza.Copy())
|
||||
c.Check(p.Equals(p2), Equals, true)
|
||||
|
||||
p2.deps.Depends = []string{"package1"}
|
||||
c.Check(p.Equals(p2), Equals, true) // strange, but Equals doesn't check deep
|
||||
|
||||
p2 = NewPackageFromControlFile(packageStanza.Copy())
|
||||
files := p2.Files()
|
||||
files[0].Checksums.MD5 = "abcdefabcdef"
|
||||
p2.UpdateFiles(files)
|
||||
c.Check(p.Equals(p2), Equals, false)
|
||||
|
||||
so, _ := NewSourcePackageFromControlFile(s.sourceStanza.Copy())
|
||||
so2, _ := NewSourcePackageFromControlFile(s.sourceStanza.Copy())
|
||||
|
||||
c.Check(so.Equals(so2), Equals, true)
|
||||
|
||||
files = so2.Files()
|
||||
files[2].Checksums.MD5 = "abcde"
|
||||
so2.UpdateFiles(files)
|
||||
c.Check(so.Equals(so2), Equals, false)
|
||||
|
||||
so2, _ = NewSourcePackageFromControlFile(s.sourceStanza.Copy())
|
||||
files = so2.Files()
|
||||
files[1].Filename = "other.deb"
|
||||
so2.UpdateFiles(files)
|
||||
c.Check(so.Equals(so2), Equals, false)
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestMatchesArchitecture(c *C) {
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
c.Check(p.MatchesArchitecture("i386"), Equals, true)
|
||||
c.Check(p.MatchesArchitecture("amd64"), Equals, false)
|
||||
|
||||
s.stanza = packageStanza.Copy()
|
||||
s.stanza["Architecture"] = "all"
|
||||
p = NewPackageFromControlFile(s.stanza)
|
||||
c.Check(p.MatchesArchitecture("i386"), Equals, true)
|
||||
c.Check(p.MatchesArchitecture("amd64"), Equals, true)
|
||||
c.Check(p.MatchesArchitecture("source"), Equals, false)
|
||||
|
||||
p, _ = NewSourcePackageFromControlFile(s.sourceStanza)
|
||||
c.Check(p.MatchesArchitecture("source"), Equals, true)
|
||||
c.Check(p.MatchesArchitecture("amd64"), Equals, false)
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestMatchesDependency(c *C) {
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
|
||||
// exact match
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionEqual, Version: "7.40-2"}), Equals, true)
|
||||
|
||||
// different name
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena", Architecture: "i386", Relation: VersionEqual, Version: "7.40-2"}), Equals, false)
|
||||
|
||||
// different version
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionEqual, Version: "7.40-3"}), Equals, false)
|
||||
|
||||
// different arch
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "amd64", Relation: VersionEqual, Version: "7.40-2"}), Equals, false)
|
||||
|
||||
// empty arch
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "", Relation: VersionEqual, Version: "7.40-2"}), Equals, true)
|
||||
|
||||
// version don't care
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionDontCare, Version: ""}), Equals, true)
|
||||
|
||||
// >
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionGreater, Version: "7.40-2"}), Equals, false)
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionGreater, Version: "7.40-1"}), Equals, true)
|
||||
|
||||
// <
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionLess, Version: "7.40-2"}), Equals, false)
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionLess, Version: "7.40-3"}), Equals, true)
|
||||
|
||||
// >=
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionGreaterOrEqual, Version: "7.40-2"}), Equals, true)
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionGreaterOrEqual, Version: "7.40-3"}), Equals, false)
|
||||
|
||||
// <=
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionLessOrEqual, Version: "7.40-2"}), Equals, true)
|
||||
c.Check(p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionLessOrEqual, Version: "7.40-1"}), Equals, false)
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestGetDependencies(c *C) {
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
c.Check(p.GetDependencies(0), DeepEquals, []string{"libc6 (>= 2.7)", "alien-arena-data (>= 7.40)", "dpkg (>= 1.6)"})
|
||||
c.Check(p.GetDependencies(DepFollowSuggests), DeepEquals, []string{"libc6 (>= 2.7)", "alien-arena-data (>= 7.40)", "dpkg (>= 1.6)", "alien-arena-mars"})
|
||||
c.Check(p.GetDependencies(DepFollowSuggests|DepFollowRecommends), DeepEquals, []string{"libc6 (>= 2.7)", "alien-arena-data (>= 7.40)", "dpkg (>= 1.6)", "aliean-arena-luna", "alien-arena-mars"})
|
||||
|
||||
c.Check(p.GetDependencies(DepFollowSource), DeepEquals, []string{"libc6 (>= 2.7)", "alien-arena-data (>= 7.40)", "dpkg (>= 1.6)", "alien-arena (= 7.40-2) {source}"})
|
||||
p.Source = "alien-arena (7.40-3)"
|
||||
c.Check(p.GetDependencies(DepFollowSource), DeepEquals, []string{"libc6 (>= 2.7)", "alien-arena-data (>= 7.40)", "dpkg (>= 1.6)", "alien-arena (7.40-3) {source}"})
|
||||
p.Source = ""
|
||||
c.Check(p.GetDependencies(DepFollowSource), DeepEquals, []string{"libc6 (>= 2.7)", "alien-arena-data (>= 7.40)", "dpkg (>= 1.6)", "alien-arena-common (= 7.40-2) {source}"})
|
||||
|
||||
p, _ = NewSourcePackageFromControlFile(s.sourceStanza)
|
||||
c.Check(p.GetDependencies(0), DeepEquals, []string{})
|
||||
c.Check(p.GetDependencies(DepFollowBuild), DeepEquals, []string{"cdbs", "debhelper (>= 7)", "default-jdk", "maven-debian-helper", "default-jdk-doc", "junit (>= 3.8.1)", "libannotation-indexer-java (>= 1.3)", "libannotation-indexer-java-doc", "libasm3-java", "libmaven-install-plugin-java", "libmaven-javadoc-plugin-java", "libmaven-scm-java", "libmaven2-core-java", "libmaven2-core-java-doc", "libmetainf-services-java", "libmetainf-services-java-doc", "libmaven-plugin-tools-java (>= 2.8)"})
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestPoolDirectory(c *C) {
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
dir, err := p.PoolDirectory()
|
||||
c.Check(err, IsNil)
|
||||
c.Check(dir, Equals, "a/alien-arena")
|
||||
|
||||
p = NewPackageFromControlFile(packageStanza.Copy())
|
||||
p.Source = ""
|
||||
dir, err = p.PoolDirectory()
|
||||
c.Check(err, IsNil)
|
||||
c.Check(dir, Equals, "a/alien-arena-common")
|
||||
|
||||
p = NewPackageFromControlFile(packageStanza.Copy())
|
||||
p.Source = "libarena"
|
||||
dir, err = p.PoolDirectory()
|
||||
c.Check(err, IsNil)
|
||||
c.Check(dir, Equals, "liba/libarena")
|
||||
|
||||
p = NewPackageFromControlFile(packageStanza.Copy())
|
||||
p.Source = "l"
|
||||
_, err = p.PoolDirectory()
|
||||
c.Check(err, ErrorMatches, ".* too short")
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestLinkFromPool(c *C) {
|
||||
packagePool := files.NewPackagePool(c.MkDir())
|
||||
publishedStorage := files.NewPublishedStorage(c.MkDir())
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
|
||||
poolPath, _ := packagePool.Path(p.Files()[0].Filename, p.Files()[0].Checksums.MD5)
|
||||
err := os.MkdirAll(filepath.Dir(poolPath), 0755)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
file, err := os.Create(poolPath)
|
||||
c.Assert(err, IsNil)
|
||||
file.Close()
|
||||
|
||||
err = p.LinkFromPool(publishedStorage, packagePool, "", "non-free")
|
||||
c.Check(err, IsNil)
|
||||
c.Check(p.Files()[0].Filename, Equals, "alien-arena-common_7.40-2_i386.deb")
|
||||
c.Check(p.Files()[0].downloadPath, Equals, "pool/non-free/a/alien-arena")
|
||||
|
||||
p.IsSource = true
|
||||
err = p.LinkFromPool(publishedStorage, packagePool, "", "non-free")
|
||||
c.Check(err, IsNil)
|
||||
c.Check(p.Extra()["Directory"], Equals, "pool/non-free/a/alien-arena")
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestFilepathList(c *C) {
|
||||
packagePool := files.NewPackagePool(c.MkDir())
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
|
||||
list, err := p.FilepathList(packagePool)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(list, DeepEquals, []string{"1e/8c/alien-arena-common_7.40-2_i386.deb"})
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestDownloadList(c *C) {
|
||||
packagePool := files.NewPackagePool(c.MkDir())
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
p.Files()[0].Checksums.Size = 5
|
||||
poolPath, _ := packagePool.Path(p.Files()[0].Filename, p.Files()[0].Checksums.MD5)
|
||||
|
||||
list, err := p.DownloadList(packagePool)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(list, DeepEquals, []PackageDownloadTask{
|
||||
PackageDownloadTask{
|
||||
RepoURI: "pool/contrib/a/alien-arena/alien-arena-common_7.40-2_i386.deb",
|
||||
DestinationPath: poolPath,
|
||||
Checksums: utils.ChecksumInfo{Size: 5,
|
||||
MD5: "1e8cba92c41420aa7baa8a5718d67122",
|
||||
SHA1: "46955e48cad27410a83740a21d766ce362364024",
|
||||
SHA256: "eb4afb9885cba6dc70cccd05b910b2dbccc02c5900578be5e99f0d3dbf9d76a5"}}})
|
||||
|
||||
err = os.MkdirAll(filepath.Dir(poolPath), 0755)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
file, err := os.Create(poolPath)
|
||||
c.Assert(err, IsNil)
|
||||
file.WriteString("abcde")
|
||||
file.Close()
|
||||
|
||||
list, err = p.DownloadList(packagePool)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(list, DeepEquals, []PackageDownloadTask{})
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestVerifyFiles(c *C) {
|
||||
p := NewPackageFromControlFile(s.stanza)
|
||||
|
||||
packagePool := files.NewPackagePool(c.MkDir())
|
||||
poolPath, _ := packagePool.Path(p.Files()[0].Filename, p.Files()[0].Checksums.MD5)
|
||||
|
||||
err := os.MkdirAll(filepath.Dir(poolPath), 0755)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
file, err := os.Create(poolPath)
|
||||
c.Assert(err, IsNil)
|
||||
file.WriteString("abcde")
|
||||
file.Close()
|
||||
|
||||
result, err := p.VerifyFiles(packagePool)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(result, Equals, false)
|
||||
|
||||
p.Files()[0].Checksums.Size = 5
|
||||
|
||||
result, err = p.VerifyFiles(packagePool)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(result, Equals, true)
|
||||
}
|
||||
|
||||
var packageStanza = Stanza{"Source": "alien-arena", "Pre-Depends": "dpkg (>= 1.6)", "Suggests": "alien-arena-mars", "Recommends": "aliean-arena-luna", "Depends": "libc6 (>= 2.7), alien-arena-data (>= 7.40)", "Filename": "pool/contrib/a/alien-arena/alien-arena-common_7.40-2_i386.deb", "SHA1": " 46955e48cad27410a83740a21d766ce362364024", "SHA256": " eb4afb9885cba6dc70cccd05b910b2dbccc02c5900578be5e99f0d3dbf9d76a5", "Priority": "extra", "Maintainer": "Debian Games Team <pkg-games-devel@lists.alioth.debian.org>", "Description": "Common files for Alien Arena client and server ALIEN ARENA is a standalone 3D first person online deathmatch shooter\n crafted from the original source code of Quake II and Quake III, released\n by id Software under the GPL license. With features including 32 bit\n graphics, new particle engine and effects, light blooms, reflective water,\n hi resolution textures and skins, hi poly models, stain maps, ALIEN ARENA\n pushes the envelope of graphical beauty rivaling today's top games.\n .\n This package installs the common files for Alien Arena.\n", "Homepage": "http://red.planetarena.org", "Tag": "role::app-data, role::shared-lib, special::auto-inst-parts", "Installed-Size": "456", "Version": "7.40-2", "Replaces": "alien-arena (<< 7.33-1)", "Size": "187518", "MD5sum": "1e8cba92c41420aa7baa8a5718d67122", "Package": "alien-arena-common", "Section": "contrib/games", "Architecture": "i386"}
|
||||
|
||||
const sourcePackageMeta = `Package: access-modifier-checker
|
||||
Binary: libaccess-modifier-checker-java, libaccess-modifier-checker-java-doc
|
||||
Version: 1.0-4
|
||||
Maintainer: Debian Java Maintainers <pkg-java-maintainers@lists.alioth.debian.org>
|
||||
Uploaders: James Page <james.page@ubuntu.com>
|
||||
Build-Depends: cdbs, debhelper (>= 7), default-jdk, maven-debian-helper
|
||||
Build-Depends-Indep: default-jdk-doc, junit (>= 3.8.1), libannotation-indexer-java (>= 1.3), libannotation-indexer-java-doc, libasm3-java, libmaven-install-plugin-java, libmaven-javadoc-plugin-java, libmaven-scm-java, libmaven2-core-java, libmaven2-core-java-doc, libmetainf-services-java, libmetainf-services-java-doc, libmaven-plugin-tools-java (>= 2.8)
|
||||
Architecture: all
|
||||
Standards-Version: 3.9.3
|
||||
Format: 3.0 (quilt)
|
||||
Files:
|
||||
ab56b4d92b40713acc5af89985d4b786 5 access-modifier-checker_1.0-4.debian.tar.gz
|
||||
900150983cd24fb0d6963f7d28e17f72 3 access-modifier-checker_1.0-4.dsc
|
||||
e2fc714c4727ee9395f324cd2e7f331f 4 access-modifier-checker_1.0.orig.tar.gz
|
||||
Dm-Upload-Allowed: yes
|
||||
Vcs-Browser: http://git.debian.org/?p=pkg-java/access-modifier-checker.git
|
||||
Vcs-Git: git://git.debian.org/git/pkg-java/access-modifier-checker.git
|
||||
Checksums-Sha1:
|
||||
03de6c570bfe24bfc328ccd7ca46b76eadaf4334 5 access-modifier-checker_1.0-4.debian.tar.gz
|
||||
a9993e364706816aba3e25717850c26c9cd0d89d 3 access-modifier-checker_1.0-4.dsc
|
||||
81fe8bfe87576c3ecb22426f8e57847382917acf 4 access-modifier-checker_1.0.orig.tar.gz
|
||||
Checksums-Sha256:
|
||||
36bbe50ed96841d10443bcb670d6554f0a34b761be67ec9c4a8ad2c0c44ca42c 5 access-modifier-checker_1.0-4.debian.tar.gz
|
||||
ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad 3 access-modifier-checker_1.0-4.dsc
|
||||
88d4266fd4e6338d13b845fcf289579d209c897823b9217da3e161936f031589 4 access-modifier-checker_1.0.orig.tar.gz
|
||||
Homepage: https://github.com/kohsuke/access-modifier
|
||||
Package-List:
|
||||
libaccess-modifier-checker-java deb java optional
|
||||
libaccess-modifier-checker-java-doc deb doc optional
|
||||
Directory: pool/main/a/access-modifier-checker
|
||||
Priority: source
|
||||
Section: java
|
||||
`
|
||||
54
debian/ppa.go
vendored
54
debian/ppa.go
vendored
@@ -1,54 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/smira/aptly/utils"
|
||||
"os/exec"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var ppaRegexp = regexp.MustCompile("^ppa:([^/]+)/(.+)$")
|
||||
|
||||
// ParsePPA converts ppa URL like ppa:user/ppa-name to full HTTP url
|
||||
func ParsePPA(ppaURL string, config *utils.ConfigStructure) (url string, distribution string, components []string, err error) {
|
||||
matches := ppaRegexp.FindStringSubmatch(ppaURL)
|
||||
if matches == nil {
|
||||
err = fmt.Errorf("unable to parse ppa URL: %v", ppaURL)
|
||||
return
|
||||
}
|
||||
|
||||
distributorID := config.PpaDistributorID
|
||||
if distributorID == "" {
|
||||
distributorID, err = getDistributorID()
|
||||
if err != nil {
|
||||
err = fmt.Errorf("unable to figure out Distributor ID: %s, please set config option ppaDistributorID", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
codename := config.PpaCodename
|
||||
if codename == "" {
|
||||
codename, err = getCodename()
|
||||
if err != nil {
|
||||
err = fmt.Errorf("unable to figure out Codename: %s, please set config option ppaCodename", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
distribution = codename
|
||||
components = []string{"main"}
|
||||
url = fmt.Sprintf("http://ppa.launchpad.net/%s/%s/%s", matches[1], matches[2], distributorID)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func getCodename() (string, error) {
|
||||
out, err := exec.Command("lsb_release", "-sc").Output()
|
||||
return strings.TrimSpace(string(out)), err
|
||||
}
|
||||
|
||||
func getDistributorID() (string, error) {
|
||||
out, err := exec.Command("lsb_release", "-si").Output()
|
||||
return strings.ToLower(strings.TrimSpace(string(out))), err
|
||||
}
|
||||
26
debian/ppa_test.go
vendored
26
debian/ppa_test.go
vendored
@@ -1,26 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"github.com/smira/aptly/utils"
|
||||
. "launchpad.net/gocheck"
|
||||
)
|
||||
|
||||
type PpaSuite struct {
|
||||
config utils.ConfigStructure
|
||||
}
|
||||
|
||||
var _ = Suite(&PpaSuite{})
|
||||
|
||||
func (s *PpaSuite) TestParsePPA(c *C) {
|
||||
_, _, _, err := ParsePPA("ppa:dedeed", &s.config)
|
||||
c.Check(err, ErrorMatches, "unable to parse ppa URL.*")
|
||||
|
||||
s.config.PpaDistributorID = "debian"
|
||||
s.config.PpaCodename = "wheezy"
|
||||
|
||||
url, distribution, components, err := ParsePPA("ppa:user/project", &s.config)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(url, Equals, "http://ppa.launchpad.net/user/project/debian")
|
||||
c.Check(distribution, Equals, "wheezy")
|
||||
c.Check(components, DeepEquals, []string{"main"})
|
||||
}
|
||||
607
debian/publish.go
vendored
607
debian/publish.go
vendored
@@ -1,607 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"code.google.com/p/go-uuid/uuid"
|
||||
"fmt"
|
||||
"github.com/smira/aptly/aptly"
|
||||
"github.com/smira/aptly/database"
|
||||
"github.com/smira/aptly/utils"
|
||||
"github.com/ugorji/go/codec"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// PublishedRepo is a published for http/ftp representation of snapshot as Debian repository
|
||||
type PublishedRepo struct {
|
||||
// Internal unique ID
|
||||
UUID string
|
||||
// Prefix & distribution should be unique across all published repositories
|
||||
Prefix string
|
||||
Distribution string
|
||||
Component string
|
||||
// Architectures is a list of all architectures published
|
||||
Architectures []string
|
||||
// SourceKind is "local"/"repo"
|
||||
SourceKind string
|
||||
// SourceUUID is UUID of either snapshot or local repo
|
||||
SourceUUID string `codec:"SnapshotUUID"`
|
||||
|
||||
snapshot *Snapshot
|
||||
localRepo *LocalRepo
|
||||
}
|
||||
|
||||
// NewPublishedRepo creates new published repository
|
||||
//
|
||||
// prefix specifies publishing prefix
|
||||
// distribution, component and architectures are user-defined properties
|
||||
// source could either be *Snapshot or *LocalRepo
|
||||
func NewPublishedRepo(prefix string, distribution string, component string, architectures []string, source interface{}, collectionFactory *CollectionFactory) (*PublishedRepo, error) {
|
||||
var ok bool
|
||||
|
||||
result := &PublishedRepo{
|
||||
UUID: uuid.New(),
|
||||
Architectures: architectures,
|
||||
}
|
||||
|
||||
// figure out source
|
||||
result.snapshot, ok = source.(*Snapshot)
|
||||
if ok {
|
||||
result.SourceKind = "snapshot"
|
||||
result.SourceUUID = result.snapshot.UUID
|
||||
} else {
|
||||
result.localRepo, ok = source.(*LocalRepo)
|
||||
if ok {
|
||||
result.SourceKind = "local"
|
||||
result.SourceUUID = result.localRepo.UUID
|
||||
} else {
|
||||
panic("unknown source kind")
|
||||
}
|
||||
}
|
||||
|
||||
// clean & verify prefix
|
||||
prefix = filepath.Clean(prefix)
|
||||
if strings.HasPrefix(prefix, "/") {
|
||||
prefix = prefix[1:]
|
||||
}
|
||||
if strings.HasSuffix(prefix, "/") {
|
||||
prefix = prefix[:len(prefix)-1]
|
||||
}
|
||||
prefix = filepath.Clean(prefix)
|
||||
|
||||
for _, part := range strings.Split(prefix, "/") {
|
||||
if part == ".." || part == "dists" || part == "pool" {
|
||||
return nil, fmt.Errorf("invalid prefix %s", prefix)
|
||||
}
|
||||
}
|
||||
|
||||
result.Prefix = prefix
|
||||
|
||||
// guessing distribution & component
|
||||
if component == "" || distribution == "" {
|
||||
var (
|
||||
head interface{}
|
||||
current = []interface{}{source}
|
||||
rootComponents = []string{}
|
||||
rootDistributions = []string{}
|
||||
)
|
||||
|
||||
// walk up the tree from current source up to roots (local or remote repos)
|
||||
// and collect information about distribution and components
|
||||
for len(current) > 0 {
|
||||
head, current = current[0], current[1:]
|
||||
|
||||
if snapshot, ok := head.(*Snapshot); ok {
|
||||
for _, uuid := range snapshot.SourceIDs {
|
||||
if snapshot.SourceKind == "repo" {
|
||||
remoteRepo, err := collectionFactory.RemoteRepoCollection().ByUUID(uuid)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
current = append(current, remoteRepo)
|
||||
} else if snapshot.SourceKind == "local" {
|
||||
localRepo, err := collectionFactory.LocalRepoCollection().ByUUID(uuid)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
current = append(current, localRepo)
|
||||
} else if snapshot.SourceKind == "snapshot" {
|
||||
snap, err := collectionFactory.SnapshotCollection().ByUUID(uuid)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
current = append(current, snap)
|
||||
}
|
||||
}
|
||||
} else if localRepo, ok := head.(*LocalRepo); ok {
|
||||
if localRepo.DefaultDistribution != "" {
|
||||
rootDistributions = append(rootDistributions, localRepo.DefaultDistribution)
|
||||
}
|
||||
if localRepo.DefaultComponent != "" {
|
||||
rootComponents = append(rootComponents, localRepo.DefaultComponent)
|
||||
}
|
||||
} else if remoteRepo, ok := head.(*RemoteRepo); ok {
|
||||
if remoteRepo.Distribution != "" {
|
||||
rootDistributions = append(rootDistributions, remoteRepo.Distribution)
|
||||
}
|
||||
rootComponents = append(rootComponents, remoteRepo.Components...)
|
||||
} else {
|
||||
panic("unknown type")
|
||||
}
|
||||
}
|
||||
|
||||
if distribution == "" {
|
||||
sort.Strings(rootDistributions)
|
||||
if len(rootDistributions) > 0 && rootDistributions[0] == rootDistributions[len(rootDistributions)-1] {
|
||||
distribution = rootDistributions[0]
|
||||
} else {
|
||||
return nil, fmt.Errorf("unable to guess distribution name, please specify explicitly")
|
||||
}
|
||||
}
|
||||
|
||||
if component == "" {
|
||||
sort.Strings(rootComponents)
|
||||
if len(rootComponents) > 0 && rootComponents[0] == rootComponents[len(rootComponents)-1] {
|
||||
component = rootComponents[0]
|
||||
} else {
|
||||
component = "main"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result.Distribution, result.Component = distribution, component
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// String returns human-readable represenation of PublishedRepo
|
||||
func (p *PublishedRepo) String() string {
|
||||
var source string
|
||||
|
||||
if p.snapshot != nil {
|
||||
source = p.snapshot.String()
|
||||
} else if p.localRepo != nil {
|
||||
source = p.localRepo.String()
|
||||
} else {
|
||||
panic("no snapshot/localRepo")
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s/%s (%s) [%s] publishes %s", p.Prefix, p.Distribution, p.Component, strings.Join(p.Architectures, ", "), source)
|
||||
}
|
||||
|
||||
// Key returns unique key identifying PublishedRepo
|
||||
func (p *PublishedRepo) Key() []byte {
|
||||
return []byte("U" + p.Prefix + ">>" + p.Distribution)
|
||||
}
|
||||
|
||||
// Encode does msgpack encoding of PublishedRepo
|
||||
func (p *PublishedRepo) Encode() []byte {
|
||||
var buf bytes.Buffer
|
||||
|
||||
encoder := codec.NewEncoder(&buf, &codec.MsgpackHandle{})
|
||||
encoder.Encode(p)
|
||||
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// Decode decodes msgpack representation into PublishedRepo
|
||||
func (p *PublishedRepo) Decode(input []byte) error {
|
||||
decoder := codec.NewDecoderBytes(input, &codec.MsgpackHandle{})
|
||||
err := decoder.Decode(p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// old PublishedRepo were publishing only snapshots
|
||||
if p.SourceKind == "" {
|
||||
p.SourceKind = "snapshot"
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Publish publishes snapshot (repository) contents, links package files, generates Packages & Release files, signs them
|
||||
func (p *PublishedRepo) Publish(packagePool aptly.PackagePool, publishedStorage aptly.PublishedStorage, collectionFactory *CollectionFactory, signer utils.Signer, progress aptly.Progress) error {
|
||||
err := publishedStorage.MkDir(filepath.Join(p.Prefix, "pool"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
basePath := filepath.Join(p.Prefix, "dists", p.Distribution)
|
||||
err = publishedStorage.MkDir(basePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if progress != nil {
|
||||
progress.Printf("Loading packages...\n")
|
||||
}
|
||||
|
||||
var refList *PackageRefList
|
||||
|
||||
if p.snapshot != nil {
|
||||
refList = p.snapshot.RefList()
|
||||
} else if p.localRepo != nil {
|
||||
refList = p.localRepo.RefList()
|
||||
} else {
|
||||
panic("no source")
|
||||
}
|
||||
|
||||
// Load all packages
|
||||
list, err := NewPackageListFromRefList(refList, collectionFactory.PackageCollection(), progress)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to load packages: %s", err)
|
||||
}
|
||||
|
||||
if list.Len() == 0 {
|
||||
return fmt.Errorf("source is empty")
|
||||
}
|
||||
|
||||
if len(p.Architectures) == 0 {
|
||||
p.Architectures = list.Architectures(true)
|
||||
}
|
||||
|
||||
if len(p.Architectures) == 0 {
|
||||
return fmt.Errorf("unable to figure out list of architectures, please supply explicit list")
|
||||
}
|
||||
|
||||
sort.Strings(p.Architectures)
|
||||
|
||||
generatedFiles := map[string]utils.ChecksumInfo{}
|
||||
|
||||
if progress != nil {
|
||||
progress.Printf("Generating metadata files and linking package files...\n")
|
||||
}
|
||||
|
||||
// For all architectures, generate release file
|
||||
for _, arch := range p.Architectures {
|
||||
if progress != nil {
|
||||
progress.InitBar(int64(list.Len()), false)
|
||||
}
|
||||
|
||||
var relativePath string
|
||||
if arch == "source" {
|
||||
relativePath = filepath.Join(p.Component, "source", "Sources")
|
||||
} else {
|
||||
relativePath = filepath.Join(p.Component, fmt.Sprintf("binary-%s", arch), "Packages")
|
||||
}
|
||||
err = publishedStorage.MkDir(filepath.Dir(filepath.Join(basePath, relativePath)))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var packagesFile *os.File
|
||||
packagesFile, err = publishedStorage.CreateFile(filepath.Join(basePath, relativePath))
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to creates Packages file: %s", err)
|
||||
}
|
||||
|
||||
bufWriter := bufio.NewWriter(packagesFile)
|
||||
|
||||
err = list.ForEach(func(pkg *Package) error {
|
||||
if progress != nil {
|
||||
progress.AddBar(1)
|
||||
}
|
||||
if pkg.MatchesArchitecture(arch) {
|
||||
err = pkg.LinkFromPool(publishedStorage, packagePool, p.Prefix, p.Component)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = pkg.Stanza().WriteTo(bufWriter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = bufWriter.WriteByte('\n')
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
pkg.files = nil
|
||||
pkg.deps = nil
|
||||
pkg.extra = nil
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to process packages: %s", err)
|
||||
}
|
||||
|
||||
err = bufWriter.Flush()
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to write Packages file: %s", err)
|
||||
}
|
||||
|
||||
err = utils.CompressFile(packagesFile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to compress Packages files: %s", err)
|
||||
}
|
||||
|
||||
packagesFile.Close()
|
||||
|
||||
var checksumInfo utils.ChecksumInfo
|
||||
checksumInfo, err = publishedStorage.ChecksumsForFile(filepath.Join(basePath, relativePath))
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to collect checksums: %s", err)
|
||||
}
|
||||
generatedFiles[relativePath] = checksumInfo
|
||||
|
||||
checksumInfo, err = publishedStorage.ChecksumsForFile(filepath.Join(basePath, relativePath+".gz"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to collect checksums: %s", err)
|
||||
}
|
||||
generatedFiles[relativePath+".gz"] = checksumInfo
|
||||
|
||||
checksumInfo, err = publishedStorage.ChecksumsForFile(filepath.Join(basePath, relativePath+".bz2"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to collect checksums: %s", err)
|
||||
}
|
||||
generatedFiles[relativePath+".bz2"] = checksumInfo
|
||||
|
||||
if progress != nil {
|
||||
progress.ShutdownBar()
|
||||
}
|
||||
}
|
||||
|
||||
release := make(Stanza)
|
||||
release["Origin"] = p.Prefix + " " + p.Distribution
|
||||
release["Label"] = p.Prefix + " " + p.Distribution
|
||||
release["Codename"] = p.Distribution
|
||||
release["Date"] = time.Now().UTC().Format("Mon, 2 Jan 2006 15:04:05 MST")
|
||||
release["Components"] = p.Component
|
||||
release["Architectures"] = strings.Join(utils.StrSlicesSubstract(p.Architectures, []string{"source"}), " ")
|
||||
release["Description"] = " Generated by aptly\n"
|
||||
release["MD5Sum"] = "\n"
|
||||
release["SHA1"] = "\n"
|
||||
release["SHA256"] = "\n"
|
||||
|
||||
for path, info := range generatedFiles {
|
||||
release["MD5Sum"] += fmt.Sprintf(" %s %8d %s\n", info.MD5, info.Size, path)
|
||||
release["SHA1"] += fmt.Sprintf(" %s %8d %s\n", info.SHA1, info.Size, path)
|
||||
release["SHA256"] += fmt.Sprintf(" %s %8d %s\n", info.SHA256, info.Size, path)
|
||||
}
|
||||
|
||||
releaseFile, err := publishedStorage.CreateFile(filepath.Join(basePath, "Release"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to create Release file: %s", err)
|
||||
}
|
||||
|
||||
bufWriter := bufio.NewWriter(releaseFile)
|
||||
|
||||
err = release.WriteTo(bufWriter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to create Release file: %s", err)
|
||||
}
|
||||
|
||||
err = bufWriter.Flush()
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to create Release file: %s", err)
|
||||
}
|
||||
|
||||
releaseFilename := releaseFile.Name()
|
||||
releaseFile.Close()
|
||||
|
||||
// Signing files might output to console, so flush progress writer first
|
||||
if progress != nil {
|
||||
progress.Flush()
|
||||
}
|
||||
|
||||
if signer != nil {
|
||||
err = signer.DetachedSign(releaseFilename, releaseFilename+".gpg")
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to sign Release file: %s", err)
|
||||
}
|
||||
|
||||
err = signer.ClearSign(releaseFilename, filepath.Join(filepath.Dir(releaseFilename), "InRelease"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to sign Release file: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// RemoveFiles removes files that were created by Publish
|
||||
//
|
||||
// It can remove prefix fully, and part of pool (for specific component)
|
||||
func (p *PublishedRepo) RemoveFiles(publishedStorage aptly.PublishedStorage, removePrefix, removePoolComponent bool) error {
|
||||
if removePrefix {
|
||||
err := publishedStorage.RemoveDirs(filepath.Join(p.Prefix, "dists"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return publishedStorage.RemoveDirs(filepath.Join(p.Prefix, "pool"))
|
||||
}
|
||||
|
||||
err := publishedStorage.RemoveDirs(filepath.Join(p.Prefix, "dists", p.Distribution))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if removePoolComponent {
|
||||
err = publishedStorage.RemoveDirs(filepath.Join(p.Prefix, "pool", p.Component))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// PublishedRepoCollection does listing, updating/adding/deleting of PublishedRepos
|
||||
type PublishedRepoCollection struct {
|
||||
db database.Storage
|
||||
list []*PublishedRepo
|
||||
}
|
||||
|
||||
// NewPublishedRepoCollection loads PublishedRepos from DB and makes up collection
|
||||
func NewPublishedRepoCollection(db database.Storage) *PublishedRepoCollection {
|
||||
result := &PublishedRepoCollection{
|
||||
db: db,
|
||||
}
|
||||
|
||||
blobs := db.FetchByPrefix([]byte("U"))
|
||||
result.list = make([]*PublishedRepo, 0, len(blobs))
|
||||
|
||||
for _, blob := range blobs {
|
||||
r := &PublishedRepo{}
|
||||
if err := r.Decode(blob); err != nil {
|
||||
log.Printf("Error decoding published repo: %s\n", err)
|
||||
} else {
|
||||
result.list = append(result.list, r)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Add appends new repo to collection and saves it
|
||||
func (collection *PublishedRepoCollection) Add(repo *PublishedRepo) error {
|
||||
if collection.CheckDuplicate(repo) != nil {
|
||||
return fmt.Errorf("published repo with prefix/distribution %s/%s already exists", repo.Prefix, repo.Distribution)
|
||||
}
|
||||
|
||||
err := collection.Update(repo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
collection.list = append(collection.list, repo)
|
||||
return nil
|
||||
}
|
||||
|
||||
// CheckDuplicate verifies that there's no published repo with the same name
|
||||
func (collection *PublishedRepoCollection) CheckDuplicate(repo *PublishedRepo) *PublishedRepo {
|
||||
for _, r := range collection.list {
|
||||
if r.Prefix == repo.Prefix && r.Distribution == repo.Distribution {
|
||||
return r
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Update stores updated information about repo in DB
|
||||
func (collection *PublishedRepoCollection) Update(repo *PublishedRepo) error {
|
||||
err := collection.db.Put(repo.Key(), repo.Encode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadComplete loads additional information for remote repo
|
||||
func (collection *PublishedRepoCollection) LoadComplete(repo *PublishedRepo, collectionFactory *CollectionFactory) error {
|
||||
var err error
|
||||
|
||||
if repo.SourceKind == "snapshot" {
|
||||
repo.snapshot, err = collectionFactory.SnapshotCollection().ByUUID(repo.SourceUUID)
|
||||
} else if repo.SourceKind == "local" {
|
||||
repo.localRepo, err = collectionFactory.LocalRepoCollection().ByUUID(repo.SourceUUID)
|
||||
} else {
|
||||
panic("unknown SourceKind")
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// ByPrefixDistribution looks up repository by prefix & distribution
|
||||
func (collection *PublishedRepoCollection) ByPrefixDistribution(prefix, distribution string) (*PublishedRepo, error) {
|
||||
for _, r := range collection.list {
|
||||
if r.Prefix == prefix && r.Distribution == distribution {
|
||||
return r, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("published repo with prefix/distribution %s/%s not found", prefix, distribution)
|
||||
}
|
||||
|
||||
// ByUUID looks up repository by uuid
|
||||
func (collection *PublishedRepoCollection) ByUUID(uuid string) (*PublishedRepo, error) {
|
||||
for _, r := range collection.list {
|
||||
if r.UUID == uuid {
|
||||
return r, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("published repo with uuid %s not found", uuid)
|
||||
}
|
||||
|
||||
// BySnapshot looks up repository by snapshot source
|
||||
func (collection *PublishedRepoCollection) BySnapshot(snapshot *Snapshot) []*PublishedRepo {
|
||||
result := make([]*PublishedRepo, 0)
|
||||
for _, r := range collection.list {
|
||||
if r.SourceKind == "snapshot" && r.SourceUUID == snapshot.UUID {
|
||||
result = append(result, r)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// ByLocalRepo looks up repository by local repo source
|
||||
func (collection *PublishedRepoCollection) ByLocalRepo(repo *LocalRepo) []*PublishedRepo {
|
||||
result := make([]*PublishedRepo, 0)
|
||||
for _, r := range collection.list {
|
||||
if r.SourceKind == "local" && r.SourceUUID == repo.UUID {
|
||||
result = append(result, r)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// ForEach runs method for each repository
|
||||
func (collection *PublishedRepoCollection) ForEach(handler func(*PublishedRepo) error) error {
|
||||
var err error
|
||||
for _, r := range collection.list {
|
||||
err = handler(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Len returns number of remote repos
|
||||
func (collection *PublishedRepoCollection) Len() int {
|
||||
return len(collection.list)
|
||||
}
|
||||
|
||||
// Remove removes published repository, cleaning up directories, files
|
||||
func (collection *PublishedRepoCollection) Remove(publishedStorage aptly.PublishedStorage, prefix, distribution string) error {
|
||||
repo, err := collection.ByPrefixDistribution(prefix, distribution)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
removePrefix := true
|
||||
removePoolComponent := true
|
||||
repoPosition := -1
|
||||
|
||||
for i, r := range collection.list {
|
||||
if r == repo {
|
||||
repoPosition = i
|
||||
continue
|
||||
}
|
||||
if r.Prefix == repo.Prefix {
|
||||
removePrefix = false
|
||||
if r.Component == repo.Component {
|
||||
removePoolComponent = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
err = repo.RemoveFiles(publishedStorage, removePrefix, removePoolComponent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
collection.list[len(collection.list)-1], collection.list[repoPosition], collection.list =
|
||||
nil, collection.list[len(collection.list)-1], collection.list[:len(collection.list)-1]
|
||||
|
||||
return collection.db.Delete(repo.Key())
|
||||
}
|
||||
565
debian/publish_test.go
vendored
565
debian/publish_test.go
vendored
@@ -1,565 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"github.com/smira/aptly/aptly"
|
||||
"github.com/smira/aptly/database"
|
||||
"github.com/smira/aptly/files"
|
||||
. "launchpad.net/gocheck"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type pathExistsChecker struct {
|
||||
*CheckerInfo
|
||||
}
|
||||
|
||||
var PathExists = &pathExistsChecker{
|
||||
&CheckerInfo{Name: "PathExists", Params: []string{"path"}},
|
||||
}
|
||||
|
||||
func (checker *pathExistsChecker) Check(params []interface{}, names []string) (result bool, error string) {
|
||||
_, err := os.Stat(params[0].(string))
|
||||
return err == nil, ""
|
||||
}
|
||||
|
||||
type NullSigner struct{}
|
||||
|
||||
func (n *NullSigner) Init() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *NullSigner) SetKey(keyRef string) {
|
||||
}
|
||||
|
||||
func (n *NullSigner) SetKeyRing(keyring, secretKeyring string) {
|
||||
}
|
||||
|
||||
func (n *NullSigner) DetachedSign(source string, destination string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *NullSigner) ClearSign(source string, destination string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type PublishedRepoSuite struct {
|
||||
PackageListMixinSuite
|
||||
repo, repo2 *PublishedRepo
|
||||
root string
|
||||
publishedStorage aptly.PublishedStorage
|
||||
packagePool aptly.PackagePool
|
||||
localRepo *LocalRepo
|
||||
snapshot *Snapshot
|
||||
db database.Storage
|
||||
factory *CollectionFactory
|
||||
packageCollection *PackageCollection
|
||||
}
|
||||
|
||||
var _ = Suite(&PublishedRepoSuite{})
|
||||
|
||||
func (s *PublishedRepoSuite) SetUpTest(c *C) {
|
||||
s.SetUpPackages()
|
||||
|
||||
s.db, _ = database.OpenDB(c.MkDir())
|
||||
s.factory = NewCollectionFactory(s.db)
|
||||
|
||||
s.root = c.MkDir()
|
||||
s.publishedStorage = files.NewPublishedStorage(s.root)
|
||||
s.packagePool = files.NewPackagePool(s.root)
|
||||
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false)
|
||||
repo.packageRefs = s.reflist
|
||||
s.factory.RemoteRepoCollection().Add(repo)
|
||||
|
||||
s.localRepo = NewLocalRepo("local1", "comment1")
|
||||
s.localRepo.packageRefs = s.reflist
|
||||
s.factory.LocalRepoCollection().Add(s.localRepo)
|
||||
|
||||
s.snapshot, _ = NewSnapshotFromRepository("snap", repo)
|
||||
s.factory.SnapshotCollection().Add(s.snapshot)
|
||||
|
||||
s.packageCollection = s.factory.PackageCollection()
|
||||
s.packageCollection.Update(s.p1)
|
||||
s.packageCollection.Update(s.p2)
|
||||
s.packageCollection.Update(s.p3)
|
||||
|
||||
s.repo, _ = NewPublishedRepo("ppa", "squeeze", "main", nil, s.snapshot, s.factory)
|
||||
|
||||
s.repo2, _ = NewPublishedRepo("ppa", "maverick", "main", nil, s.localRepo, s.factory)
|
||||
|
||||
poolPath, _ := s.packagePool.Path(s.p1.Files()[0].Filename, s.p1.Files()[0].Checksums.MD5)
|
||||
err := os.MkdirAll(filepath.Dir(poolPath), 0755)
|
||||
f, err := os.Create(poolPath)
|
||||
c.Assert(err, IsNil)
|
||||
f.Close()
|
||||
}
|
||||
|
||||
func (s *PublishedRepoSuite) TearDownTest(c *C) {
|
||||
s.db.Close()
|
||||
}
|
||||
|
||||
func (s *PublishedRepoSuite) TestPrefixNormalization(c *C) {
|
||||
|
||||
for _, t := range []struct {
|
||||
prefix string
|
||||
expected string
|
||||
errorExpected string
|
||||
}{
|
||||
{
|
||||
prefix: "ppa",
|
||||
expected: "ppa",
|
||||
},
|
||||
{
|
||||
prefix: "",
|
||||
expected: ".",
|
||||
},
|
||||
{
|
||||
prefix: "/",
|
||||
expected: ".",
|
||||
},
|
||||
{
|
||||
prefix: "//",
|
||||
expected: ".",
|
||||
},
|
||||
{
|
||||
prefix: "//ppa/",
|
||||
expected: "ppa",
|
||||
},
|
||||
{
|
||||
prefix: "ppa/..",
|
||||
expected: ".",
|
||||
},
|
||||
{
|
||||
prefix: "ppa/ubuntu/",
|
||||
expected: "ppa/ubuntu",
|
||||
},
|
||||
{
|
||||
prefix: "ppa/../ubuntu/",
|
||||
expected: "ubuntu",
|
||||
},
|
||||
{
|
||||
prefix: "../ppa/",
|
||||
errorExpected: "invalid prefix .*",
|
||||
},
|
||||
{
|
||||
prefix: "../ppa/../ppa/",
|
||||
errorExpected: "invalid prefix .*",
|
||||
},
|
||||
{
|
||||
prefix: "ppa/dists",
|
||||
errorExpected: "invalid prefix .*",
|
||||
},
|
||||
{
|
||||
prefix: "ppa/pool",
|
||||
errorExpected: "invalid prefix .*",
|
||||
},
|
||||
} {
|
||||
repo, err := NewPublishedRepo(t.prefix, "squeeze", "main", nil, s.snapshot, s.factory)
|
||||
if t.errorExpected != "" {
|
||||
c.Check(err, ErrorMatches, t.errorExpected)
|
||||
} else {
|
||||
c.Check(repo.Prefix, Equals, t.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *PublishedRepoSuite) TestDistributionComponentGuessing(c *C) {
|
||||
repo, err := NewPublishedRepo("ppa", "", "", nil, s.snapshot, s.factory)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(repo.Distribution, Equals, "squeeze")
|
||||
c.Check(repo.Component, Equals, "main")
|
||||
|
||||
repo, err = NewPublishedRepo("ppa", "wheezy", "", nil, s.snapshot, s.factory)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(repo.Distribution, Equals, "wheezy")
|
||||
c.Check(repo.Component, Equals, "main")
|
||||
|
||||
repo, err = NewPublishedRepo("ppa", "", "non-free", nil, s.snapshot, s.factory)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(repo.Distribution, Equals, "squeeze")
|
||||
c.Check(repo.Component, Equals, "non-free")
|
||||
|
||||
repo, err = NewPublishedRepo("ppa", "squeeze", "", nil, s.localRepo, s.factory)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(repo.Distribution, Equals, "squeeze")
|
||||
c.Check(repo.Component, Equals, "main")
|
||||
|
||||
repo, err = NewPublishedRepo("ppa", "", "main", nil, s.localRepo, s.factory)
|
||||
c.Check(err, ErrorMatches, "unable to guess distribution name, please specify explicitly")
|
||||
|
||||
s.localRepo.DefaultDistribution = "precise"
|
||||
s.localRepo.DefaultComponent = "contrib"
|
||||
s.factory.LocalRepoCollection().Update(s.localRepo)
|
||||
|
||||
repo, err = NewPublishedRepo("ppa", "", "", nil, s.localRepo, s.factory)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(repo.Distribution, Equals, "precise")
|
||||
c.Check(repo.Component, Equals, "contrib")
|
||||
}
|
||||
|
||||
func (s *PublishedRepoSuite) TestPublish(c *C) {
|
||||
err := s.repo.Publish(s.packagePool, s.publishedStorage, s.factory, &NullSigner{}, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(s.repo.Architectures, DeepEquals, []string{"i386"})
|
||||
|
||||
rf, err := os.Open(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/squeeze/Release"))
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
cfr := NewControlFileReader(rf)
|
||||
st, err := cfr.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(st["Origin"], Equals, "ppa squeeze")
|
||||
c.Check(st["Components"], Equals, "main")
|
||||
c.Check(st["Architectures"], Equals, "i386")
|
||||
|
||||
pf, err := os.Open(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/squeeze/main/binary-i386/Packages"))
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
cfr = NewControlFileReader(pf)
|
||||
|
||||
for i := 0; i < 3; i++ {
|
||||
st, err = cfr.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(st["Filename"], Equals, "pool/main/a/alien-arena/alien-arena-common_7.40-2_i386.deb")
|
||||
}
|
||||
|
||||
st, err = cfr.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(st, IsNil)
|
||||
|
||||
_, err = os.Stat(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/main/a/alien-arena/alien-arena-common_7.40-2_i386.deb"))
|
||||
c.Assert(err, IsNil)
|
||||
}
|
||||
|
||||
func (s *PublishedRepoSuite) TestPublishNoSigner(c *C) {
|
||||
err := s.repo.Publish(s.packagePool, s.publishedStorage, s.factory, nil, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/squeeze/Release"), PathExists)
|
||||
}
|
||||
|
||||
func (s *PublishedRepoSuite) TestPublishLocalRepo(c *C) {
|
||||
err := s.repo2.Publish(s.packagePool, s.publishedStorage, s.factory, nil, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/maverick/Release"), PathExists)
|
||||
}
|
||||
|
||||
func (s *PublishedRepoSuite) TestString(c *C) {
|
||||
c.Check(s.repo.String(), Equals,
|
||||
"ppa/squeeze (main) [] publishes [snap]: Snapshot from mirror [yandex]: http://mirror.yandex.ru/debian/ squeeze")
|
||||
c.Check(s.repo2.String(), Equals,
|
||||
"ppa/maverick (main) [] publishes [local1]: comment1")
|
||||
repo, _ := NewPublishedRepo("", "squeeze", "main", []string{"s390"}, s.snapshot, s.factory)
|
||||
c.Check(repo.String(), Equals,
|
||||
"./squeeze (main) [s390] publishes [snap]: Snapshot from mirror [yandex]: http://mirror.yandex.ru/debian/ squeeze")
|
||||
repo, _ = NewPublishedRepo("", "squeeze", "main", []string{"i386", "amd64"}, s.snapshot, s.factory)
|
||||
c.Check(repo.String(), Equals,
|
||||
"./squeeze (main) [i386, amd64] publishes [snap]: Snapshot from mirror [yandex]: http://mirror.yandex.ru/debian/ squeeze")
|
||||
}
|
||||
|
||||
func (s *PublishedRepoSuite) TestKey(c *C) {
|
||||
c.Check(s.repo.Key(), DeepEquals, []byte("Uppa>>squeeze"))
|
||||
}
|
||||
|
||||
func (s *PublishedRepoSuite) TestEncodeDecode(c *C) {
|
||||
encoded := s.repo.Encode()
|
||||
repo := &PublishedRepo{}
|
||||
err := repo.Decode(encoded)
|
||||
|
||||
s.repo.snapshot = nil
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(repo, DeepEquals, s.repo)
|
||||
|
||||
encoded2 := s.repo2.Encode()
|
||||
repo2 := &PublishedRepo{}
|
||||
err = repo2.Decode(encoded2)
|
||||
|
||||
s.repo2.localRepo = nil
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(repo2, DeepEquals, s.repo2)
|
||||
}
|
||||
|
||||
type PublishedRepoCollectionSuite struct {
|
||||
PackageListMixinSuite
|
||||
db database.Storage
|
||||
factory *CollectionFactory
|
||||
snapshotCollection *SnapshotCollection
|
||||
collection *PublishedRepoCollection
|
||||
snap1, snap2 *Snapshot
|
||||
localRepo *LocalRepo
|
||||
repo1, repo2, repo3, repo4 *PublishedRepo
|
||||
}
|
||||
|
||||
var _ = Suite(&PublishedRepoCollectionSuite{})
|
||||
|
||||
func (s *PublishedRepoCollectionSuite) SetUpTest(c *C) {
|
||||
s.db, _ = database.OpenDB(c.MkDir())
|
||||
s.factory = NewCollectionFactory(s.db)
|
||||
|
||||
s.snapshotCollection = s.factory.SnapshotCollection()
|
||||
|
||||
s.snap1 = NewSnapshotFromPackageList("snap1", []*Snapshot{}, NewPackageList(), "desc1")
|
||||
s.snap2 = NewSnapshotFromPackageList("snap2", []*Snapshot{}, NewPackageList(), "desc2")
|
||||
|
||||
s.snapshotCollection.Add(s.snap1)
|
||||
s.snapshotCollection.Add(s.snap2)
|
||||
|
||||
s.localRepo = NewLocalRepo("local1", "comment1")
|
||||
s.factory.LocalRepoCollection().Add(s.localRepo)
|
||||
|
||||
s.repo1, _ = NewPublishedRepo("ppa", "anaconda", "main", []string{}, s.snap1, s.factory)
|
||||
s.repo2, _ = NewPublishedRepo("", "anaconda", "main", []string{}, s.snap2, s.factory)
|
||||
s.repo3, _ = NewPublishedRepo("ppa", "anaconda", "main", []string{}, s.snap2, s.factory)
|
||||
s.repo4, _ = NewPublishedRepo("ppa", "precise", "main", []string{}, s.localRepo, s.factory)
|
||||
|
||||
s.collection = s.factory.PublishedRepoCollection()
|
||||
}
|
||||
|
||||
func (s *PublishedRepoCollectionSuite) TearDownTest(c *C) {
|
||||
s.db.Close()
|
||||
}
|
||||
|
||||
func (s *PublishedRepoCollectionSuite) TestAddByPrefixDistribution(c *C) {
|
||||
r, err := s.collection.ByPrefixDistribution("ppa", "anaconda")
|
||||
c.Assert(err, ErrorMatches, "*.not found")
|
||||
|
||||
c.Assert(s.collection.Add(s.repo1), IsNil)
|
||||
c.Assert(s.collection.Add(s.repo1), ErrorMatches, ".*already exists")
|
||||
c.Assert(s.collection.CheckDuplicate(s.repo2), IsNil)
|
||||
c.Assert(s.collection.Add(s.repo2), IsNil)
|
||||
c.Assert(s.collection.Add(s.repo3), ErrorMatches, ".*already exists")
|
||||
c.Assert(s.collection.CheckDuplicate(s.repo3), Equals, s.repo1)
|
||||
c.Assert(s.collection.Add(s.repo4), IsNil)
|
||||
|
||||
r, err = s.collection.ByPrefixDistribution("ppa", "anaconda")
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
err = s.collection.LoadComplete(r, s.factory)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.String(), Equals, s.repo1.String())
|
||||
|
||||
collection := NewPublishedRepoCollection(s.db)
|
||||
r, err = collection.ByPrefixDistribution("ppa", "anaconda")
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
err = s.collection.LoadComplete(r, s.factory)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.String(), Equals, s.repo1.String())
|
||||
}
|
||||
|
||||
func (s *PublishedRepoCollectionSuite) TestByUUID(c *C) {
|
||||
r, err := s.collection.ByUUID(s.repo1.UUID)
|
||||
c.Assert(err, ErrorMatches, "*.not found")
|
||||
|
||||
c.Assert(s.collection.Add(s.repo1), IsNil)
|
||||
|
||||
r, err = s.collection.ByUUID(s.repo1.UUID)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
err = s.collection.LoadComplete(r, s.factory)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.String(), Equals, s.repo1.String())
|
||||
}
|
||||
|
||||
func (s *PublishedRepoCollectionSuite) TestUpdateLoadComplete(c *C) {
|
||||
c.Assert(s.collection.Update(s.repo1), IsNil)
|
||||
c.Assert(s.collection.Update(s.repo4), IsNil)
|
||||
|
||||
collection := NewPublishedRepoCollection(s.db)
|
||||
r, err := collection.ByPrefixDistribution("ppa", "anaconda")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.snapshot, IsNil)
|
||||
c.Assert(s.collection.LoadComplete(r, s.factory), IsNil)
|
||||
c.Assert(r.snapshot.UUID, Equals, s.repo1.snapshot.UUID)
|
||||
|
||||
r, err = collection.ByPrefixDistribution("ppa", "precise")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.localRepo, IsNil)
|
||||
c.Assert(s.collection.LoadComplete(r, s.factory), IsNil)
|
||||
c.Assert(r.localRepo.UUID, Equals, s.repo4.localRepo.UUID)
|
||||
}
|
||||
|
||||
func (s *PublishedRepoCollectionSuite) TestForEachAndLen(c *C) {
|
||||
s.collection.Add(s.repo1)
|
||||
|
||||
count := 0
|
||||
err := s.collection.ForEach(func(*PublishedRepo) error {
|
||||
count++
|
||||
return nil
|
||||
})
|
||||
c.Assert(count, Equals, 1)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(s.collection.Len(), Equals, 1)
|
||||
|
||||
e := errors.New("c")
|
||||
|
||||
err = s.collection.ForEach(func(*PublishedRepo) error {
|
||||
return e
|
||||
})
|
||||
c.Assert(err, Equals, e)
|
||||
}
|
||||
|
||||
func (s *PublishedRepoCollectionSuite) TestBySnapshot(c *C) {
|
||||
c.Check(s.collection.Add(s.repo1), IsNil)
|
||||
c.Check(s.collection.Add(s.repo2), IsNil)
|
||||
|
||||
c.Check(s.collection.BySnapshot(s.snap1), DeepEquals, []*PublishedRepo{s.repo1})
|
||||
c.Check(s.collection.BySnapshot(s.snap2), DeepEquals, []*PublishedRepo{s.repo2})
|
||||
}
|
||||
|
||||
type PublishedRepoRemoveSuite struct {
|
||||
PackageListMixinSuite
|
||||
db database.Storage
|
||||
factory *CollectionFactory
|
||||
snapshotCollection *SnapshotCollection
|
||||
collection *PublishedRepoCollection
|
||||
root string
|
||||
publishedStorage aptly.PublishedStorage
|
||||
snap1 *Snapshot
|
||||
repo1, repo2, repo3, repo4 *PublishedRepo
|
||||
}
|
||||
|
||||
var _ = Suite(&PublishedRepoRemoveSuite{})
|
||||
|
||||
func (s *PublishedRepoRemoveSuite) SetUpTest(c *C) {
|
||||
s.db, _ = database.OpenDB(c.MkDir())
|
||||
s.factory = NewCollectionFactory(s.db)
|
||||
|
||||
s.snapshotCollection = s.factory.SnapshotCollection()
|
||||
|
||||
s.snap1 = NewSnapshotFromPackageList("snap1", []*Snapshot{}, NewPackageList(), "desc1")
|
||||
|
||||
s.snapshotCollection.Add(s.snap1)
|
||||
|
||||
s.repo1, _ = NewPublishedRepo("ppa", "anaconda", "main", []string{}, s.snap1, s.factory)
|
||||
s.repo2, _ = NewPublishedRepo("", "anaconda", "main", []string{}, s.snap1, s.factory)
|
||||
s.repo3, _ = NewPublishedRepo("ppa", "meduza", "main", []string{}, s.snap1, s.factory)
|
||||
s.repo4, _ = NewPublishedRepo("ppa", "osminog", "contrib", []string{}, s.snap1, s.factory)
|
||||
|
||||
s.collection = s.factory.PublishedRepoCollection()
|
||||
s.collection.Add(s.repo1)
|
||||
s.collection.Add(s.repo2)
|
||||
s.collection.Add(s.repo3)
|
||||
s.collection.Add(s.repo4)
|
||||
|
||||
s.root = c.MkDir()
|
||||
s.publishedStorage = files.NewPublishedStorage(s.root)
|
||||
s.publishedStorage.MkDir("ppa/dists/anaconda")
|
||||
s.publishedStorage.MkDir("ppa/dists/meduza")
|
||||
s.publishedStorage.MkDir("ppa/dists/osminog")
|
||||
s.publishedStorage.MkDir("ppa/pool/main")
|
||||
s.publishedStorage.MkDir("ppa/pool/contrib")
|
||||
s.publishedStorage.MkDir("dists/anaconda")
|
||||
s.publishedStorage.MkDir("pool/main")
|
||||
}
|
||||
|
||||
func (s *PublishedRepoRemoveSuite) TearDownTest(c *C) {
|
||||
s.db.Close()
|
||||
}
|
||||
|
||||
func (s *PublishedRepoRemoveSuite) TestRemoveFilesOnlyDist(c *C) {
|
||||
s.repo1.RemoveFiles(s.publishedStorage, false, false)
|
||||
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/anaconda"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/meduza"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/osminog"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/main"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/contrib"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "dists/anaconda"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "pool/main"), PathExists)
|
||||
}
|
||||
|
||||
func (s *PublishedRepoRemoveSuite) TestRemoveFilesWithPool(c *C) {
|
||||
s.repo1.RemoveFiles(s.publishedStorage, false, true)
|
||||
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/anaconda"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/meduza"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/osminog"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/main"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/contrib"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "dists/anaconda"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "pool/main"), PathExists)
|
||||
}
|
||||
|
||||
func (s *PublishedRepoRemoveSuite) TestRemoveFilesWithPrefix(c *C) {
|
||||
s.repo1.RemoveFiles(s.publishedStorage, true, true)
|
||||
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/anaconda"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/meduza"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/osminog"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/main"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/contrib"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "dists/anaconda"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "pool/main"), PathExists)
|
||||
}
|
||||
|
||||
func (s *PublishedRepoRemoveSuite) TestRemoveFilesWithPrefixRoot(c *C) {
|
||||
s.repo2.RemoveFiles(s.publishedStorage, true, true)
|
||||
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/anaconda"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/meduza"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/main"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/contrib"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "dists/anaconda"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "pool/main"), Not(PathExists))
|
||||
}
|
||||
|
||||
func (s *PublishedRepoRemoveSuite) TestRemoveRepo1and2(c *C) {
|
||||
err := s.collection.Remove(s.publishedStorage, "ppa", "anaconda")
|
||||
c.Check(err, IsNil)
|
||||
|
||||
_, err = s.collection.ByPrefixDistribution("ppa", "anaconda")
|
||||
c.Check(err, ErrorMatches, ".*not found")
|
||||
|
||||
collection := NewPublishedRepoCollection(s.db)
|
||||
_, err = collection.ByPrefixDistribution("ppa", "anaconda")
|
||||
c.Check(err, ErrorMatches, ".*not found")
|
||||
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/anaconda"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/meduza"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/osminog"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/main"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/contrib"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "dists/anaconda"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "pool/main"), PathExists)
|
||||
|
||||
err = s.collection.Remove(s.publishedStorage, "ppa", "anaconda")
|
||||
c.Check(err, ErrorMatches, ".*not found")
|
||||
|
||||
err = s.collection.Remove(s.publishedStorage, "ppa", "meduza")
|
||||
c.Check(err, IsNil)
|
||||
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/anaconda"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/meduza"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/osminog"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/main"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/contrib"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "dists/anaconda"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "pool/main"), PathExists)
|
||||
}
|
||||
|
||||
func (s *PublishedRepoRemoveSuite) TestRemoveRepo3(c *C) {
|
||||
err := s.collection.Remove(s.publishedStorage, ".", "anaconda")
|
||||
c.Check(err, IsNil)
|
||||
|
||||
_, err = s.collection.ByPrefixDistribution(".", "anaconda")
|
||||
c.Check(err, ErrorMatches, ".*not found")
|
||||
|
||||
collection := NewPublishedRepoCollection(s.db)
|
||||
_, err = collection.ByPrefixDistribution(".", "anaconda")
|
||||
c.Check(err, ErrorMatches, ".*not found")
|
||||
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/anaconda"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/meduza"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/osminog"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/main"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "ppa/pool/contrib"), PathExists)
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "dists/"), Not(PathExists))
|
||||
c.Check(filepath.Join(s.publishedStorage.PublicPath(), "pool/"), Not(PathExists))
|
||||
}
|
||||
286
debian/reflist.go
vendored
286
debian/reflist.go
vendored
@@ -1,286 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/ugorji/go/codec"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// PackageRefList is a list of keys of packages, this is basis for snapshot
|
||||
// and similar stuff
|
||||
//
|
||||
// Refs are sorted in lexicographical order
|
||||
type PackageRefList struct {
|
||||
// List of package keys
|
||||
Refs [][]byte
|
||||
}
|
||||
|
||||
// Verify interface
|
||||
var (
|
||||
_ sort.Interface = &PackageRefList{}
|
||||
)
|
||||
|
||||
// NewPackageRefList creates empty PackageRefList
|
||||
func NewPackageRefList() *PackageRefList {
|
||||
return &PackageRefList{}
|
||||
}
|
||||
|
||||
// NewPackageRefListFromPackageList creates PackageRefList from PackageList
|
||||
func NewPackageRefListFromPackageList(list *PackageList) *PackageRefList {
|
||||
reflist := &PackageRefList{}
|
||||
reflist.Refs = make([][]byte, list.Len())
|
||||
|
||||
i := 0
|
||||
for _, p := range list.packages {
|
||||
reflist.Refs[i] = p.Key("")
|
||||
i++
|
||||
}
|
||||
|
||||
sort.Sort(reflist)
|
||||
|
||||
return reflist
|
||||
}
|
||||
|
||||
// Len returns number of refs
|
||||
func (l *PackageRefList) Len() int {
|
||||
return len(l.Refs)
|
||||
}
|
||||
|
||||
// Swap swaps two refs
|
||||
func (l *PackageRefList) Swap(i, j int) {
|
||||
l.Refs[i], l.Refs[j] = l.Refs[j], l.Refs[i]
|
||||
}
|
||||
|
||||
// Compare compares two refs in lexographical order
|
||||
func (l *PackageRefList) Less(i, j int) bool {
|
||||
return bytes.Compare(l.Refs[i], l.Refs[j]) < 0
|
||||
}
|
||||
|
||||
// Encode does msgpack encoding of PackageRefList
|
||||
func (l *PackageRefList) Encode() []byte {
|
||||
var buf bytes.Buffer
|
||||
|
||||
encoder := codec.NewEncoder(&buf, &codec.MsgpackHandle{})
|
||||
encoder.Encode(l)
|
||||
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// Decode decodes msgpack representation into PackageRefLit
|
||||
func (l *PackageRefList) Decode(input []byte) error {
|
||||
decoder := codec.NewDecoderBytes(input, &codec.MsgpackHandle{})
|
||||
return decoder.Decode(l)
|
||||
}
|
||||
|
||||
// ForEach calls handler for each package ref in list
|
||||
func (l *PackageRefList) ForEach(handler func([]byte) error) error {
|
||||
var err error
|
||||
for _, p := range l.Refs {
|
||||
err = handler(p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Substract returns all packages in l that are not in r
|
||||
func (l *PackageRefList) Substract(r *PackageRefList) *PackageRefList {
|
||||
result := &PackageRefList{Refs: make([][]byte, 0, 128)}
|
||||
|
||||
// pointer to left and right reflists
|
||||
il, ir := 0, 0
|
||||
// length of reflists
|
||||
ll, lr := l.Len(), r.Len()
|
||||
|
||||
for il < ll || ir < lr {
|
||||
if il == ll {
|
||||
// left list exhausted, we got the result
|
||||
break
|
||||
}
|
||||
if ir == lr {
|
||||
// right list exhausted, append what is left to result
|
||||
result.Refs = append(result.Refs, l.Refs[il:]...)
|
||||
break
|
||||
}
|
||||
|
||||
rel := bytes.Compare(l.Refs[il], r.Refs[ir])
|
||||
if rel == 0 {
|
||||
// r contains entry from l, so we skip it
|
||||
il++
|
||||
ir++
|
||||
} else if rel < 0 {
|
||||
// item il is not in r, append
|
||||
result.Refs = append(result.Refs, l.Refs[il])
|
||||
il++
|
||||
} else {
|
||||
// skip over to next item in r
|
||||
ir++
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// PackageDiff is a difference between two packages in a list.
|
||||
//
|
||||
// If left & right are present, difference is in package version
|
||||
// If left is nil, package is present only in right
|
||||
// If right is nil, package is present only in left
|
||||
type PackageDiff struct {
|
||||
Left, Right *Package
|
||||
}
|
||||
|
||||
// PackageDiffs is a list of PackageDiff records
|
||||
type PackageDiffs []PackageDiff
|
||||
|
||||
// Diff calculates difference between two reflists
|
||||
func (l *PackageRefList) Diff(r *PackageRefList, packageCollection *PackageCollection) (result PackageDiffs, err error) {
|
||||
result = make(PackageDiffs, 0, 128)
|
||||
|
||||
// pointer to left and right reflists
|
||||
il, ir := 0, 0
|
||||
// length of reflists
|
||||
ll, lr := l.Len(), r.Len()
|
||||
// cached loaded packages on the left & right
|
||||
pl, pr := (*Package)(nil), (*Package)(nil)
|
||||
|
||||
// until we reached end of both lists
|
||||
for il < ll || ir < lr {
|
||||
// if we've exhausted left list, pull the rest from the right
|
||||
if il == ll {
|
||||
pr, err = packageCollection.ByKey(r.Refs[ir])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = append(result, PackageDiff{Left: nil, Right: pr})
|
||||
ir++
|
||||
continue
|
||||
}
|
||||
// if we've exhausted right list, pull the rest from the left
|
||||
if ir == lr {
|
||||
pl, err = packageCollection.ByKey(l.Refs[il])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = append(result, PackageDiff{Left: pl, Right: nil})
|
||||
il++
|
||||
continue
|
||||
}
|
||||
|
||||
// refs on both sides are present, load them
|
||||
rl, rr := l.Refs[il], r.Refs[ir]
|
||||
// compare refs
|
||||
rel := bytes.Compare(rl, rr)
|
||||
|
||||
if rel == 0 {
|
||||
// refs are identical, so are packages, advance pointer
|
||||
il++
|
||||
ir++
|
||||
pl, pr = nil, nil
|
||||
} else {
|
||||
// load pl & pr if they haven't been loaded before
|
||||
if pl == nil {
|
||||
pl, err = packageCollection.ByKey(rl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if pr == nil {
|
||||
pr, err = packageCollection.ByKey(rr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// is pl & pr the same package, but different version?
|
||||
if pl.Name == pr.Name && pl.Architecture == pr.Architecture {
|
||||
result = append(result, PackageDiff{Left: pl, Right: pr})
|
||||
il++
|
||||
ir++
|
||||
pl, pr = nil, nil
|
||||
} else {
|
||||
// otherwise pl or pr is missing on one of the sides
|
||||
if rel < 0 {
|
||||
result = append(result, PackageDiff{Left: pl, Right: nil})
|
||||
il++
|
||||
pl = nil
|
||||
} else {
|
||||
result = append(result, PackageDiff{Left: nil, Right: pr})
|
||||
ir++
|
||||
pr = nil
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Merge merges reflist r into current reflist. If overrideMatching, merge replaces matching packages (by architecture/name)
|
||||
// with reference from r, otherwise all packages are saved.
|
||||
func (l *PackageRefList) Merge(r *PackageRefList, overrideMatching bool) (result *PackageRefList) {
|
||||
// pointer to left and right reflists
|
||||
il, ir := 0, 0
|
||||
// length of reflists
|
||||
ll, lr := l.Len(), r.Len()
|
||||
|
||||
result = &PackageRefList{}
|
||||
result.Refs = make([][]byte, 0, ll+lr)
|
||||
|
||||
// until we reached end of both lists
|
||||
for il < ll || ir < lr {
|
||||
// if we've exhausted left list, pull the rest from the right
|
||||
if il == ll {
|
||||
result.Refs = append(result.Refs, r.Refs[ir:]...)
|
||||
break
|
||||
}
|
||||
// if we've exhausted right list, pull the rest from the left
|
||||
if ir == lr {
|
||||
result.Refs = append(result.Refs, l.Refs[il:]...)
|
||||
break
|
||||
}
|
||||
|
||||
// refs on both sides are present, load them
|
||||
rl, rr := l.Refs[il], r.Refs[ir]
|
||||
// compare refs
|
||||
rel := bytes.Compare(rl, rr)
|
||||
|
||||
if rel == 0 {
|
||||
// refs are identical, so are packages, advance pointer
|
||||
result.Refs = append(result.Refs, l.Refs[il])
|
||||
il++
|
||||
ir++
|
||||
} else {
|
||||
if overrideMatching {
|
||||
partsL := bytes.Split(rl, []byte(" "))
|
||||
archL, nameL := partsL[0][1:], partsL[1]
|
||||
|
||||
partsR := bytes.Split(rr, []byte(" "))
|
||||
archR, nameR := partsR[0][1:], partsR[1]
|
||||
|
||||
if bytes.Compare(archL, archR) == 0 && bytes.Compare(nameL, nameR) == 0 {
|
||||
// override with package from the right
|
||||
result.Refs = append(result.Refs, r.Refs[ir])
|
||||
il++
|
||||
ir++
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise append smallest of two
|
||||
if rel < 0 {
|
||||
result.Refs = append(result.Refs, l.Refs[il])
|
||||
il++
|
||||
} else {
|
||||
result.Refs = append(result.Refs, r.Refs[ir])
|
||||
ir++
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
275
debian/reflist_test.go
vendored
275
debian/reflist_test.go
vendored
@@ -1,275 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"github.com/smira/aptly/database"
|
||||
. "launchpad.net/gocheck"
|
||||
)
|
||||
|
||||
type PackageRefListSuite struct {
|
||||
// Simple list with "real" packages from stanzas
|
||||
list *PackageList
|
||||
p1, p2, p3, p4, p5, p6 *Package
|
||||
}
|
||||
|
||||
var _ = Suite(&PackageRefListSuite{})
|
||||
|
||||
func (s *PackageRefListSuite) SetUpTest(c *C) {
|
||||
s.list = NewPackageList()
|
||||
|
||||
s.p1 = NewPackageFromControlFile(packageStanza.Copy())
|
||||
s.p2 = NewPackageFromControlFile(packageStanza.Copy())
|
||||
stanza := packageStanza.Copy()
|
||||
stanza["Package"] = "mars-invaders"
|
||||
s.p3 = NewPackageFromControlFile(stanza)
|
||||
stanza = packageStanza.Copy()
|
||||
stanza["Size"] = "42"
|
||||
s.p4 = NewPackageFromControlFile(stanza)
|
||||
stanza = packageStanza.Copy()
|
||||
stanza["Package"] = "lonely-strangers"
|
||||
s.p5 = NewPackageFromControlFile(stanza)
|
||||
stanza = packageStanza.Copy()
|
||||
stanza["Version"] = "99.1"
|
||||
s.p6 = NewPackageFromControlFile(stanza)
|
||||
}
|
||||
|
||||
func (s *PackageRefListSuite) TestNewPackageListFromRefList(c *C) {
|
||||
db, _ := database.OpenDB(c.MkDir())
|
||||
coll := NewPackageCollection(db)
|
||||
coll.Update(s.p1)
|
||||
coll.Update(s.p3)
|
||||
|
||||
s.list.Add(s.p1)
|
||||
s.list.Add(s.p3)
|
||||
s.list.Add(s.p5)
|
||||
s.list.Add(s.p6)
|
||||
|
||||
reflist := NewPackageRefListFromPackageList(s.list)
|
||||
|
||||
_, err := NewPackageListFromRefList(reflist, coll, nil)
|
||||
c.Assert(err, ErrorMatches, "unable to load package with key.*")
|
||||
|
||||
coll.Update(s.p5)
|
||||
coll.Update(s.p6)
|
||||
|
||||
list, err := NewPackageListFromRefList(reflist, coll, nil)
|
||||
c.Assert(err, IsNil)
|
||||
c.Check(list.Len(), Equals, 4)
|
||||
c.Check(list.Add(s.p4), ErrorMatches, "conflict in package.*")
|
||||
|
||||
list, err = NewPackageListFromRefList(nil, coll, nil)
|
||||
c.Assert(err, IsNil)
|
||||
c.Check(list.Len(), Equals, 0)
|
||||
}
|
||||
|
||||
func (s *PackageRefListSuite) TestNewPackageRefList(c *C) {
|
||||
s.list.Add(s.p1)
|
||||
s.list.Add(s.p3)
|
||||
s.list.Add(s.p5)
|
||||
s.list.Add(s.p6)
|
||||
|
||||
reflist := NewPackageRefListFromPackageList(s.list)
|
||||
c.Assert(reflist.Len(), Equals, 4)
|
||||
c.Check(reflist.Refs[0], DeepEquals, []byte(s.p1.Key("")))
|
||||
c.Check(reflist.Refs[1], DeepEquals, []byte(s.p6.Key("")))
|
||||
c.Check(reflist.Refs[2], DeepEquals, []byte(s.p5.Key("")))
|
||||
c.Check(reflist.Refs[3], DeepEquals, []byte(s.p3.Key("")))
|
||||
|
||||
reflist = NewPackageRefList()
|
||||
c.Check(reflist.Len(), Equals, 0)
|
||||
}
|
||||
|
||||
func (s *PackageRefListSuite) TestPackageRefListEncodeDecode(c *C) {
|
||||
s.list.Add(s.p1)
|
||||
s.list.Add(s.p3)
|
||||
s.list.Add(s.p5)
|
||||
s.list.Add(s.p6)
|
||||
|
||||
reflist := NewPackageRefListFromPackageList(s.list)
|
||||
|
||||
reflist2 := &PackageRefList{}
|
||||
err := reflist2.Decode(reflist.Encode())
|
||||
c.Assert(err, IsNil)
|
||||
c.Check(reflist2.Len(), Equals, reflist.Len())
|
||||
c.Check(reflist2.Refs, DeepEquals, reflist.Refs)
|
||||
}
|
||||
|
||||
func (s *PackageRefListSuite) TestPackageRefListForeach(c *C) {
|
||||
s.list.Add(s.p1)
|
||||
s.list.Add(s.p3)
|
||||
s.list.Add(s.p5)
|
||||
s.list.Add(s.p6)
|
||||
|
||||
reflist := NewPackageRefListFromPackageList(s.list)
|
||||
|
||||
Len := 0
|
||||
err := reflist.ForEach(func([]byte) error {
|
||||
Len++
|
||||
return nil
|
||||
})
|
||||
|
||||
c.Check(Len, Equals, 4)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
e := errors.New("b")
|
||||
|
||||
err = reflist.ForEach(func([]byte) error {
|
||||
return e
|
||||
})
|
||||
|
||||
c.Check(err, Equals, e)
|
||||
}
|
||||
|
||||
func (s *PackageRefListSuite) TestSubstract(c *C) {
|
||||
r1 := []byte("r1")
|
||||
r2 := []byte("r2")
|
||||
r3 := []byte("r3")
|
||||
r4 := []byte("r4")
|
||||
r5 := []byte("r5")
|
||||
|
||||
empty := &PackageRefList{Refs: [][]byte{}}
|
||||
l1 := &PackageRefList{Refs: [][]byte{r1, r2, r3, r4}}
|
||||
l2 := &PackageRefList{Refs: [][]byte{r1, r3}}
|
||||
l3 := &PackageRefList{Refs: [][]byte{r2, r4}}
|
||||
l4 := &PackageRefList{Refs: [][]byte{r4, r5}}
|
||||
l5 := &PackageRefList{Refs: [][]byte{r1, r2, r3}}
|
||||
|
||||
c.Check(l1.Substract(empty), DeepEquals, l1)
|
||||
c.Check(l1.Substract(l2), DeepEquals, l3)
|
||||
c.Check(l1.Substract(l3), DeepEquals, l2)
|
||||
c.Check(l1.Substract(l4), DeepEquals, l5)
|
||||
c.Check(empty.Substract(l1), DeepEquals, empty)
|
||||
c.Check(l2.Substract(l3), DeepEquals, l2)
|
||||
}
|
||||
|
||||
func (s *PackageRefListSuite) TestDiff(c *C) {
|
||||
db, _ := database.OpenDB(c.MkDir())
|
||||
coll := NewPackageCollection(db)
|
||||
|
||||
packages := []*Package{
|
||||
&Package{Name: "lib", Version: "1.0", Architecture: "i386"}, //0
|
||||
&Package{Name: "dpkg", Version: "1.7", Architecture: "i386"}, //1
|
||||
&Package{Name: "data", Version: "1.1~bp1", Architecture: "all"}, //2
|
||||
&Package{Name: "app", Version: "1.1~bp1", Architecture: "i386"}, //3
|
||||
&Package{Name: "app", Version: "1.1~bp2", Architecture: "i386"}, //4
|
||||
&Package{Name: "app", Version: "1.1~bp2", Architecture: "amd64"}, //5
|
||||
&Package{Name: "xyz", Version: "3.0", Architecture: "sparc"}, //6
|
||||
}
|
||||
|
||||
for _, p := range packages {
|
||||
coll.Update(p)
|
||||
}
|
||||
|
||||
listA := NewPackageList()
|
||||
listA.Add(packages[0])
|
||||
listA.Add(packages[1])
|
||||
listA.Add(packages[2])
|
||||
listA.Add(packages[3])
|
||||
listA.Add(packages[6])
|
||||
|
||||
listB := NewPackageList()
|
||||
listB.Add(packages[0])
|
||||
listB.Add(packages[2])
|
||||
listB.Add(packages[4])
|
||||
listB.Add(packages[5])
|
||||
|
||||
reflistA := NewPackageRefListFromPackageList(listA)
|
||||
reflistB := NewPackageRefListFromPackageList(listB)
|
||||
|
||||
diffAA, err := reflistA.Diff(reflistA, coll)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(diffAA, HasLen, 0)
|
||||
|
||||
diffAB, err := reflistA.Diff(reflistB, coll)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(diffAB, HasLen, 4)
|
||||
|
||||
c.Check(diffAB[0].Left, IsNil)
|
||||
c.Check(diffAB[0].Right.String(), Equals, "app_1.1~bp2_amd64")
|
||||
|
||||
c.Check(diffAB[1].Left.String(), Equals, "app_1.1~bp1_i386")
|
||||
c.Check(diffAB[1].Right.String(), Equals, "app_1.1~bp2_i386")
|
||||
|
||||
c.Check(diffAB[2].Left.String(), Equals, "dpkg_1.7_i386")
|
||||
c.Check(diffAB[2].Right, IsNil)
|
||||
|
||||
c.Check(diffAB[3].Left.String(), Equals, "xyz_3.0_sparc")
|
||||
c.Check(diffAB[3].Right, IsNil)
|
||||
|
||||
diffBA, err := reflistB.Diff(reflistA, coll)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(diffBA, HasLen, 4)
|
||||
|
||||
c.Check(diffBA[0].Right, IsNil)
|
||||
c.Check(diffBA[0].Left.String(), Equals, "app_1.1~bp2_amd64")
|
||||
|
||||
c.Check(diffBA[1].Right.String(), Equals, "app_1.1~bp1_i386")
|
||||
c.Check(diffBA[1].Left.String(), Equals, "app_1.1~bp2_i386")
|
||||
|
||||
c.Check(diffBA[2].Right.String(), Equals, "dpkg_1.7_i386")
|
||||
c.Check(diffBA[2].Left, IsNil)
|
||||
|
||||
c.Check(diffBA[3].Right.String(), Equals, "xyz_3.0_sparc")
|
||||
c.Check(diffBA[3].Left, IsNil)
|
||||
|
||||
}
|
||||
|
||||
func (s *PackageRefListSuite) TestMerge(c *C) {
|
||||
db, _ := database.OpenDB(c.MkDir())
|
||||
coll := NewPackageCollection(db)
|
||||
|
||||
packages := []*Package{
|
||||
&Package{Name: "lib", Version: "1.0", Architecture: "i386"}, //0
|
||||
&Package{Name: "dpkg", Version: "1.7", Architecture: "i386"}, //1
|
||||
&Package{Name: "data", Version: "1.1~bp1", Architecture: "all"}, //2
|
||||
&Package{Name: "app", Version: "1.1~bp1", Architecture: "i386"}, //3
|
||||
&Package{Name: "app", Version: "1.1~bp2", Architecture: "i386"}, //4
|
||||
&Package{Name: "app", Version: "1.1~bp2", Architecture: "amd64"}, //5
|
||||
&Package{Name: "dpkg", Version: "1.0", Architecture: "i386"}, //6
|
||||
&Package{Name: "xyz", Version: "1.0", Architecture: "sparc"}, //7
|
||||
}
|
||||
|
||||
for _, p := range packages {
|
||||
coll.Update(p)
|
||||
}
|
||||
|
||||
listA := NewPackageList()
|
||||
listA.Add(packages[0])
|
||||
listA.Add(packages[1])
|
||||
listA.Add(packages[2])
|
||||
listA.Add(packages[3])
|
||||
listA.Add(packages[7])
|
||||
|
||||
listB := NewPackageList()
|
||||
listB.Add(packages[0])
|
||||
listB.Add(packages[2])
|
||||
listB.Add(packages[4])
|
||||
listB.Add(packages[5])
|
||||
listB.Add(packages[6])
|
||||
|
||||
reflistA := NewPackageRefListFromPackageList(listA)
|
||||
reflistB := NewPackageRefListFromPackageList(listB)
|
||||
|
||||
toStrSlice := func(reflist *PackageRefList) (result []string) {
|
||||
result = make([]string, reflist.Len())
|
||||
for i, r := range reflist.Refs {
|
||||
result[i] = string(r)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
mergeAB := reflistA.Merge(reflistB, true)
|
||||
mergeBA := reflistB.Merge(reflistA, true)
|
||||
|
||||
c.Check(toStrSlice(mergeAB), DeepEquals,
|
||||
[]string{"Pall data 1.1~bp1", "Pamd64 app 1.1~bp2", "Pi386 app 1.1~bp2", "Pi386 dpkg 1.0", "Pi386 lib 1.0", "Psparc xyz 1.0"})
|
||||
c.Check(toStrSlice(mergeBA), DeepEquals,
|
||||
[]string{"Pall data 1.1~bp1", "Pamd64 app 1.1~bp2", "Pi386 app 1.1~bp1", "Pi386 dpkg 1.7", "Pi386 lib 1.0", "Psparc xyz 1.0"})
|
||||
|
||||
mergeABall := reflistA.Merge(reflistB, false)
|
||||
mergeBAall := reflistB.Merge(reflistA, false)
|
||||
|
||||
c.Check(mergeABall, DeepEquals, mergeBAall)
|
||||
c.Check(toStrSlice(mergeBAall), DeepEquals,
|
||||
[]string{"Pall data 1.1~bp1", "Pamd64 app 1.1~bp2", "Pi386 app 1.1~bp1", "Pi386 app 1.1~bp2", "Pi386 dpkg 1.0", "Pi386 dpkg 1.7", "Pi386 lib 1.0", "Psparc xyz 1.0"})
|
||||
}
|
||||
625
debian/remote.go
vendored
625
debian/remote.go
vendored
@@ -1,625 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"code.google.com/p/go-uuid/uuid"
|
||||
"fmt"
|
||||
"github.com/smira/aptly/aptly"
|
||||
"github.com/smira/aptly/database"
|
||||
"github.com/smira/aptly/http"
|
||||
"github.com/smira/aptly/utils"
|
||||
"github.com/ugorji/go/codec"
|
||||
"log"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// RemoteRepo represents remote (fetchable) Debian repository.
|
||||
//
|
||||
// Repostitory could be filtered when fetching by components, architectures
|
||||
type RemoteRepo struct {
|
||||
// Permanent internal ID
|
||||
UUID string
|
||||
// User-assigned name
|
||||
Name string
|
||||
// Root of Debian archive, URL
|
||||
ArchiveRoot string
|
||||
// Distribution name, e.g. squeeze
|
||||
Distribution string
|
||||
// List of components to fetch, if empty, then fetch all components
|
||||
Components []string
|
||||
// List of architectures to fetch, if empty, then fetch all architectures
|
||||
Architectures []string
|
||||
// Should we download sources?
|
||||
DownloadSources bool
|
||||
// Meta-information about repository
|
||||
Meta Stanza
|
||||
// Last update date
|
||||
LastDownloadDate time.Time
|
||||
// Checksums for release files
|
||||
ReleaseFiles map[string]utils.ChecksumInfo
|
||||
// "Snapshot" of current list of packages
|
||||
packageRefs *PackageRefList
|
||||
// Parsed archived root
|
||||
archiveRootURL *url.URL
|
||||
}
|
||||
|
||||
// NewRemoteRepo creates new instance of Debian remote repository with specified params
|
||||
func NewRemoteRepo(name string, archiveRoot string, distribution string, components []string,
|
||||
architectures []string, downloadSources bool) (*RemoteRepo, error) {
|
||||
result := &RemoteRepo{
|
||||
UUID: uuid.New(),
|
||||
Name: name,
|
||||
ArchiveRoot: archiveRoot,
|
||||
Distribution: distribution,
|
||||
Components: components,
|
||||
Architectures: architectures,
|
||||
DownloadSources: downloadSources,
|
||||
}
|
||||
|
||||
err := result.prepare()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if result.Distribution == "." || result.Distribution == "./" {
|
||||
// flat repo
|
||||
result.Distribution = ""
|
||||
result.Architectures = nil
|
||||
if len(result.Components) > 0 {
|
||||
return nil, fmt.Errorf("components aren't supported for flat repos")
|
||||
}
|
||||
result.Components = nil
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (repo *RemoteRepo) prepare() error {
|
||||
var err error
|
||||
|
||||
// Add final / to URL
|
||||
if !strings.HasSuffix(repo.ArchiveRoot, "/") {
|
||||
repo.ArchiveRoot = repo.ArchiveRoot + "/"
|
||||
}
|
||||
|
||||
repo.archiveRootURL, err = url.Parse(repo.ArchiveRoot)
|
||||
return err
|
||||
}
|
||||
|
||||
// String interface
|
||||
func (repo *RemoteRepo) String() string {
|
||||
srcFlag := ""
|
||||
if repo.DownloadSources {
|
||||
srcFlag = " [src]"
|
||||
}
|
||||
distribution := repo.Distribution
|
||||
if distribution == "" {
|
||||
distribution = "./"
|
||||
}
|
||||
return fmt.Sprintf("[%s]: %s %s%s", repo.Name, repo.ArchiveRoot, distribution, srcFlag)
|
||||
}
|
||||
|
||||
// IsFlat determines if repository is flat
|
||||
func (repo *RemoteRepo) IsFlat() bool {
|
||||
return repo.Distribution == ""
|
||||
}
|
||||
|
||||
// NumPackages return number of packages retrived from remote repo
|
||||
func (repo *RemoteRepo) NumPackages() int {
|
||||
if repo.packageRefs == nil {
|
||||
return 0
|
||||
}
|
||||
return repo.packageRefs.Len()
|
||||
}
|
||||
|
||||
// RefList returns package list for repo
|
||||
func (repo *RemoteRepo) RefList() *PackageRefList {
|
||||
return repo.packageRefs
|
||||
}
|
||||
|
||||
// ReleaseURL returns URL to Release* files in repo root
|
||||
func (repo *RemoteRepo) ReleaseURL(name string) *url.URL {
|
||||
var path *url.URL
|
||||
|
||||
if !repo.IsFlat() {
|
||||
path = &url.URL{Path: fmt.Sprintf("dists/%s/%s", repo.Distribution, name)}
|
||||
} else {
|
||||
path = &url.URL{Path: name}
|
||||
}
|
||||
|
||||
return repo.archiveRootURL.ResolveReference(path)
|
||||
}
|
||||
|
||||
// FlatBinaryURL returns URL to Packages files for flat repo
|
||||
func (repo *RemoteRepo) FlatBinaryURL() *url.URL {
|
||||
path := &url.URL{Path: "Packages"}
|
||||
return repo.archiveRootURL.ResolveReference(path)
|
||||
}
|
||||
|
||||
// FlatSourcesURL returns URL to Sources files for flat repo
|
||||
func (repo *RemoteRepo) FlatSourcesURL() *url.URL {
|
||||
path := &url.URL{Path: "Sources"}
|
||||
return repo.archiveRootURL.ResolveReference(path)
|
||||
}
|
||||
|
||||
// BinaryURL returns URL of Packages files for given component and
|
||||
// architecture
|
||||
func (repo *RemoteRepo) BinaryURL(component string, architecture string) *url.URL {
|
||||
path := &url.URL{Path: fmt.Sprintf("dists/%s/%s/binary-%s/Packages", repo.Distribution, component, architecture)}
|
||||
return repo.archiveRootURL.ResolveReference(path)
|
||||
}
|
||||
|
||||
// SourcesURL returns URL of Sources files for given component
|
||||
func (repo *RemoteRepo) SourcesURL(component string) *url.URL {
|
||||
path := &url.URL{Path: fmt.Sprintf("dists/%s/%s/source/Sources", repo.Distribution, component)}
|
||||
return repo.archiveRootURL.ResolveReference(path)
|
||||
}
|
||||
|
||||
// PackageURL returns URL of package file relative to repository root
|
||||
// architecture
|
||||
func (repo *RemoteRepo) PackageURL(filename string) *url.URL {
|
||||
path := &url.URL{Path: filename}
|
||||
return repo.archiveRootURL.ResolveReference(path)
|
||||
}
|
||||
|
||||
// Fetch updates information about repository
|
||||
func (repo *RemoteRepo) Fetch(d aptly.Downloader, verifier utils.Verifier) error {
|
||||
var (
|
||||
release, inrelease, releasesig *os.File
|
||||
err error
|
||||
)
|
||||
|
||||
if verifier == nil {
|
||||
// 0. Just download release file to temporary URL
|
||||
release, err = http.DownloadTemp(d, repo.ReleaseURL("Release").String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// 1. try InRelease file
|
||||
inrelease, err = http.DownloadTemp(d, repo.ReleaseURL("InRelease").String())
|
||||
if err != nil {
|
||||
goto splitsignature
|
||||
}
|
||||
defer inrelease.Close()
|
||||
|
||||
err = verifier.VerifyClearsigned(inrelease)
|
||||
if err != nil {
|
||||
goto splitsignature
|
||||
}
|
||||
|
||||
inrelease.Seek(0, 0)
|
||||
|
||||
release, err = verifier.ExtractClearsigned(inrelease)
|
||||
if err != nil {
|
||||
goto splitsignature
|
||||
}
|
||||
|
||||
goto ok
|
||||
|
||||
splitsignature:
|
||||
// 2. try Release + Release.gpg
|
||||
release, err = http.DownloadTemp(d, repo.ReleaseURL("Release").String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
releasesig, err = http.DownloadTemp(d, repo.ReleaseURL("Release.gpg").String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = verifier.VerifyDetachedSignature(releasesig, release)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = release.Seek(0, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
ok:
|
||||
|
||||
defer release.Close()
|
||||
|
||||
sreader := NewControlFileReader(release)
|
||||
stanza, err := sreader.ReadStanza()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !repo.IsFlat() {
|
||||
architectures := strings.Split(stanza["Architectures"], " ")
|
||||
if len(repo.Architectures) == 0 {
|
||||
repo.Architectures = architectures
|
||||
} else {
|
||||
err = utils.StringsIsSubset(repo.Architectures, architectures,
|
||||
fmt.Sprintf("architecture %%s not available in repo %s", repo))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
components := strings.Split(stanza["Components"], " ")
|
||||
for i := range components {
|
||||
components[i] = path.Base(components[i])
|
||||
}
|
||||
if len(repo.Components) == 0 {
|
||||
repo.Components = components
|
||||
} else {
|
||||
err = utils.StringsIsSubset(repo.Components, components,
|
||||
fmt.Sprintf("component %%s not available in repo %s", repo))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
repo.ReleaseFiles = make(map[string]utils.ChecksumInfo)
|
||||
|
||||
parseSums := func(field string, setter func(sum *utils.ChecksumInfo, data string)) error {
|
||||
for _, line := range strings.Split(stanza[field], "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
parts := strings.Fields(line)
|
||||
|
||||
if len(parts) != 3 {
|
||||
return fmt.Errorf("unparseable hash sum line: %#v", line)
|
||||
}
|
||||
|
||||
var size int64
|
||||
size, err = strconv.ParseInt(parts[1], 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to parse size: %s", err)
|
||||
}
|
||||
|
||||
sum := repo.ReleaseFiles[parts[2]]
|
||||
|
||||
sum.Size = size
|
||||
setter(&sum, parts[0])
|
||||
|
||||
repo.ReleaseFiles[parts[2]] = sum
|
||||
}
|
||||
|
||||
delete(stanza, field)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
err = parseSums("MD5Sum", func(sum *utils.ChecksumInfo, data string) { sum.MD5 = data })
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = parseSums("SHA1", func(sum *utils.ChecksumInfo, data string) { sum.SHA1 = data })
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = parseSums("SHA256", func(sum *utils.ChecksumInfo, data string) { sum.SHA256 = data })
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
repo.Meta = stanza
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Download downloads all repo files
|
||||
func (repo *RemoteRepo) Download(progress aptly.Progress, d aptly.Downloader, collectionFactory *CollectionFactory, packagePool aptly.PackagePool, ignoreMismatch bool) error {
|
||||
list := NewPackageList()
|
||||
|
||||
progress.Printf("Downloading & parsing package files...\n")
|
||||
|
||||
// Download and parse all Packages & Source files
|
||||
packagesURLs := [][]string{}
|
||||
|
||||
if repo.IsFlat() {
|
||||
packagesURLs = append(packagesURLs, []string{repo.FlatBinaryURL().String(), "binary"})
|
||||
if repo.DownloadSources {
|
||||
packagesURLs = append(packagesURLs, []string{repo.FlatSourcesURL().String(), "source"})
|
||||
}
|
||||
} else {
|
||||
for _, component := range repo.Components {
|
||||
for _, architecture := range repo.Architectures {
|
||||
packagesURLs = append(packagesURLs, []string{repo.BinaryURL(component, architecture).String(), "binary"})
|
||||
}
|
||||
if repo.DownloadSources {
|
||||
packagesURLs = append(packagesURLs, []string{repo.SourcesURL(component).String(), "source"})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, info := range packagesURLs {
|
||||
url, kind := info[0], info[1]
|
||||
packagesReader, packagesFile, err := http.DownloadTryCompression(d, url, repo.ReleaseFiles, ignoreMismatch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer packagesFile.Close()
|
||||
|
||||
stat, _ := packagesFile.Stat()
|
||||
progress.InitBar(stat.Size(), true)
|
||||
|
||||
sreader := NewControlFileReader(packagesReader)
|
||||
|
||||
for {
|
||||
stanza, err := sreader.ReadStanza()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if stanza == nil {
|
||||
break
|
||||
}
|
||||
|
||||
off, _ := packagesFile.Seek(0, 1)
|
||||
progress.SetBar(int(off))
|
||||
|
||||
var p *Package
|
||||
|
||||
if kind == "binary" {
|
||||
p = NewPackageFromControlFile(stanza)
|
||||
} else if kind == "source" {
|
||||
p, err = NewSourcePackageFromControlFile(stanza)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
err = list.Add(p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = collectionFactory.PackageCollection().Update(p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
progress.ShutdownBar()
|
||||
}
|
||||
|
||||
progress.Printf("Building download queue...\n")
|
||||
|
||||
// Build download queue
|
||||
queued := make(map[string]PackageDownloadTask, list.Len())
|
||||
count := 0
|
||||
downloadSize := int64(0)
|
||||
|
||||
err := list.ForEach(func(p *Package) error {
|
||||
list, err2 := p.DownloadList(packagePool)
|
||||
if err2 != nil {
|
||||
return err2
|
||||
}
|
||||
p.files = nil
|
||||
|
||||
for _, task := range list {
|
||||
key := task.RepoURI + "-" + task.DestinationPath
|
||||
_, found := queued[key]
|
||||
if !found {
|
||||
count++
|
||||
downloadSize += task.Checksums.Size
|
||||
queued[key] = task
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to build download queue: %s", err)
|
||||
}
|
||||
|
||||
repo.packageRefs = NewPackageRefListFromPackageList(list)
|
||||
// free up package list, we don't need it after this point
|
||||
list = nil
|
||||
|
||||
progress.Printf("Download queue: %d items (%s)\n", count, utils.HumanBytes(downloadSize))
|
||||
|
||||
progress.InitBar(downloadSize, true)
|
||||
|
||||
// Download all package files
|
||||
ch := make(chan error, len(queued))
|
||||
|
||||
for _, task := range queued {
|
||||
d.DownloadWithChecksum(repo.PackageURL(task.RepoURI).String(), task.DestinationPath, ch, task.Checksums, ignoreMismatch)
|
||||
}
|
||||
|
||||
// We don't need queued after this point
|
||||
queued = nil
|
||||
|
||||
// Wait for all downloads to finish
|
||||
errors := make([]string, 0)
|
||||
|
||||
for count > 0 {
|
||||
err = <-ch
|
||||
if err != nil {
|
||||
errors = append(errors, err.Error())
|
||||
}
|
||||
count--
|
||||
}
|
||||
|
||||
progress.ShutdownBar()
|
||||
|
||||
if len(errors) > 0 {
|
||||
return fmt.Errorf("download errors:\n %s\n", strings.Join(errors, "\n "))
|
||||
}
|
||||
|
||||
repo.LastDownloadDate = time.Now()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Encode does msgpack encoding of RemoteRepo
|
||||
func (repo *RemoteRepo) Encode() []byte {
|
||||
var buf bytes.Buffer
|
||||
|
||||
encoder := codec.NewEncoder(&buf, &codec.MsgpackHandle{})
|
||||
encoder.Encode(repo)
|
||||
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// Decode decodes msgpack representation into RemoteRepo
|
||||
func (repo *RemoteRepo) Decode(input []byte) error {
|
||||
decoder := codec.NewDecoderBytes(input, &codec.MsgpackHandle{})
|
||||
err := decoder.Decode(repo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return repo.prepare()
|
||||
}
|
||||
|
||||
// Key is a unique id in DB
|
||||
func (repo *RemoteRepo) Key() []byte {
|
||||
return []byte("R" + repo.UUID)
|
||||
}
|
||||
|
||||
// RefKey is a unique id for package reference list
|
||||
func (repo *RemoteRepo) RefKey() []byte {
|
||||
return []byte("E" + repo.UUID)
|
||||
}
|
||||
|
||||
// RemoteRepoCollection does listing, updating/adding/deleting of RemoteRepos
|
||||
type RemoteRepoCollection struct {
|
||||
db database.Storage
|
||||
list []*RemoteRepo
|
||||
}
|
||||
|
||||
// NewRemoteRepoCollection loads RemoteRepos from DB and makes up collection
|
||||
func NewRemoteRepoCollection(db database.Storage) *RemoteRepoCollection {
|
||||
result := &RemoteRepoCollection{
|
||||
db: db,
|
||||
}
|
||||
|
||||
blobs := db.FetchByPrefix([]byte("R"))
|
||||
result.list = make([]*RemoteRepo, 0, len(blobs))
|
||||
|
||||
for _, blob := range blobs {
|
||||
r := &RemoteRepo{}
|
||||
if err := r.Decode(blob); err != nil {
|
||||
log.Printf("Error decoding mirror: %s\n", err)
|
||||
} else {
|
||||
result.list = append(result.list, r)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Add appends new repo to collection and saves it
|
||||
func (collection *RemoteRepoCollection) Add(repo *RemoteRepo) error {
|
||||
for _, r := range collection.list {
|
||||
if r.Name == repo.Name {
|
||||
return fmt.Errorf("mirror with name %s already exists", repo.Name)
|
||||
}
|
||||
}
|
||||
|
||||
err := collection.Update(repo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
collection.list = append(collection.list, repo)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Update stores updated information about repo in DB
|
||||
func (collection *RemoteRepoCollection) Update(repo *RemoteRepo) error {
|
||||
err := collection.db.Put(repo.Key(), repo.Encode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if repo.packageRefs != nil {
|
||||
err = collection.db.Put(repo.RefKey(), repo.packageRefs.Encode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadComplete loads additional information for remote repo
|
||||
func (collection *RemoteRepoCollection) LoadComplete(repo *RemoteRepo) error {
|
||||
encoded, err := collection.db.Get(repo.RefKey())
|
||||
if err == database.ErrNotFound {
|
||||
return nil
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
repo.packageRefs = &PackageRefList{}
|
||||
return repo.packageRefs.Decode(encoded)
|
||||
}
|
||||
|
||||
// ByName looks up repository by name
|
||||
func (collection *RemoteRepoCollection) ByName(name string) (*RemoteRepo, error) {
|
||||
for _, r := range collection.list {
|
||||
if r.Name == name {
|
||||
return r, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("mirror with name %s not found", name)
|
||||
}
|
||||
|
||||
// ByUUID looks up repository by uuid
|
||||
func (collection *RemoteRepoCollection) ByUUID(uuid string) (*RemoteRepo, error) {
|
||||
for _, r := range collection.list {
|
||||
if r.UUID == uuid {
|
||||
return r, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("mirror with uuid %s not found", uuid)
|
||||
}
|
||||
|
||||
// ForEach runs method for each repository
|
||||
func (collection *RemoteRepoCollection) ForEach(handler func(*RemoteRepo) error) error {
|
||||
var err error
|
||||
for _, r := range collection.list {
|
||||
err = handler(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Len returns number of remote repos
|
||||
func (collection *RemoteRepoCollection) Len() int {
|
||||
return len(collection.list)
|
||||
}
|
||||
|
||||
// Drop removes remote repo from collection
|
||||
func (collection *RemoteRepoCollection) Drop(repo *RemoteRepo) error {
|
||||
repoPosition := -1
|
||||
|
||||
for i, r := range collection.list {
|
||||
if r == repo {
|
||||
repoPosition = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if repoPosition == -1 {
|
||||
panic("repo not found!")
|
||||
}
|
||||
|
||||
collection.list[len(collection.list)-1], collection.list[repoPosition], collection.list =
|
||||
nil, collection.list[len(collection.list)-1], collection.list[:len(collection.list)-1]
|
||||
|
||||
err := collection.db.Delete(repo.Key())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return collection.db.Delete(repo.RefKey())
|
||||
}
|
||||
643
debian/remote_test.go
vendored
643
debian/remote_test.go
vendored
@@ -1,643 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"github.com/smira/aptly/aptly"
|
||||
"github.com/smira/aptly/console"
|
||||
"github.com/smira/aptly/database"
|
||||
"github.com/smira/aptly/files"
|
||||
"github.com/smira/aptly/http"
|
||||
"github.com/smira/aptly/utils"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
. "launchpad.net/gocheck"
|
||||
"os"
|
||||
)
|
||||
|
||||
type NullVerifier struct {
|
||||
}
|
||||
|
||||
func (n *NullVerifier) InitKeyring() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *NullVerifier) AddKeyring(keyring string) {
|
||||
}
|
||||
|
||||
func (n *NullVerifier) VerifyDetachedSignature(signature, cleartext io.Reader) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *NullVerifier) VerifyClearsigned(clearsigned io.Reader) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *NullVerifier) ExtractClearsigned(clearsigned io.Reader) (text *os.File, err error) {
|
||||
text, _ = ioutil.TempFile("", "aptly-test")
|
||||
io.Copy(text, clearsigned)
|
||||
text.Seek(0, 0)
|
||||
os.Remove(text.Name())
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
type PackageListMixinSuite struct {
|
||||
p1, p2, p3 *Package
|
||||
list *PackageList
|
||||
reflist *PackageRefList
|
||||
}
|
||||
|
||||
func (s *PackageListMixinSuite) SetUpPackages() {
|
||||
s.list = NewPackageList()
|
||||
|
||||
s.p1 = NewPackageFromControlFile(packageStanza.Copy())
|
||||
stanza := packageStanza.Copy()
|
||||
stanza["Package"] = "mars-invaders"
|
||||
s.p2 = NewPackageFromControlFile(stanza)
|
||||
stanza = packageStanza.Copy()
|
||||
stanza["Package"] = "lonely-strangers"
|
||||
s.p3 = NewPackageFromControlFile(stanza)
|
||||
|
||||
s.list.Add(s.p1)
|
||||
s.list.Add(s.p2)
|
||||
s.list.Add(s.p3)
|
||||
|
||||
s.reflist = NewPackageRefListFromPackageList(s.list)
|
||||
}
|
||||
|
||||
type RemoteRepoSuite struct {
|
||||
PackageListMixinSuite
|
||||
repo *RemoteRepo
|
||||
flat *RemoteRepo
|
||||
downloader *http.FakeDownloader
|
||||
progress aptly.Progress
|
||||
db database.Storage
|
||||
collectionFactory *CollectionFactory
|
||||
packagePool aptly.PackagePool
|
||||
}
|
||||
|
||||
var _ = Suite(&RemoteRepoSuite{})
|
||||
|
||||
func (s *RemoteRepoSuite) SetUpTest(c *C) {
|
||||
s.repo, _ = NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian", "squeeze", []string{"main"}, []string{}, false)
|
||||
s.flat, _ = NewRemoteRepo("exp42", "http://repos.express42.com/virool/precise/", "./", []string{}, []string{}, false)
|
||||
s.downloader = http.NewFakeDownloader().ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/Release", exampleReleaseFile)
|
||||
s.progress = console.NewProgress()
|
||||
s.db, _ = database.OpenDB(c.MkDir())
|
||||
s.collectionFactory = NewCollectionFactory(s.db)
|
||||
s.packagePool = files.NewPackagePool(c.MkDir())
|
||||
s.SetUpPackages()
|
||||
s.progress.Start()
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TearDownTest(c *C) {
|
||||
s.progress.Shutdown()
|
||||
s.db.Close()
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestInvalidURL(c *C) {
|
||||
_, err := NewRemoteRepo("s", "http://lolo%2", "squeeze", []string{"main"}, []string{}, false)
|
||||
c.Assert(err, ErrorMatches, ".*hexadecimal escape in host.*")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestFlatCreation(c *C) {
|
||||
c.Check(s.flat.Distribution, Equals, "")
|
||||
c.Check(s.flat.Architectures, IsNil)
|
||||
c.Check(s.flat.Components, IsNil)
|
||||
|
||||
_, err := NewRemoteRepo("fl", "http://some.repo/", "./", []string{"main"}, []string{}, false)
|
||||
c.Check(err, ErrorMatches, "components aren't supported for flat repos")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestString(c *C) {
|
||||
c.Check(s.repo.String(), Equals, "[yandex]: http://mirror.yandex.ru/debian/ squeeze")
|
||||
c.Check(s.flat.String(), Equals, "[exp42]: http://repos.express42.com/virool/precise/ ./")
|
||||
|
||||
s.repo.DownloadSources = true
|
||||
s.flat.DownloadSources = true
|
||||
c.Check(s.repo.String(), Equals, "[yandex]: http://mirror.yandex.ru/debian/ squeeze [src]")
|
||||
c.Check(s.flat.String(), Equals, "[exp42]: http://repos.express42.com/virool/precise/ ./ [src]")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestNumPackages(c *C) {
|
||||
c.Check(s.repo.NumPackages(), Equals, 0)
|
||||
s.repo.packageRefs = s.reflist
|
||||
c.Check(s.repo.NumPackages(), Equals, 3)
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestIsFlat(c *C) {
|
||||
c.Check(s.repo.IsFlat(), Equals, false)
|
||||
c.Check(s.flat.IsFlat(), Equals, true)
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestRefList(c *C) {
|
||||
s.repo.packageRefs = s.reflist
|
||||
c.Check(s.repo.RefList(), Equals, s.reflist)
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestReleaseURL(c *C) {
|
||||
c.Assert(s.repo.ReleaseURL("Release").String(), Equals, "http://mirror.yandex.ru/debian/dists/squeeze/Release")
|
||||
c.Assert(s.repo.ReleaseURL("InRelease").String(), Equals, "http://mirror.yandex.ru/debian/dists/squeeze/InRelease")
|
||||
|
||||
c.Assert(s.flat.ReleaseURL("Release").String(), Equals, "http://repos.express42.com/virool/precise/Release")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestBinaryURL(c *C) {
|
||||
c.Assert(s.repo.BinaryURL("main", "amd64").String(), Equals, "http://mirror.yandex.ru/debian/dists/squeeze/main/binary-amd64/Packages")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestSourcesURL(c *C) {
|
||||
c.Assert(s.repo.SourcesURL("main").String(), Equals, "http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestFlatBinaryURL(c *C) {
|
||||
c.Assert(s.flat.FlatBinaryURL().String(), Equals, "http://repos.express42.com/virool/precise/Packages")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestFlatSourcesURL(c *C) {
|
||||
c.Assert(s.flat.FlatSourcesURL().String(), Equals, "http://repos.express42.com/virool/precise/Sources")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestPackageURL(c *C) {
|
||||
c.Assert(s.repo.PackageURL("pool/main/0/0ad/0ad_0~r11863-2_i386.deb").String(), Equals,
|
||||
"http://mirror.yandex.ru/debian/pool/main/0/0ad/0ad_0~r11863-2_i386.deb")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestFetch(c *C) {
|
||||
err := s.repo.Fetch(s.downloader, nil)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(s.repo.Architectures, DeepEquals, []string{"amd64", "armel", "armhf", "i386", "powerpc"})
|
||||
c.Assert(s.repo.Components, DeepEquals, []string{"main"})
|
||||
c.Assert(s.downloader.Empty(), Equals, true)
|
||||
|
||||
c.Check(s.repo.ReleaseFiles, HasLen, 39)
|
||||
c.Check(s.repo.ReleaseFiles["main/binary-i386/Packages.bz2"], DeepEquals,
|
||||
utils.ChecksumInfo{
|
||||
Size: 734,
|
||||
MD5: "7954ed80936429687122b554620c1b5b",
|
||||
SHA1: "95a463a0739bf9ff622c8d68f6e4598d400f5248",
|
||||
SHA256: "377890a26f99db55e117dfc691972dcbbb7d8be1630c8fc8297530c205377f2b"})
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestFetchNullVerifier1(c *C) {
|
||||
downloader := http.NewFakeDownloader()
|
||||
downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/InRelease", errors.New("404"))
|
||||
downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/Release", exampleReleaseFile)
|
||||
downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/Release.gpg", "GPG")
|
||||
|
||||
err := s.repo.Fetch(downloader, &NullVerifier{})
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(s.repo.Architectures, DeepEquals, []string{"amd64", "armel", "armhf", "i386", "powerpc"})
|
||||
c.Assert(s.repo.Components, DeepEquals, []string{"main"})
|
||||
c.Assert(downloader.Empty(), Equals, true)
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestFetchNullVerifier2(c *C) {
|
||||
downloader := http.NewFakeDownloader()
|
||||
downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/InRelease", exampleReleaseFile)
|
||||
|
||||
err := s.repo.Fetch(downloader, &NullVerifier{})
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(s.repo.Architectures, DeepEquals, []string{"amd64", "armel", "armhf", "i386", "powerpc"})
|
||||
c.Assert(s.repo.Components, DeepEquals, []string{"main"})
|
||||
c.Assert(downloader.Empty(), Equals, true)
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestFetchWrongArchitecture(c *C) {
|
||||
s.repo, _ = NewRemoteRepo("s", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{"xyz"}, false)
|
||||
err := s.repo.Fetch(s.downloader, nil)
|
||||
c.Assert(err, ErrorMatches, "architecture xyz not available in repo.*")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestFetchWrongComponent(c *C) {
|
||||
s.repo, _ = NewRemoteRepo("s", "http://mirror.yandex.ru/debian/", "squeeze", []string{"xyz"}, []string{"i386"}, false)
|
||||
err := s.repo.Fetch(s.downloader, nil)
|
||||
c.Assert(err, ErrorMatches, "component xyz not available in repo.*")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestEncodeDecode(c *C) {
|
||||
repo := &RemoteRepo{}
|
||||
err := repo.Decode(s.repo.Encode())
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(repo.Name, Equals, "yandex")
|
||||
c.Check(repo.ArchiveRoot, Equals, "http://mirror.yandex.ru/debian/")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestKey(c *C) {
|
||||
c.Assert(len(s.repo.Key()), Equals, 37)
|
||||
c.Assert(s.repo.Key()[0], Equals, byte('R'))
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestRefKey(c *C) {
|
||||
c.Assert(len(s.repo.RefKey()), Equals, 37)
|
||||
c.Assert(s.repo.RefKey()[0], Equals, byte('E'))
|
||||
c.Assert(s.repo.RefKey()[1:], DeepEquals, s.repo.Key()[1:])
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestDownload(c *C) {
|
||||
s.repo.Architectures = []string{"i386"}
|
||||
|
||||
err := s.repo.Fetch(s.downloader, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages.bz2", errors.New("HTTP 404"))
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages.gz", errors.New("HTTP 404"))
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages", examplePackagesFile)
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/pool/main/a/amanda/amanda-client_3.3.1-3~bpo60+1_amd64.deb", "xyz")
|
||||
|
||||
err = s.repo.Download(s.progress, s.downloader, s.collectionFactory, s.packagePool, false)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(s.downloader.Empty(), Equals, true)
|
||||
c.Assert(s.repo.packageRefs, NotNil)
|
||||
|
||||
pkg, err := s.collectionFactory.PackageCollection().ByKey(s.repo.packageRefs.Refs[0])
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
result, err := pkg.VerifyFiles(s.packagePool)
|
||||
c.Check(result, Equals, true)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
c.Check(pkg.Name, Equals, "amanda-client")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestDownloadWithSources(c *C) {
|
||||
s.repo.Architectures = []string{"i386"}
|
||||
s.repo.DownloadSources = true
|
||||
|
||||
err := s.repo.Fetch(s.downloader, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages.bz2", errors.New("HTTP 404"))
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages.gz", errors.New("HTTP 404"))
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages", examplePackagesFile)
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources.bz2", errors.New("HTTP 404"))
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources.gz", errors.New("HTTP 404"))
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources", exampleSourcesFile)
|
||||
s.downloader.AnyExpectResponse("http://mirror.yandex.ru/debian/pool/main/a/amanda/amanda-client_3.3.1-3~bpo60+1_amd64.deb", "xyz")
|
||||
s.downloader.AnyExpectResponse("http://mirror.yandex.ru/debian/pool/main/a/access-modifier-checker/access-modifier-checker_1.0-4.dsc", "abc")
|
||||
s.downloader.AnyExpectResponse("http://mirror.yandex.ru/debian/pool/main/a/access-modifier-checker/access-modifier-checker_1.0.orig.tar.gz", "abcd")
|
||||
s.downloader.AnyExpectResponse("http://mirror.yandex.ru/debian/pool/main/a/access-modifier-checker/access-modifier-checker_1.0-4.debian.tar.gz", "abcde")
|
||||
|
||||
err = s.repo.Download(s.progress, s.downloader, s.collectionFactory, s.packagePool, false)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(s.downloader.Empty(), Equals, true)
|
||||
c.Assert(s.repo.packageRefs, NotNil)
|
||||
|
||||
pkg, err := s.collectionFactory.PackageCollection().ByKey(s.repo.packageRefs.Refs[0])
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
result, err := pkg.VerifyFiles(s.packagePool)
|
||||
c.Check(result, Equals, true)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
c.Check(pkg.Name, Equals, "amanda-client")
|
||||
|
||||
pkg, err = s.collectionFactory.PackageCollection().ByKey(s.repo.packageRefs.Refs[1])
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
result, err = pkg.VerifyFiles(s.packagePool)
|
||||
c.Check(result, Equals, true)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
c.Check(pkg.Name, Equals, "access-modifier-checker")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestDownloadFlat(c *C) {
|
||||
downloader := http.NewFakeDownloader()
|
||||
downloader.ExpectResponse("http://repos.express42.com/virool/precise/Release", exampleReleaseFile)
|
||||
downloader.ExpectError("http://repos.express42.com/virool/precise/Packages.bz2", errors.New("HTTP 404"))
|
||||
downloader.ExpectError("http://repos.express42.com/virool/precise/Packages.gz", errors.New("HTTP 404"))
|
||||
downloader.ExpectResponse("http://repos.express42.com/virool/precise/Packages", examplePackagesFile)
|
||||
downloader.ExpectResponse("http://repos.express42.com/virool/precise/pool/main/a/amanda/amanda-client_3.3.1-3~bpo60+1_amd64.deb", "xyz")
|
||||
|
||||
err := s.flat.Fetch(downloader, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
err = s.flat.Download(s.progress, downloader, s.collectionFactory, s.packagePool, false)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(downloader.Empty(), Equals, true)
|
||||
c.Assert(s.flat.packageRefs, NotNil)
|
||||
|
||||
pkg, err := s.collectionFactory.PackageCollection().ByKey(s.flat.packageRefs.Refs[0])
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
result, err := pkg.VerifyFiles(s.packagePool)
|
||||
c.Check(result, Equals, true)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
c.Check(pkg.Name, Equals, "amanda-client")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestDownloadWithSourcesFlat(c *C) {
|
||||
s.flat.DownloadSources = true
|
||||
|
||||
downloader := http.NewFakeDownloader()
|
||||
downloader.ExpectResponse("http://repos.express42.com/virool/precise/Release", exampleReleaseFile)
|
||||
downloader.ExpectError("http://repos.express42.com/virool/precise/Packages.bz2", errors.New("HTTP 404"))
|
||||
downloader.ExpectError("http://repos.express42.com/virool/precise/Packages.gz", errors.New("HTTP 404"))
|
||||
downloader.ExpectResponse("http://repos.express42.com/virool/precise/Packages", examplePackagesFile)
|
||||
downloader.ExpectError("http://repos.express42.com/virool/precise/Sources.bz2", errors.New("HTTP 404"))
|
||||
downloader.ExpectError("http://repos.express42.com/virool/precise/Sources.gz", errors.New("HTTP 404"))
|
||||
downloader.ExpectResponse("http://repos.express42.com/virool/precise/Sources", exampleSourcesFile)
|
||||
downloader.AnyExpectResponse("http://repos.express42.com/virool/precise/pool/main/a/amanda/amanda-client_3.3.1-3~bpo60+1_amd64.deb", "xyz")
|
||||
downloader.AnyExpectResponse("http://repos.express42.com/virool/precise/pool/main/a/access-modifier-checker/access-modifier-checker_1.0-4.dsc", "abc")
|
||||
downloader.AnyExpectResponse("http://repos.express42.com/virool/precise/pool/main/a/access-modifier-checker/access-modifier-checker_1.0.orig.tar.gz", "abcd")
|
||||
downloader.AnyExpectResponse("http://repos.express42.com/virool/precise/pool/main/a/access-modifier-checker/access-modifier-checker_1.0-4.debian.tar.gz", "abcde")
|
||||
|
||||
err := s.flat.Fetch(downloader, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
err = s.flat.Download(s.progress, downloader, s.collectionFactory, s.packagePool, false)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(downloader.Empty(), Equals, true)
|
||||
c.Assert(s.flat.packageRefs, NotNil)
|
||||
|
||||
pkg, err := s.collectionFactory.PackageCollection().ByKey(s.flat.packageRefs.Refs[0])
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
result, err := pkg.VerifyFiles(s.packagePool)
|
||||
c.Check(result, Equals, true)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
c.Check(pkg.Name, Equals, "amanda-client")
|
||||
|
||||
pkg, err = s.collectionFactory.PackageCollection().ByKey(s.flat.packageRefs.Refs[1])
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
result, err = pkg.VerifyFiles(s.packagePool)
|
||||
c.Check(result, Equals, true)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
c.Check(pkg.Name, Equals, "access-modifier-checker")
|
||||
}
|
||||
|
||||
type RemoteRepoCollectionSuite struct {
|
||||
PackageListMixinSuite
|
||||
db database.Storage
|
||||
collection *RemoteRepoCollection
|
||||
}
|
||||
|
||||
var _ = Suite(&RemoteRepoCollectionSuite{})
|
||||
|
||||
func (s *RemoteRepoCollectionSuite) SetUpTest(c *C) {
|
||||
s.db, _ = database.OpenDB(c.MkDir())
|
||||
s.collection = NewRemoteRepoCollection(s.db)
|
||||
s.SetUpPackages()
|
||||
}
|
||||
|
||||
func (s *RemoteRepoCollectionSuite) TearDownTest(c *C) {
|
||||
s.db.Close()
|
||||
}
|
||||
|
||||
func (s *RemoteRepoCollectionSuite) TestAddByName(c *C) {
|
||||
r, err := s.collection.ByName("yandex")
|
||||
c.Assert(err, ErrorMatches, "*.not found")
|
||||
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false)
|
||||
c.Assert(s.collection.Add(repo), IsNil)
|
||||
c.Assert(s.collection.Add(repo), ErrorMatches, ".*already exists")
|
||||
|
||||
r, err = s.collection.ByName("yandex")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.String(), Equals, repo.String())
|
||||
|
||||
collection := NewRemoteRepoCollection(s.db)
|
||||
r, err = collection.ByName("yandex")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.String(), Equals, repo.String())
|
||||
}
|
||||
|
||||
func (s *RemoteRepoCollectionSuite) TestByUUID(c *C) {
|
||||
r, err := s.collection.ByUUID("some-uuid")
|
||||
c.Assert(err, ErrorMatches, "*.not found")
|
||||
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false)
|
||||
c.Assert(s.collection.Add(repo), IsNil)
|
||||
|
||||
r, err = s.collection.ByUUID(repo.UUID)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.String(), Equals, repo.String())
|
||||
}
|
||||
|
||||
func (s *RemoteRepoCollectionSuite) TestUpdateLoadComplete(c *C) {
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false)
|
||||
c.Assert(s.collection.Update(repo), IsNil)
|
||||
|
||||
collection := NewRemoteRepoCollection(s.db)
|
||||
r, err := collection.ByName("yandex")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.packageRefs, IsNil)
|
||||
|
||||
repo.packageRefs = s.reflist
|
||||
c.Assert(s.collection.Update(repo), IsNil)
|
||||
|
||||
collection = NewRemoteRepoCollection(s.db)
|
||||
r, err = collection.ByName("yandex")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(r.packageRefs, IsNil)
|
||||
c.Assert(r.NumPackages(), Equals, 0)
|
||||
c.Assert(s.collection.LoadComplete(r), IsNil)
|
||||
c.Assert(r.NumPackages(), Equals, 3)
|
||||
}
|
||||
|
||||
func (s *RemoteRepoCollectionSuite) TestForEachAndLen(c *C) {
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false)
|
||||
s.collection.Add(repo)
|
||||
|
||||
count := 0
|
||||
err := s.collection.ForEach(func(*RemoteRepo) error {
|
||||
count++
|
||||
return nil
|
||||
})
|
||||
c.Assert(count, Equals, 1)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(s.collection.Len(), Equals, 1)
|
||||
|
||||
e := errors.New("c")
|
||||
|
||||
err = s.collection.ForEach(func(*RemoteRepo) error {
|
||||
return e
|
||||
})
|
||||
c.Assert(err, Equals, e)
|
||||
}
|
||||
|
||||
func (s *RemoteRepoCollectionSuite) TestDrop(c *C) {
|
||||
repo1, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false)
|
||||
s.collection.Add(repo1)
|
||||
|
||||
repo2, _ := NewRemoteRepo("tyndex", "http://mirror.yandex.ru/debian/", "wheezy", []string{"main"}, []string{}, false)
|
||||
s.collection.Add(repo2)
|
||||
|
||||
r1, _ := s.collection.ByUUID(repo1.UUID)
|
||||
c.Check(r1, Equals, repo1)
|
||||
|
||||
err := s.collection.Drop(repo1)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
_, err = s.collection.ByUUID(repo1.UUID)
|
||||
c.Check(err, ErrorMatches, "mirror .* not found")
|
||||
|
||||
collection := NewRemoteRepoCollection(s.db)
|
||||
_, err = collection.ByName("yandex")
|
||||
c.Check(err, ErrorMatches, "mirror .* not found")
|
||||
|
||||
r2, _ := collection.ByName("tyndex")
|
||||
c.Check(r2.String(), Equals, repo2.String())
|
||||
|
||||
c.Check(func() { s.collection.Drop(repo1) }, Panics, "repo not found!")
|
||||
}
|
||||
|
||||
const exampleReleaseFile = `Origin: LP-PPA-agenda-developers-daily
|
||||
Label: Agenda Daily Builds
|
||||
Suite: precise
|
||||
Version: 12.04
|
||||
Codename: precise
|
||||
Date: Thu, 05 Dec 2013 8:14:32 UTC
|
||||
Architectures: amd64 armel armhf i386 powerpc
|
||||
Components: main
|
||||
Description: Ubuntu Precise 12.04
|
||||
MD5Sum:
|
||||
6a5fc91b7277021999268e04a8d74d4c 134 main/binary-amd64/Release
|
||||
01ff4a18aab39546fde304a35350fc2d 643 main/binary-amd64/Packages.gz
|
||||
52ded91eeb8490b02016335aa3343492 1350 main/binary-amd64/Packages
|
||||
5216f9ffe55d151cd7ce7b98b7a43bd7 735 main/binary-amd64/Packages.bz2
|
||||
d41d8cd98f00b204e9800998ecf8427e 0 main/binary-armel/Packages
|
||||
4059d198768f9f8dc9372dc1c54bc3c3 14 main/binary-armel/Packages.bz2
|
||||
7a9de1fb7bf60d416a77d9c9a9716675 134 main/binary-armel/Release
|
||||
9d10bb61e59bd799891ae4fbcf447ec9 29 main/binary-armel/Packages.gz
|
||||
9d10bb61e59bd799891ae4fbcf447ec9 29 main/binary-armhf/Packages.gz
|
||||
c63d31e8e3a5650c29a7124e541d6c23 134 main/binary-armhf/Release
|
||||
4059d198768f9f8dc9372dc1c54bc3c3 14 main/binary-armhf/Packages.bz2
|
||||
d41d8cd98f00b204e9800998ecf8427e 0 main/binary-armhf/Packages
|
||||
c8d336856df67d509032bb54145c2f89 826 main/binary-i386/Packages
|
||||
92262f0668b265401291f0467bc93763 133 main/binary-i386/Release
|
||||
7954ed80936429687122b554620c1b5b 734 main/binary-i386/Packages.bz2
|
||||
e2eef4fe7d285b12c511adfa3a39069e 641 main/binary-i386/Packages.gz
|
||||
4059d198768f9f8dc9372dc1c54bc3c3 14 main/binary-powerpc/Packages.bz2
|
||||
9d10bb61e59bd799891ae4fbcf447ec9 29 main/binary-powerpc/Packages.gz
|
||||
d41d8cd98f00b204e9800998ecf8427e 0 main/binary-powerpc/Packages
|
||||
b079563fd3367c11f7be049bc686dd10 136 main/binary-powerpc/Release
|
||||
9d10bb61e59bd799891ae4fbcf447ec9 29 main/debian-installer/binary-amd64/Packages.gz
|
||||
d41d8cd98f00b204e9800998ecf8427e 0 main/debian-installer/binary-amd64/Packages
|
||||
4059d198768f9f8dc9372dc1c54bc3c3 14 main/debian-installer/binary-amd64/Packages.bz2
|
||||
9d10bb61e59bd799891ae4fbcf447ec9 29 main/debian-installer/binary-armel/Packages.gz
|
||||
d41d8cd98f00b204e9800998ecf8427e 0 main/debian-installer/binary-armel/Packages
|
||||
4059d198768f9f8dc9372dc1c54bc3c3 14 main/debian-installer/binary-armel/Packages.bz2
|
||||
9d10bb61e59bd799891ae4fbcf447ec9 29 main/debian-installer/binary-armhf/Packages.gz
|
||||
d41d8cd98f00b204e9800998ecf8427e 0 main/debian-installer/binary-armhf/Packages
|
||||
4059d198768f9f8dc9372dc1c54bc3c3 14 main/debian-installer/binary-armhf/Packages.bz2
|
||||
d41d8cd98f00b204e9800998ecf8427e 0 main/debian-installer/binary-i386/Packages
|
||||
9d10bb61e59bd799891ae4fbcf447ec9 29 main/debian-installer/binary-i386/Packages.gz
|
||||
4059d198768f9f8dc9372dc1c54bc3c3 14 main/debian-installer/binary-i386/Packages.bz2
|
||||
d41d8cd98f00b204e9800998ecf8427e 0 main/debian-installer/binary-powerpc/Packages
|
||||
4059d198768f9f8dc9372dc1c54bc3c3 14 main/debian-installer/binary-powerpc/Packages.bz2
|
||||
9d10bb61e59bd799891ae4fbcf447ec9 29 main/debian-installer/binary-powerpc/Packages.gz
|
||||
3481d65651306df1596dca9078c2506a 135 main/source/Release
|
||||
0459b7e4512db5479cb982bac6e2f9a1 2003 main/source/Sources
|
||||
3d83a489f1bd3c04226aa6520b8a6d07 656 main/source/Sources.bz2
|
||||
b062b5b77094aeeb05ca8dbb1ecf68a9 592 main/source/Sources.gz
|
||||
SHA1:
|
||||
fb0b7c8935623ed7d8c45044ba62225fd8cbd4ad 134 main/binary-amd64/Release
|
||||
b5d62bcec4ec18b88d664255e9051645bab7bd01 643 main/binary-amd64/Packages.gz
|
||||
ed47aae8926d22d529c27b40b61604aed2cb5f2f 1350 main/binary-amd64/Packages
|
||||
5b9b171ffcea36e869eba31bcc0e1bfb2a6ad84f 735 main/binary-amd64/Packages.bz2
|
||||
da39a3ee5e6b4b0d3255bfef95601890afd80709 0 main/binary-armel/Packages
|
||||
64a543afbb5f4bf728636bdcbbe7a2ed0804adc2 14 main/binary-armel/Packages.bz2
|
||||
b89234a7efb74d02f15b88e264b5cd2ae1e5dc2d 134 main/binary-armel/Release
|
||||
3df6ca52b6e8ecfb4a8fac6b8e02c777e3c7960d 29 main/binary-armel/Packages.gz
|
||||
3df6ca52b6e8ecfb4a8fac6b8e02c777e3c7960d 29 main/binary-armhf/Packages.gz
|
||||
585a452e27c2e7e047c49d4b0a7459d8c627aa08 134 main/binary-armhf/Release
|
||||
64a543afbb5f4bf728636bdcbbe7a2ed0804adc2 14 main/binary-armhf/Packages.bz2
|
||||
da39a3ee5e6b4b0d3255bfef95601890afd80709 0 main/binary-armhf/Packages
|
||||
1d2f0cd7a3c9e687b853eb277e241cd712b6e3b1 826 main/binary-i386/Packages
|
||||
16020809662f9bda36eb516d0995658dd94d1ad5 133 main/binary-i386/Release
|
||||
95a463a0739bf9ff622c8d68f6e4598d400f5248 734 main/binary-i386/Packages.bz2
|
||||
bf8c0dec9665ba78311c97cae1755d4b2e60af76 641 main/binary-i386/Packages.gz
|
||||
64a543afbb5f4bf728636bdcbbe7a2ed0804adc2 14 main/binary-powerpc/Packages.bz2
|
||||
3df6ca52b6e8ecfb4a8fac6b8e02c777e3c7960d 29 main/binary-powerpc/Packages.gz
|
||||
da39a3ee5e6b4b0d3255bfef95601890afd80709 0 main/binary-powerpc/Packages
|
||||
cf2ae2d98f535d90209f2c4e5790f95b393d8c2b 136 main/binary-powerpc/Release
|
||||
3df6ca52b6e8ecfb4a8fac6b8e02c777e3c7960d 29 main/debian-installer/binary-amd64/Packages.gz
|
||||
da39a3ee5e6b4b0d3255bfef95601890afd80709 0 main/debian-installer/binary-amd64/Packages
|
||||
64a543afbb5f4bf728636bdcbbe7a2ed0804adc2 14 main/debian-installer/binary-amd64/Packages.bz2
|
||||
3df6ca52b6e8ecfb4a8fac6b8e02c777e3c7960d 29 main/debian-installer/binary-armel/Packages.gz
|
||||
da39a3ee5e6b4b0d3255bfef95601890afd80709 0 main/debian-installer/binary-armel/Packages
|
||||
64a543afbb5f4bf728636bdcbbe7a2ed0804adc2 14 main/debian-installer/binary-armel/Packages.bz2
|
||||
3df6ca52b6e8ecfb4a8fac6b8e02c777e3c7960d 29 main/debian-installer/binary-armhf/Packages.gz
|
||||
da39a3ee5e6b4b0d3255bfef95601890afd80709 0 main/debian-installer/binary-armhf/Packages
|
||||
64a543afbb5f4bf728636bdcbbe7a2ed0804adc2 14 main/debian-installer/binary-armhf/Packages.bz2
|
||||
da39a3ee5e6b4b0d3255bfef95601890afd80709 0 main/debian-installer/binary-i386/Packages
|
||||
3df6ca52b6e8ecfb4a8fac6b8e02c777e3c7960d 29 main/debian-installer/binary-i386/Packages.gz
|
||||
64a543afbb5f4bf728636bdcbbe7a2ed0804adc2 14 main/debian-installer/binary-i386/Packages.bz2
|
||||
da39a3ee5e6b4b0d3255bfef95601890afd80709 0 main/debian-installer/binary-powerpc/Packages
|
||||
64a543afbb5f4bf728636bdcbbe7a2ed0804adc2 14 main/debian-installer/binary-powerpc/Packages.bz2
|
||||
3df6ca52b6e8ecfb4a8fac6b8e02c777e3c7960d 29 main/debian-installer/binary-powerpc/Packages.gz
|
||||
49cfec0c9b1df3a25e983a3ddf29d15b0e376e02 135 main/source/Release
|
||||
6b92e0fc84307226172696fde59ca5f33f380b57 2003 main/source/Sources
|
||||
ecb8afea11030a5df46941cb8ec297ca24c85736 656 main/source/Sources.bz2
|
||||
923e71383969c91146f12fa8cd121397f2467a2e 592 main/source/Sources.gz
|
||||
SHA256:
|
||||
8c0314cfb1b48a8daf47f77420330fd0d78a31897eeb46e05a51964c9f2c02df 134 main/binary-amd64/Release
|
||||
81b072773d2fdd8471473e060d3bf73255e4c00d322cf387654736ea196e83b4 643 main/binary-amd64/Packages.gz
|
||||
c7bb299483277bbf7bf4165042edaf547f5fa18f5782c7d2cd8407a38a327cc8 1350 main/binary-amd64/Packages
|
||||
d263f735c3830caa33ae6441529bd4f8e382205af597ab2cdfcea73afdaa21ab 735 main/binary-amd64/Packages.bz2
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-armel/Packages
|
||||
d3dda84eb03b9738d118eb2be78e246106900493c0ae07819ad60815134a8058 14 main/binary-armel/Packages.bz2
|
||||
75ede815b020626c6aa16201d24099ed7e06f03643d0cf38ef194f1029ea648b 134 main/binary-armel/Release
|
||||
825d493158fe0f50ca1acd70367aefa391170563af2e4ee9cedbcbe6796c8384 29 main/binary-armel/Packages.gz
|
||||
825d493158fe0f50ca1acd70367aefa391170563af2e4ee9cedbcbe6796c8384 29 main/binary-armhf/Packages.gz
|
||||
d25382b633c4a1621f8df6ce86e5c63da2e506a377e05ae9453238bb18191540 134 main/binary-armhf/Release
|
||||
d3dda84eb03b9738d118eb2be78e246106900493c0ae07819ad60815134a8058 14 main/binary-armhf/Packages.bz2
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-armhf/Packages
|
||||
b1bb341bb613363ca29440c2eb9c08a9289de5458209990ec502ed27711a83a2 826 main/binary-i386/Packages
|
||||
e5aaceaac5ecb59143a4b4ed2bf700fe85d6cf08addd10cf2058bde697b7b219 133 main/binary-i386/Release
|
||||
377890a26f99db55e117dfc691972dcbbb7d8be1630c8fc8297530c205377f2b 734 main/binary-i386/Packages.bz2
|
||||
6361e8efc67d2e7c1a8db45388aec0311007c0a1bd96698623ddeb5ed0bdc914 641 main/binary-i386/Packages.gz
|
||||
d3dda84eb03b9738d118eb2be78e246106900493c0ae07819ad60815134a8058 14 main/binary-powerpc/Packages.bz2
|
||||
825d493158fe0f50ca1acd70367aefa391170563af2e4ee9cedbcbe6796c8384 29 main/binary-powerpc/Packages.gz
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-powerpc/Packages
|
||||
03b5c97a99aa799964eb5a77f8a62ad38a241b93a87eacac6cf75a270a6d417c 136 main/binary-powerpc/Release
|
||||
825d493158fe0f50ca1acd70367aefa391170563af2e4ee9cedbcbe6796c8384 29 main/debian-installer/binary-amd64/Packages.gz
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/debian-installer/binary-amd64/Packages
|
||||
d3dda84eb03b9738d118eb2be78e246106900493c0ae07819ad60815134a8058 14 main/debian-installer/binary-amd64/Packages.bz2
|
||||
825d493158fe0f50ca1acd70367aefa391170563af2e4ee9cedbcbe6796c8384 29 main/debian-installer/binary-armel/Packages.gz
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/debian-installer/binary-armel/Packages
|
||||
d3dda84eb03b9738d118eb2be78e246106900493c0ae07819ad60815134a8058 14 main/debian-installer/binary-armel/Packages.bz2
|
||||
825d493158fe0f50ca1acd70367aefa391170563af2e4ee9cedbcbe6796c8384 29 main/debian-installer/binary-armhf/Packages.gz
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/debian-installer/binary-armhf/Packages
|
||||
d3dda84eb03b9738d118eb2be78e246106900493c0ae07819ad60815134a8058 14 main/debian-installer/binary-armhf/Packages.bz2
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/debian-installer/binary-i386/Packages
|
||||
825d493158fe0f50ca1acd70367aefa391170563af2e4ee9cedbcbe6796c8384 29 main/debian-installer/binary-i386/Packages.gz
|
||||
d3dda84eb03b9738d118eb2be78e246106900493c0ae07819ad60815134a8058 14 main/debian-installer/binary-i386/Packages.bz2
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/debian-installer/binary-powerpc/Packages
|
||||
d3dda84eb03b9738d118eb2be78e246106900493c0ae07819ad60815134a8058 14 main/debian-installer/binary-powerpc/Packages.bz2
|
||||
825d493158fe0f50ca1acd70367aefa391170563af2e4ee9cedbcbe6796c8384 29 main/debian-installer/binary-powerpc/Packages.gz
|
||||
d683102993b6f11067ce86d73111f067e36a199e9dc1f4295c8b19c274dc9ef8 135 main/source/Release
|
||||
45f868fd5d9efe611d67572ffcf96a00a5b9ec38ea5102753290c38c36b8c282 2003 main/source/Sources
|
||||
d178f1e310218d9f0f16c37d0780637f1cf3640a94a7fb0e24dc940c51b1e115 656 main/source/Sources.bz2
|
||||
080228b550da407fb8ac73fb30b37323468fd2b2de98dd56a324ee7d701f6103 592 main/source/Sources.gz`
|
||||
|
||||
const examplePackagesFile = `Package: amanda-client
|
||||
Source: amanda
|
||||
Version: 1:3.3.1-3~bpo60+1
|
||||
Installed-Size: 880
|
||||
Maintainer: Bdale Garbee <bdale@gag.com>
|
||||
Architecture: i386
|
||||
Replaces: amanda-common (<< 1:2.5.2p1-3)
|
||||
Depends: libc6 (>= 2.3), libcurl3 (>= 7.16.2-1), libglib2.0-0 (>= 2.12.0), libreadline6 (>= 6.0), libssl0.9.8 (>= 0.9.8m-1), amanda-common (= 1:3.3.1-3~bpo60+1)
|
||||
Suggests: gnuplot, dump, smbclient
|
||||
Conflicts: amanda, amanda-common (<< 1:2.5.2p1-3)
|
||||
Description: Advanced Maryland Automatic Network Disk Archiver (Client)
|
||||
Description-md5: 21af3684379a64cacc51c39152ab1062
|
||||
Section: utils
|
||||
Priority: optional
|
||||
Filename: pool/main/a/amanda/amanda-client_3.3.1-3~bpo60+1_amd64.deb
|
||||
Size: 3
|
||||
MD5sum: d16fb36f0911f878998c136191af705e
|
||||
SHA1: 66b27417d37e024c46526c2f6d358a754fc552f3
|
||||
SHA256: 3608bca1e44ea6c4d268eb6db02260269892c0b42b86bbf1e77a6fa16c3c9282
|
||||
`
|
||||
|
||||
const exampleSourcesFile = sourcePackageMeta
|
||||
293
debian/snapshot.go
vendored
293
debian/snapshot.go
vendored
@@ -1,293 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"code.google.com/p/go-uuid/uuid"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/smira/aptly/database"
|
||||
"github.com/smira/aptly/utils"
|
||||
"github.com/ugorji/go/codec"
|
||||
"log"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Snapshot is immutable state of repository: list of packages
|
||||
type Snapshot struct {
|
||||
// Persisten internal ID
|
||||
UUID string
|
||||
// Human-readable name
|
||||
Name string
|
||||
// Date of creation
|
||||
CreatedAt time.Time
|
||||
|
||||
// Source: kind + ID
|
||||
SourceKind string
|
||||
SourceIDs []string
|
||||
// Description of how snapshot was created
|
||||
Description string
|
||||
|
||||
packageRefs *PackageRefList
|
||||
}
|
||||
|
||||
// NewSnapshotFromRepository creates snapshot from current state of repository
|
||||
func NewSnapshotFromRepository(name string, repo *RemoteRepo) (*Snapshot, error) {
|
||||
if repo.packageRefs == nil {
|
||||
return nil, errors.New("mirror not updated")
|
||||
}
|
||||
|
||||
return &Snapshot{
|
||||
UUID: uuid.New(),
|
||||
Name: name,
|
||||
CreatedAt: time.Now(),
|
||||
SourceKind: "repo",
|
||||
SourceIDs: []string{repo.UUID},
|
||||
Description: fmt.Sprintf("Snapshot from mirror %s", repo),
|
||||
packageRefs: repo.packageRefs,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// NewSnapshotFromLocalRepo creates snapshot from current state of local repository
|
||||
func NewSnapshotFromLocalRepo(name string, repo *LocalRepo) (*Snapshot, error) {
|
||||
if repo.packageRefs == nil {
|
||||
return nil, errors.New("local repo doesn't have packages")
|
||||
}
|
||||
|
||||
return &Snapshot{
|
||||
UUID: uuid.New(),
|
||||
Name: name,
|
||||
CreatedAt: time.Now(),
|
||||
SourceKind: "local",
|
||||
SourceIDs: []string{repo.UUID},
|
||||
Description: fmt.Sprintf("Snapshot from local repo %s", repo),
|
||||
packageRefs: repo.packageRefs,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// NewSnapshotFromPackageList creates snapshot from PackageList
|
||||
func NewSnapshotFromPackageList(name string, sources []*Snapshot, list *PackageList, description string) *Snapshot {
|
||||
return NewSnapshotFromRefList(name, sources, NewPackageRefListFromPackageList(list), description)
|
||||
}
|
||||
|
||||
// NewSnapshotFromRefList creates snapshot from PackageRefList
|
||||
func NewSnapshotFromRefList(name string, sources []*Snapshot, list *PackageRefList, description string) *Snapshot {
|
||||
sourceUUIDs := make([]string, len(sources))
|
||||
for i := range sources {
|
||||
sourceUUIDs[i] = sources[i].UUID
|
||||
}
|
||||
|
||||
return &Snapshot{
|
||||
UUID: uuid.New(),
|
||||
Name: name,
|
||||
CreatedAt: time.Now(),
|
||||
SourceKind: "snapshot",
|
||||
SourceIDs: sourceUUIDs,
|
||||
Description: description,
|
||||
packageRefs: list,
|
||||
}
|
||||
}
|
||||
|
||||
// String returns string representation of snapshot
|
||||
func (s *Snapshot) String() string {
|
||||
return fmt.Sprintf("[%s]: %s", s.Name, s.Description)
|
||||
}
|
||||
|
||||
// NumPackages returns number of packages in snapshot
|
||||
func (s *Snapshot) NumPackages() int {
|
||||
return s.packageRefs.Len()
|
||||
}
|
||||
|
||||
// RefList returns list of package refs in snapshot
|
||||
func (s *Snapshot) RefList() *PackageRefList {
|
||||
return s.packageRefs
|
||||
}
|
||||
|
||||
// Key is a unique id in DB
|
||||
func (s *Snapshot) Key() []byte {
|
||||
return []byte("S" + s.UUID)
|
||||
}
|
||||
|
||||
// RefKey is a unique id for package reference list
|
||||
func (s *Snapshot) RefKey() []byte {
|
||||
return []byte("E" + s.UUID)
|
||||
}
|
||||
|
||||
// Encode does msgpack encoding of Snapshot
|
||||
func (s *Snapshot) Encode() []byte {
|
||||
var buf bytes.Buffer
|
||||
|
||||
encoder := codec.NewEncoder(&buf, &codec.MsgpackHandle{})
|
||||
encoder.Encode(s)
|
||||
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// Decode decodes msgpack representation into Snapshot
|
||||
func (s *Snapshot) Decode(input []byte) error {
|
||||
decoder := codec.NewDecoderBytes(input, &codec.MsgpackHandle{})
|
||||
return decoder.Decode(s)
|
||||
}
|
||||
|
||||
// SnapshotCollection does listing, updating/adding/deleting of Snapshots
|
||||
type SnapshotCollection struct {
|
||||
db database.Storage
|
||||
list []*Snapshot
|
||||
}
|
||||
|
||||
// NewSnapshotCollection loads Snapshots from DB and makes up collection
|
||||
func NewSnapshotCollection(db database.Storage) *SnapshotCollection {
|
||||
result := &SnapshotCollection{
|
||||
db: db,
|
||||
}
|
||||
|
||||
blobs := db.FetchByPrefix([]byte("S"))
|
||||
result.list = make([]*Snapshot, 0, len(blobs))
|
||||
|
||||
for _, blob := range blobs {
|
||||
s := &Snapshot{}
|
||||
if err := s.Decode(blob); err != nil {
|
||||
log.Printf("Error decoding snapshot: %s\n", err)
|
||||
} else {
|
||||
result.list = append(result.list, s)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Add appends new repo to collection and saves it
|
||||
func (collection *SnapshotCollection) Add(snapshot *Snapshot) error {
|
||||
for _, s := range collection.list {
|
||||
if s.Name == snapshot.Name {
|
||||
return fmt.Errorf("snapshot with name %s already exists", snapshot.Name)
|
||||
}
|
||||
}
|
||||
|
||||
err := collection.Update(snapshot)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
collection.list = append(collection.list, snapshot)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Update stores updated information about repo in DB
|
||||
func (collection *SnapshotCollection) Update(snapshot *Snapshot) error {
|
||||
err := collection.db.Put(snapshot.Key(), snapshot.Encode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return collection.db.Put(snapshot.RefKey(), snapshot.packageRefs.Encode())
|
||||
}
|
||||
|
||||
// LoadComplete loads additional information about snapshot
|
||||
func (collection *SnapshotCollection) LoadComplete(snapshot *Snapshot) error {
|
||||
encoded, err := collection.db.Get(snapshot.RefKey())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
snapshot.packageRefs = &PackageRefList{}
|
||||
return snapshot.packageRefs.Decode(encoded)
|
||||
}
|
||||
|
||||
// ByName looks up snapshot by name
|
||||
func (collection *SnapshotCollection) ByName(name string) (*Snapshot, error) {
|
||||
for _, s := range collection.list {
|
||||
if s.Name == name {
|
||||
return s, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("snapshot with name %s not found", name)
|
||||
}
|
||||
|
||||
// ByUUID looks up snapshot by UUID
|
||||
func (collection *SnapshotCollection) ByUUID(uuid string) (*Snapshot, error) {
|
||||
for _, s := range collection.list {
|
||||
if s.UUID == uuid {
|
||||
return s, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("snapshot with uuid %s not found", uuid)
|
||||
}
|
||||
|
||||
// ByRemoteRepoSource looks up snapshots that have specified RemoteRepo as a source
|
||||
func (collection *SnapshotCollection) ByRemoteRepoSource(repo *RemoteRepo) []*Snapshot {
|
||||
result := make([]*Snapshot, 0)
|
||||
|
||||
for _, s := range collection.list {
|
||||
if s.SourceKind == "repo" && utils.StrSliceHasItem(s.SourceIDs, repo.UUID) {
|
||||
result = append(result, s)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// ByLocalRepoSource looks up snapshots that have specified LocalRepo as a source
|
||||
func (collection *SnapshotCollection) ByLocalRepoSource(repo *LocalRepo) []*Snapshot {
|
||||
result := make([]*Snapshot, 0)
|
||||
|
||||
for _, s := range collection.list {
|
||||
if s.SourceKind == "local" && utils.StrSliceHasItem(s.SourceIDs, repo.UUID) {
|
||||
result = append(result, s)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// BySnapshotSource looks up snapshots that have specified snapshot as a source
|
||||
func (collection *SnapshotCollection) BySnapshotSource(snapshot *Snapshot) []*Snapshot {
|
||||
result := make([]*Snapshot, 0)
|
||||
|
||||
for _, s := range collection.list {
|
||||
if s.SourceKind == "snapshot" && utils.StrSliceHasItem(s.SourceIDs, snapshot.UUID) {
|
||||
result = append(result, s)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// ForEach runs method for each snapshot
|
||||
func (collection *SnapshotCollection) ForEach(handler func(*Snapshot) error) error {
|
||||
var err error
|
||||
for _, s := range collection.list {
|
||||
err = handler(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Len returns number of snapshots in collection
|
||||
// ForEach runs method for each snapshot
|
||||
func (collection *SnapshotCollection) Len() int {
|
||||
return len(collection.list)
|
||||
}
|
||||
|
||||
// Drop removes snapshot from collection
|
||||
func (collection *SnapshotCollection) Drop(snapshot *Snapshot) error {
|
||||
snapshotPosition := -1
|
||||
|
||||
for i, s := range collection.list {
|
||||
if s == snapshot {
|
||||
snapshotPosition = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if snapshotPosition == -1 {
|
||||
panic("snapshot not found!")
|
||||
}
|
||||
|
||||
collection.list[len(collection.list)-1], collection.list[snapshotPosition], collection.list =
|
||||
nil, collection.list[len(collection.list)-1], collection.list[:len(collection.list)-1]
|
||||
|
||||
err := collection.db.Delete(snapshot.Key())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return collection.db.Delete(snapshot.RefKey())
|
||||
}
|
||||
249
debian/snapshot_test.go
vendored
249
debian/snapshot_test.go
vendored
@@ -1,249 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"github.com/smira/aptly/database"
|
||||
. "launchpad.net/gocheck"
|
||||
)
|
||||
|
||||
type SnapshotSuite struct {
|
||||
PackageListMixinSuite
|
||||
repo *RemoteRepo
|
||||
}
|
||||
|
||||
var _ = Suite(&SnapshotSuite{})
|
||||
|
||||
func (s *SnapshotSuite) SetUpTest(c *C) {
|
||||
s.SetUpPackages()
|
||||
s.repo, _ = NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false)
|
||||
s.repo.packageRefs = s.reflist
|
||||
}
|
||||
|
||||
func (s *SnapshotSuite) TestNewSnapshotFromRepository(c *C) {
|
||||
snapshot, _ := NewSnapshotFromRepository("snap1", s.repo)
|
||||
c.Check(snapshot.Name, Equals, "snap1")
|
||||
c.Check(snapshot.NumPackages(), Equals, 3)
|
||||
c.Check(snapshot.RefList().Len(), Equals, 3)
|
||||
c.Check(snapshot.SourceKind, Equals, "repo")
|
||||
c.Check(snapshot.SourceIDs, DeepEquals, []string{s.repo.UUID})
|
||||
|
||||
s.repo.packageRefs = nil
|
||||
_, err := NewSnapshotFromRepository("snap2", s.repo)
|
||||
c.Check(err, ErrorMatches, ".*not updated")
|
||||
}
|
||||
|
||||
func (s *SnapshotSuite) TestNewSnapshotFromLocalRepo(c *C) {
|
||||
localRepo := NewLocalRepo("lala", "hoorah!")
|
||||
|
||||
_, err := NewSnapshotFromLocalRepo("snap2", localRepo)
|
||||
c.Check(err, ErrorMatches, "local repo doesn't have packages")
|
||||
|
||||
localRepo.UpdateRefList(s.reflist)
|
||||
snapshot, _ := NewSnapshotFromLocalRepo("snap1", localRepo)
|
||||
c.Check(snapshot.Name, Equals, "snap1")
|
||||
c.Check(snapshot.NumPackages(), Equals, 3)
|
||||
c.Check(snapshot.RefList().Len(), Equals, 3)
|
||||
c.Check(snapshot.SourceKind, Equals, "local")
|
||||
c.Check(snapshot.SourceIDs, DeepEquals, []string{localRepo.UUID})
|
||||
|
||||
}
|
||||
|
||||
func (s *SnapshotSuite) TestNewSnapshotFromPackageList(c *C) {
|
||||
snap, _ := NewSnapshotFromRepository("snap1", s.repo)
|
||||
|
||||
snapshot := NewSnapshotFromPackageList("snap2", []*Snapshot{snap}, s.list, "Pulled")
|
||||
c.Check(snapshot.Name, Equals, "snap2")
|
||||
c.Check(snapshot.NumPackages(), Equals, 3)
|
||||
c.Check(snapshot.SourceKind, Equals, "snapshot")
|
||||
c.Check(snapshot.SourceIDs, DeepEquals, []string{snap.UUID})
|
||||
}
|
||||
|
||||
func (s *SnapshotSuite) TestNewSnapshotFromRefList(c *C) {
|
||||
snap, _ := NewSnapshotFromRepository("snap1", s.repo)
|
||||
|
||||
snapshot := NewSnapshotFromRefList("snap2", []*Snapshot{snap}, s.reflist, "Merged")
|
||||
c.Check(snapshot.Name, Equals, "snap2")
|
||||
c.Check(snapshot.NumPackages(), Equals, 3)
|
||||
c.Check(snapshot.SourceKind, Equals, "snapshot")
|
||||
c.Check(snapshot.SourceIDs, DeepEquals, []string{snap.UUID})
|
||||
}
|
||||
|
||||
func (s *SnapshotSuite) TestKey(c *C) {
|
||||
snapshot, _ := NewSnapshotFromRepository("snap1", s.repo)
|
||||
c.Assert(len(snapshot.Key()), Equals, 37)
|
||||
c.Assert(snapshot.Key()[0], Equals, byte('S'))
|
||||
}
|
||||
|
||||
func (s *SnapshotSuite) TestRefKey(c *C) {
|
||||
snapshot, _ := NewSnapshotFromRepository("snap1", s.repo)
|
||||
c.Assert(len(snapshot.RefKey()), Equals, 37)
|
||||
c.Assert(snapshot.RefKey()[0], Equals, byte('E'))
|
||||
c.Assert(snapshot.RefKey()[1:], DeepEquals, snapshot.Key()[1:])
|
||||
}
|
||||
|
||||
func (s *SnapshotSuite) TestEncodeDecode(c *C) {
|
||||
snapshot, _ := NewSnapshotFromRepository("snap1", s.repo)
|
||||
s.repo.packageRefs = s.reflist
|
||||
|
||||
snapshot2 := &Snapshot{}
|
||||
c.Assert(snapshot2.Decode(snapshot.Encode()), IsNil)
|
||||
c.Assert(snapshot2.Name, Equals, snapshot.Name)
|
||||
c.Assert(snapshot2.packageRefs, IsNil)
|
||||
}
|
||||
|
||||
type SnapshotCollectionSuite struct {
|
||||
PackageListMixinSuite
|
||||
db database.Storage
|
||||
repo1, repo2 *RemoteRepo
|
||||
lrepo1, lrepo2 *LocalRepo
|
||||
snapshot1, snapshot2 *Snapshot
|
||||
snapshot3, snapshot4 *Snapshot
|
||||
collection *SnapshotCollection
|
||||
}
|
||||
|
||||
var _ = Suite(&SnapshotCollectionSuite{})
|
||||
|
||||
func (s *SnapshotCollectionSuite) SetUpTest(c *C) {
|
||||
s.db, _ = database.OpenDB(c.MkDir())
|
||||
s.collection = NewSnapshotCollection(s.db)
|
||||
s.SetUpPackages()
|
||||
|
||||
s.repo1, _ = NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false)
|
||||
s.repo1.packageRefs = s.reflist
|
||||
s.snapshot1, _ = NewSnapshotFromRepository("snap1", s.repo1)
|
||||
|
||||
s.repo2, _ = NewRemoteRepo("android", "http://mirror.yandex.ru/debian/", "lenny", []string{"main"}, []string{}, false)
|
||||
s.repo2.packageRefs = s.reflist
|
||||
s.snapshot2, _ = NewSnapshotFromRepository("snap2", s.repo2)
|
||||
|
||||
s.lrepo1 = NewLocalRepo("local1", "")
|
||||
s.lrepo1.packageRefs = s.reflist
|
||||
s.snapshot3, _ = NewSnapshotFromLocalRepo("snap3", s.lrepo1)
|
||||
|
||||
s.lrepo2 = NewLocalRepo("local2", "")
|
||||
s.lrepo2.packageRefs = s.reflist
|
||||
s.snapshot4, _ = NewSnapshotFromLocalRepo("snap4", s.lrepo2)
|
||||
}
|
||||
|
||||
func (s *SnapshotCollectionSuite) TearDownTest(c *C) {
|
||||
s.db.Close()
|
||||
}
|
||||
|
||||
func (s *SnapshotCollectionSuite) TestAddByNameByUUID(c *C) {
|
||||
snapshot, err := s.collection.ByName("snap1")
|
||||
c.Assert(err, ErrorMatches, "*.not found")
|
||||
|
||||
c.Assert(s.collection.Add(s.snapshot1), IsNil)
|
||||
c.Assert(s.collection.Add(s.snapshot1), ErrorMatches, ".*already exists")
|
||||
|
||||
c.Assert(s.collection.Add(s.snapshot2), IsNil)
|
||||
|
||||
snapshot, err = s.collection.ByName("snap1")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(snapshot.String(), Equals, s.snapshot1.String())
|
||||
|
||||
collection := NewSnapshotCollection(s.db)
|
||||
snapshot, err = collection.ByName("snap1")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(snapshot.String(), Equals, s.snapshot1.String())
|
||||
|
||||
snapshot, err = collection.ByUUID(s.snapshot1.UUID)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(snapshot.String(), Equals, s.snapshot1.String())
|
||||
}
|
||||
|
||||
func (s *SnapshotCollectionSuite) TestUpdateLoadComplete(c *C) {
|
||||
c.Assert(s.collection.Update(s.snapshot1), IsNil)
|
||||
|
||||
collection := NewSnapshotCollection(s.db)
|
||||
snapshot, err := collection.ByName("snap1")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(snapshot.packageRefs, IsNil)
|
||||
|
||||
c.Assert(s.collection.LoadComplete(snapshot), IsNil)
|
||||
c.Assert(snapshot.NumPackages(), Equals, 3)
|
||||
}
|
||||
|
||||
func (s *SnapshotCollectionSuite) TestForEachAndLen(c *C) {
|
||||
s.collection.Add(s.snapshot1)
|
||||
s.collection.Add(s.snapshot2)
|
||||
|
||||
count := 0
|
||||
err := s.collection.ForEach(func(*Snapshot) error {
|
||||
count++
|
||||
return nil
|
||||
})
|
||||
c.Assert(count, Equals, 2)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(s.collection.Len(), Equals, 2)
|
||||
|
||||
e := errors.New("d")
|
||||
err = s.collection.ForEach(func(*Snapshot) error {
|
||||
return e
|
||||
})
|
||||
c.Assert(err, Equals, e)
|
||||
}
|
||||
|
||||
func (s *SnapshotCollectionSuite) TestFindByRemoteRepoSource(c *C) {
|
||||
c.Assert(s.collection.Add(s.snapshot1), IsNil)
|
||||
c.Assert(s.collection.Add(s.snapshot2), IsNil)
|
||||
|
||||
c.Check(s.collection.ByRemoteRepoSource(s.repo1), DeepEquals, []*Snapshot{s.snapshot1})
|
||||
c.Check(s.collection.ByRemoteRepoSource(s.repo2), DeepEquals, []*Snapshot{s.snapshot2})
|
||||
|
||||
repo3, _ := NewRemoteRepo("other", "http://mirror.yandex.ru/debian/", "lenny", []string{"main"}, []string{}, false)
|
||||
|
||||
c.Check(s.collection.ByRemoteRepoSource(repo3), DeepEquals, []*Snapshot{})
|
||||
}
|
||||
|
||||
func (s *SnapshotCollectionSuite) TestFindByLocalRepoSource(c *C) {
|
||||
c.Assert(s.collection.Add(s.snapshot1), IsNil)
|
||||
c.Assert(s.collection.Add(s.snapshot2), IsNil)
|
||||
c.Assert(s.collection.Add(s.snapshot3), IsNil)
|
||||
c.Assert(s.collection.Add(s.snapshot4), IsNil)
|
||||
|
||||
c.Check(s.collection.ByLocalRepoSource(s.lrepo1), DeepEquals, []*Snapshot{s.snapshot3})
|
||||
c.Check(s.collection.ByLocalRepoSource(s.lrepo2), DeepEquals, []*Snapshot{s.snapshot4})
|
||||
|
||||
lrepo3 := NewLocalRepo("other", "")
|
||||
|
||||
c.Check(s.collection.ByLocalRepoSource(lrepo3), DeepEquals, []*Snapshot{})
|
||||
}
|
||||
|
||||
func (s *SnapshotCollectionSuite) TestFindSnapshotSource(c *C) {
|
||||
snapshot3 := NewSnapshotFromRefList("snap3", []*Snapshot{s.snapshot1, s.snapshot2}, s.reflist, "desc1")
|
||||
snapshot4 := NewSnapshotFromRefList("snap4", []*Snapshot{s.snapshot1}, s.reflist, "desc2")
|
||||
snapshot5 := NewSnapshotFromRefList("snap5", []*Snapshot{snapshot3}, s.reflist, "desc3")
|
||||
|
||||
c.Assert(s.collection.Add(s.snapshot1), IsNil)
|
||||
c.Assert(s.collection.Add(s.snapshot2), IsNil)
|
||||
c.Assert(s.collection.Add(snapshot3), IsNil)
|
||||
c.Assert(s.collection.Add(snapshot4), IsNil)
|
||||
c.Assert(s.collection.Add(snapshot5), IsNil)
|
||||
|
||||
c.Check(s.collection.BySnapshotSource(s.snapshot1), DeepEquals, []*Snapshot{snapshot3, snapshot4})
|
||||
c.Check(s.collection.BySnapshotSource(s.snapshot2), DeepEquals, []*Snapshot{snapshot3})
|
||||
c.Check(s.collection.BySnapshotSource(snapshot5), DeepEquals, []*Snapshot{})
|
||||
}
|
||||
|
||||
func (s *SnapshotCollectionSuite) TestDrop(c *C) {
|
||||
s.collection.Add(s.snapshot1)
|
||||
s.collection.Add(s.snapshot2)
|
||||
|
||||
snap, _ := s.collection.ByUUID(s.snapshot1.UUID)
|
||||
c.Check(snap, Equals, s.snapshot1)
|
||||
|
||||
err := s.collection.Drop(s.snapshot1)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
_, err = s.collection.ByUUID(s.snapshot1.UUID)
|
||||
c.Check(err, ErrorMatches, "snapshot .* not found")
|
||||
|
||||
collection := NewSnapshotCollection(s.db)
|
||||
|
||||
_, err = collection.ByUUID(s.snapshot1.UUID)
|
||||
c.Check(err, ErrorMatches, "snapshot .* not found")
|
||||
|
||||
c.Check(func() { s.collection.Drop(s.snapshot1) }, Panics, "snapshot not found!")
|
||||
}
|
||||
287
debian/version.go
vendored
287
debian/version.go
vendored
@@ -1,287 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
// Using documentation from: http://www.debian.org/doc/debian-policy/ch-controlfields.html#s-f-Version
|
||||
|
||||
// CompareVersions compares two package versions
|
||||
func CompareVersions(ver1, ver2 string) int {
|
||||
e1, u1, d1 := parseVersion(ver1)
|
||||
e2, u2, d2 := parseVersion(ver2)
|
||||
|
||||
r := compareVersionPart(e1, e2)
|
||||
if r != 0 {
|
||||
return r
|
||||
}
|
||||
|
||||
r = compareVersionPart(u1, u2)
|
||||
if r != 0 {
|
||||
return r
|
||||
}
|
||||
|
||||
return compareVersionPart(d1, d2)
|
||||
}
|
||||
|
||||
// parseVersions breaks down full version to components (possibly empty)
|
||||
func parseVersion(ver string) (epoch, upstream, debian string) {
|
||||
i := strings.LastIndex(ver, "-")
|
||||
if i != -1 {
|
||||
debian, ver = ver[i+1:], ver[:i]
|
||||
}
|
||||
|
||||
i = strings.Index(ver, ":")
|
||||
if i != -1 {
|
||||
epoch, ver = ver[:i], ver[i+1:]
|
||||
}
|
||||
|
||||
upstream = ver
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// compareLexicographic compares in "Debian lexicographic" way, see below compareVersionPart for details
|
||||
func compareLexicographic(s1, s2 string) int {
|
||||
i := 0
|
||||
l1, l2 := len(s1), len(s2)
|
||||
|
||||
for {
|
||||
if i == l1 && i == l2 {
|
||||
// s1 equal to s2
|
||||
break
|
||||
}
|
||||
|
||||
if i == l2 {
|
||||
// s1 is longer than s2
|
||||
if s1[i] == '~' {
|
||||
return -1 // s1 < s2
|
||||
}
|
||||
return 1 // s1 > s2
|
||||
}
|
||||
|
||||
if i == l1 {
|
||||
// s2 is longer than s1
|
||||
if s2[i] == '~' {
|
||||
return 1 // s1 > s2
|
||||
}
|
||||
return -1 // s1 < s2
|
||||
}
|
||||
|
||||
if s1[i] == s2[i] {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
if s1[i] == '~' {
|
||||
return -1
|
||||
}
|
||||
|
||||
if s2[i] == '~' {
|
||||
return 1
|
||||
}
|
||||
|
||||
c1, c2 := unicode.IsLetter(rune(s1[i])), unicode.IsLetter(rune(s2[i]))
|
||||
if c1 && !c2 {
|
||||
return -1
|
||||
}
|
||||
if !c1 && c2 {
|
||||
return 1
|
||||
}
|
||||
|
||||
if s1[i] < s2[i] {
|
||||
return -1
|
||||
}
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// compareVersionPart compares parts of full version
|
||||
//
|
||||
// From Debian Policy Manual:
|
||||
//
|
||||
// "The strings are compared from left to right.
|
||||
//
|
||||
// First the initial part of each string consisting entirely of non-digit characters is
|
||||
// determined. These two parts (one of which may be empty) are compared lexically. If a
|
||||
// difference is found it is returned. The lexical comparison is a comparison of ASCII values
|
||||
// modified so that all the letters sort earlier than all the non-letters and so that a tilde
|
||||
// sorts before anything, even the end of a part. For example, the following parts are in sorted
|
||||
// order from earliest to latest: ~~, ~~a, ~, the empty part.
|
||||
//
|
||||
// Then the initial part of the remainder of each string which consists entirely of digit
|
||||
// characters is determined. The numerical values of these two parts are compared, and any difference
|
||||
// found is returned as the result of the comparison. For these purposes an empty string (which can only occur at
|
||||
// the end of one or both version strings being compared) counts as zero.
|
||||
|
||||
// These two steps (comparing and removing initial non-digit strings and initial digit strings) are
|
||||
// repeated until a difference is found or both strings are exhausted."
|
||||
func compareVersionPart(part1, part2 string) int {
|
||||
i1, i2 := 0, 0
|
||||
l1, l2 := len(part1), len(part2)
|
||||
|
||||
for {
|
||||
j1, j2 := i1, i2
|
||||
for j1 < l1 && !unicode.IsDigit(rune(part1[j1])) {
|
||||
j1++
|
||||
}
|
||||
|
||||
for j2 < l2 && !unicode.IsDigit(rune(part2[j2])) {
|
||||
j2++
|
||||
}
|
||||
|
||||
s1, s2 := part1[i1:j1], part2[i2:j2]
|
||||
r := compareLexicographic(s1, s2)
|
||||
if r != 0 {
|
||||
return r
|
||||
}
|
||||
|
||||
i1, i2 = j1, j2
|
||||
|
||||
for j1 < l1 && unicode.IsDigit(rune(part1[j1])) {
|
||||
j1++
|
||||
}
|
||||
|
||||
for j2 < l2 && unicode.IsDigit(rune(part2[j2])) {
|
||||
j2++
|
||||
}
|
||||
|
||||
s1, s2 = part1[i1:j1], part2[i2:j2]
|
||||
n1, _ := strconv.Atoi(s1)
|
||||
n2, _ := strconv.Atoi(s2)
|
||||
|
||||
if n1 < n2 {
|
||||
return -1
|
||||
}
|
||||
if n1 > n2 {
|
||||
return 1
|
||||
}
|
||||
|
||||
i1, i2 = j1, j2
|
||||
|
||||
if i1 == l1 && i2 == l2 {
|
||||
break
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// Version relations
|
||||
const (
|
||||
VersionDontCare = iota
|
||||
VersionLess
|
||||
VersionLessOrEqual
|
||||
VersionEqual
|
||||
VersionGreaterOrEqual
|
||||
VersionGreater
|
||||
)
|
||||
|
||||
// Dependency is a parsed version of Debian dependency to package
|
||||
type Dependency struct {
|
||||
Pkg string
|
||||
Relation int
|
||||
Version string
|
||||
Architecture string
|
||||
}
|
||||
|
||||
// Hash calculates some predefined unique ID of Dependency
|
||||
func (d *Dependency) Hash() string {
|
||||
return fmt.Sprintf("%s:%s:%d:%s", d.Architecture, d.Pkg, d.Relation, d.Version)
|
||||
}
|
||||
|
||||
// String produces human-readable representation
|
||||
func (d *Dependency) String() string {
|
||||
var rel string
|
||||
switch d.Relation {
|
||||
case VersionEqual:
|
||||
rel = "="
|
||||
case VersionGreater:
|
||||
rel = ">>"
|
||||
case VersionLess:
|
||||
rel = "<<"
|
||||
case VersionGreaterOrEqual:
|
||||
rel = ">="
|
||||
case VersionLessOrEqual:
|
||||
rel = "<="
|
||||
case VersionDontCare:
|
||||
return fmt.Sprintf("%s [%s]", d.Pkg, d.Architecture)
|
||||
}
|
||||
return fmt.Sprintf("%s (%s %s) [%s]", d.Pkg, rel, d.Version, d.Architecture)
|
||||
}
|
||||
|
||||
// ParseDependencyVariants parses dependencies in format "pkg (>= 1.35) | other-package"
|
||||
func ParseDependencyVariants(variants string) (l []Dependency, err error) {
|
||||
parts := strings.Split(variants, "|")
|
||||
l = make([]Dependency, len(parts))
|
||||
|
||||
for i, part := range parts {
|
||||
l[i], err = ParseDependency(strings.TrimSpace(part))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// ParseDependency parses dependency in format "pkg (>= 1.35) [arch]" into parts
|
||||
func ParseDependency(dep string) (d Dependency, err error) {
|
||||
if strings.HasSuffix(dep, "}") {
|
||||
i := strings.LastIndex(dep, "{")
|
||||
if i == -1 {
|
||||
err = fmt.Errorf("unable to parse dependency: %s", dep)
|
||||
return
|
||||
}
|
||||
d.Architecture = dep[i+1 : len(dep)-1]
|
||||
|
||||
dep = strings.TrimSpace(dep[:i])
|
||||
}
|
||||
|
||||
if !strings.HasSuffix(dep, ")") {
|
||||
d.Pkg = strings.TrimSpace(dep)
|
||||
d.Relation = VersionDontCare
|
||||
return
|
||||
}
|
||||
|
||||
i := strings.Index(dep, "(")
|
||||
if i == -1 {
|
||||
err = fmt.Errorf("unable to parse dependency: %s", dep)
|
||||
return
|
||||
}
|
||||
|
||||
d.Pkg = strings.TrimSpace(dep[0:i])
|
||||
|
||||
rel := ""
|
||||
if dep[i+1] == '>' || dep[i+1] == '<' || dep[i+1] == '=' {
|
||||
rel += dep[i+1 : i+2]
|
||||
if dep[i+2] == '>' || dep[i+2] == '<' || dep[i+2] == '=' {
|
||||
rel += dep[i+2 : i+3]
|
||||
d.Version = strings.TrimSpace(dep[i+3 : len(dep)-1])
|
||||
} else {
|
||||
d.Version = strings.TrimSpace(dep[i+2 : len(dep)-1])
|
||||
}
|
||||
} else {
|
||||
d.Version = strings.TrimSpace(dep[i+1 : len(dep)-1])
|
||||
}
|
||||
|
||||
switch rel {
|
||||
case "<", "<=":
|
||||
d.Relation = VersionLessOrEqual
|
||||
case ">", ">=":
|
||||
d.Relation = VersionGreaterOrEqual
|
||||
case "<<":
|
||||
d.Relation = VersionLess
|
||||
case ">>":
|
||||
d.Relation = VersionGreater
|
||||
case "", "=":
|
||||
d.Relation = VersionEqual
|
||||
default:
|
||||
err = fmt.Errorf("relation unknown %#v in dependency %s", rel, dep)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
219
debian/version_test.go
vendored
219
debian/version_test.go
vendored
@@ -1,219 +0,0 @@
|
||||
package debian
|
||||
|
||||
import (
|
||||
. "launchpad.net/gocheck"
|
||||
)
|
||||
|
||||
type VersionSuite struct {
|
||||
stanza Stanza
|
||||
}
|
||||
|
||||
var _ = Suite(&VersionSuite{})
|
||||
|
||||
func (s *VersionSuite) TestParseVersion(c *C) {
|
||||
e, u, d := parseVersion("1.3.4")
|
||||
c.Check([]string{e, u, d}, DeepEquals, []string{"", "1.3.4", ""})
|
||||
|
||||
e, u, d = parseVersion("4:1.3:4")
|
||||
c.Check([]string{e, u, d}, DeepEquals, []string{"4", "1.3:4", ""})
|
||||
|
||||
e, u, d = parseVersion("1.3.4-1")
|
||||
c.Check([]string{e, u, d}, DeepEquals, []string{"", "1.3.4", "1"})
|
||||
|
||||
e, u, d = parseVersion("1.3-pre4-1")
|
||||
c.Check([]string{e, u, d}, DeepEquals, []string{"", "1.3-pre4", "1"})
|
||||
|
||||
e, u, d = parseVersion("4:1.3-pre4-1")
|
||||
c.Check([]string{e, u, d}, DeepEquals, []string{"4", "1.3-pre4", "1"})
|
||||
}
|
||||
|
||||
func (s *VersionSuite) TestCompareLexicographic(c *C) {
|
||||
c.Check(compareLexicographic("", ""), Equals, 0)
|
||||
c.Check(compareLexicographic("pre", "pre"), Equals, 0)
|
||||
|
||||
c.Check(compareLexicographic("pr", "pre"), Equals, -1)
|
||||
c.Check(compareLexicographic("pre", "pr"), Equals, 1)
|
||||
|
||||
c.Check(compareLexicographic("pra", "prb"), Equals, -1)
|
||||
c.Check(compareLexicographic("prb", "pra"), Equals, 1)
|
||||
|
||||
c.Check(compareLexicographic("prx", "pr+"), Equals, -1)
|
||||
c.Check(compareLexicographic("pr+", "prx"), Equals, 1)
|
||||
|
||||
c.Check(compareLexicographic("pr~", "pra"), Equals, -1)
|
||||
c.Check(compareLexicographic("pra", "pr~"), Equals, 1)
|
||||
|
||||
c.Check(compareLexicographic("~~", "~~a"), Equals, -1)
|
||||
c.Check(compareLexicographic("~~a", "~"), Equals, -1)
|
||||
c.Check(compareLexicographic("~", ""), Equals, -1)
|
||||
|
||||
c.Check(compareLexicographic("~~a", "~~"), Equals, 1)
|
||||
c.Check(compareLexicographic("~", "~~a"), Equals, 1)
|
||||
c.Check(compareLexicographic("", "~"), Equals, 1)
|
||||
}
|
||||
|
||||
func (s *VersionSuite) TestCompareVersionPart(c *C) {
|
||||
c.Check(compareVersionPart("", ""), Equals, 0)
|
||||
c.Check(compareVersionPart("pre", "pre"), Equals, 0)
|
||||
c.Check(compareVersionPart("12", "12"), Equals, 0)
|
||||
c.Check(compareVersionPart("1.3.5", "1.3.5"), Equals, 0)
|
||||
c.Check(compareVersionPart("1.3.5-pre1", "1.3.5-pre1"), Equals, 0)
|
||||
|
||||
c.Check(compareVersionPart("1.0~beta1~svn1245", "1.0~beta1"), Equals, -1)
|
||||
c.Check(compareVersionPart("1.0~beta1", "1.0"), Equals, -1)
|
||||
|
||||
c.Check(compareVersionPart("1.0~beta1", "1.0~beta1~svn1245"), Equals, 1)
|
||||
c.Check(compareVersionPart("1.0", "1.0~beta1"), Equals, 1)
|
||||
|
||||
c.Check(compareVersionPart("1.pr", "1.pre"), Equals, -1)
|
||||
c.Check(compareVersionPart("1.pre", "1.pr"), Equals, 1)
|
||||
|
||||
c.Check(compareVersionPart("1.pra", "1.prb"), Equals, -1)
|
||||
c.Check(compareVersionPart("1.prb", "1.pra"), Equals, 1)
|
||||
|
||||
c.Check(compareVersionPart("3.prx", "3.pr+"), Equals, -1)
|
||||
c.Check(compareVersionPart("3.pr+", "3.prx"), Equals, 1)
|
||||
|
||||
c.Check(compareVersionPart("3.pr~", "3.pra"), Equals, -1)
|
||||
c.Check(compareVersionPart("3.pra", "3.pr~"), Equals, 1)
|
||||
|
||||
c.Check(compareVersionPart("2~~", "2~~a"), Equals, -1)
|
||||
c.Check(compareVersionPart("2~~a", "2~"), Equals, -1)
|
||||
c.Check(compareVersionPart("2~", "2"), Equals, -1)
|
||||
|
||||
c.Check(compareVersionPart("2~~a", "2~~"), Equals, 1)
|
||||
c.Check(compareVersionPart("2~", "2~~a"), Equals, 1)
|
||||
c.Check(compareVersionPart("2", "2~"), Equals, 1)
|
||||
}
|
||||
|
||||
func (s *VersionSuite) TestCompareVersions(c *C) {
|
||||
c.Check(CompareVersions("3:1.0~beta1~svn1245-1", "3:1.0~beta1~svn1245-1"), Equals, 0)
|
||||
|
||||
c.Check(CompareVersions("1:1.0~beta1~svn1245-1", "3:1.0~beta1~svn1245-1"), Equals, -1)
|
||||
c.Check(CompareVersions("1:1.0~beta1~svn1245-1", "1.0~beta1~svn1245-1"), Equals, 1)
|
||||
c.Check(CompareVersions("1.0~beta1~svn1245-1", "1.0~beta1~svn1245-2"), Equals, -1)
|
||||
c.Check(CompareVersions("3:1.0~beta1~svn1245-1", "3:1.0~beta1-1"), Equals, -1)
|
||||
|
||||
c.Check(CompareVersions("1.0~beta1~svn1245", "1.0~beta1"), Equals, -1)
|
||||
c.Check(CompareVersions("1.0~beta1", "1.0"), Equals, -1)
|
||||
}
|
||||
|
||||
func (s *VersionSuite) TestParseDependency(c *C) {
|
||||
d, e := ParseDependency("dpkg (>= 1.6)")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(d.Pkg, Equals, "dpkg")
|
||||
c.Check(d.Relation, Equals, VersionGreaterOrEqual)
|
||||
c.Check(d.Version, Equals, "1.6")
|
||||
c.Check(d.Architecture, Equals, "")
|
||||
|
||||
d, e = ParseDependency("dpkg(>>1.6)")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(d.Pkg, Equals, "dpkg")
|
||||
c.Check(d.Relation, Equals, VersionGreater)
|
||||
c.Check(d.Version, Equals, "1.6")
|
||||
|
||||
d, e = ParseDependency("dpkg(1.6)")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(d.Pkg, Equals, "dpkg")
|
||||
c.Check(d.Relation, Equals, VersionEqual)
|
||||
c.Check(d.Version, Equals, "1.6")
|
||||
|
||||
d, e = ParseDependency("dpkg ( 1.6)")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(d.Pkg, Equals, "dpkg")
|
||||
c.Check(d.Relation, Equals, VersionEqual)
|
||||
c.Check(d.Version, Equals, "1.6")
|
||||
|
||||
d, e = ParseDependency("dpkg (> 1.6)")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(d.Pkg, Equals, "dpkg")
|
||||
c.Check(d.Relation, Equals, VersionGreaterOrEqual)
|
||||
c.Check(d.Version, Equals, "1.6")
|
||||
|
||||
d, e = ParseDependency("dpkg (< 1.6)")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(d.Pkg, Equals, "dpkg")
|
||||
c.Check(d.Relation, Equals, VersionLessOrEqual)
|
||||
c.Check(d.Version, Equals, "1.6")
|
||||
|
||||
d, e = ParseDependency("dpkg (= 1.6)")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(d.Pkg, Equals, "dpkg")
|
||||
c.Check(d.Relation, Equals, VersionEqual)
|
||||
c.Check(d.Version, Equals, "1.6")
|
||||
|
||||
d, e = ParseDependency("dpkg (<< 1.6)")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(d.Pkg, Equals, "dpkg")
|
||||
c.Check(d.Relation, Equals, VersionLess)
|
||||
c.Check(d.Version, Equals, "1.6")
|
||||
|
||||
d, e = ParseDependency("dpkg(>>1.6)")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(d.Pkg, Equals, "dpkg")
|
||||
c.Check(d.Relation, Equals, VersionGreater)
|
||||
c.Check(d.Version, Equals, "1.6")
|
||||
|
||||
d, e = ParseDependency("dpkg (>>1.6) {i386}")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(d.Pkg, Equals, "dpkg")
|
||||
c.Check(d.Relation, Equals, VersionGreater)
|
||||
c.Check(d.Version, Equals, "1.6")
|
||||
c.Check(d.Architecture, Equals, "i386")
|
||||
|
||||
d, e = ParseDependency("dpkg{i386}")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(d.Pkg, Equals, "dpkg")
|
||||
c.Check(d.Relation, Equals, VersionDontCare)
|
||||
c.Check(d.Version, Equals, "")
|
||||
c.Check(d.Architecture, Equals, "i386")
|
||||
|
||||
d, e = ParseDependency("dpkg ")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(d.Pkg, Equals, "dpkg")
|
||||
c.Check(d.Relation, Equals, VersionDontCare)
|
||||
c.Check(d.Version, Equals, "")
|
||||
|
||||
d, e = ParseDependency("dpkg(==1.6)")
|
||||
c.Check(e, ErrorMatches, "relation unknown.*")
|
||||
|
||||
d, e = ParseDependency("dpkg==1.6)")
|
||||
c.Check(e, ErrorMatches, "unable to parse.*")
|
||||
|
||||
d, e = ParseDependency("dpkg i386}")
|
||||
c.Check(e, ErrorMatches, "unable to parse.*")
|
||||
|
||||
d, e = ParseDependency("dpkg ) {i386}")
|
||||
c.Check(e, ErrorMatches, "unable to parse.*")
|
||||
}
|
||||
|
||||
func (s *VersionSuite) TestParseDependencyVariants(c *C) {
|
||||
l, e := ParseDependencyVariants("dpkg (>= 1.6)")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(l, HasLen, 1)
|
||||
c.Check(l[0].Pkg, Equals, "dpkg")
|
||||
c.Check(l[0].Relation, Equals, VersionGreaterOrEqual)
|
||||
c.Check(l[0].Version, Equals, "1.6")
|
||||
|
||||
l, e = ParseDependencyVariants("dpkg (>= 1.6) | mailer-agent")
|
||||
c.Check(e, IsNil)
|
||||
c.Check(l, HasLen, 2)
|
||||
c.Check(l[0].Pkg, Equals, "dpkg")
|
||||
c.Check(l[0].Relation, Equals, VersionGreaterOrEqual)
|
||||
c.Check(l[0].Version, Equals, "1.6")
|
||||
c.Check(l[1].Pkg, Equals, "mailer-agent")
|
||||
c.Check(l[1].Relation, Equals, VersionDontCare)
|
||||
|
||||
_, e = ParseDependencyVariants("dpkg(==1.6)")
|
||||
c.Check(e, ErrorMatches, "relation unknown.*")
|
||||
}
|
||||
|
||||
func (s *VersionSuite) TestDependencyString(c *C) {
|
||||
d, _ := ParseDependency("dpkg(>>1.6)")
|
||||
d.Architecture = "i386"
|
||||
c.Check(d.String(), Equals, "dpkg (>> 1.6) [i386]")
|
||||
|
||||
d, _ = ParseDependency("dpkg")
|
||||
d.Architecture = "i386"
|
||||
c.Check(d.String(), Equals, "dpkg [i386]")
|
||||
}
|
||||
Reference in New Issue
Block a user