mirror of
https://github.com/aptly-dev/aptly.git
synced 2026-01-11 03:11:50 +00:00
Merge pull request #680 from sliverc/with_installer
Add support to mirror non package installer files
This commit is contained in:
@@ -69,7 +69,7 @@ type PublishedStorage interface {
|
||||
// Remove removes single file under public path
|
||||
Remove(path string) error
|
||||
// LinkFromPool links package file from pool to dist's pool location
|
||||
LinkFromPool(publishedDirectory, baseName string, sourcePool PackagePool, sourcePath string, sourceChecksums utils.ChecksumInfo, force bool) error
|
||||
LinkFromPool(publishedDirectory, fileName string, sourcePool PackagePool, sourcePath string, sourceChecksums utils.ChecksumInfo, force bool) error
|
||||
// Filelist returns list of files under prefix
|
||||
Filelist(prefix string) ([]string, error)
|
||||
// RenameFile renames (moves) file
|
||||
@@ -130,6 +130,8 @@ type Downloader interface {
|
||||
DownloadWithChecksum(ctx context.Context, url string, destination string, expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) error
|
||||
// GetProgress returns Progress object
|
||||
GetProgress() Progress
|
||||
// GetLength returns size by heading object with url
|
||||
GetLength(ctx context.Context, url string) (int64, error)
|
||||
}
|
||||
|
||||
// ChecksumStorage is stores checksums in some (persistent) storage
|
||||
|
||||
@@ -19,6 +19,7 @@ func aptlyMirrorCreate(cmd *commander.Command, args []string) error {
|
||||
|
||||
downloadSources := LookupOption(context.Config().DownloadSourcePackages, context.Flags(), "with-sources")
|
||||
downloadUdebs := context.Flags().Lookup("with-udebs").Value.Get().(bool)
|
||||
downloadInstaller := context.Flags().Lookup("with-installer").Value.Get().(bool)
|
||||
|
||||
var (
|
||||
mirrorName, archiveURL, distribution string
|
||||
@@ -36,7 +37,7 @@ func aptlyMirrorCreate(cmd *commander.Command, args []string) error {
|
||||
}
|
||||
|
||||
repo, err := deb.NewRemoteRepo(mirrorName, archiveURL, distribution, components, context.ArchitecturesList(),
|
||||
downloadSources, downloadUdebs)
|
||||
downloadSources, downloadUdebs, downloadInstaller)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to create mirror: %s", err)
|
||||
}
|
||||
@@ -94,6 +95,7 @@ Example:
|
||||
}
|
||||
|
||||
cmd.Flag.Bool("ignore-signatures", false, "disable verification of Release file signatures")
|
||||
cmd.Flag.Bool("with-installer", false, "download additional not packaged installer files")
|
||||
cmd.Flag.Bool("with-sources", false, "download source packages in addition to binary packages")
|
||||
cmd.Flag.Bool("with-udebs", false, "download .udeb packages (Debian installer support)")
|
||||
cmd.Flag.String("filter", "", "filter packages in mirror")
|
||||
|
||||
@@ -33,6 +33,8 @@ func aptlyMirrorEdit(cmd *commander.Command, args []string) error {
|
||||
repo.Filter = flag.Value.String()
|
||||
case "filter-with-deps":
|
||||
repo.FilterWithDeps = flag.Value.Get().(bool)
|
||||
case "with-installer":
|
||||
repo.DownloadInstaller = flag.Value.Get().(bool)
|
||||
case "with-sources":
|
||||
repo.DownloadSources = flag.Value.Get().(bool)
|
||||
case "with-udebs":
|
||||
@@ -101,6 +103,7 @@ Example:
|
||||
cmd.Flag.String("filter", "", "filter packages in mirror")
|
||||
cmd.Flag.Bool("filter-with-deps", false, "when filtering, include dependencies of matching packages as well")
|
||||
cmd.Flag.Bool("ignore-signatures", false, "disable verification of Release file signatures")
|
||||
cmd.Flag.Bool("with-installer", false, "download additional not packaged installer files")
|
||||
cmd.Flag.Bool("with-sources", false, "download source packages in addition to binary packages")
|
||||
cmd.Flag.Bool("with-udebs", false, "download .udeb packages (Debian installer support)")
|
||||
cmd.Flag.Var(&keyRingsFlag{}, "keyring", "gpg keyring to use when verifying Release file (could be specified multiple times)")
|
||||
|
||||
@@ -54,7 +54,7 @@ func aptlyMirrorUpdate(cmd *commander.Command, args []string) error {
|
||||
}
|
||||
|
||||
context.Progress().Printf("Downloading & parsing package files...\n")
|
||||
err = repo.DownloadPackageIndexes(context.Progress(), context.Downloader(), context.CollectionFactory(), ignoreMismatch, maxTries)
|
||||
err = repo.DownloadPackageIndexes(context.Progress(), context.Downloader(), verifier, context.CollectionFactory(), ignoreMismatch, maxTries)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to update: %s", err)
|
||||
}
|
||||
|
||||
@@ -79,7 +79,7 @@ func aptlyPackageShow(cmd *commander.Command, args []string) error {
|
||||
result := q.Query(context.CollectionFactory().PackageCollection())
|
||||
|
||||
err = result.ForEach(func(p *deb.Package) error {
|
||||
p.Stanza().WriteTo(w, p.IsSource, false)
|
||||
p.Stanza().WriteTo(w, p.IsSource, false, false)
|
||||
w.Flush()
|
||||
fmt.Printf("\n")
|
||||
|
||||
|
||||
@@ -156,7 +156,7 @@ _aptly()
|
||||
"create")
|
||||
if [[ $numargs -eq 0 ]]; then
|
||||
if [[ "$cur" == -* ]]; then
|
||||
COMPREPLY=($(compgen -W "-filter= -filter-with-deps -force-components -ignore-signatures -keyring= -with-sources -with-udebs" -- ${cur}))
|
||||
COMPREPLY=($(compgen -W "-filter= -filter-with-deps -force-components -ignore-signatures -keyring= -with-installer -with-sources -with-udebs" -- ${cur}))
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
@@ -164,7 +164,7 @@ _aptly()
|
||||
"edit")
|
||||
if [[ $numargs -eq 0 ]]; then
|
||||
if [[ "$cur" == -* ]]; then
|
||||
COMPREPLY=($(compgen -W "-archive-url= -filter= -filter-with-deps -ignore-signatures -keyring= -with-sources -with-udebs" -- ${cur}))
|
||||
COMPREPLY=($(compgen -W "-archive-url= -filter= -filter-with-deps -ignore-signatures -keyring= -with-installer -with-sources -with-udebs" -- ${cur}))
|
||||
else
|
||||
COMPREPLY=($(compgen -W "$(__aptly_mirror_list)" -- ${cur}))
|
||||
fi
|
||||
|
||||
@@ -95,8 +95,8 @@ func (c *Changes) VerifyAndParse(acceptUnsigned, ignoreSignature bool, verifier
|
||||
text = input
|
||||
}
|
||||
|
||||
reader := NewControlFileReader(text)
|
||||
c.Stanza, err = reader.ReadStanza(false)
|
||||
reader := NewControlFileReader(text, false, false)
|
||||
c.Stanza, err = reader.ReadStanza()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -89,8 +89,8 @@ func GetControlFileFromDeb(packageFile string) (Stanza, error) {
|
||||
}
|
||||
|
||||
if tarHeader.Name == "./control" || tarHeader.Name == "control" {
|
||||
reader := NewControlFileReader(untar)
|
||||
stanza, err := reader.ReadStanza(false)
|
||||
reader := NewControlFileReader(untar, false, false)
|
||||
stanza, err := reader.ReadStanza()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -129,8 +129,8 @@ func GetControlFileFromDsc(dscFile string, verifier pgp.Verifier) (Stanza, error
|
||||
text = file
|
||||
}
|
||||
|
||||
reader := NewControlFileReader(text)
|
||||
stanza, err := reader.ReadStanza(false)
|
||||
reader := NewControlFileReader(text, false, false)
|
||||
stanza, err := reader.ReadStanza()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -88,6 +88,9 @@ var (
|
||||
"Directory",
|
||||
"Files",
|
||||
}
|
||||
canonicalOrderInstaller = []string{
|
||||
"",
|
||||
}
|
||||
)
|
||||
|
||||
// Copy returns copy of Stanza
|
||||
@@ -101,6 +104,9 @@ func (s Stanza) Copy() (result Stanza) {
|
||||
|
||||
func isMultilineField(field string, isRelease bool) bool {
|
||||
switch field {
|
||||
// file without a section
|
||||
case "":
|
||||
return true
|
||||
case "Description":
|
||||
return true
|
||||
case "Files":
|
||||
@@ -132,21 +138,26 @@ func writeField(w *bufio.Writer, field, value string, isRelease bool) (err error
|
||||
if !isMultilineField(field, isRelease) {
|
||||
_, err = w.WriteString(field + ": " + value + "\n")
|
||||
} else {
|
||||
if !strings.HasSuffix(value, "\n") {
|
||||
if field != "" && !strings.HasSuffix(value, "\n") {
|
||||
value = value + "\n"
|
||||
}
|
||||
|
||||
if field != "Description" {
|
||||
if field != "Description" && field != "" {
|
||||
value = "\n" + value
|
||||
}
|
||||
_, err = w.WriteString(field + ":" + value)
|
||||
|
||||
if field != "" {
|
||||
_, err = w.WriteString(field + ":" + value)
|
||||
} else {
|
||||
_, err = w.WriteString(value)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// WriteTo saves stanza back to stream, modifying itself on the fly
|
||||
func (s Stanza) WriteTo(w *bufio.Writer, isSource, isRelease bool) error {
|
||||
func (s Stanza) WriteTo(w *bufio.Writer, isSource, isRelease, isInstaller bool) error {
|
||||
canonicalOrder := canonicalOrderBinary
|
||||
if isSource {
|
||||
canonicalOrder = canonicalOrderSource
|
||||
@@ -154,6 +165,9 @@ func (s Stanza) WriteTo(w *bufio.Writer, isSource, isRelease bool) error {
|
||||
if isRelease {
|
||||
canonicalOrder = canonicalOrderRelease
|
||||
}
|
||||
if isInstaller {
|
||||
canonicalOrder = canonicalOrderInstaller
|
||||
}
|
||||
|
||||
for _, field := range canonicalOrder {
|
||||
value, ok := s[field]
|
||||
@@ -166,10 +180,13 @@ func (s Stanza) WriteTo(w *bufio.Writer, isSource, isRelease bool) error {
|
||||
}
|
||||
}
|
||||
|
||||
for field, value := range s {
|
||||
err := writeField(w, field, value, isRelease)
|
||||
if err != nil {
|
||||
return err
|
||||
// no extra fields in installer
|
||||
if !isInstaller {
|
||||
for field, value := range s {
|
||||
err := writeField(w, field, value, isRelease)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -212,22 +229,28 @@ func canonicalCase(field string) string {
|
||||
|
||||
// ControlFileReader implements reading of control files stanza by stanza
|
||||
type ControlFileReader struct {
|
||||
scanner *bufio.Scanner
|
||||
scanner *bufio.Scanner
|
||||
isRelease bool
|
||||
isInstaller bool
|
||||
}
|
||||
|
||||
// NewControlFileReader creates ControlFileReader, it wraps with buffering
|
||||
func NewControlFileReader(r io.Reader) *ControlFileReader {
|
||||
func NewControlFileReader(r io.Reader, isRelease, isInstaller bool) *ControlFileReader {
|
||||
scnr := bufio.NewScanner(bufio.NewReaderSize(r, 32768))
|
||||
scnr.Buffer(nil, MaxFieldSize)
|
||||
|
||||
return &ControlFileReader{scanner: scnr}
|
||||
return &ControlFileReader{
|
||||
scanner: scnr,
|
||||
isRelease: isRelease,
|
||||
isInstaller: isInstaller,
|
||||
}
|
||||
}
|
||||
|
||||
// ReadStanza reeads one stanza from control file
|
||||
func (c *ControlFileReader) ReadStanza(isRelease bool) (Stanza, error) {
|
||||
func (c *ControlFileReader) ReadStanza() (Stanza, error) {
|
||||
stanza := make(Stanza, 32)
|
||||
lastField := ""
|
||||
lastFieldMultiline := false
|
||||
lastFieldMultiline := c.isInstaller
|
||||
|
||||
for c.scanner.Scan() {
|
||||
line := c.scanner.Text()
|
||||
@@ -240,7 +263,7 @@ func (c *ControlFileReader) ReadStanza(isRelease bool) (Stanza, error) {
|
||||
continue
|
||||
}
|
||||
|
||||
if line[0] == ' ' || line[0] == '\t' {
|
||||
if line[0] == ' ' || line[0] == '\t' || c.isInstaller {
|
||||
if lastFieldMultiline {
|
||||
stanza[lastField] += line + "\n"
|
||||
} else {
|
||||
@@ -252,7 +275,7 @@ func (c *ControlFileReader) ReadStanza(isRelease bool) (Stanza, error) {
|
||||
return nil, ErrMalformedStanza
|
||||
}
|
||||
lastField = canonicalCase(parts[0])
|
||||
lastFieldMultiline = isMultilineField(lastField, isRelease)
|
||||
lastFieldMultiline = isMultilineField(lastField, c.isRelease)
|
||||
if lastFieldMultiline {
|
||||
stanza[lastField] = parts[1]
|
||||
if parts[1] != "" {
|
||||
|
||||
@@ -15,6 +15,10 @@ type ControlFileSuite struct {
|
||||
|
||||
var _ = Suite(&ControlFileSuite{})
|
||||
|
||||
const installerFile = `dab96042d8e25e0f6bbb8d7c5bd78543afb5eb31a4a8b122ece68ab197228028 ./udeb.list
|
||||
9d8bb14044dee520f4706ab197dfff10e9e39ecb3c1a402331712154e8284b2e ./MANIFEST.udebs
|
||||
`
|
||||
|
||||
const controlFile = `Package: bti
|
||||
Binary: bti
|
||||
Version: 032-1
|
||||
@@ -83,15 +87,15 @@ func (s *ControlFileSuite) SetUpTest(c *C) {
|
||||
}
|
||||
|
||||
func (s *ControlFileSuite) TestReadStanza(c *C) {
|
||||
r := NewControlFileReader(s.reader)
|
||||
r := NewControlFileReader(s.reader, false, false)
|
||||
|
||||
stanza1, err := r.ReadStanza(false)
|
||||
stanza1, err := r.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
stanza2, err := r.ReadStanza(false)
|
||||
stanza2, err := r.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
stanza3, err := r.ReadStanza(false)
|
||||
stanza3, err := r.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(stanza3, IsNil)
|
||||
|
||||
@@ -103,27 +107,47 @@ func (s *ControlFileSuite) TestReadStanza(c *C) {
|
||||
}
|
||||
|
||||
func (s *ControlFileSuite) TestReadWriteStanza(c *C) {
|
||||
r := NewControlFileReader(s.reader)
|
||||
stanza, err := r.ReadStanza(false)
|
||||
r := NewControlFileReader(s.reader, false, false)
|
||||
stanza, err := r.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
buf := &bytes.Buffer{}
|
||||
w := bufio.NewWriter(buf)
|
||||
err = stanza.Copy().WriteTo(w, true, false)
|
||||
err = stanza.Copy().WriteTo(w, true, false, false)
|
||||
c.Assert(err, IsNil)
|
||||
err = w.Flush()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
str := buf.String()
|
||||
|
||||
r = NewControlFileReader(buf)
|
||||
stanza2, err := r.ReadStanza(false)
|
||||
r = NewControlFileReader(buf, false, false)
|
||||
stanza2, err := r.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Assert(stanza2, DeepEquals, stanza)
|
||||
c.Assert(strings.HasPrefix(str, "Package: "), Equals, true)
|
||||
}
|
||||
|
||||
func (s *ControlFileSuite) TestReadWriteInstallerStanza(c *C) {
|
||||
s.reader = bytes.NewBufferString(installerFile)
|
||||
r := NewControlFileReader(s.reader, false, true)
|
||||
stanza, err := r.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
buf := &bytes.Buffer{}
|
||||
w := bufio.NewWriter(buf)
|
||||
err = stanza.Copy().WriteTo(w, false, false, true)
|
||||
c.Assert(err, IsNil)
|
||||
err = w.Flush()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
r = NewControlFileReader(buf, false, true)
|
||||
stanza2, err := r.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Assert(stanza2, DeepEquals, stanza)
|
||||
}
|
||||
|
||||
func (s *ControlFileSuite) TestCanonicalCase(c *C) {
|
||||
c.Check(canonicalCase("Package"), Equals, "Package")
|
||||
c.Check(canonicalCase("package"), Equals, "Package")
|
||||
@@ -141,8 +165,8 @@ func (s *ControlFileSuite) TestLongFields(c *C) {
|
||||
c.Assert(err, IsNil)
|
||||
defer f.Close()
|
||||
|
||||
r := NewControlFileReader(f)
|
||||
stanza, e := r.ReadStanza(false)
|
||||
r := NewControlFileReader(f, false, false)
|
||||
stanza, e := r.ReadStanza()
|
||||
c.Assert(e, IsNil)
|
||||
c.Assert(len(stanza["Provides"]), Equals, 586929)
|
||||
}
|
||||
@@ -150,9 +174,9 @@ func (s *ControlFileSuite) TestLongFields(c *C) {
|
||||
func (s *ControlFileSuite) BenchmarkReadStanza(c *C) {
|
||||
for i := 0; i < c.N; i++ {
|
||||
reader := bytes.NewBufferString(controlFile)
|
||||
r := NewControlFileReader(reader)
|
||||
r := NewControlFileReader(reader, false, false)
|
||||
for {
|
||||
s, e := r.ReadStanza(false)
|
||||
s, e := r.ReadStanza()
|
||||
if s == nil && e == nil {
|
||||
break
|
||||
}
|
||||
|
||||
@@ -29,7 +29,8 @@ type indexFile struct {
|
||||
discardable bool
|
||||
compressable bool
|
||||
onlyGzip bool
|
||||
signable bool
|
||||
clearSign bool
|
||||
detachedSign bool
|
||||
acquireByHash bool
|
||||
relativePath string
|
||||
tempFilename string
|
||||
@@ -136,34 +137,42 @@ func (file *indexFile) Finalize(signer pgp.Signer) error {
|
||||
}
|
||||
}
|
||||
|
||||
if file.signable && signer != nil {
|
||||
err = signer.DetachedSign(file.tempFilename, file.tempFilename+".gpg")
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to detached sign file: %s", err)
|
||||
if signer != nil {
|
||||
if file.detachedSign {
|
||||
err = signer.DetachedSign(file.tempFilename, file.tempFilename+".gpg")
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to detached sign file: %s", err)
|
||||
}
|
||||
|
||||
if file.parent.suffix != "" {
|
||||
file.parent.renameMap[filepath.Join(file.parent.basePath, file.relativePath+file.parent.suffix+".gpg")] =
|
||||
filepath.Join(file.parent.basePath, file.relativePath+".gpg")
|
||||
}
|
||||
|
||||
err = file.parent.publishedStorage.PutFile(filepath.Join(file.parent.basePath, file.relativePath+file.parent.suffix+".gpg"),
|
||||
file.tempFilename+".gpg")
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to publish file: %s", err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
err = signer.ClearSign(file.tempFilename, filepath.Join(filepath.Dir(file.tempFilename), "In"+filepath.Base(file.tempFilename)))
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to clearsign file: %s", err)
|
||||
}
|
||||
if file.clearSign {
|
||||
err = signer.ClearSign(file.tempFilename, filepath.Join(filepath.Dir(file.tempFilename), "In"+filepath.Base(file.tempFilename)))
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to clearsign file: %s", err)
|
||||
}
|
||||
|
||||
if file.parent.suffix != "" {
|
||||
file.parent.renameMap[filepath.Join(file.parent.basePath, file.relativePath+file.parent.suffix+".gpg")] =
|
||||
filepath.Join(file.parent.basePath, file.relativePath+".gpg")
|
||||
file.parent.renameMap[filepath.Join(file.parent.basePath, "In"+file.relativePath+file.parent.suffix)] =
|
||||
filepath.Join(file.parent.basePath, "In"+file.relativePath)
|
||||
}
|
||||
if file.parent.suffix != "" {
|
||||
file.parent.renameMap[filepath.Join(file.parent.basePath, "In"+file.relativePath+file.parent.suffix)] =
|
||||
filepath.Join(file.parent.basePath, "In"+file.relativePath)
|
||||
}
|
||||
|
||||
err = file.parent.publishedStorage.PutFile(filepath.Join(file.parent.basePath, file.relativePath+file.parent.suffix+".gpg"),
|
||||
file.tempFilename+".gpg")
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to publish file: %s", err)
|
||||
}
|
||||
|
||||
err = file.parent.publishedStorage.PutFile(filepath.Join(file.parent.basePath, "In"+file.relativePath+file.parent.suffix),
|
||||
filepath.Join(filepath.Dir(file.tempFilename), "In"+filepath.Base(file.tempFilename)))
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to publish file: %s", err)
|
||||
err = file.parent.publishedStorage.PutFile(filepath.Join(file.parent.basePath, "In"+file.relativePath+file.parent.suffix),
|
||||
filepath.Join(filepath.Dir(file.tempFilename), "In"+filepath.Base(file.tempFilename)))
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to publish file: %s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,11 +242,11 @@ func newIndexFiles(publishedStorage aptly.PublishedStorage, basePath, tempDir, s
|
||||
}
|
||||
}
|
||||
|
||||
func (files *indexFiles) PackageIndex(component, arch string, udeb bool) *indexFile {
|
||||
func (files *indexFiles) PackageIndex(component, arch string, udeb, installer bool) *indexFile {
|
||||
if arch == ArchitectureSource {
|
||||
udeb = false
|
||||
}
|
||||
key := fmt.Sprintf("pi-%s-%s-%v", component, arch, udeb)
|
||||
key := fmt.Sprintf("pi-%s-%s-%v-%v", component, arch, udeb, installer)
|
||||
file, ok := files.indexes[key]
|
||||
if !ok {
|
||||
var relativePath string
|
||||
@@ -247,6 +256,8 @@ func (files *indexFiles) PackageIndex(component, arch string, udeb bool) *indexF
|
||||
} else {
|
||||
if udeb {
|
||||
relativePath = filepath.Join(component, "debian-installer", fmt.Sprintf("binary-%s", arch), "Packages")
|
||||
} else if installer {
|
||||
relativePath = filepath.Join(component, fmt.Sprintf("installer-%s", arch), "current", "images", "SHA256SUMS")
|
||||
} else {
|
||||
relativePath = filepath.Join(component, fmt.Sprintf("binary-%s", arch), "Packages")
|
||||
}
|
||||
@@ -255,8 +266,9 @@ func (files *indexFiles) PackageIndex(component, arch string, udeb bool) *indexF
|
||||
file = &indexFile{
|
||||
parent: files,
|
||||
discardable: false,
|
||||
compressable: true,
|
||||
signable: false,
|
||||
compressable: !installer,
|
||||
detachedSign: installer,
|
||||
clearSign: false,
|
||||
acquireByHash: files.acquireByHash,
|
||||
relativePath: relativePath,
|
||||
}
|
||||
@@ -290,7 +302,8 @@ func (files *indexFiles) ReleaseIndex(component, arch string, udeb bool) *indexF
|
||||
parent: files,
|
||||
discardable: udeb,
|
||||
compressable: false,
|
||||
signable: false,
|
||||
detachedSign: false,
|
||||
clearSign: false,
|
||||
acquireByHash: files.acquireByHash,
|
||||
relativePath: relativePath,
|
||||
}
|
||||
@@ -321,7 +334,8 @@ func (files *indexFiles) ContentsIndex(component, arch string, udeb bool) *index
|
||||
discardable: true,
|
||||
compressable: true,
|
||||
onlyGzip: true,
|
||||
signable: false,
|
||||
detachedSign: false,
|
||||
clearSign: false,
|
||||
acquireByHash: files.acquireByHash,
|
||||
relativePath: relativePath,
|
||||
}
|
||||
@@ -352,7 +366,8 @@ func (files *indexFiles) LegacyContentsIndex(arch string, udeb bool) *indexFile
|
||||
discardable: true,
|
||||
compressable: true,
|
||||
onlyGzip: true,
|
||||
signable: false,
|
||||
detachedSign: false,
|
||||
clearSign: false,
|
||||
acquireByHash: files.acquireByHash,
|
||||
relativePath: relativePath,
|
||||
}
|
||||
@@ -368,19 +383,20 @@ func (files *indexFiles) ReleaseFile() *indexFile {
|
||||
parent: files,
|
||||
discardable: false,
|
||||
compressable: false,
|
||||
signable: true,
|
||||
detachedSign: true,
|
||||
clearSign: true,
|
||||
relativePath: "Release",
|
||||
}
|
||||
}
|
||||
|
||||
func (files *indexFiles) FinalizeAll(progress aptly.Progress) (err error) {
|
||||
func (files *indexFiles) FinalizeAll(progress aptly.Progress, signer pgp.Signer) (err error) {
|
||||
if progress != nil {
|
||||
progress.InitBar(int64(len(files.indexes)), false)
|
||||
defer progress.ShutdownBar()
|
||||
}
|
||||
|
||||
for _, file := range files.indexes {
|
||||
err = file.Finalize(nil)
|
||||
err = file.Finalize(signer)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package deb
|
||||
|
||||
import (
|
||||
gocontext "context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
@@ -26,6 +27,8 @@ type Package struct {
|
||||
Provides []string
|
||||
// Hash of files section
|
||||
FilesHash uint64
|
||||
// Is this package a dummy installer package
|
||||
IsInstaller bool
|
||||
// Is this source package
|
||||
IsSource bool
|
||||
// Is this udeb package
|
||||
@@ -43,9 +46,10 @@ type Package struct {
|
||||
|
||||
// Package types
|
||||
const (
|
||||
PackageTypeBinary = "deb"
|
||||
PackageTypeUdeb = "udeb"
|
||||
PackageTypeSource = "source"
|
||||
PackageTypeBinary = "deb"
|
||||
PackageTypeUdeb = "udeb"
|
||||
PackageTypeSource = "source"
|
||||
PackageTypeInstaller = "installer"
|
||||
)
|
||||
|
||||
// Special arhictectures
|
||||
@@ -168,6 +172,41 @@ func NewUdebPackageFromControlFile(input Stanza) *Package {
|
||||
return p
|
||||
}
|
||||
|
||||
// NewInstallerPackageFromControlFile creates a dummy installer Package from parsed hash sum file
|
||||
func NewInstallerPackageFromControlFile(input Stanza, repo *RemoteRepo, component, architecture string, d aptly.Downloader) (*Package, error) {
|
||||
p := &Package{
|
||||
Name: "installer",
|
||||
Architecture: architecture,
|
||||
IsInstaller: true,
|
||||
V06Plus: true,
|
||||
extra: &Stanza{},
|
||||
deps: &PackageDependencies{},
|
||||
}
|
||||
|
||||
files := make(PackageFiles, 0)
|
||||
files, err := files.ParseSumField(input[""], func(sum *utils.ChecksumInfo, data string) { sum.SHA256 = data }, false, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
relPath := filepath.Join("dists", repo.Distribution, component, fmt.Sprintf("%s-%s", p.Name, architecture), "current", "images")
|
||||
for i := range files {
|
||||
files[i].downloadPath = relPath
|
||||
|
||||
url := repo.PackageURL(files[i].DownloadURL()).String()
|
||||
var size int64
|
||||
size, err = d.GetLength(gocontext.TODO(), url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files[i].Checksums.Size = size
|
||||
}
|
||||
|
||||
p.UpdateFiles(files)
|
||||
return p, nil
|
||||
}
|
||||
|
||||
// Key returns unique key identifying package
|
||||
func (p *Package) Key(prefix string) []byte {
|
||||
if p.V06Plus {
|
||||
@@ -509,6 +548,12 @@ func (p *Package) Stanza() (result Stanza) {
|
||||
if len(sha512) > 0 {
|
||||
result["Checksums-Sha512"] = strings.Join(sha512, "")
|
||||
}
|
||||
} else if p.IsInstaller {
|
||||
sha256 := []string{}
|
||||
for _, f := range p.Files() {
|
||||
sha256 = append(sha256, fmt.Sprintf("%s %s", f.Checksums.SHA256, f.Filename))
|
||||
}
|
||||
result[""] = strings.Join(sha256, "\n")
|
||||
} else {
|
||||
f := p.Files()[0]
|
||||
result["Filename"] = f.DownloadURL()
|
||||
@@ -563,11 +608,7 @@ func (p *Package) Equals(p2 *Package) bool {
|
||||
|
||||
// LinkFromPool links package file from pool to dist's pool location
|
||||
func (p *Package) LinkFromPool(publishedStorage aptly.PublishedStorage, packagePool aptly.PackagePool,
|
||||
prefix, component string, force bool) error {
|
||||
poolDir, err := p.PoolDirectory()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
prefix, relPath string, force bool) error {
|
||||
|
||||
for i, f := range p.Files() {
|
||||
sourcePoolPath, err := f.GetPoolPath(packagePool)
|
||||
@@ -575,7 +616,6 @@ func (p *Package) LinkFromPool(publishedStorage aptly.PublishedStorage, packageP
|
||||
return err
|
||||
}
|
||||
|
||||
relPath := filepath.Join("pool", component, poolDir)
|
||||
publishedDirectory := filepath.Join(prefix, relPath)
|
||||
|
||||
err = publishedStorage.LinkFromPool(publishedDirectory, f.Filename, packagePool, sourcePoolPath, f.Checksums, force)
|
||||
|
||||
@@ -88,7 +88,8 @@ func (files PackageFiles) Less(i, j int) bool {
|
||||
return files[i].Filename < files[j].Filename
|
||||
}
|
||||
|
||||
func (files PackageFiles) parseSumField(input string, setter func(sum *utils.ChecksumInfo, data string)) (PackageFiles, error) {
|
||||
// ParseSumField populates PackageFiles by parsing given input
|
||||
func (files PackageFiles) ParseSumField(input string, setter func(sum *utils.ChecksumInfo, data string), withSize bool, onlyBasePath bool) (PackageFiles, error) {
|
||||
for _, line := range strings.Split(input, "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
@@ -96,16 +97,23 @@ func (files PackageFiles) parseSumField(input string, setter func(sum *utils.Che
|
||||
}
|
||||
parts := strings.Fields(line)
|
||||
|
||||
if len(parts) < 3 {
|
||||
if withSize && len(parts) < 3 || !withSize && len(parts) < 2 {
|
||||
return nil, fmt.Errorf("unparseable hash sum line: %#v", line)
|
||||
}
|
||||
|
||||
size, err := strconv.ParseInt(parts[1], 10, 64)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to parse size: %s", err)
|
||||
var size int64
|
||||
var err error
|
||||
if withSize {
|
||||
size, err = strconv.ParseInt(parts[1], 10, 64)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to parse size: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
filename := filepath.Base(parts[len(parts)-1])
|
||||
filename := parts[len(parts)-1]
|
||||
if onlyBasePath {
|
||||
filename = filepath.Base(filename)
|
||||
}
|
||||
|
||||
found := false
|
||||
pos := 0
|
||||
@@ -133,22 +141,22 @@ func (files PackageFiles) parseSumField(input string, setter func(sum *utils.Che
|
||||
func (files PackageFiles) ParseSumFields(stanza Stanza) (PackageFiles, error) {
|
||||
var err error
|
||||
|
||||
files, err = files.parseSumField(stanza["Files"], func(sum *utils.ChecksumInfo, data string) { sum.MD5 = data })
|
||||
files, err = files.ParseSumField(stanza["Files"], func(sum *utils.ChecksumInfo, data string) { sum.MD5 = data }, true, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files, err = files.parseSumField(stanza["Checksums-Sha1"], func(sum *utils.ChecksumInfo, data string) { sum.SHA1 = data })
|
||||
files, err = files.ParseSumField(stanza["Checksums-Sha1"], func(sum *utils.ChecksumInfo, data string) { sum.SHA1 = data }, true, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files, err = files.parseSumField(stanza["Checksums-Sha256"], func(sum *utils.ChecksumInfo, data string) { sum.SHA256 = data })
|
||||
files, err = files.ParseSumField(stanza["Checksums-Sha256"], func(sum *utils.ChecksumInfo, data string) { sum.SHA256 = data }, true, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files, err = files.parseSumField(stanza["Checksums-Sha512"], func(sum *utils.ChecksumInfo, data string) { sum.SHA512 = data })
|
||||
files, err = files.ParseSumField(stanza["Checksums-Sha512"], func(sum *utils.ChecksumInfo, data string) { sum.SHA512 = data }, true, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"regexp"
|
||||
|
||||
"github.com/aptly-dev/aptly/files"
|
||||
"github.com/aptly-dev/aptly/http"
|
||||
|
||||
. "gopkg.in/check.v1"
|
||||
)
|
||||
@@ -22,7 +23,7 @@ func (s *PackageSuite) SetUpTest(c *C) {
|
||||
s.stanza = packageStanza.Copy()
|
||||
|
||||
buf := bytes.NewBufferString(sourcePackageMeta)
|
||||
s.sourceStanza, _ = NewControlFileReader(buf).ReadStanza(false)
|
||||
s.sourceStanza, _ = NewControlFileReader(buf, false, false).ReadStanza()
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestNewFromPara(c *C) {
|
||||
@@ -43,7 +44,7 @@ func (s *PackageSuite) TestNewFromPara(c *C) {
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestNewUdebFromPara(c *C) {
|
||||
stanza, _ := NewControlFileReader(bytes.NewBufferString(udebPackageMeta)).ReadStanza(false)
|
||||
stanza, _ := NewControlFileReader(bytes.NewBufferString(udebPackageMeta), false, false).ReadStanza()
|
||||
p := NewUdebPackageFromControlFile(stanza)
|
||||
|
||||
c.Check(p.IsSource, Equals, false)
|
||||
@@ -57,6 +58,29 @@ func (s *PackageSuite) TestNewUdebFromPara(c *C) {
|
||||
c.Check(p.deps.Depends, DeepEquals, []string{"libc6-udeb (>= 2.13)"})
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestNewInstallerFromPara(c *C) {
|
||||
repo, _ := NewRemoteRepo("yandex", "http://example.com/debian", "squeeze", []string{"main"}, []string{}, false, false, false)
|
||||
downloader := http.NewFakeDownloader()
|
||||
downloader.ExpectResponse("http://example.com/debian/dists/squeeze/main/installer-i386/current/images/MANIFEST.udebs", "MANIFEST.udebs")
|
||||
downloader.ExpectResponse("http://example.com/debian/dists/squeeze/main/installer-i386/current/images/udeb.list", "udeb.list")
|
||||
downloader.ExpectResponse("", "udeb.list")
|
||||
stanza, _ := NewControlFileReader(bytes.NewBufferString(installerPackageMeta), false, true).ReadStanza()
|
||||
p, err := NewInstallerPackageFromControlFile(stanza, repo, "main", "i386", downloader)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
c.Check(p.IsSource, Equals, false)
|
||||
c.Check(p.IsUdeb, Equals, false)
|
||||
c.Check(p.IsInstaller, Equals, true)
|
||||
c.Check(p.Name, Equals, "installer")
|
||||
c.Check(p.Version, Equals, "")
|
||||
c.Check(p.Architecture, Equals, "i386")
|
||||
c.Check(p.Files(), HasLen, 2)
|
||||
c.Check(p.Files()[0].Filename, Equals, "./MANIFEST.udebs")
|
||||
c.Check(p.Files()[0].Checksums.Size, Equals, int64(14))
|
||||
c.Check(p.Files()[1].Filename, Equals, "./udeb.list")
|
||||
c.Check(p.Files()[1].Checksums.Size, Equals, int64(9))
|
||||
}
|
||||
|
||||
func (s *PackageSuite) TestNewSourceFromPara(c *C) {
|
||||
p, err := NewSourcePackageFromControlFile(s.sourceStanza)
|
||||
|
||||
@@ -156,7 +180,7 @@ func (s *PackageSuite) TestGetField(c *C) {
|
||||
|
||||
p4, _ := NewSourcePackageFromControlFile(s.sourceStanza.Copy())
|
||||
|
||||
stanza5, _ := NewControlFileReader(bytes.NewBufferString(udebPackageMeta)).ReadStanza(false)
|
||||
stanza5, _ := NewControlFileReader(bytes.NewBufferString(udebPackageMeta), false, false).ReadStanza()
|
||||
p5 := NewUdebPackageFromControlFile(stanza5)
|
||||
|
||||
c.Check(p.GetField("$Source"), Equals, "alien-arena")
|
||||
@@ -372,13 +396,13 @@ func (s *PackageSuite) TestLinkFromPool(c *C) {
|
||||
|
||||
p.Files()[0].PoolPath, _ = packagePool.Import(tmpFilepath, p.Files()[0].Filename, &p.Files()[0].Checksums, false, cs)
|
||||
|
||||
err := p.LinkFromPool(publishedStorage, packagePool, "", "non-free", false)
|
||||
err := p.LinkFromPool(publishedStorage, packagePool, "", "pool/non-free/a/alien-arena", false)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(p.Files()[0].Filename, Equals, "alien-arena-common_7.40-2_i386.deb")
|
||||
c.Check(p.Files()[0].downloadPath, Equals, "pool/non-free/a/alien-arena")
|
||||
|
||||
p.IsSource = true
|
||||
err = p.LinkFromPool(publishedStorage, packagePool, "", "non-free", false)
|
||||
err = p.LinkFromPool(publishedStorage, packagePool, "", "pool/non-free/a/alien-arena", false)
|
||||
c.Check(err, IsNil)
|
||||
c.Check(p.Extra()["Directory"], Equals, "pool/non-free/a/alien-arena")
|
||||
}
|
||||
@@ -490,3 +514,6 @@ Size: 29188
|
||||
MD5sum: ae70341c4d96dcded89fa670bcfea31e
|
||||
SHA1: 9532ae4226a85805189a671ee0283f719d48a5ba
|
||||
SHA256: bbb3a2cb07f741c3995b6d4bb08d772d83582b93a0236d4ea7736bc0370fc320`
|
||||
|
||||
const installerPackageMeta = `9d8bb14044dee520f4706ab197dfff10e9e39ecb3c1a402331712154e8284b2e ./MANIFEST.udebs
|
||||
dab96042d8e25e0f6bbb8d7c5bd78543afb5eb31a4a8b122ece68ab197228028 ./udeb.list`
|
||||
|
||||
@@ -569,7 +569,7 @@ func (p *PublishedRepo) Publish(packagePool aptly.PackagePool, publishedStorageP
|
||||
|
||||
// For all architectures, pregenerate packages/sources files
|
||||
for _, arch := range p.Architectures {
|
||||
indexes.PackageIndex(component, arch, false)
|
||||
indexes.PackageIndex(component, arch, false, false)
|
||||
}
|
||||
|
||||
if progress != nil {
|
||||
@@ -585,19 +585,26 @@ func (p *PublishedRepo) Publish(packagePool aptly.PackagePool, publishedStorageP
|
||||
progress.AddBar(1)
|
||||
}
|
||||
|
||||
matches := false
|
||||
for _, arch := range p.Architectures {
|
||||
if pkg.MatchesArchitecture(arch) {
|
||||
matches = true
|
||||
break
|
||||
}
|
||||
}
|
||||
hadUdebs = hadUdebs || pkg.IsUdeb
|
||||
|
||||
if matches {
|
||||
hadUdebs = hadUdebs || pkg.IsUdeb
|
||||
err = pkg.LinkFromPool(publishedStorage, packagePool, p.Prefix, component, forceOverwrite)
|
||||
if err != nil {
|
||||
return err
|
||||
var relPath string
|
||||
if !pkg.IsInstaller {
|
||||
poolDir, err2 := pkg.PoolDirectory()
|
||||
if err2 != nil {
|
||||
return err2
|
||||
}
|
||||
relPath = filepath.Join("pool", component, poolDir)
|
||||
} else {
|
||||
relPath = filepath.Join("dists", p.Distribution, component, fmt.Sprintf("%s-%s", pkg.Name, arch), "current", "images")
|
||||
}
|
||||
|
||||
err = pkg.LinkFromPool(publishedStorage, packagePool, p.Prefix, relPath, forceOverwrite)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
@@ -612,7 +619,7 @@ func (p *PublishedRepo) Publish(packagePool aptly.PackagePool, publishedStorageP
|
||||
if pkg.MatchesArchitecture(arch) {
|
||||
var bufWriter *bufio.Writer
|
||||
|
||||
if !p.SkipContents {
|
||||
if !p.SkipContents && !pkg.IsInstaller {
|
||||
key := fmt.Sprintf("%s-%v", arch, pkg.IsUdeb)
|
||||
qualifiedName := []byte(pkg.QualifiedName())
|
||||
contents := pkg.Contents(packagePool, progress)
|
||||
@@ -629,12 +636,12 @@ func (p *PublishedRepo) Publish(packagePool aptly.PackagePool, publishedStorageP
|
||||
}
|
||||
}
|
||||
|
||||
bufWriter, err = indexes.PackageIndex(component, arch, pkg.IsUdeb).BufWriter()
|
||||
bufWriter, err = indexes.PackageIndex(component, arch, pkg.IsUdeb, pkg.IsInstaller).BufWriter()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = pkg.Stanza().WriteTo(bufWriter, pkg.IsSource, false)
|
||||
err = pkg.Stanza().WriteTo(bufWriter, pkg.IsSource, false, pkg.IsInstaller)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -687,7 +694,7 @@ func (p *PublishedRepo) Publish(packagePool aptly.PackagePool, publishedStorageP
|
||||
|
||||
// For all architectures, pregenerate .udeb indexes
|
||||
for _, arch := range p.Architectures {
|
||||
indexes.PackageIndex(component, arch, true)
|
||||
indexes.PackageIndex(component, arch, true, false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -710,7 +717,7 @@ func (p *PublishedRepo) Publish(packagePool aptly.PackagePool, publishedStorageP
|
||||
return fmt.Errorf("unable to get ReleaseIndex writer: %s", err)
|
||||
}
|
||||
|
||||
err = release.WriteTo(bufWriter, false, true)
|
||||
err = release.WriteTo(bufWriter, false, true, false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to create Release file: %s", err)
|
||||
}
|
||||
@@ -742,7 +749,7 @@ func (p *PublishedRepo) Publish(packagePool aptly.PackagePool, publishedStorageP
|
||||
progress.Printf("Finalizing metadata files...\n")
|
||||
}
|
||||
|
||||
err = indexes.FinalizeAll(progress)
|
||||
err = indexes.FinalizeAll(progress, signer)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -791,7 +798,7 @@ func (p *PublishedRepo) Publish(packagePool aptly.PackagePool, publishedStorageP
|
||||
return err
|
||||
}
|
||||
|
||||
err = release.WriteTo(bufWriter, false, true)
|
||||
err = release.WriteTo(bufWriter, false, true, false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to create Release file: %s", err)
|
||||
}
|
||||
|
||||
@@ -113,7 +113,7 @@ func (s *PublishedRepoSuite) SetUpTest(c *C) {
|
||||
|
||||
s.reflist = NewPackageRefListFromPackageList(s.list)
|
||||
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false)
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false, false)
|
||||
repo.packageRefs = s.reflist
|
||||
s.factory.RemoteRepoCollection().Add(repo)
|
||||
|
||||
@@ -314,8 +314,8 @@ func (s *PublishedRepoSuite) TestPublish(c *C) {
|
||||
rf, err := os.Open(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/squeeze/Release"))
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
cfr := NewControlFileReader(rf)
|
||||
st, err := cfr.ReadStanza(true)
|
||||
cfr := NewControlFileReader(rf, true, false)
|
||||
st, err := cfr.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(st["Origin"], Equals, "ppa squeeze")
|
||||
@@ -325,24 +325,24 @@ func (s *PublishedRepoSuite) TestPublish(c *C) {
|
||||
pf, err := os.Open(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/squeeze/main/binary-i386/Packages"))
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
cfr = NewControlFileReader(pf)
|
||||
cfr = NewControlFileReader(pf, false, false)
|
||||
|
||||
for i := 0; i < 3; i++ {
|
||||
st, err = cfr.ReadStanza(false)
|
||||
st, err = cfr.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(st["Filename"], Equals, "pool/main/a/alien-arena/alien-arena-common_7.40-2_i386.deb")
|
||||
}
|
||||
|
||||
st, err = cfr.ReadStanza(false)
|
||||
st, err = cfr.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(st, IsNil)
|
||||
|
||||
drf, err := os.Open(filepath.Join(s.publishedStorage.PublicPath(), "ppa/dists/squeeze/main/binary-i386/Release"))
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
cfr = NewControlFileReader(drf)
|
||||
st, err = cfr.ReadStanza(true)
|
||||
cfr = NewControlFileReader(drf, true, false)
|
||||
st, err = cfr.ReadStanza()
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(st["Archive"], Equals, "squeeze")
|
||||
|
||||
105
deb/remote.go
105
deb/remote.go
@@ -69,6 +69,8 @@ type RemoteRepo struct {
|
||||
DownloadSources bool
|
||||
// Should we download .udebs?
|
||||
DownloadUdebs bool
|
||||
// Should we download installer files?
|
||||
DownloadInstaller bool
|
||||
// "Snapshot" of current list of packages
|
||||
packageRefs *PackageRefList
|
||||
// Parsed archived root
|
||||
@@ -79,16 +81,17 @@ type RemoteRepo struct {
|
||||
|
||||
// NewRemoteRepo creates new instance of Debian remote repository with specified params
|
||||
func NewRemoteRepo(name string, archiveRoot string, distribution string, components []string,
|
||||
architectures []string, downloadSources bool, downloadUdebs bool) (*RemoteRepo, error) {
|
||||
architectures []string, downloadSources bool, downloadUdebs bool, downloadInstaller bool) (*RemoteRepo, error) {
|
||||
result := &RemoteRepo{
|
||||
UUID: uuid.New(),
|
||||
Name: name,
|
||||
ArchiveRoot: archiveRoot,
|
||||
Distribution: distribution,
|
||||
Components: components,
|
||||
Architectures: architectures,
|
||||
DownloadSources: downloadSources,
|
||||
DownloadUdebs: downloadUdebs,
|
||||
UUID: uuid.New(),
|
||||
Name: name,
|
||||
ArchiveRoot: archiveRoot,
|
||||
Distribution: distribution,
|
||||
Components: components,
|
||||
Architectures: architectures,
|
||||
DownloadSources: downloadSources,
|
||||
DownloadUdebs: downloadUdebs,
|
||||
DownloadInstaller: downloadInstaller,
|
||||
}
|
||||
|
||||
err := result.prepare()
|
||||
@@ -141,6 +144,9 @@ func (repo *RemoteRepo) String() string {
|
||||
if repo.DownloadUdebs {
|
||||
srcFlag += " [udeb]"
|
||||
}
|
||||
if repo.DownloadInstaller {
|
||||
srcFlag += " [installer]"
|
||||
}
|
||||
distribution := repo.Distribution
|
||||
if distribution == "" {
|
||||
distribution = "./"
|
||||
@@ -244,6 +250,12 @@ func (repo *RemoteRepo) UdebPath(component string, architecture string) string {
|
||||
return fmt.Sprintf("%s/debian-installer/binary-%s/Packages", component, architecture)
|
||||
}
|
||||
|
||||
// InstallerPath returns path of Packages files for given component and
|
||||
// architecture
|
||||
func (repo *RemoteRepo) InstallerPath(component string, architecture string) string {
|
||||
return fmt.Sprintf("%s/installer-%s/current/images/SHA256SUMS", component, architecture)
|
||||
}
|
||||
|
||||
// PackageURL returns URL of package file relative to repository root
|
||||
// architecture
|
||||
func (repo *RemoteRepo) PackageURL(filename string) *url.URL {
|
||||
@@ -312,8 +324,8 @@ ok:
|
||||
|
||||
defer release.Close()
|
||||
|
||||
sreader := NewControlFileReader(release)
|
||||
stanza, err := sreader.ReadStanza(true)
|
||||
sreader := NewControlFileReader(release, true, false)
|
||||
stanza, err := sreader.ReadStanza()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -410,7 +422,7 @@ ok:
|
||||
}
|
||||
|
||||
// DownloadPackageIndexes downloads & parses package index files
|
||||
func (repo *RemoteRepo) DownloadPackageIndexes(progress aptly.Progress, d aptly.Downloader, collectionFactory *CollectionFactory,
|
||||
func (repo *RemoteRepo) DownloadPackageIndexes(progress aptly.Progress, d aptly.Downloader, verifier pgp.Verifier, collectionFactory *CollectionFactory,
|
||||
ignoreMismatch bool, maxTries int) error {
|
||||
if repo.packageList != nil {
|
||||
panic("packageList != nil")
|
||||
@@ -421,39 +433,85 @@ func (repo *RemoteRepo) DownloadPackageIndexes(progress aptly.Progress, d aptly.
|
||||
packagesPaths := [][]string{}
|
||||
|
||||
if repo.IsFlat() {
|
||||
packagesPaths = append(packagesPaths, []string{repo.FlatBinaryPath(), PackageTypeBinary})
|
||||
packagesPaths = append(packagesPaths, []string{repo.FlatBinaryPath(), PackageTypeBinary, "", ""})
|
||||
if repo.DownloadSources {
|
||||
packagesPaths = append(packagesPaths, []string{repo.FlatSourcesPath(), PackageTypeSource})
|
||||
packagesPaths = append(packagesPaths, []string{repo.FlatSourcesPath(), PackageTypeSource, "", ""})
|
||||
}
|
||||
} else {
|
||||
for _, component := range repo.Components {
|
||||
for _, architecture := range repo.Architectures {
|
||||
packagesPaths = append(packagesPaths, []string{repo.BinaryPath(component, architecture), PackageTypeBinary})
|
||||
packagesPaths = append(packagesPaths, []string{repo.BinaryPath(component, architecture), PackageTypeBinary, component, architecture})
|
||||
if repo.DownloadUdebs {
|
||||
packagesPaths = append(packagesPaths, []string{repo.UdebPath(component, architecture), PackageTypeUdeb})
|
||||
packagesPaths = append(packagesPaths, []string{repo.UdebPath(component, architecture), PackageTypeUdeb, component, architecture})
|
||||
}
|
||||
if repo.DownloadInstaller {
|
||||
packagesPaths = append(packagesPaths, []string{repo.InstallerPath(component, architecture), PackageTypeInstaller, component, architecture})
|
||||
}
|
||||
}
|
||||
if repo.DownloadSources {
|
||||
packagesPaths = append(packagesPaths, []string{repo.SourcesPath(component), PackageTypeSource})
|
||||
packagesPaths = append(packagesPaths, []string{repo.SourcesPath(component), PackageTypeSource, component, "source"})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, info := range packagesPaths {
|
||||
path, kind := info[0], info[1]
|
||||
path, kind, component, architecture := info[0], info[1], info[2], info[3]
|
||||
packagesReader, packagesFile, err := http.DownloadTryCompression(gocontext.TODO(), d, repo.IndexesRootURL(), path, repo.ReleaseFiles, ignoreMismatch, maxTries)
|
||||
|
||||
isInstaller := kind == PackageTypeInstaller
|
||||
if err != nil {
|
||||
return err
|
||||
if _, ok := err.(*http.NoCandidateFoundError); isInstaller && ok {
|
||||
// checking if gpg file is only needed when checksums matches are required.
|
||||
// otherwise there actually has been no candidate found and we can continue
|
||||
if ignoreMismatch {
|
||||
continue
|
||||
}
|
||||
|
||||
// some repos do not have installer hashsum file listed in release file but provide a separate gpg file
|
||||
hashsumPath := repo.IndexesRootURL().ResolveReference(&url.URL{Path: path}).String()
|
||||
packagesFile, err = http.DownloadTemp(gocontext.TODO(), d, hashsumPath)
|
||||
if err != nil {
|
||||
if herr, ok := err.(*http.Error); ok && (herr.Code == 404 || herr.Code == 403) {
|
||||
// installer files are not available in all components and architectures
|
||||
// so ignore it if not found
|
||||
continue
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
if verifier != nil {
|
||||
hashsumGpgPath := repo.IndexesRootURL().ResolveReference(&url.URL{Path: path + ".gpg"}).String()
|
||||
var filesig *os.File
|
||||
filesig, err = http.DownloadTemp(gocontext.TODO(), d, hashsumGpgPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = verifier.VerifyDetachedSignature(filesig, packagesFile, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = packagesFile.Seek(0, 0)
|
||||
}
|
||||
|
||||
packagesReader = packagesFile
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
defer packagesFile.Close()
|
||||
|
||||
stat, _ := packagesFile.Stat()
|
||||
progress.InitBar(stat.Size(), true)
|
||||
|
||||
sreader := NewControlFileReader(packagesReader)
|
||||
sreader := NewControlFileReader(packagesReader, false, isInstaller)
|
||||
|
||||
for {
|
||||
stanza, err := sreader.ReadStanza(false)
|
||||
stanza, err := sreader.ReadStanza()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -475,6 +533,11 @@ func (repo *RemoteRepo) DownloadPackageIndexes(progress aptly.Progress, d aptly.
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if kind == PackageTypeInstaller {
|
||||
p, err = NewInstallerPackageFromControlFile(stanza, repo, component, architecture, d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
err = repo.packageList.Add(p)
|
||||
if err != nil {
|
||||
|
||||
@@ -88,8 +88,8 @@ type RemoteRepoSuite struct {
|
||||
var _ = Suite(&RemoteRepoSuite{})
|
||||
|
||||
func (s *RemoteRepoSuite) SetUpTest(c *C) {
|
||||
s.repo, _ = NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian", "squeeze", []string{"main"}, []string{}, false, false)
|
||||
s.flat, _ = NewRemoteRepo("exp42", "http://repos.express42.com/virool/precise/", "./", []string{}, []string{}, false, false)
|
||||
s.repo, _ = NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian", "squeeze", []string{"main"}, []string{}, false, false, false)
|
||||
s.flat, _ = NewRemoteRepo("exp42", "http://repos.express42.com/virool/precise/", "./", []string{}, []string{}, false, false, false)
|
||||
s.downloader = http.NewFakeDownloader().ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/Release", exampleReleaseFile)
|
||||
s.progress = console.NewProgress()
|
||||
s.db, _ = database.NewOpenDB(c.MkDir())
|
||||
@@ -106,7 +106,7 @@ func (s *RemoteRepoSuite) TearDownTest(c *C) {
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestInvalidURL(c *C) {
|
||||
_, err := NewRemoteRepo("s", "http://lolo%2", "squeeze", []string{"main"}, []string{}, false, false)
|
||||
_, err := NewRemoteRepo("s", "http://lolo%2", "squeeze", []string{"main"}, []string{}, false, false, false)
|
||||
c.Assert(err, ErrorMatches, ".*(hexadecimal escape in host|percent-encoded characters in host|invalid URL escape).*")
|
||||
}
|
||||
|
||||
@@ -116,11 +116,11 @@ func (s *RemoteRepoSuite) TestFlatCreation(c *C) {
|
||||
c.Check(s.flat.Architectures, IsNil)
|
||||
c.Check(s.flat.Components, IsNil)
|
||||
|
||||
flat2, _ := NewRemoteRepo("flat2", "http://pkg.jenkins-ci.org/debian-stable", "binary/", []string{}, []string{}, false, false)
|
||||
flat2, _ := NewRemoteRepo("flat2", "http://pkg.jenkins-ci.org/debian-stable", "binary/", []string{}, []string{}, false, false, false)
|
||||
c.Check(flat2.IsFlat(), Equals, true)
|
||||
c.Check(flat2.Distribution, Equals, "./binary/")
|
||||
|
||||
_, err := NewRemoteRepo("fl", "http://some.repo/", "./", []string{"main"}, []string{}, false, false)
|
||||
_, err := NewRemoteRepo("fl", "http://some.repo/", "./", []string{"main"}, []string{}, false, false, false)
|
||||
c.Check(err, ErrorMatches, "components aren't supported for flat repos")
|
||||
}
|
||||
|
||||
@@ -130,8 +130,9 @@ func (s *RemoteRepoSuite) TestString(c *C) {
|
||||
|
||||
s.repo.DownloadSources = true
|
||||
s.repo.DownloadUdebs = true
|
||||
s.repo.DownloadInstaller = true
|
||||
s.flat.DownloadSources = true
|
||||
c.Check(s.repo.String(), Equals, "[yandex]: http://mirror.yandex.ru/debian/ squeeze [src] [udeb]")
|
||||
c.Check(s.repo.String(), Equals, "[yandex]: http://mirror.yandex.ru/debian/ squeeze [src] [udeb] [installer]")
|
||||
c.Check(s.flat.String(), Equals, "[exp42]: http://repos.express42.com/virool/precise/ ./ [src]")
|
||||
}
|
||||
|
||||
@@ -176,6 +177,10 @@ func (s *RemoteRepoSuite) TestSourcesPath(c *C) {
|
||||
c.Assert(s.repo.SourcesPath("main"), Equals, "main/source/Sources")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestInstallerPath(c *C) {
|
||||
c.Assert(s.repo.InstallerPath("main", "amd64"), Equals, "main/installer-amd64/current/images/SHA256SUMS")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestFlatBinaryPath(c *C) {
|
||||
c.Assert(s.flat.FlatBinaryPath(), Equals, "Packages")
|
||||
}
|
||||
@@ -230,13 +235,13 @@ func (s *RemoteRepoSuite) TestFetchNullVerifier2(c *C) {
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestFetchWrongArchitecture(c *C) {
|
||||
s.repo, _ = NewRemoteRepo("s", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{"xyz"}, false, false)
|
||||
s.repo, _ = NewRemoteRepo("s", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{"xyz"}, false, false, false)
|
||||
err := s.repo.Fetch(s.downloader, nil)
|
||||
c.Assert(err, ErrorMatches, "architecture xyz not available in repo.*")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestFetchWrongComponent(c *C) {
|
||||
s.repo, _ = NewRemoteRepo("s", "http://mirror.yandex.ru/debian/", "squeeze", []string{"xyz"}, []string{"i386"}, false, false)
|
||||
s.repo, _ = NewRemoteRepo("s", "http://mirror.yandex.ru/debian/", "squeeze", []string{"xyz"}, []string{"i386"}, false, false, false)
|
||||
err := s.repo.Fetch(s.downloader, nil)
|
||||
c.Assert(err, ErrorMatches, "component xyz not available in repo.*")
|
||||
}
|
||||
@@ -271,7 +276,7 @@ func (s *RemoteRepoSuite) TestDownload(c *C) {
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages.gz", &http.Error{Code: 404})
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages", examplePackagesFile)
|
||||
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, s.collectionFactory, false, 1)
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(s.downloader.Empty(), Equals, true)
|
||||
|
||||
@@ -298,7 +303,7 @@ func (s *RemoteRepoSuite) TestDownload(c *C) {
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages.gz", &http.Error{Code: 404})
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages", examplePackagesFile)
|
||||
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, s.collectionFactory, false, 1)
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(s.downloader.Empty(), Equals, true)
|
||||
|
||||
@@ -319,7 +324,7 @@ func (s *RemoteRepoSuite) TestDownload(c *C) {
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages.gz", &http.Error{Code: 404})
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages", examplePackagesFile)
|
||||
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, s.collectionFactory, false, 1)
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(s.downloader.Empty(), Equals, true)
|
||||
|
||||
@@ -333,6 +338,49 @@ func (s *RemoteRepoSuite) TestDownload(c *C) {
|
||||
c.Assert(s.repo.packageRefs, NotNil)
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestDownloadWithInstaller(c *C) {
|
||||
s.repo.Architectures = []string{"i386"}
|
||||
s.repo.DownloadInstaller = true
|
||||
|
||||
err := s.repo.Fetch(s.downloader, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages.bz2", &http.Error{Code: 404})
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages.gz", &http.Error{Code: 404})
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/binary-i386/Packages", examplePackagesFile)
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/installer-i386/current/images/SHA256SUMS", exampleInstallerHashSumFile)
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/installer-i386/current/images/MANIFEST", exampleInstallerManifestFile)
|
||||
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(s.downloader.Empty(), Equals, true)
|
||||
|
||||
queue, size, err := s.repo.BuildDownloadQueue(s.packagePool, s.collectionFactory.PackageCollection(), s.cs, false)
|
||||
c.Assert(err, IsNil)
|
||||
c.Check(size, Equals, int64(3)+int64(len(exampleInstallerManifestFile)))
|
||||
c.Check(queue, HasLen, 2)
|
||||
|
||||
q := make([]string, 2)
|
||||
for i := range q {
|
||||
q[i] = queue[i].File.DownloadURL()
|
||||
}
|
||||
sort.Strings(q)
|
||||
c.Check(q[0], Equals, "dists/squeeze/main/installer-i386/current/images/MANIFEST")
|
||||
c.Check(q[1], Equals, "pool/main/a/amanda/amanda-client_3.3.1-3~bpo60+1_amd64.deb")
|
||||
|
||||
s.repo.FinalizeDownload(s.collectionFactory, nil)
|
||||
c.Assert(s.repo.packageRefs, NotNil)
|
||||
|
||||
pkg, err := s.collectionFactory.PackageCollection().ByKey(s.repo.packageRefs.Refs[0])
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Check(pkg.Name, Equals, "amanda-client")
|
||||
|
||||
pkg, err = s.collectionFactory.PackageCollection().ByKey(s.repo.packageRefs.Refs[1])
|
||||
c.Assert(err, IsNil)
|
||||
c.Check(pkg.Name, Equals, "installer")
|
||||
}
|
||||
|
||||
func (s *RemoteRepoSuite) TestDownloadWithSources(c *C) {
|
||||
s.repo.Architectures = []string{"i386"}
|
||||
s.repo.DownloadSources = true
|
||||
@@ -347,7 +395,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSources(c *C) {
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources.gz", &http.Error{Code: 404})
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources", exampleSourcesFile)
|
||||
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, s.collectionFactory, false, 1)
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(s.downloader.Empty(), Equals, true)
|
||||
|
||||
@@ -391,7 +439,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSources(c *C) {
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources.gz", &http.Error{Code: 404})
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources", exampleSourcesFile)
|
||||
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, s.collectionFactory, false, 1)
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(s.downloader.Empty(), Equals, true)
|
||||
|
||||
@@ -416,7 +464,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSources(c *C) {
|
||||
s.downloader.ExpectError("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources.gz", &http.Error{Code: 404})
|
||||
s.downloader.ExpectResponse("http://mirror.yandex.ru/debian/dists/squeeze/main/source/Sources", exampleSourcesFile)
|
||||
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, s.collectionFactory, false, 1)
|
||||
err = s.repo.DownloadPackageIndexes(s.progress, s.downloader, nil, s.collectionFactory, false, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(s.downloader.Empty(), Equals, true)
|
||||
|
||||
@@ -440,7 +488,7 @@ func (s *RemoteRepoSuite) TestDownloadFlat(c *C) {
|
||||
err := s.flat.Fetch(downloader, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
err = s.flat.DownloadPackageIndexes(s.progress, downloader, s.collectionFactory, true, 1)
|
||||
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(downloader.Empty(), Equals, true)
|
||||
|
||||
@@ -468,7 +516,7 @@ func (s *RemoteRepoSuite) TestDownloadFlat(c *C) {
|
||||
err = s.flat.Fetch(downloader, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
err = s.flat.DownloadPackageIndexes(s.progress, downloader, s.collectionFactory, true, 1)
|
||||
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(downloader.Empty(), Equals, true)
|
||||
|
||||
@@ -490,7 +538,7 @@ func (s *RemoteRepoSuite) TestDownloadFlat(c *C) {
|
||||
err = s.flat.Fetch(downloader, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
err = s.flat.DownloadPackageIndexes(s.progress, downloader, s.collectionFactory, true, 1)
|
||||
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(downloader.Empty(), Equals, true)
|
||||
|
||||
@@ -521,7 +569,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSourcesFlat(c *C) {
|
||||
err := s.flat.Fetch(downloader, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
err = s.flat.DownloadPackageIndexes(s.progress, downloader, s.collectionFactory, true, 1)
|
||||
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(downloader.Empty(), Equals, true)
|
||||
|
||||
@@ -567,7 +615,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSourcesFlat(c *C) {
|
||||
err = s.flat.Fetch(downloader, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
err = s.flat.DownloadPackageIndexes(s.progress, downloader, s.collectionFactory, true, 1)
|
||||
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(downloader.Empty(), Equals, true)
|
||||
|
||||
@@ -593,7 +641,7 @@ func (s *RemoteRepoSuite) TestDownloadWithSourcesFlat(c *C) {
|
||||
err = s.flat.Fetch(downloader, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
err = s.flat.DownloadPackageIndexes(s.progress, downloader, s.collectionFactory, true, 1)
|
||||
err = s.flat.DownloadPackageIndexes(s.progress, downloader, nil, s.collectionFactory, true, 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(downloader.Empty(), Equals, true)
|
||||
|
||||
@@ -628,7 +676,7 @@ func (s *RemoteRepoCollectionSuite) TestAddByName(c *C) {
|
||||
_, err := s.collection.ByName("yandex")
|
||||
c.Assert(err, ErrorMatches, "*.not found")
|
||||
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false)
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false, false)
|
||||
c.Assert(s.collection.Add(repo), IsNil)
|
||||
c.Assert(s.collection.Add(repo), ErrorMatches, ".*already exists")
|
||||
|
||||
@@ -646,7 +694,7 @@ func (s *RemoteRepoCollectionSuite) TestByUUID(c *C) {
|
||||
_, err := s.collection.ByUUID("some-uuid")
|
||||
c.Assert(err, ErrorMatches, "*.not found")
|
||||
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false)
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false, false)
|
||||
c.Assert(s.collection.Add(repo), IsNil)
|
||||
|
||||
r, err := s.collection.ByUUID(repo.UUID)
|
||||
@@ -660,7 +708,7 @@ func (s *RemoteRepoCollectionSuite) TestByUUID(c *C) {
|
||||
}
|
||||
|
||||
func (s *RemoteRepoCollectionSuite) TestUpdateLoadComplete(c *C) {
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false)
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false, false)
|
||||
c.Assert(s.collection.Update(repo), IsNil)
|
||||
|
||||
collection := NewRemoteRepoCollection(s.db)
|
||||
@@ -681,7 +729,7 @@ func (s *RemoteRepoCollectionSuite) TestUpdateLoadComplete(c *C) {
|
||||
}
|
||||
|
||||
func (s *RemoteRepoCollectionSuite) TestForEachAndLen(c *C) {
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false)
|
||||
repo, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false, false)
|
||||
s.collection.Add(repo)
|
||||
|
||||
count := 0
|
||||
@@ -703,10 +751,10 @@ func (s *RemoteRepoCollectionSuite) TestForEachAndLen(c *C) {
|
||||
}
|
||||
|
||||
func (s *RemoteRepoCollectionSuite) TestDrop(c *C) {
|
||||
repo1, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false)
|
||||
repo1, _ := NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false, false)
|
||||
s.collection.Add(repo1)
|
||||
|
||||
repo2, _ := NewRemoteRepo("tyndex", "http://mirror.yandex.ru/debian/", "wheezy", []string{"main"}, []string{}, false, false)
|
||||
repo2, _ := NewRemoteRepo("tyndex", "http://mirror.yandex.ru/debian/", "wheezy", []string{"main"}, []string{}, false, false, false)
|
||||
s.collection.Add(repo2)
|
||||
|
||||
r1, _ := s.collection.ByUUID(repo1.UUID)
|
||||
@@ -879,4 +927,31 @@ SHA1: 66b27417d37e024c46526c2f6d358a754fc552f3
|
||||
SHA256: 3608bca1e44ea6c4d268eb6db02260269892c0b42b86bbf1e77a6fa16c3c9282
|
||||
`
|
||||
|
||||
const exampleInstallerHashSumFile = `82f69d557f0004d2923fb03e4fb47d18187e37768dbfd0c99756f8a6c68a6d3a ./MANIFEST
|
||||
`
|
||||
|
||||
const exampleInstallerManifestFile = `cdrom/debian-cd_info.tar.gz -- isolinux config files for CD
|
||||
cdrom/gtk/debian-cd_info.tar.gz -- isolinux help screens for CD (graphical)
|
||||
cdrom/gtk/initrd.gz -- initrd for use with isolinux to build a CD (graphical)
|
||||
cdrom/gtk/vmlinuz -- kernel for use with isolinux to build a CD (graphical)
|
||||
cdrom/initrd.gz -- initrd for use with isolinux to build a CD
|
||||
cdrom/vmlinuz -- kernel for use with isolinux to build a CD
|
||||
cdrom/xen/debian.cfg -- example Xen configuration
|
||||
cdrom/xen/initrd.gz -- initrd for installing under Xen
|
||||
cdrom/xen/vmlinuz -- kernel image for installing under Xen
|
||||
hd-media/boot.img.gz -- 1 gb image (compressed) for USB memory stick
|
||||
hd-media/gtk/initrd.gz -- for use on USB memory sticks
|
||||
hd-media/gtk/vmlinuz -- for use on USB memory sticks
|
||||
hd-media/initrd.gz -- for use on USB memory sticks
|
||||
hd-media/vmlinuz -- for use on USB memory sticks
|
||||
netboot/debian-installer -- PXE boot directory for tftp server
|
||||
netboot/gtk/debian-installer -- PXE boot directory for tftp server (graphical installer)
|
||||
netboot/gtk/mini.iso -- not so tiny CD image that boots the graphical netboot installer
|
||||
netboot/gtk/netboot.tar.gz -- tarball of PXE boot directory (graphical installer)
|
||||
netboot/mini.iso -- tiny CD image that boots the netboot installer
|
||||
netboot/netboot.tar.gz -- tarball of PXE boot directory
|
||||
netboot/xen/debian.cfg -- example Xen configuration
|
||||
netboot/xen/initrd.gz -- initrd for installing under Xen
|
||||
netboot/xen/vmlinuz -- kernel image for installing under Xen`
|
||||
|
||||
const exampleSourcesFile = sourcePackageMeta
|
||||
|
||||
@@ -18,7 +18,7 @@ var _ = Suite(&SnapshotSuite{})
|
||||
|
||||
func (s *SnapshotSuite) SetUpTest(c *C) {
|
||||
s.SetUpPackages()
|
||||
s.repo, _ = NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false)
|
||||
s.repo, _ = NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false, false)
|
||||
s.repo.packageRefs = s.reflist
|
||||
}
|
||||
|
||||
@@ -117,11 +117,11 @@ func (s *SnapshotCollectionSuite) SetUpTest(c *C) {
|
||||
s.collection = NewSnapshotCollection(s.db)
|
||||
s.SetUpPackages()
|
||||
|
||||
s.repo1, _ = NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false)
|
||||
s.repo1, _ = NewRemoteRepo("yandex", "http://mirror.yandex.ru/debian/", "squeeze", []string{"main"}, []string{}, false, false, false)
|
||||
s.repo1.packageRefs = s.reflist
|
||||
s.snapshot1, _ = NewSnapshotFromRepository("snap1", s.repo1)
|
||||
|
||||
s.repo2, _ = NewRemoteRepo("android", "http://mirror.yandex.ru/debian/", "lenny", []string{"main"}, []string{}, false, false)
|
||||
s.repo2, _ = NewRemoteRepo("android", "http://mirror.yandex.ru/debian/", "lenny", []string{"main"}, []string{}, false, false, false)
|
||||
s.repo2.packageRefs = s.reflist
|
||||
s.snapshot2, _ = NewSnapshotFromRepository("snap2", s.repo2)
|
||||
|
||||
@@ -222,7 +222,7 @@ func (s *SnapshotCollectionSuite) TestFindByRemoteRepoSource(c *C) {
|
||||
c.Check(s.collection.ByRemoteRepoSource(s.repo1), DeepEquals, []*Snapshot{s.snapshot1})
|
||||
c.Check(s.collection.ByRemoteRepoSource(s.repo2), DeepEquals, []*Snapshot{s.snapshot2})
|
||||
|
||||
repo3, _ := NewRemoteRepo("other", "http://mirror.yandex.ru/debian/", "lenny", []string{"main"}, []string{}, false, false)
|
||||
repo3, _ := NewRemoteRepo("other", "http://mirror.yandex.ru/debian/", "lenny", []string{"main"}, []string{}, false, false, false)
|
||||
|
||||
c.Check(s.collection.ByRemoteRepoSource(repo3), DeepEquals, []*Snapshot(nil))
|
||||
}
|
||||
|
||||
@@ -123,10 +123,11 @@ func (storage *PublishedStorage) RemoveDirs(path string, progress aptly.Progress
|
||||
// sourcePath is a relative path to package file in package pool
|
||||
//
|
||||
// LinkFromPool returns relative path for the published file to be included in package index
|
||||
func (storage *PublishedStorage) LinkFromPool(publishedDirectory, baseName string, sourcePool aptly.PackagePool,
|
||||
func (storage *PublishedStorage) LinkFromPool(publishedDirectory, fileName string, sourcePool aptly.PackagePool,
|
||||
sourcePath string, sourceChecksums utils.ChecksumInfo, force bool) error {
|
||||
|
||||
poolPath := filepath.Join(storage.rootPath, publishedDirectory)
|
||||
baseName := filepath.Base(fileName)
|
||||
poolPath := filepath.Join(storage.rootPath, publishedDirectory, filepath.Dir(fileName))
|
||||
|
||||
err := os.MkdirAll(poolPath, 0777)
|
||||
if err != nil {
|
||||
|
||||
@@ -181,39 +181,40 @@ func (s *PublishedStorageSuite) TestRemove(c *C) {
|
||||
|
||||
func (s *PublishedStorageSuite) TestLinkFromPool(c *C) {
|
||||
tests := []struct {
|
||||
prefix string
|
||||
component string
|
||||
sourcePath string
|
||||
poolDirectory string
|
||||
expectedFilename string
|
||||
prefix string
|
||||
sourcePath string
|
||||
publishedDirectory string
|
||||
expectedFilename string
|
||||
}{
|
||||
{ // package name regular
|
||||
prefix: "",
|
||||
component: "main",
|
||||
sourcePath: "mars-invaders_1.03.deb",
|
||||
poolDirectory: "m/mars-invaders",
|
||||
expectedFilename: "pool/main/m/mars-invaders/mars-invaders_1.03.deb",
|
||||
prefix: "",
|
||||
sourcePath: "mars-invaders_1.03.deb",
|
||||
publishedDirectory: "pool/main/m/mars-invaders",
|
||||
expectedFilename: "pool/main/m/mars-invaders/mars-invaders_1.03.deb",
|
||||
},
|
||||
{ // lib-like filename
|
||||
prefix: "",
|
||||
component: "main",
|
||||
sourcePath: "libmars-invaders_1.03.deb",
|
||||
poolDirectory: "libm/libmars-invaders",
|
||||
expectedFilename: "pool/main/libm/libmars-invaders/libmars-invaders_1.03.deb",
|
||||
prefix: "",
|
||||
sourcePath: "libmars-invaders_1.03.deb",
|
||||
publishedDirectory: "pool/main/libm/libmars-invaders",
|
||||
expectedFilename: "pool/main/libm/libmars-invaders/libmars-invaders_1.03.deb",
|
||||
},
|
||||
{ // duplicate link, shouldn't panic
|
||||
prefix: "",
|
||||
component: "main",
|
||||
sourcePath: "mars-invaders_1.03.deb",
|
||||
poolDirectory: "m/mars-invaders",
|
||||
expectedFilename: "pool/main/m/mars-invaders/mars-invaders_1.03.deb",
|
||||
prefix: "",
|
||||
sourcePath: "mars-invaders_1.03.deb",
|
||||
publishedDirectory: "pool/main/m/mars-invaders",
|
||||
expectedFilename: "pool/main/m/mars-invaders/mars-invaders_1.03.deb",
|
||||
},
|
||||
{ // prefix & component
|
||||
prefix: "ppa",
|
||||
component: "contrib",
|
||||
sourcePath: "libmars-invaders_1.04.deb",
|
||||
poolDirectory: "libm/libmars-invaders",
|
||||
expectedFilename: "pool/contrib/libm/libmars-invaders/libmars-invaders_1.04.deb",
|
||||
prefix: "ppa",
|
||||
sourcePath: "libmars-invaders_1.04.deb",
|
||||
publishedDirectory: "pool/contrib/libm/libmars-invaders",
|
||||
expectedFilename: "pool/contrib/libm/libmars-invaders/libmars-invaders_1.04.deb",
|
||||
},
|
||||
{ // installer file
|
||||
prefix: "",
|
||||
sourcePath: "netboot/boot.img.gz",
|
||||
publishedDirectory: "dists/jessie/non-free/installer-i386/current/images",
|
||||
expectedFilename: "dists/jessie/non-free/installer-i386/current/images/netboot/boot.img.gz",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -221,6 +222,7 @@ func (s *PublishedStorageSuite) TestLinkFromPool(c *C) {
|
||||
|
||||
for _, t := range tests {
|
||||
tmpPath := filepath.Join(c.MkDir(), t.sourcePath)
|
||||
os.MkdirAll(filepath.Dir(tmpPath), 0777)
|
||||
err := ioutil.WriteFile(tmpPath, []byte("Contents"), 0644)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
@@ -231,7 +233,7 @@ func (s *PublishedStorageSuite) TestLinkFromPool(c *C) {
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
// Test using hardlinks
|
||||
err = s.storage.LinkFromPool(filepath.Join(t.prefix, "pool", t.component, t.poolDirectory), t.sourcePath, pool, srcPoolPath, sourceChecksum, false)
|
||||
err = s.storage.LinkFromPool(filepath.Join(t.prefix, t.publishedDirectory), t.sourcePath, pool, srcPoolPath, sourceChecksum, false)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
st, err := os.Stat(filepath.Join(s.storage.rootPath, t.prefix, t.expectedFilename))
|
||||
@@ -241,7 +243,7 @@ func (s *PublishedStorageSuite) TestLinkFromPool(c *C) {
|
||||
c.Check(int(info.Nlink), Equals, 3)
|
||||
|
||||
// Test using symlinks
|
||||
err = s.storageSymlink.LinkFromPool(filepath.Join(t.prefix, "pool", t.component, t.poolDirectory), t.sourcePath, pool, srcPoolPath, sourceChecksum, false)
|
||||
err = s.storageSymlink.LinkFromPool(filepath.Join(t.prefix, t.publishedDirectory), t.sourcePath, pool, srcPoolPath, sourceChecksum, false)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
st, err = os.Lstat(filepath.Join(s.storageSymlink.rootPath, t.prefix, t.expectedFilename))
|
||||
@@ -252,7 +254,7 @@ func (s *PublishedStorageSuite) TestLinkFromPool(c *C) {
|
||||
c.Check(int(info.Mode&syscall.S_IFMT), Equals, int(syscall.S_IFLNK))
|
||||
|
||||
// Test using copy with checksum verification
|
||||
err = s.storageCopy.LinkFromPool(filepath.Join(t.prefix, "pool", t.component, t.poolDirectory), t.sourcePath, pool, srcPoolPath, sourceChecksum, false)
|
||||
err = s.storageCopy.LinkFromPool(filepath.Join(t.prefix, t.publishedDirectory), t.sourcePath, pool, srcPoolPath, sourceChecksum, false)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
st, err = os.Stat(filepath.Join(s.storageCopy.rootPath, t.prefix, t.expectedFilename))
|
||||
@@ -262,7 +264,7 @@ func (s *PublishedStorageSuite) TestLinkFromPool(c *C) {
|
||||
c.Check(int(info.Nlink), Equals, 1)
|
||||
|
||||
// Test using copy with size verification
|
||||
err = s.storageCopySize.LinkFromPool(filepath.Join(t.prefix, "pool", t.component, t.poolDirectory), t.sourcePath, pool, srcPoolPath, sourceChecksum, false)
|
||||
err = s.storageCopySize.LinkFromPool(filepath.Join(t.prefix, t.publishedDirectory), t.sourcePath, pool, srcPoolPath, sourceChecksum, false)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
st, err = os.Stat(filepath.Join(s.storageCopySize.rootPath, t.prefix, t.expectedFilename))
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"compress/bzip2"
|
||||
"compress/gzip"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"os"
|
||||
@@ -90,7 +89,7 @@ func DownloadTryCompression(ctx context.Context, downloader aptly.Downloader, ba
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
err = fmt.Errorf("no candidates for %s found", baseURL.ResolveReference(&url.URL{Path: path}))
|
||||
return nil, nil, &NoCandidateFoundError{URL: baseURL.ResolveReference(&url.URL{Path: path})}
|
||||
}
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
@@ -64,6 +64,29 @@ func (downloader *downloaderImpl) GetProgress() aptly.Progress {
|
||||
return downloader.progress
|
||||
}
|
||||
|
||||
// GetLength of given url
|
||||
func (downloader *downloaderImpl) GetLength(ctx context.Context, url string) (int64, error) {
|
||||
req, err := downloader.newRequest(ctx, "HEAD", url)
|
||||
if err != nil {
|
||||
return -1, err
|
||||
}
|
||||
|
||||
resp, err := downloader.client.Do(req)
|
||||
if err != nil {
|
||||
return -1, errors.Wrap(err, url)
|
||||
}
|
||||
|
||||
if resp.StatusCode < 200 || resp.StatusCode > 299 {
|
||||
return -1, &Error{Code: resp.StatusCode, URL: url}
|
||||
}
|
||||
|
||||
if resp.ContentLength < 0 {
|
||||
return -1, fmt.Errorf("Could not determine length of %s", url)
|
||||
}
|
||||
|
||||
return resp.ContentLength, nil
|
||||
}
|
||||
|
||||
// Download starts new download task
|
||||
func (downloader *downloaderImpl) Download(ctx context.Context, url string, destination string) error {
|
||||
return downloader.DownloadWithChecksum(ctx, url, destination, nil, false, 1)
|
||||
@@ -81,15 +104,10 @@ func retryableError(err error) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// DownloadWithChecksum starts new download task with checksum verification
|
||||
func (downloader *downloaderImpl) DownloadWithChecksum(ctx context.Context, url string, destination string,
|
||||
expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) error {
|
||||
|
||||
downloader.progress.Printf("Downloading %s...\n", url)
|
||||
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
func (downloader *downloaderImpl) newRequest(ctx context.Context, method, url string) (*http.Request, error) {
|
||||
req, err := http.NewRequest(method, url, nil)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, url)
|
||||
return nil, errors.Wrap(err, url)
|
||||
}
|
||||
req.Close = true
|
||||
req = req.WithContext(ctx)
|
||||
@@ -100,6 +118,18 @@ func (downloader *downloaderImpl) DownloadWithChecksum(ctx context.Context, url
|
||||
req.URL.RawQuery = ""
|
||||
}
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
||||
// DownloadWithChecksum starts new download task with checksum verification
|
||||
func (downloader *downloaderImpl) DownloadWithChecksum(ctx context.Context, url string, destination string,
|
||||
expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) error {
|
||||
|
||||
if downloader.progress != nil {
|
||||
downloader.progress.Printf("Downloading %s...\n", url)
|
||||
}
|
||||
req, err := downloader.newRequest(ctx, "GET", url)
|
||||
|
||||
var temppath string
|
||||
for maxTries > 0 {
|
||||
temppath, err = downloader.download(req, url, destination, expected, ignoreMismatch)
|
||||
|
||||
@@ -123,3 +123,22 @@ func (s *DownloaderSuite) TestDownloadFileError(c *C) {
|
||||
c.Assert(s.d.Download(s.ctx, s.url+"/test", "/"),
|
||||
ErrorMatches, ".*permission denied")
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestGetLength(c *C) {
|
||||
size, err := s.d.GetLength(s.ctx, s.url+"/test")
|
||||
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(size, Equals, int64(12))
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestGetLength404(c *C) {
|
||||
_, err := s.d.GetLength(s.ctx, s.url+"/doesntexist")
|
||||
|
||||
c.Assert(err, ErrorMatches, "HTTP code 404.*")
|
||||
}
|
||||
|
||||
func (s *DownloaderSuite) TestGetLengthConnectError(c *C) {
|
||||
_, err := s.d.GetLength(s.ctx, "http://nosuch.localhost/")
|
||||
|
||||
c.Assert(err, ErrorMatches, ".*no such host")
|
||||
}
|
||||
|
||||
29
http/fake.go
29
http/fake.go
@@ -60,8 +60,17 @@ func (f *FakeDownloader) Empty() bool {
|
||||
return len(f.expected) == 0
|
||||
}
|
||||
|
||||
// DownloadWithChecksum performs fake download by matching against first expectation in the queue or any expectation, with cheksum verification
|
||||
func (f *FakeDownloader) DownloadWithChecksum(ctx context.Context, url string, filename string, expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) error {
|
||||
// GetLength returns content length of given url
|
||||
func (f *FakeDownloader) GetLength(ctx context.Context, url string) (int64, error) {
|
||||
expectation, err := f.getExpectedRequest(url)
|
||||
if err != nil {
|
||||
return -1, err
|
||||
}
|
||||
|
||||
return int64(len(expectation.Response)), nil
|
||||
}
|
||||
|
||||
func (f *FakeDownloader) getExpectedRequest(url string) (*expectedRequest, error) {
|
||||
var expectation expectedRequest
|
||||
if len(f.expected) > 0 && f.expected[0].URL == url {
|
||||
expectation, f.expected = f.expected[0], f.expected[1:]
|
||||
@@ -69,14 +78,24 @@ func (f *FakeDownloader) DownloadWithChecksum(ctx context.Context, url string, f
|
||||
expectation = f.anyExpected[url]
|
||||
delete(f.anyExpected, url)
|
||||
} else {
|
||||
return fmt.Errorf("unexpected request for %s", url)
|
||||
return nil, fmt.Errorf("unexpected request for %s", url)
|
||||
}
|
||||
|
||||
if expectation.Err != nil {
|
||||
return expectation.Err
|
||||
return nil, expectation.Err
|
||||
}
|
||||
|
||||
err := os.MkdirAll(filepath.Dir(filename), 0755)
|
||||
return &expectation, nil
|
||||
}
|
||||
|
||||
// DownloadWithChecksum performs fake download by matching against first expectation in the queue or any expectation, with cheksum verification
|
||||
func (f *FakeDownloader) DownloadWithChecksum(ctx context.Context, url string, filename string, expected *utils.ChecksumInfo, ignoreMismatch bool, maxTries int) error {
|
||||
expectation, err := f.getExpectedRequest(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = os.MkdirAll(filepath.Dir(filename), 0755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
11
http/http.go
11
http/http.go
@@ -3,6 +3,7 @@ package http
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
// Error is download error connected to HTTP code
|
||||
@@ -15,3 +16,13 @@ type Error struct {
|
||||
func (e *Error) Error() string {
|
||||
return fmt.Sprintf("HTTP code %d while fetching %s", e.Code, e.URL)
|
||||
}
|
||||
|
||||
// NoCandidateFoundError indicates that now candidate of given url could be found
|
||||
type NoCandidateFoundError struct {
|
||||
URL *url.URL
|
||||
}
|
||||
|
||||
// Error message
|
||||
func (e *NoCandidateFoundError) Error() string {
|
||||
return fmt.Sprintf("no candidates for %s found", e.URL)
|
||||
}
|
||||
|
||||
@@ -482,6 +482,10 @@ disable verification of Release file signatures
|
||||
gpg keyring to use when verifying Release file (could be specified multiple times)
|
||||
.
|
||||
.TP
|
||||
\-\fBwith\-installer\fR
|
||||
download additional not packaged installer files
|
||||
.
|
||||
.TP
|
||||
\-\fBwith\-sources\fR
|
||||
download source packages in addition to binary packages
|
||||
.
|
||||
@@ -637,6 +641,10 @@ disable verification of Release file signatures
|
||||
gpg keyring to use when verifying Release file (could be specified multiple times)
|
||||
.
|
||||
.TP
|
||||
\-\fBwith\-installer\fR
|
||||
download additional not packaged installer files
|
||||
.
|
||||
.TP
|
||||
\-\fBwith\-sources\fR
|
||||
download source packages in addition to binary packages
|
||||
.
|
||||
|
||||
@@ -263,10 +263,10 @@ func (storage *PublishedStorage) RemoveDirs(path string, progress aptly.Progress
|
||||
// sourcePath is filepath to package file in package pool
|
||||
//
|
||||
// LinkFromPool returns relative path for the published file to be included in package index
|
||||
func (storage *PublishedStorage) LinkFromPool(publishedDirectory, baseName string, sourcePool aptly.PackagePool,
|
||||
func (storage *PublishedStorage) LinkFromPool(publishedDirectory, fileName string, sourcePool aptly.PackagePool,
|
||||
sourcePath string, sourceChecksums utils.ChecksumInfo, force bool) error {
|
||||
|
||||
relPath := filepath.Join(publishedDirectory, baseName)
|
||||
relPath := filepath.Join(publishedDirectory, fileName)
|
||||
poolPath := filepath.Join(storage.prefix, relPath)
|
||||
|
||||
if storage.pathCache == nil {
|
||||
|
||||
@@ -3,6 +3,7 @@ package s3
|
||||
import (
|
||||
"bytes"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
. "gopkg.in/check.v1"
|
||||
@@ -236,10 +237,18 @@ func (s *PublishedStorageSuite) TestLinkFromPool(c *C) {
|
||||
c.Assert(err, IsNil)
|
||||
cksum2 := utils.ChecksumInfo{MD5: "e9dfd31cc505d51fc26975250750deab"}
|
||||
|
||||
tmpFile3 := filepath.Join(c.MkDir(), "netboot/boot.img.gz")
|
||||
os.MkdirAll(filepath.Dir(tmpFile3), 0777)
|
||||
err = ioutil.WriteFile(tmpFile3, []byte("Contents"), 0644)
|
||||
c.Assert(err, IsNil)
|
||||
cksum3 := utils.ChecksumInfo{MD5: "c1df1da7a1ce305a3b60af9d5733ac1d"}
|
||||
|
||||
src1, err := pool.Import(tmpFile1, "mars-invaders_1.03.deb", &cksum1, true, cs)
|
||||
c.Assert(err, IsNil)
|
||||
src2, err := pool.Import(tmpFile2, "mars-invaders_1.03.deb", &cksum2, true, cs)
|
||||
c.Assert(err, IsNil)
|
||||
src3, err := pool.Import(tmpFile3, "netboot/boot.img.gz", &cksum3, true, cs)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
// first link from pool
|
||||
err = s.storage.LinkFromPool(filepath.Join("", "pool", "main", "m/mars-invaders"), "mars-invaders_1.03.deb", pool, src1, cksum1, false)
|
||||
@@ -279,6 +288,11 @@ func (s *PublishedStorageSuite) TestLinkFromPool(c *C) {
|
||||
|
||||
c.Check(s.GetFile(c, "lala/pool/main/m/mars-invaders/mars-invaders_1.03.deb"), DeepEquals, []byte("Contents"))
|
||||
|
||||
// link from pool with nested file name
|
||||
err = s.storage.LinkFromPool("dists/jessie/non-free/installer-i386/current/images", "netboot/boot.img.gz", pool, src3, cksum3, false)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
c.Check(s.GetFile(c, "dists/jessie/non-free/installer-i386/current/images/netboot/boot.img.gz"), DeepEquals, []byte("Contents"))
|
||||
}
|
||||
|
||||
func (s *PublishedStorageSuite) TestSymLink(c *C) {
|
||||
|
||||
@@ -193,10 +193,10 @@ func (storage *PublishedStorage) RemoveDirs(path string, progress aptly.Progress
|
||||
// sourcePath is filepath to package file in package pool
|
||||
//
|
||||
// LinkFromPool returns relative path for the published file to be included in package index
|
||||
func (storage *PublishedStorage) LinkFromPool(publishedDirectory, baseName string, sourcePool aptly.PackagePool,
|
||||
func (storage *PublishedStorage) LinkFromPool(publishedDirectory, fileName string, sourcePool aptly.PackagePool,
|
||||
sourcePath string, sourceChecksums utils.ChecksumInfo, force bool) error {
|
||||
|
||||
relPath := filepath.Join(publishedDirectory, baseName)
|
||||
relPath := filepath.Join(publishedDirectory, fileName)
|
||||
poolPath := filepath.Join(storage.prefix, relPath)
|
||||
|
||||
var (
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"math/rand"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
@@ -157,10 +158,18 @@ func (s *PublishedStorageSuite) TestLinkFromPool(c *C) {
|
||||
c.Assert(err, IsNil)
|
||||
cksum2 := utils.ChecksumInfo{MD5: "e9dfd31cc505d51fc26975250750deab"}
|
||||
|
||||
tmpFile3 := filepath.Join(c.MkDir(), "netboot/boot.img.gz")
|
||||
os.MkdirAll(filepath.Dir(tmpFile3), 0777)
|
||||
err = ioutil.WriteFile(tmpFile3, []byte("Contents"), 0644)
|
||||
c.Assert(err, IsNil)
|
||||
cksum3 := utils.ChecksumInfo{MD5: "c1df1da7a1ce305a3b60af9d5733ac1d"}
|
||||
|
||||
src1, err := pool.Import(tmpFile1, "mars-invaders_1.03.deb", &cksum1, true, cs)
|
||||
c.Assert(err, IsNil)
|
||||
src2, err := pool.Import(tmpFile2, "mars-invaders_1.03.deb", &cksum2, true, cs)
|
||||
c.Assert(err, IsNil)
|
||||
src3, err := pool.Import(tmpFile3, "netboot/boot.img.gz", &cksum3, true, cs)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
// first link from pool
|
||||
err = s.storage.LinkFromPool(filepath.Join("", "pool", "main", "m/mars-invaders"), "mars-invaders_1.03.deb", pool, src1, cksum1, false)
|
||||
@@ -193,6 +202,14 @@ func (s *PublishedStorageSuite) TestLinkFromPool(c *C) {
|
||||
data, err = s.storage.conn.ObjectGetBytes("test", "pool/main/m/mars-invaders/mars-invaders_1.03.deb")
|
||||
c.Check(err, IsNil)
|
||||
c.Check(data, DeepEquals, []byte("Spam"))
|
||||
|
||||
// link from pool with nested file name
|
||||
err = s.storage.LinkFromPool("dists/jessie/non-free/installer-i386/current/images", "netboot/boot.img.gz", pool, src3, cksum3, false)
|
||||
c.Check(err, IsNil)
|
||||
|
||||
data, err = s.storage.conn.ObjectGetBytes("test", "dists/jessie/non-free/installer-i386/current/images/netboot/boot.img.gz")
|
||||
c.Check(err, IsNil)
|
||||
c.Check(data, DeepEquals, []byte("Contents"))
|
||||
}
|
||||
|
||||
func (s *PublishedStorageSuite) TestSymLink(c *C) {
|
||||
|
||||
BIN
system/files/ubuntu-archive-keyring.gpg
Normal file
BIN
system/files/ubuntu-archive-keyring.gpg
Normal file
Binary file not shown.
@@ -84,6 +84,7 @@ class BaseTest(object):
|
||||
fixtureDBDir = os.path.join(os.environ["HOME"], "aptly-fixture-db")
|
||||
fixturePoolDir = os.path.join(os.environ["HOME"], "aptly-fixture-pool")
|
||||
fixtureGpgKeys = ["debian-archive-keyring.gpg",
|
||||
"ubuntu-archive-keyring.gpg",
|
||||
"launchpad.key",
|
||||
"flat.key",
|
||||
"pagerduty.key",
|
||||
|
||||
@@ -28,6 +28,7 @@ Options:
|
||||
-gpg-provider="": PGP implementation ("gpg" for external gpg or "internal" for Go internal implementation)
|
||||
-ignore-signatures: disable verification of Release file signatures
|
||||
-keyring=: gpg keyring to use when verifying Release file (could be specified multiple times)
|
||||
-with-installer: download additional not packaged installer files
|
||||
-with-sources: download source packages in addition to binary packages
|
||||
-with-udebs: download .udeb packages (Debian installer support)
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ Options:
|
||||
-gpg-provider="": PGP implementation ("gpg" for external gpg or "internal" for Go internal implementation)
|
||||
-ignore-signatures: disable verification of Release file signatures
|
||||
-keyring=: gpg keyring to use when verifying Release file (could be specified multiple times)
|
||||
-with-installer: download additional not packaged installer files
|
||||
-with-sources: download source packages in addition to binary packages
|
||||
-with-udebs: download .udeb packages (Debian installer support)
|
||||
ERROR: unable to parse command
|
||||
|
||||
@@ -20,6 +20,7 @@ Options:
|
||||
-gpg-provider="": PGP implementation ("gpg" for external gpg or "internal" for Go internal implementation)
|
||||
-ignore-signatures: disable verification of Release file signatures
|
||||
-keyring=: gpg keyring to use when verifying Release file (could be specified multiple times)
|
||||
-with-installer: download additional not packaged installer files
|
||||
-with-sources: download source packages in addition to binary packages
|
||||
-with-udebs: download .udeb packages (Debian installer support)
|
||||
ERROR: unable to parse flags
|
||||
|
||||
33
system/t04_mirror/UpdateMirror23Test_gold
Normal file
33
system/t04_mirror/UpdateMirror23Test_gold
Normal file
@@ -0,0 +1,33 @@
|
||||
|
||||
|
||||
Applying filter...
|
||||
Building download queue...
|
||||
Download queue: 12 items (25.14 MiB)
|
||||
Downloading & parsing package files...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/InRelease...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/Release...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/Release.gpg...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/binary-s390x/Packages.bz2...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/MANIFEST...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/MANIFEST.udebs...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/MD5SUMS...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/SHA256SUMS...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/generic/debian.exec...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/generic/initrd.debian...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/generic/kernel.debian...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/generic/parmfile.debian...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/tape/initrd.debian...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/tape/kernel.debian-nolabel...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/tape/kernel.debian...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/tape/parmfile.debian...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/main/installer-s390x/current/images/udeb.list...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/non-free/binary-s390x/Packages.bz2...
|
||||
Downloading http://mirror.yandex.ru/debian/dists/wheezy/non-free/installer-s390x/current/images/SHA256SUMS...
|
||||
Mirror `wheezy` has been successfully updated.
|
||||
Packages filtered: 35003 -> 1.
|
||||
gpgv: Good signature from "Debian Archive Automatic Signing Key (7.0/wheezy) <ftpmaster@debian.org>"
|
||||
gpgv: Good signature from "Debian Archive Automatic Signing Key (8/jessie) <ftpmaster@debian.org>"
|
||||
gpgv: Good signature from "Wheezy Stable Release Key <debian-release@lists.debian.org>"
|
||||
gpgv: RSA key ID 2B90D010
|
||||
gpgv: RSA key ID 46925553
|
||||
gpgv: RSA key ID 65FFB764
|
||||
61
system/t04_mirror/UpdateMirror24Test_gold
Normal file
61
system/t04_mirror/UpdateMirror24Test_gold
Normal file
@@ -0,0 +1,61 @@
|
||||
|
||||
|
||||
Applying filter...
|
||||
Building download queue...
|
||||
Download queue: 37 items (166.14 MiB)
|
||||
Downloading & parsing package files...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/InRelease...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/Release...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/Release.gpg...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/binary-amd64/Packages.bz2...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/MANIFEST...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/MANIFEST.udebs...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/SHA256SUMS...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/SHA256SUMS.gpg...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/cdrom/debian-cd_info.tar.gz...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/cdrom/initrd.gz...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/cdrom/vmlinuz...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/cdrom/xen/xm-debian.cfg...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/hd-media/boot.img.gz...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/hd-media/initrd.gz...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/hd-media/vmlinuz...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/boot.img.gz...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/mini.iso...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/netboot.tar.gz...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/adtxt.cfg...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/exithelp.cfg...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/f1.txt...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/f10.txt...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/f2.txt...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/f3.txt...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/f4.txt...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/f5.txt...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/f6.txt...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/f7.txt...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/f8.txt...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/f9.txt...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/menu.cfg...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/prompt.cfg...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/rqtxt.cfg...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/splash.png...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/stdmenu.cfg...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/syslinux.cfg...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/txt.cfg...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/boot-screens/vesamenu.c32...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/initrd.gz...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/linux...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/ubuntu-installer/amd64/pxelinux.0...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/netboot/xen/xm-debian.cfg...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/main/installer-amd64/current/images/udeb.list...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/restricted/binary-amd64/Packages.bz2...
|
||||
Downloading http://mirror.yandex.ru/ubuntu/dists/trusty/restricted/installer-amd64/current/images/SHA256SUMS...
|
||||
Mirror `trusty` has been successfully updated.
|
||||
Packages filtered: 8616 -> 1.
|
||||
gpgv: Good signature from "Ubuntu Archive Automatic Signing Key (2012) <ftpmaster@ubuntu.com>"
|
||||
gpgv: Good signature from "Ubuntu Archive Automatic Signing Key (2012) <ftpmaster@ubuntu.com>"
|
||||
gpgv: Good signature from "Ubuntu Archive Automatic Signing Key <ftpmaster@ubuntu.com>"
|
||||
gpgv: Good signature from "Ubuntu Archive Automatic Signing Key <ftpmaster@ubuntu.com>"
|
||||
gpgv: DSA key ID 437D05B5
|
||||
gpgv: RSA key ID C0B21F32
|
||||
gpgv: DSA key ID 437D05B5
|
||||
gpgv: RSA key ID C0B21F32
|
||||
@@ -376,3 +376,35 @@ class UpdateMirror22Test(BaseTest):
|
||||
|
||||
def outputMatchPrepare(_, s):
|
||||
return re.sub(r'Signature made .* using|Packages filtered: .* -> 0.', '', s)
|
||||
|
||||
|
||||
class UpdateMirror23Test(BaseTest):
|
||||
"""
|
||||
update mirrors: update with installer
|
||||
"""
|
||||
longTest = False
|
||||
fixtureGpg = True
|
||||
fixtureCmds = [
|
||||
"aptly -architectures=s390x mirror create -keyring=aptlytest.gpg -filter='installer' -with-installer wheezy http://mirror.yandex.ru/debian/ wheezy main non-free",
|
||||
]
|
||||
runCmd = "aptly mirror update -keyring=aptlytest.gpg wheezy"
|
||||
outputMatchPrepare = filterOutSignature
|
||||
|
||||
def output_processor(self, output):
|
||||
return "\n".join(sorted(output.split("\n")))
|
||||
|
||||
|
||||
class UpdateMirror24Test(BaseTest):
|
||||
"""
|
||||
update mirrors: update with installer with separate gpg file
|
||||
"""
|
||||
longTest = False
|
||||
fixtureGpg = True
|
||||
fixtureCmds = [
|
||||
"aptly -architectures=amd64 mirror create -keyring=aptlytest.gpg -filter='installer' -with-installer trusty http://mirror.yandex.ru/ubuntu/ trusty main restricted",
|
||||
]
|
||||
runCmd = "aptly mirror update -keyring=aptlytest.gpg trusty"
|
||||
outputMatchPrepare = filterOutSignature
|
||||
|
||||
def output_processor(self, output):
|
||||
return "\n".join(sorted(output.split("\n")))
|
||||
|
||||
14
system/t06_publish/PublishSnapshot38Test_gold
Normal file
14
system/t06_publish/PublishSnapshot38Test_gold
Normal file
@@ -0,0 +1,14 @@
|
||||
Loading packages...
|
||||
Generating metadata files and linking package files...
|
||||
Finalizing metadata files...
|
||||
Signing file 'main_installer-s390x_current_images_SHA256SUMS' with gpg, please enter your passphrase when prompted:
|
||||
Signing file 'Release' with gpg, please enter your passphrase when prompted:
|
||||
Clearsigning file 'Release' with gpg, please enter your passphrase when prompted:
|
||||
|
||||
Snapshot wheezy has been successfully published.
|
||||
Please setup your webserver to serve directory '${HOME}/.aptly/public' with autoindexing.
|
||||
Now you can add following line to apt sources:
|
||||
deb http://your-server/ wheezy main
|
||||
Don't forget to add your GPG key to apt with apt-key.
|
||||
|
||||
You can also use `aptly serve` to publish your repositories over HTTP quickly.
|
||||
12
system/t06_publish/PublishSnapshot38Test_installer_s390x
Normal file
12
system/t06_publish/PublishSnapshot38Test_installer_s390x
Normal file
@@ -0,0 +1,12 @@
|
||||
a29b0c17f98afb5ebc5a65bd03411e430dd372223d931b2c2441604fd6942472 ./MD5SUMS
|
||||
a5a8e9029fc31df4a892644524a28c7d31f12a012d72cf1ac5672178b982ecac ./udeb.list
|
||||
d592bca155b709066bcd48070358e6a2dd4f401c09f594b37b15caaf9fc45d80 ./tape/parmfile.debian
|
||||
5e4d45f22a87e36d21685043f85106a6380b3ae7fbdccff70097ccbf6757a287 ./tape/kernel.debian-nolabel
|
||||
57afafa42e61248d96e9d26efb2db075a17f484c51445c6420a32220b05c1fb2 ./tape/initrd.debian
|
||||
0ca15998b2156af0a2ecffc771f9a418b4918956049fac4250c01d163b143006 ./tape/kernel.debian
|
||||
1a907f670fc825f70827608ec32bb8a194971d0e1126d1caf33de87fc27d0d08 ./MANIFEST.udebs
|
||||
62ba2aaafac85c163c3cbf25ee52724a59b12d805382a0122080b6a86e229b0a ./MANIFEST
|
||||
d592bca155b709066bcd48070358e6a2dd4f401c09f594b37b15caaf9fc45d80 ./generic/parmfile.debian
|
||||
48d2cbebbc8582f546232c1acf68b1b73125731441fda528f8d71795a60e14a4 ./generic/debian.exec
|
||||
5194c4cfc1d527ce6aa45ced5a7f5102a3c85851ac8409c32b53c1004d0717da ./generic/kernel.debian
|
||||
57afafa42e61248d96e9d26efb2db075a17f484c51445c6420a32220b05c1fb2 ./generic/initrd.debian
|
||||
@@ -1033,3 +1033,26 @@ class PublishSnapshot37Test(BaseTest):
|
||||
]
|
||||
runCmd = "aptly publish snapshot -keyring=${files}/aptly.pub -secret-keyring=${files}/aptly.sec wheezy"
|
||||
gold_processor = BaseTest.expand_environ
|
||||
|
||||
|
||||
class PublishSnapshot38Test(BaseTest):
|
||||
"""
|
||||
publish snapshot: mirror with installer
|
||||
"""
|
||||
fixtureGpg = True
|
||||
fixtureCmds = [
|
||||
"aptly -architectures=s390x mirror create -keyring=aptlytest.gpg -filter='installer' -with-installer wheezy http://mirror.yandex.ru/debian/ wheezy main",
|
||||
"aptly mirror update -keyring=aptlytest.gpg wheezy",
|
||||
"aptly snapshot create wheezy from mirror wheezy",
|
||||
]
|
||||
runCmd = "aptly publish snapshot -keyring=${files}/aptly.pub -secret-keyring=${files}/aptly.sec wheezy"
|
||||
gold_processor = BaseTest.expand_environ
|
||||
|
||||
def check(self):
|
||||
super(PublishSnapshot38Test, self).check()
|
||||
self.check_exists('public/dists/wheezy/main/installer-s390x/current/images/SHA256SUMS')
|
||||
self.check_exists('public/dists/wheezy/main/installer-s390x/current/images/SHA256SUMS.gpg')
|
||||
self.check_exists('public/dists/wheezy/main/installer-s390x/current/images/generic/debian.exec')
|
||||
self.check_exists('public/dists/wheezy/main/installer-s390x/current/images/MANIFEST')
|
||||
|
||||
self.check_file_contents('public/dists/wheezy/main/installer-s390x/current/images/SHA256SUMS', "installer_s390x", match_prepare=sorted_processor)
|
||||
|
||||
Reference in New Issue
Block a user