Expose repo include through API

This commit is contained in:
Oliver Sauder
2018-06-18 11:28:41 +02:00
parent 90e446ec16
commit f1882cfe2c
6 changed files with 264 additions and 147 deletions
+75
View File
@@ -8,6 +8,7 @@ import (
"github.com/aptly-dev/aptly/aptly" "github.com/aptly-dev/aptly/aptly"
"github.com/aptly-dev/aptly/database" "github.com/aptly-dev/aptly/database"
"github.com/aptly-dev/aptly/deb" "github.com/aptly-dev/aptly/deb"
"github.com/aptly-dev/aptly/query"
"github.com/aptly-dev/aptly/utils" "github.com/aptly-dev/aptly/utils"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
) )
@@ -367,3 +368,77 @@ func apiReposPackageFromDir(c *gin.Context) {
"FailedFiles": failedFiles, "FailedFiles": failedFiles,
}) })
} }
// POST /repos/:name/include/:dir/:file
func apiReposIncludePackageFromFile(c *gin.Context) {
// redirect all work to dir method
apiReposIncludePackageFromDir(c)
}
// POST /repos/:name/include/:dir
func apiReposIncludePackageFromDir(c *gin.Context) {
forceReplace := c.Request.URL.Query().Get("forceReplace") == "1"
noRemoveFiles := c.Request.URL.Query().Get("noRemoveFiles") == "1"
acceptUnsigned := c.Request.URL.Query().Get("acceptUnsigned") == "1"
ignoreSignature := c.Request.URL.Query().Get("ignoreSignature") == "1"
repoTemplateString := c.Params.ByName("name")
if !verifyDir(c) {
return
}
fileParam := c.Params.ByName("file")
if fileParam != "" && !verifyPath(fileParam) {
c.AbortWithError(400, fmt.Errorf("wrong file"))
return
}
var (
err error
verifier = context.GetVerifier()
sources, changesFiles []string
failedFiles, failedFiles2 []string
reporter = &aptly.RecordingResultReporter{
Warnings: []string{},
AddedLines: []string{},
RemovedLines: []string{},
}
)
if fileParam == "" {
sources = []string{filepath.Join(context.UploadPath(), c.Params.ByName("dir"))}
} else {
sources = []string{filepath.Join(context.UploadPath(), c.Params.ByName("dir"), c.Params.ByName("file"))}
}
localRepoCollection := context.CollectionFactory().LocalRepoCollection()
localRepoCollection.Lock()
defer localRepoCollection.Unlock()
changesFiles, failedFiles = deb.CollectChangesFiles(sources, reporter)
_, failedFiles2, err = deb.ImportChangesFiles(
changesFiles, reporter, acceptUnsigned, ignoreSignature, forceReplace, noRemoveFiles, verifier,
repoTemplateString, context.Progress(), localRepoCollection, context.CollectionFactory().PackageCollection(),
context.PackagePool(), context.CollectionFactory().ChecksumCollection(), nil, query.Parse)
failedFiles = append(failedFiles, failedFiles2...)
if err != nil {
c.AbortWithError(500, fmt.Errorf("unable to import changes files: %s", err))
return
}
if !noRemoveFiles {
// atempt to remove dir, if it fails, that's fine: probably it's not empty
os.Remove(filepath.Join(context.UploadPath(), c.Params.ByName("dir")))
}
if failedFiles == nil {
failedFiles = []string{}
}
c.JSON(200, gin.H{
"Report": reporter,
"FailedFiles": failedFiles,
})
}
+3
View File
@@ -71,6 +71,9 @@ func Router(c *ctx.AptlyContext) http.Handler {
root.POST("/repos/:name/file/:dir/:file", apiReposPackageFromFile) root.POST("/repos/:name/file/:dir/:file", apiReposPackageFromFile)
root.POST("/repos/:name/file/:dir", apiReposPackageFromDir) root.POST("/repos/:name/file/:dir", apiReposPackageFromDir)
root.POST("/repos/:name/include/:dir/:file", apiReposIncludePackageFromFile)
root.POST("/repos/:name/include/:dir", apiReposIncludePackageFromDir)
root.POST("/repos/:name/snapshots", apiSnapshotsCreateFromRepository) root.POST("/repos/:name/snapshots", apiSnapshotsCreateFromRepository)
} }
+8 -145
View File
@@ -1,16 +1,11 @@
package cmd package cmd
import ( import (
"bytes"
"fmt" "fmt"
"os"
"path/filepath"
"text/template"
"github.com/aptly-dev/aptly/aptly" "github.com/aptly-dev/aptly/aptly"
"github.com/aptly-dev/aptly/deb" "github.com/aptly-dev/aptly/deb"
"github.com/aptly-dev/aptly/query" "github.com/aptly-dev/aptly/query"
"github.com/aptly-dev/aptly/utils"
"github.com/smira/commander" "github.com/smira/commander"
"github.com/smira/flag" "github.com/smira/flag"
) )
@@ -35,11 +30,7 @@ func aptlyRepoInclude(cmd *commander.Command, args []string) error {
acceptUnsigned := context.Flags().Lookup("accept-unsigned").Value.Get().(bool) acceptUnsigned := context.Flags().Lookup("accept-unsigned").Value.Get().(bool)
ignoreSignatures := context.Flags().Lookup("ignore-signatures").Value.Get().(bool) ignoreSignatures := context.Flags().Lookup("ignore-signatures").Value.Get().(bool)
noRemoveFiles := context.Flags().Lookup("no-remove-files").Value.Get().(bool) noRemoveFiles := context.Flags().Lookup("no-remove-files").Value.Get().(bool)
repoTemplateString := context.Flags().Lookup("repo").Value.Get().(string)
repoTemplate, err := template.New("repo").Parse(context.Flags().Lookup("repo").Value.Get().(string))
if err != nil {
return fmt.Errorf("error parsing -repo template: %s", err)
}
uploaders := (*deb.Uploaders)(nil) uploaders := (*deb.Uploaders)(nil)
uploadersFile := context.Flags().Lookup("uploaders-file").Value.Get().(string) uploadersFile := context.Flags().Lookup("uploaders-file").Value.Get().(string)
@@ -59,143 +50,15 @@ func aptlyRepoInclude(cmd *commander.Command, args []string) error {
reporter := &aptly.ConsoleResultReporter{Progress: context.Progress()} reporter := &aptly.ConsoleResultReporter{Progress: context.Progress()}
var changesFiles, failedFiles, processedFiles []string var changesFiles, failedFiles, failedFiles2 []string
changesFiles, failedFiles = deb.CollectChangesFiles(args, reporter) changesFiles, failedFiles = deb.CollectChangesFiles(args, reporter)
_, failedFiles2, err = deb.ImportChangesFiles(
for _, path := range changesFiles { changesFiles, reporter, acceptUnsigned, ignoreSignatures, forceReplace, noRemoveFiles, verifier, repoTemplateString,
var changes *deb.Changes context.Progress(), context.CollectionFactory().LocalRepoCollection(), context.CollectionFactory().PackageCollection(),
context.PackagePool(), context.CollectionFactory().ChecksumCollection(),
changes, err = deb.NewChanges(path) uploaders, query.Parse)
if err != nil { failedFiles = append(failedFiles, failedFiles2...)
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", path, err)
continue
}
err = changes.VerifyAndParse(acceptUnsigned, ignoreSignatures, verifier)
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
err = changes.Prepare()
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
repoName := &bytes.Buffer{}
err = repoTemplate.Execute(repoName, changes.Stanza)
if err != nil {
return fmt.Errorf("error applying template to repo: %s", err)
}
context.Progress().Printf("Loading repository %s for changes file %s...\n", repoName.String(), changes.ChangesName)
var repo *deb.LocalRepo
repo, err = context.CollectionFactory().LocalRepoCollection().ByName(repoName.String())
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
currentUploaders := uploaders
if repo.Uploaders != nil {
currentUploaders = repo.Uploaders
for i := range currentUploaders.Rules {
currentUploaders.Rules[i].CompiledCondition, err = query.Parse(currentUploaders.Rules[i].Condition)
if err != nil {
return fmt.Errorf("error parsing query %s: %s", currentUploaders.Rules[i].Condition, err)
}
}
}
if currentUploaders != nil {
if err = currentUploaders.IsAllowed(changes); err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("changes file skipped due to uploaders config: %s, keys %#v: %s",
changes.ChangesName, changes.SignatureKeys, err)
changes.Cleanup()
continue
}
}
err = context.CollectionFactory().LocalRepoCollection().LoadComplete(repo)
if err != nil {
return fmt.Errorf("unable to load repo: %s", err)
}
var list *deb.PackageList
list, err = deb.NewPackageListFromRefList(repo.RefList(), context.CollectionFactory().PackageCollection(), context.Progress())
if err != nil {
return fmt.Errorf("unable to load packages: %s", err)
}
packageFiles, otherFiles, _ := deb.CollectPackageFiles([]string{changes.TempDir}, reporter)
var restriction deb.PackageQuery
restriction, err = changes.PackageQuery()
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
var processedFiles2, failedFiles2 []string
processedFiles2, failedFiles2, err = deb.ImportPackageFiles(list, packageFiles, forceReplace, verifier, context.PackagePool(),
context.CollectionFactory().PackageCollection(), reporter, restriction, context.CollectionFactory().ChecksumCollection())
if err != nil {
return fmt.Errorf("unable to import package files: %s", err)
}
repo.UpdateRefList(deb.NewPackageRefListFromPackageList(list))
err = context.CollectionFactory().LocalRepoCollection().Update(repo)
if err != nil {
return fmt.Errorf("unable to save: %s", err)
}
err = changes.Cleanup()
if err != nil {
return err
}
for _, file := range failedFiles2 {
failedFiles = append(failedFiles, filepath.Join(changes.BasePath, filepath.Base(file)))
}
for _, file := range processedFiles2 {
processedFiles = append(processedFiles, filepath.Join(changes.BasePath, filepath.Base(file)))
}
for _, file := range otherFiles {
processedFiles = append(processedFiles, filepath.Join(changes.BasePath, filepath.Base(file)))
}
processedFiles = append(processedFiles, path)
}
if !noRemoveFiles {
processedFiles = utils.StrSliceDeduplicate(processedFiles)
for _, file := range processedFiles {
err = os.Remove(file)
if err != nil {
return fmt.Errorf("unable to remove file: %s", err)
}
}
}
if len(failedFiles) > 0 { if len(failedFiles) > 0 {
context.Progress().ColoredPrintf("@y[!]@| @!Some files were skipped due to errors:@|") context.Progress().ColoredPrintf("@y[!]@| @!Some files were skipped due to errors:@|")
+144 -2
View File
@@ -1,6 +1,7 @@
package deb package deb
import ( import (
"bytes"
"fmt" "fmt"
"io" "io"
"io/ioutil" "io/ioutil"
@@ -8,6 +9,7 @@ import (
"path/filepath" "path/filepath"
"sort" "sort"
"strings" "strings"
"text/template"
"github.com/aptly-dev/aptly/aptly" "github.com/aptly-dev/aptly/aptly"
"github.com/aptly-dev/aptly/pgp" "github.com/aptly-dev/aptly/pgp"
@@ -164,7 +166,7 @@ func (c *Changes) Cleanup() error {
} }
// PackageQuery returns query that every package should match to be included // PackageQuery returns query that every package should match to be included
func (c *Changes) PackageQuery() (PackageQuery, error) { func (c *Changes) PackageQuery() PackageQuery {
var archQuery PackageQuery = &FieldQuery{Field: "$Architecture", Relation: VersionEqual, Value: ""} var archQuery PackageQuery = &FieldQuery{Field: "$Architecture", Relation: VersionEqual, Value: ""}
for _, arch := range c.Architectures { for _, arch := range c.Architectures {
archQuery = &OrQuery{L: &FieldQuery{Field: "$Architecture", Relation: VersionEqual, Value: arch}, R: archQuery} archQuery = &OrQuery{L: &FieldQuery{Field: "$Architecture", Relation: VersionEqual, Value: arch}, R: archQuery}
@@ -215,7 +217,7 @@ func (c *Changes) PackageQuery() (PackageQuery, error) {
nameQuery = &OrQuery{L: sourceQuery, R: binaryQuery} nameQuery = &OrQuery{L: sourceQuery, R: binaryQuery}
} }
return &AndQuery{L: archQuery, R: nameQuery}, nil return &AndQuery{L: archQuery, R: nameQuery}
} }
// GetField implements PackageLike interface // GetField implements PackageLike interface
@@ -288,3 +290,143 @@ func CollectChangesFiles(locations []string, reporter aptly.ResultReporter) (cha
return return
} }
// ImportChangesFiles imports referenced files in changes files into local repository
func ImportChangesFiles(changesFiles []string, reporter aptly.ResultReporter, acceptUnsigned, ignoreSignatures, forceReplace, noRemoveFiles bool,
verifier pgp.Verifier, repoTemplateString string, progress aptly.Progress, localRepoCollection *LocalRepoCollection, packageCollection *PackageCollection,
pool aptly.PackagePool, checksumStorage aptly.ChecksumStorage, uploaders *Uploaders, parseQuery parseQuery) (processedFiles []string, failedFiles []string, err error) {
var repoTemplate *template.Template
repoTemplate, err = template.New("repo").Parse(repoTemplateString)
if err != nil {
return nil, nil, fmt.Errorf("error parsing -repo template: %s", err)
}
for _, path := range changesFiles {
var changes *Changes
changes, err = NewChanges(path)
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", path, err)
continue
}
err = changes.VerifyAndParse(acceptUnsigned, ignoreSignatures, verifier)
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
err = changes.Prepare()
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
repoName := &bytes.Buffer{}
err = repoTemplate.Execute(repoName, changes.Stanza)
if err != nil {
return nil, nil, fmt.Errorf("error applying template to repo: %s", err)
}
if progress != nil {
progress.Printf("Loading repository %s for changes file %s...\n", repoName.String(), changes.ChangesName)
}
var repo *LocalRepo
repo, err = localRepoCollection.ByName(repoName.String())
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
currentUploaders := uploaders
if repo.Uploaders != nil {
currentUploaders = repo.Uploaders
for i := range currentUploaders.Rules {
currentUploaders.Rules[i].CompiledCondition, err = parseQuery(currentUploaders.Rules[i].Condition)
if err != nil {
return nil, nil, fmt.Errorf("error parsing query %s: %s", currentUploaders.Rules[i].Condition, err)
}
}
}
if currentUploaders != nil {
if err = currentUploaders.IsAllowed(changes); err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("changes file skipped due to uploaders config: %s, keys %#v: %s",
changes.ChangesName, changes.SignatureKeys, err)
changes.Cleanup()
continue
}
}
err = localRepoCollection.LoadComplete(repo)
if err != nil {
return nil, nil, fmt.Errorf("unable to load repo: %s", err)
}
var list *PackageList
list, err = NewPackageListFromRefList(repo.RefList(), packageCollection, progress)
if err != nil {
return nil, nil, fmt.Errorf("unable to load packages: %s", err)
}
packageFiles, otherFiles, _ := CollectPackageFiles([]string{changes.TempDir}, reporter)
restriction := changes.PackageQuery()
var processedFiles2, failedFiles2 []string
processedFiles2, failedFiles2, err = ImportPackageFiles(list, packageFiles, forceReplace, verifier, pool,
packageCollection, reporter, restriction, checksumStorage)
if err != nil {
return nil, nil, fmt.Errorf("unable to import package files: %s", err)
}
repo.UpdateRefList(NewPackageRefListFromPackageList(list))
err = localRepoCollection.Update(repo)
if err != nil {
return nil, nil, fmt.Errorf("unable to save: %s", err)
}
err = changes.Cleanup()
if err != nil {
return nil, nil, err
}
for _, file := range failedFiles2 {
failedFiles = append(failedFiles, filepath.Join(changes.BasePath, filepath.Base(file)))
}
for _, file := range processedFiles2 {
processedFiles = append(processedFiles, filepath.Join(changes.BasePath, filepath.Base(file)))
}
for _, file := range otherFiles {
processedFiles = append(processedFiles, filepath.Join(changes.BasePath, filepath.Base(file)))
}
processedFiles = append(processedFiles, path)
}
if !noRemoveFiles {
processedFiles = utils.StrSliceDeduplicate(processedFiles)
for _, file := range processedFiles {
err = os.Remove(file)
if err != nil {
return nil, nil, fmt.Errorf("unable to remove file: %s", err)
}
}
}
return processedFiles, failedFiles, nil
}
+2
View File
@@ -26,6 +26,8 @@ const (
SourceRemoteRepo = "repo" SourceRemoteRepo = "repo"
) )
type parseQuery func(string) (PackageQuery, error)
// GetControlFileFromDeb reads control file from deb package // GetControlFileFromDeb reads control file from deb package
func GetControlFileFromDeb(packageFile string) (Stanza, error) { func GetControlFileFromDeb(packageFile string) (Stanza, error) {
file, err := os.Open(packageFile) file, err := os.Open(packageFile)
+32
View File
@@ -176,6 +176,38 @@ class ReposAPITestAddFile(APITest):
self.check_not_exists("upload/" + d) self.check_not_exists("upload/" + d)
class ReposAPITestInclude(APITest):
"""
POST /api/repos/:name/include/:dir, GET /api/repos/:name/packages
"""
def check(self):
repo_name = self.random_name()
self.check_equal(self.post("/api/repos", json={"Name": repo_name, "Comment": "fun repo"}).status_code, 201)
d = self.random_name()
resp = self.upload("/api/files/" + d, "hardlink_0.2.1_amd64.changes",
"hardlink_0.2.1.dsc", "hardlink_0.2.1.tar.gz",
"hardlink_0.2.1_amd64.deb", directory='changes')
self.check_equal(resp.status_code, 200)
resp = self.post("/api/repos/" + repo_name + "/include/" + d, params={"ignoreSignature": 1})
self.check_equal(resp.status_code, 200)
self.check_equal(resp.json(), {
u'FailedFiles': [],
u'Report': {
u'Added': [u'hardlink_0.2.1_source added', 'hardlink_0.2.1_amd64 added'],
u'Removed': [],
u'Warnings': []}})
self.check_equal(
sorted(self.get("/api/repos/" + repo_name + "/packages").json()),
[u'Pamd64 hardlink 0.2.1 daf8fcecbf8210ad', u'Psource hardlink 0.2.1 8f72df429d7166e5']
)
self.check_not_exists("upload/" + d)
class ReposAPITestShowQuery(APITest): class ReposAPITestShowQuery(APITest):
""" """
GET /api/repos/:name/packages?q=query GET /api/repos/:name/packages?q=query