Merge pull request #557 from smira/linters-1

Enable `gosimple` and `ineffasign` linters
This commit is contained in:
Andrey Smirnov
2017-04-28 00:29:20 +03:00
committed by GitHub
16 changed files with 33 additions and 49 deletions
+1 -4
View File
@@ -55,11 +55,8 @@ func printReferencesTo(p *deb.Package) (err error) {
} }
return nil return nil
}) })
if err != nil {
return err
}
return nil return err
} }
func aptlyPackageShow(cmd *commander.Command, args []string) error { func aptlyPackageShow(cmd *commander.Command, args []string) error {
+1 -1
View File
@@ -273,7 +273,7 @@ func (context *AptlyContext) ReOpenDatabase() error {
for try := 0; try < MaxTries; try++ { for try := 0; try < MaxTries; try++ {
err := context.database.ReOpen() err := context.database.ReOpen()
if err == nil || strings.Index(err.Error(), "resource temporarily unavailable") == -1 { if err == nil || !strings.Contains(err.Error(), "resource temporarily unavailable") {
return err return err
} }
context._progress().Printf("Unable to reopen database, sleeping %s\n", Delay) context._progress().Printf("Unable to reopen database, sleeping %s\n", Delay)
+2 -7
View File
@@ -103,11 +103,7 @@ func (c *Changes) VerifyAndParse(acceptUnsigned, ignoreSignature bool, verifier
c.Architectures = strings.Fields(c.Stanza["Architecture"]) c.Architectures = strings.Fields(c.Stanza["Architecture"])
c.Files, err = c.Files.ParseSumFields(c.Stanza) c.Files, err = c.Files.ParseSumFields(c.Stanza)
if err != nil { return err
return err
}
return nil
} }
// Prepare creates temporary directory, copies file there and verifies checksums // Prepare creates temporary directory, copies file there and verifies checksums
@@ -181,8 +177,7 @@ func (c *Changes) PackageQuery() (PackageQuery, error) {
if len(c.Binary) > 0 { if len(c.Binary) > 0 {
binaryQuery = &FieldQuery{Field: "Name", Relation: VersionEqual, Value: c.Binary[0]} binaryQuery = &FieldQuery{Field: "Name", Relation: VersionEqual, Value: c.Binary[0]}
// matching debug ddeb packages, they're not present in the Binary field // matching debug ddeb packages, they're not present in the Binary field
var ddebQuery PackageQuery var ddebQuery PackageQuery = &FieldQuery{Field: "Name", Relation: VersionEqual, Value: fmt.Sprintf("%s-dbgsym", c.Binary[0])}
ddebQuery = &FieldQuery{Field: "Name", Relation: VersionEqual, Value: fmt.Sprintf("%s-dbgsym", c.Binary[0])}
for _, binary := range c.Binary[1:] { for _, binary := range c.Binary[1:] {
binaryQuery = &OrQuery{ binaryQuery = &OrQuery{
+1 -3
View File
@@ -175,9 +175,7 @@ func GetContentsFromDeb(file aptly.ReadSeekerCloser, packageFile string) ([]stri
continue continue
} }
if strings.HasPrefix(tarHeader.Name, "./") { tarHeader.Name = strings.TrimPrefix(tarHeader.Name[2:], "./")
tarHeader.Name = tarHeader.Name[2:]
}
results = append(results, tarHeader.Name) results = append(results, tarHeader.Name)
} }
} }
+1 -1
View File
@@ -344,7 +344,7 @@ func (p *Package) GetDependencies(options int) (dependencies []string) {
if source == "" { if source == "" {
source = p.Name source = p.Name
} }
if strings.Index(source, ")") != -1 { if strings.Contains(source, ")") {
dependencies = append(dependencies, fmt.Sprintf("%s {source}", source)) dependencies = append(dependencies, fmt.Sprintf("%s {source}", source))
} else { } else {
dependencies = append(dependencies, fmt.Sprintf("%s (= %s) {source}", source, p.Version)) dependencies = append(dependencies, fmt.Sprintf("%s (= %s) {source}", source, p.Version))
+1 -1
View File
@@ -299,7 +299,7 @@ func (s *PackageSuite) TestMatchesDependency(c *C) {
// ~ // ~
c.Check( c.Check(
p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionRegexp, Version: "7\\.40-.*", p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionRegexp, Version: "7\\.40-.*",
Regexp: regexp.MustCompile("7\\.40-.*")}), Equals, true) Regexp: regexp.MustCompile(`7\.40-.*`)}), Equals, true)
c.Check( c.Check(
p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionRegexp, Version: "7\\.40-.*", p.MatchesDependency(Dependency{Pkg: "alien-arena-common", Architecture: "i386", Relation: VersionRegexp, Version: "7\\.40-.*",
Regexp: regexp.MustCompile("40")}), Equals, true) Regexp: regexp.MustCompile("40")}), Equals, true)
+4 -15
View File
@@ -226,12 +226,7 @@ func NewPublishedRepo(storage, prefix, distribution string, architectures []stri
// clean & verify prefix // clean & verify prefix
prefix = filepath.Clean(prefix) prefix = filepath.Clean(prefix)
if strings.HasPrefix(prefix, "/") { prefix = strings.TrimPrefix(strings.TrimSuffix(prefix, "/"), "/")
prefix = prefix[1:]
}
if strings.HasSuffix(prefix, "/") {
prefix = prefix[:len(prefix)-1]
}
prefix = filepath.Clean(prefix) prefix = filepath.Clean(prefix)
for _, part := range strings.Split(prefix, "/") { for _, part := range strings.Split(prefix, "/") {
@@ -252,7 +247,7 @@ func NewPublishedRepo(storage, prefix, distribution string, architectures []stri
} }
} }
if strings.Index(distribution, "/") != -1 { if strings.Contains(distribution, "/") {
return nil, fmt.Errorf("invalid distribution %s, '/' is not allowed", distribution) return nil, fmt.Errorf("invalid distribution %s, '/' is not allowed", distribution)
} }
@@ -474,8 +469,7 @@ func (p *PublishedRepo) Publish(packagePool aptly.PackagePool, publishedStorageP
return err return err
} }
defer func() { defer func() {
var e error e := tempDB.Close()
e = tempDB.Close()
if e != nil && progress != nil { if e != nil && progress != nil {
progress.Printf("failed to close temp DB: %s", err) progress.Printf("failed to close temp DB: %s", err)
} }
@@ -730,12 +724,7 @@ func (p *PublishedRepo) Publish(packagePool aptly.PackagePool, publishedStorageP
return err return err
} }
err = indexes.RenameFiles() return indexes.RenameFiles()
if err != nil {
return err
}
return nil
} }
// RemoveFiles removes files that were created by Publish // RemoveFiles removes files that were created by Publish
+1
View File
@@ -505,6 +505,7 @@ func (s *PublishedRepoCollectionSuite) TestAddByStoragePrefixDistribution(c *C)
c.Assert(r.String(), Equals, s.repo1.String()) c.Assert(r.String(), Equals, s.repo1.String())
r, err = s.collection.ByStoragePrefixDistribution("files:other", "ppa", "precise") r, err = s.collection.ByStoragePrefixDistribution("files:other", "ppa", "precise")
c.Assert(err, IsNil)
c.Check(r.String(), Equals, s.repo5.String()) c.Check(r.String(), Equals, s.repo5.String())
} }
+1 -1
View File
@@ -204,7 +204,7 @@ func (q *FieldQuery) Fast(list PackageCatalog) bool {
// String interface // String interface
func (q *FieldQuery) String() string { func (q *FieldQuery) String() string {
escape := func(val string) string { escape := func(val string) string {
if strings.IndexAny(val, "()|,!{} \t\n") != -1 { if strings.ContainsAny(val, "()|,!{} \t\n") {
return "'" + strings.Replace(strings.Replace(val, "\\", "\\\\", -1), "'", "\\'", -1) + "'" return "'" + strings.Replace(strings.Replace(val, "\\", "\\\\", -1), "'", "\\'", -1) + "'"
} }
return val return val
+1 -1
View File
@@ -92,7 +92,7 @@ func (l *PackageRefList) Has(p *Package) bool {
key := p.Key("") key := p.Key("")
i := sort.Search(len(l.Refs), func(j int) bool { return bytes.Compare(l.Refs[j], key) >= 0 }) i := sort.Search(len(l.Refs), func(j int) bool { return bytes.Compare(l.Refs[j], key) >= 0 })
return i < len(l.Refs) && bytes.Compare(l.Refs[i], key) == 0 return i < len(l.Refs) && bytes.Equal(l.Refs[i], key)
} }
// Strings builds list of strings with package keys // Strings builds list of strings with package keys
+1 -3
View File
@@ -331,9 +331,7 @@ ok:
if strings.Contains(repo.Distribution, "/") { if strings.Contains(repo.Distribution, "/") {
distributionLast := path.Base(repo.Distribution) + "/" distributionLast := path.Base(repo.Distribution) + "/"
for i := range components { for i := range components {
if strings.HasPrefix(components[i], distributionLast) { components[i] = strings.TrimPrefix(components[i], distributionLast)
components[i] = components[i][len(distributionLast):]
}
} }
} }
if len(repo.Components) == 0 { if len(repo.Components) == 0 {
+11 -1
View File
@@ -1,4 +1,14 @@
{ {
"DisableAll": true, "DisableAll": true,
"Enable": ["vet", "golint", "gofmt", "deadcode", "goimports", "misspell"] "Enable": [
"vet",
"golint",
"gofmt",
"deadcode",
"goimports",
"misspell",
"gosimple",
"ineffassign"
],
"Deadline": "5m"
} }
+1 -1
View File
@@ -49,7 +49,7 @@ func (s *SyntaxSuite) TestParsing(c *C) {
c.Assert(err, IsNil) c.Assert(err, IsNil)
c.Check(q, DeepEquals, &deb.DependencyQuery{Dep: deb.Dependency{Pkg: "package", Relation: deb.VersionRegexp, Version: "5\\.3.*~dev", c.Check(q, DeepEquals, &deb.DependencyQuery{Dep: deb.Dependency{Pkg: "package", Relation: deb.VersionRegexp, Version: "5\\.3.*~dev",
Regexp: regexp.MustCompile("5\\.3.*~dev")}}) Regexp: regexp.MustCompile(`5\.3.*~dev`)}})
l, _ = lex("query", "alien-data_1.3.4~dev_i386") l, _ = lex("query", "alien-data_1.3.4~dev_i386")
q, err = parse(l) q, err = parse(l)
+3 -3
View File
@@ -165,7 +165,7 @@ func (storage *PublishedStorage) putFile(path string, source io.ReadSeeker) erro
return err return err
} }
if storage.plusWorkaround && strings.Index(path, "+") != -1 { if storage.plusWorkaround && strings.Contains(path, "+") {
_, err = source.Seek(0, 0) _, err = source.Seek(0, 0)
if err != nil { if err != nil {
return err return err
@@ -187,7 +187,7 @@ func (storage *PublishedStorage) Remove(path string) error {
return errors.Wrap(err, fmt.Sprintf("error deleting %s from %s", path, storage)) return errors.Wrap(err, fmt.Sprintf("error deleting %s from %s", path, storage))
} }
if storage.plusWorkaround && strings.Index(path, "+") != -1 { if storage.plusWorkaround && strings.Contains(path, "+") {
// try to remove workaround version, but don't care about result // try to remove workaround version, but don't care about result
_ = storage.Remove(strings.Replace(path, "+", " ", -1)) _ = storage.Remove(strings.Replace(path, "+", " ", -1))
} }
@@ -332,7 +332,7 @@ func (storage *PublishedStorage) internalFilelist(prefix string, hidePlusWorkaro
err = storage.s3.ListObjectsPages(params, func(contents *s3.ListObjectsOutput, lastPage bool) bool { err = storage.s3.ListObjectsPages(params, func(contents *s3.ListObjectsOutput, lastPage bool) bool {
for _, key := range contents.Contents { for _, key := range contents.Contents {
if storage.plusWorkaround && hidePlusWorkaround && strings.Index(*key.Key, " ") != -1 { if storage.plusWorkaround && hidePlusWorkaround && strings.Contains(*key.Key, " ") {
// if we use plusWorkaround, we want to hide those duplicates // if we use plusWorkaround, we want to hide those duplicates
/// from listing /// from listing
continue continue
+1 -1
View File
@@ -646,7 +646,7 @@ func (objr objectResource) put(a *action) interface{} {
fatalError(400, "TODO", "read error") fatalError(400, "TODO", "read error")
} }
gotHash := sum.Sum(nil) gotHash := sum.Sum(nil)
if expectHash != nil && bytes.Compare(gotHash, expectHash) != 0 { if expectHash != nil && !bytes.Equal(gotHash, expectHash) {
fatalError(400, "BadDigest", "The Content-MD5 you specified did not match what we received") fatalError(400, "BadDigest", "The Content-MD5 you specified did not match what we received")
} }
if a.req.ContentLength >= 0 && int64(len(data)) != a.req.ContentLength { if a.req.ContentLength >= 0 && int64(len(data)) != a.req.ContentLength {
+2 -6
View File
@@ -324,16 +324,12 @@ func (g *GpgVerifier) VerifyDetachedSignature(signature, cleartext io.Reader) er
func (g *GpgVerifier) IsClearSigned(clearsigned io.Reader) (bool, error) { func (g *GpgVerifier) IsClearSigned(clearsigned io.Reader) (bool, error) {
scanner := bufio.NewScanner(clearsigned) scanner := bufio.NewScanner(clearsigned)
for scanner.Scan() { for scanner.Scan() {
if strings.Index(scanner.Text(), "BEGIN PGP SIGN") != -1 { if strings.Contains(scanner.Text(), "BEGIN PGP SIGN") {
return true, nil return true, nil
} }
} }
if err := scanner.Err(); err != nil { return false, scanner.Err()
return false, err
}
return false, nil
} }
// VerifyClearsigned verifies clearsigned file using gpgv // VerifyClearsigned verifies clearsigned file using gpgv