Skip to content

Commit

Permalink
chore(deps): update tools to latest versions (#3144)
Browse files Browse the repository at this point in the history
  • Loading branch information
1 parent cff9d49 commit dad2537
Show file tree
Hide file tree
Showing 27 changed files with 41 additions and 45 deletions.
4 changes: 2 additions & 2 deletions .binny.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ tools:
# used for linting
- name: golangci-lint
version:
want: v1.60.1
want: v1.60.3
method: github-release
with:
repo: golangci/golangci-lint
Expand Down Expand Up @@ -111,7 +111,7 @@ tools:
# used for triggering a release
- name: gh
version:
want: v2.54.0
want: v2.55.0
method: github-release
with:
repo: cli/cli
4 changes: 2 additions & 2 deletions .golangci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ linters:
enable:
- asciicheck
- bodyclose
- copyloopvar
- dogsled
- dupl
- errcheck
- exportloopref
- funlen
- gocognit
- goconst
Expand All @@ -30,6 +30,7 @@ linters:
- ineffassign
- misspell
- nakedret
- nolintlint
- revive
- staticcheck
- stylecheck
Expand Down Expand Up @@ -80,7 +81,6 @@ run:
# - lll # without a way to specify per-line exception cases, this is not usable
# - maligned # this is an excellent linter, but tricky to optimize and we are not sensitive to memory layout optimizations
# - nestif
# - nolintlint # as of go1.19 this conflicts with the behavior of gofmt, which is a deal-breaker (lint-fix will still fail when running lint)
# - prealloc # following this rule isn't consistently a good idea, as it sometimes forces unnecessary allocations that result in less idiomatic code
# - rowserrcheck # not in a repo with sql, so this is not useful
# - scopelint # deprecated
Expand Down
3 changes: 1 addition & 2 deletions cmd/syft/internal/commands/attest.go
Original file line number Diff line number Diff line change
Expand Up @@ -93,14 +93,13 @@ func defaultAttestOutputOptions() options.Output {
string(spdxtagvalue.ID),
},
Outputs: []string{syftjson.ID.String()},
OutputFile: options.OutputFile{ // nolint:staticcheck
OutputFile: options.OutputFile{ //nolint:staticcheck
Enabled: false, // explicitly not allowed
},
Format: options.DefaultFormat(),
}
}

//nolint:funlen
func runAttest(ctx context.Context, id clio.Identification, opts *attestOptions, userInput string) error {
// TODO: what other validation here besides binary name?
if !commandExists(cosignBinName) {
Expand Down
1 change: 0 additions & 1 deletion cmd/syft/internal/commands/convert.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ type ConvertOptions struct {
options.UpdateCheck `yaml:",inline" mapstructure:",squash"`
}

//nolint:dupl
func Convert(app clio.Application) *cobra.Command {
id := app.ID()

Expand Down
7 changes: 3 additions & 4 deletions cmd/syft/internal/commands/scan.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@ func defaultScanOptions() *scanOptions {
}
}

//nolint:dupl
func Scan(app clio.Application) *cobra.Command {
id := app.ID()

Expand Down Expand Up @@ -396,13 +395,13 @@ func getExplanation(expErr task.ErrInvalidExpression) string {

if errors.Is(err, task.ErrNamesNotAllowed) {
if expErr.Operation == task.SubSelectOperation {
return "However, " + err.Error() + ".\nIt seems like you are intending to add a cataloger in addition to the default set." // nolint:goconst
return "However, " + err.Error() + ".\nIt seems like you are intending to add a cataloger in addition to the default set."
}
return "However, " + err.Error() + "." // nolint:goconst
return "However, " + err.Error() + "."
}

if errors.Is(err, task.ErrTagsNotAllowed) {
return "However, " + err.Error() + ".\nAdding groups of catalogers may result in surprising behavior (create inaccurate SBOMs)." // nolint:goconst
return "However, " + err.Error() + ".\nAdding groups of catalogers may result in surprising behavior (create inaccurate SBOMs)."
}

if errors.Is(err, task.ErrAllNotAllowed) {
Expand Down
16 changes: 10 additions & 6 deletions internal/file/zip_read_closer.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (
"errors"
"fmt"
"io"
"math"
"os"
)

Expand Down Expand Up @@ -52,9 +53,14 @@ func OpenZip(filepath string) (*ZipReadCloser, error) {
return nil, fmt.Errorf("unable to seek to beginning of archive: %w", err)
}

size := fi.Size() - int64(offset)
if offset > math.MaxInt64 {
return nil, fmt.Errorf("archive start offset too large: %v", offset)
}
offset64 := int64(offset) //nolint:gosec // lint bug, checked above: https://github.com/securego/gosec/issues/1187

size := fi.Size() - offset64

r, err := zip.NewReader(io.NewSectionReader(f, int64(offset), size), size)
r, err := zip.NewReader(io.NewSectionReader(f, offset64, size), size)
if err != nil {
return nil, fmt.Errorf("unable to open ZipReadCloser @ %q: %w", filepath, err)
}
Expand Down Expand Up @@ -95,8 +101,6 @@ type directoryEnd struct {
}

// note: this is derived from readDirectoryEnd within the archive/zip package
//
//nolint:gocognit
func findArchiveStartOffset(r io.ReaderAt, size int64) (startOfArchive uint64, err error) {
// look for directoryEndSignature in the last 1k, then in the last 65k
var buf []byte
Expand Down Expand Up @@ -150,7 +154,7 @@ func findArchiveStartOffset(r io.ReaderAt, size int64) (startOfArchive uint64, e
startOfArchive = uint64(directoryEndOffset) - d.directorySize - d.directoryOffset

// Make sure directoryOffset points to somewhere in our file.
if o := int64(d.directoryOffset); o < 0 || o >= size {
if d.directoryOffset >= uint64(size) {
return 0, zip.ErrFormat
}
return startOfArchive, nil
Expand Down Expand Up @@ -179,7 +183,7 @@ func findDirectory64End(r io.ReaderAt, directoryEndOffset int64) (int64, error)
if b.uint32() != 1 { // total number of disks
return -1, nil // the file is not a valid zip64-file
}
return int64(p), nil
return int64(p), nil //nolint:gosec
}

// readDirectory64End reads the zip64 directory end and updates the
Expand Down
2 changes: 0 additions & 2 deletions internal/task/package_task_factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,6 @@ func (f PackageTaskFactories) Tasks(cfg CatalogingFactoryConfig) ([]Task, error)
}

// NewPackageTask creates a Task function for a generic pkg.Cataloger, honoring the common configuration options.
//
//nolint:funlen
func NewPackageTask(cfg CatalogingFactoryConfig, c pkg.Cataloger, tags ...string) Task {
fn := func(ctx context.Context, resolver file.Resolver, sbom sbomsync.Builder) error {
catalogerName := c.Name()
Expand Down
2 changes: 1 addition & 1 deletion syft/file/cataloger/executable/elf.go
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ func hasElfDynTag(f *elf.File, tag elf.DynTag) bool {
t = elf.DynTag(f.ByteOrder.Uint32(d[0:4]))
d = d[8:]
case elf.ELFCLASS64:
t = elf.DynTag(f.ByteOrder.Uint64(d[0:8]))
t = elf.DynTag(f.ByteOrder.Uint64(d[0:8])) //nolint:gosec
d = d[16:]
}
if t == tag {
Expand Down
2 changes: 1 addition & 1 deletion syft/format/internal/cyclonedxutil/helpers/component.go
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ func decodePackageMetadata(vals map[string]string, c *cyclonedx.Component, typeN
if metadataType == nil {
return nil
}
metaPtrTyp := reflect.PtrTo(metadataType)
metaPtrTyp := reflect.PointerTo(metadataType)
metaPtr := Decode(metaPtrTyp, vals, "syft:metadata", CycloneDXFields)

// Map all explicit metadata properties
Expand Down
1 change: 0 additions & 1 deletion syft/format/internal/cyclonedxutil/helpers/licenses.go
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,6 @@ func decodeLicenses(c *cyclonedx.Component) []pkg.License {
return licenses
}

// nolint:funlen
func separateLicenses(p pkg.Package) (spdx, other cyclonedx.Licenses, expressions []string) {
ex := make([]string, 0)
spdxc := cyclonedx.Licenses{}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ func Decode(typ reflect.Type, values map[string]string, prefix string, fn FieldN

isSlice := false
if typ.Kind() == reflect.Slice {
typ = reflect.PtrTo(typ)
typ = reflect.PointerTo(typ)
isSlice = true
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ const (
//
// Available options are: <omit>, NOASSERTION, Person: <person>, Organization: <org>
// return values are: <type>, <value>
func Originator(p pkg.Package) (typ string, author string) { // nolint: funlen
func Originator(p pkg.Package) (typ string, author string) { //nolint: funlen
if !hasMetadata(p) {
return typ, author
}
Expand Down
2 changes: 1 addition & 1 deletion syft/format/syftjson/to_syft_model.go
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ func safeFileModeConvert(val int) (fs.FileMode, error) {
if err != nil {
return 0, err
}
return os.FileMode(mode), nil
return os.FileMode(mode), nil //nolint:gosec
}

func toSyftLicenses(m []model.License) (p []pkg.License) {
Expand Down
3 changes: 2 additions & 1 deletion syft/internal/fileresolver/container_image_all_layers.go
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,8 @@ func (r *ContainerImageAllLayers) FilesByPath(paths ...string) ([]file.Location,
}

// FilesByGlob returns all file.References that match the given path glob pattern from any layer in the image.
// nolint:gocognit
//
//nolint:gocognit
func (r *ContainerImageAllLayers) FilesByGlob(patterns ...string) ([]file.Location, error) {
uniqueFileIDs := stereoscopeFile.NewFileReferenceSet()
uniqueLocations := make([]file.Location, 0)
Expand Down
3 changes: 2 additions & 1 deletion syft/internal/fileresolver/container_image_squash.go
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,8 @@ func (r *ContainerImageSquash) FilesByPath(paths ...string) ([]file.Location, er
}

// FilesByGlob returns all file.References that match the given path glob pattern within the squashed representation of the image.
// nolint:gocognit
//
//nolint:gocognit
func (r *ContainerImageSquash) FilesByGlob(patterns ...string) ([]file.Location, error) {
uniqueFileIDs := stereoscopeFile.NewFileReferenceSet()
uniqueLocations := make([]file.Location, 0)
Expand Down
2 changes: 1 addition & 1 deletion syft/pkg/cataloger/alpine/parse_apk_db.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ type parsedData struct {
// parseApkDB parses packages from a given APK "installed" flat-file DB. For more
// information on specific fields, see https://wiki.alpinelinux.org/wiki/Apk_spec.
//
//nolint:funlen,gocognit
//nolint:funlen
func parseApkDB(_ context.Context, resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
scanner := bufio.NewScanner(reader)

Expand Down
1 change: 0 additions & 1 deletion syft/pkg/cataloger/binary/classifier.go
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,6 @@ func matchExcluding(matcher EvidenceMatcher, contentPatternsToExclude ...string)
}
}

//nolint:gocognit
func sharedLibraryLookup(sharedLibraryPattern string, sharedLibraryMatcher EvidenceMatcher) EvidenceMatcher {
pat := regexp.MustCompile(sharedLibraryPattern)
return func(classifier Classifier, context matcherContext) (packages []pkg.Package, _ error) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ func copyBinariesFromDockerImage(config config.BinaryFromImage, destination stri

defer func() {
cmd := exec.Command("docker", "rm", containerName)
cmd.Run() // nolint:errcheck
cmd.Run() //nolint:errcheck
}()

for i, destinationPath := range config.AllStorePathsForImage(image, destination) {
Expand All @@ -182,7 +182,7 @@ func copyBinaryFromContainer(containerName, containerPath, destinationPath, fing
return err
}

cmd := exec.Command("docker", "cp", fmt.Sprintf("%s:%s", containerName, containerPath), destinationPath) // nolint:gosec
cmd := exec.Command("docker", "cp", fmt.Sprintf("%s:%s", containerName, containerPath), destinationPath) //nolint:gosec
// reason for gosec exception: this is for processing test fixtures only, not used in production
if err := cmd.Run(); err != nil {
return err
Expand Down
2 changes: 0 additions & 2 deletions syft/pkg/cataloger/debian/package.go
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,6 @@ func getAdditionalFileListing(resolver file.Resolver, dbLocation file.Location,
return files, locations
}

//nolint:dupl
func fetchMd5Contents(resolver file.Resolver, dbLocation file.Location, m pkg.DpkgDBEntry) (io.ReadCloser, *file.Location) {
var md5Reader io.ReadCloser
var err error
Expand Down Expand Up @@ -213,7 +212,6 @@ func fetchMd5Contents(resolver file.Resolver, dbLocation file.Location, m pkg.Dp
return md5Reader, &l
}

//nolint:dupl
func fetchConffileContents(resolver file.Resolver, dbLocation file.Location, m pkg.DpkgDBEntry) (io.ReadCloser, *file.Location) {
var reader io.ReadCloser
var err error
Expand Down
2 changes: 1 addition & 1 deletion syft/pkg/cataloger/debian/parse_dpkg_db.go
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ func handleNewKeyValue(line string) (key string, val interface{}, err error) {
if err != nil {
return "", nil, fmt.Errorf("bad installed-size value=%q: %w", val, err)
}
return key, int(s), nil
return key, int(s), nil //nolint:gosec
default:
return key, val, nil
}
Expand Down
1 change: 0 additions & 1 deletion syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,6 @@ func (p *CatalogTester) TestCataloger(t *testing.T, cataloger pkg.Cataloger) {
}
}

// nolint:funlen
func (p *CatalogTester) assertPkgs(t *testing.T, pkgs []pkg.Package, relationships []artifact.Relationship) {
t.Helper()

Expand Down
6 changes: 3 additions & 3 deletions syft/pkg/cataloger/java/graalvm_native_image_cataloger.go
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ func newPE(filename string, r io.ReaderAt) (nativeImage, error) {
}
exportSymbolsOffset := uint64(exportSymbolsDataDirectory.VirtualAddress)
exports := make([]byte, exportSymbolsDataDirectory.Size)
_, err = r.ReadAt(exports, int64(exportSymbolsOffset))
_, err = r.ReadAt(exports, int64(exportSymbolsOffset)) //nolint:gosec
if err != nil {
return fileError(filename, fmt.Errorf("could not read the exported symbols data directory: %w", err))
}
Expand Down Expand Up @@ -412,7 +412,7 @@ func (ni nativeImagePE) fetchExportAttribute(i int) (uint32, error) {
func (ni nativeImagePE) fetchExportFunctionPointer(functionsBase uint32, i uint32) (uint32, error) {
var pointer uint32

n := uint32(len(ni.exports))
n := uint32(len(ni.exports)) //nolint:gosec
sz := uint32(unsafe.Sizeof(ni.t.functionPointer))
j := functionsBase + i*sz
if j+sz >= n {
Expand Down Expand Up @@ -457,7 +457,7 @@ func (ni nativeImagePE) fetchSbomSymbols(content *exportContentPE) {
sbomBytes := []byte(nativeImageSbomSymbol + "\x00")
sbomLengthBytes := []byte(nativeImageSbomLengthSymbol + "\x00")
svmVersionInfoBytes := []byte(nativeImageSbomVersionSymbol + "\x00")
n := uint32(len(ni.exports))
n := uint32(len(ni.exports)) //nolint:gosec

// Find SBOM, SBOM Length, and SVM Version Symbol
for i := uint32(0); i < content.numberOfNames; i++ {
Expand Down
2 changes: 1 addition & 1 deletion syft/pkg/cataloger/java/maven_resolver.go
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@ func (r *mavenResolver) findPomInRemoteRepository(ctx context.Context, groupID,
Timeout: r.remoteRequestTimeout,
}

resp, err := client.Do(req) //nolint:bodyclose
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("unable to get pom from Maven repository %v: %w", requestURL, err)
}
Expand Down
4 changes: 2 additions & 2 deletions syft/pkg/cataloger/php/parse_pecl_serialized.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,10 @@ func readStruct(metadata any, fields ...string) string {
if len(fields) > 0 {
value, ok := metadata.(map[any]any)
if !ok {
log.Tracef("unable to read '%s' from: %v", fields[0], metadata)
log.Tracef("unable to read '%s' from: %v", fields[0], metadata) //nolint:gosec
return ""
}
return readStruct(value[fields[0]], fields[1:]...)
return readStruct(value[fields[0]], fields[1:]...) //nolint:gosec
}
value, ok := metadata.(string)
if !ok {
Expand Down
4 changes: 2 additions & 2 deletions syft/pkg/cataloger/redhat/parse_rpm_archive.go
Original file line number Diff line number Diff line change
Expand Up @@ -88,12 +88,12 @@ func mapFiles(files []rpmutils.FileInfo, digestAlgorithm string) []pkg.RpmFileRe
}
out = append(out, pkg.RpmFileRecord{
Path: f.Name(),
Mode: pkg.RpmFileMode(f.Mode()),
Mode: pkg.RpmFileMode(f.Mode()), //nolint:gosec
Size: int(f.Size()),
Digest: digest,
UserName: f.UserName(),
GroupName: f.GroupName(),
Flags: rpmdb.FileFlags(f.Flags()).String(),
Flags: rpmdb.FileFlags(f.Flags()).String(), //nolint:gosec
})
}
return out
Expand Down
3 changes: 2 additions & 1 deletion syft/pkg/cataloger/redhat/parse_rpm_db.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ import (
)

// parseRpmDb parses an "Packages" RPM DB and returns the Packages listed within it.
// nolint:funlen
//
//nolint:funlen
func parseRpmDB(_ context.Context, resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) {
f, err := os.CreateTemp("", "rpmdb")
if err != nil {
Expand Down
1 change: 0 additions & 1 deletion syft/pkg/cataloger/rust/package.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ func newPackagesFromAudit(location file.Location, versionInfo rustaudit.VersionI
var pkgs []pkg.Package

for _, dep := range versionInfo.Packages {
dep := dep
p := newPackageFromAudit(&dep, location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation))
if pkg.IsValid(&p) && dep.Kind == rustaudit.Runtime {
pkgs = append(pkgs, p)
Expand Down

0 comments on commit dad2537

Please sign in to comment.