From ccbee94b876240284c25c8931c6233fc71a5b7fb Mon Sep 17 00:00:00 2001 From: Keith Zantow Date: Mon, 7 Oct 2024 16:11:37 -0400 Subject: [PATCH] feat: report unknowns in sbom (#2998) Signed-off-by: Keith Zantow Signed-off-by: Alex Goodman Co-authored-by: Alex Goodman --- cmd/syft/internal/options/catalog.go | 12 + cmd/syft/internal/options/unknowns.go | 31 + internal/constants.go | 2 +- internal/file/zip_file_manifest.go | 6 +- internal/file/zip_read_closer.go | 8 +- internal/task/executor.go | 34 +- internal/task/file_tasks.go | 21 +- internal/task/package_task_factory.go | 5 +- internal/task/unknowns_tasks.go | 114 + internal/unknown/coordinate_error.go | 201 ++ internal/unknown/coordinate_error_test.go | 236 ++ schema/json/schema-16.0.18.json | 2727 +++++++++++++++++ schema/json/schema-latest.json | 8 +- syft/cataloging/unknowns.go | 15 + syft/create_sbom_config.go | 25 + syft/create_sbom_config_test.go | 15 + syft/file/cataloger/executable/cataloger.go | 37 +- syft/file/cataloger/executable/elf.go | 12 +- syft/file/cataloger/executable/elf_test.go | 2 + syft/file/cataloger/filecontent/cataloger.go | 13 +- syft/file/cataloger/filedigest/cataloger.go | 8 +- syft/file/cataloger/filemetadata/cataloger.go | 6 +- syft/file/coordinates.go | 4 + syft/format/syftjson/decoder_test.go | 1 + syft/format/syftjson/model/file.go | 1 + syft/format/syftjson/to_format_model.go | 6 + syft/format/syftjson/to_syft_model.go | 7 + syft/format/syftjson/to_syft_model_test.go | 2 + syft/pkg/cataloger/alpine/cataloger_test.go | 8 + syft/pkg/cataloger/alpine/parse_apk_db.go | 6 +- .../corrupt/lib/apk/db/installed | 43 + syft/pkg/cataloger/arch/cataloger_test.go | 8 + syft/pkg/cataloger/arch/parse_alpm_db.go | 64 +- .../var/lib/pacman/local/corrupt-0.2.1-3/desc | 34 + .../cataloger/binary/classifier_cataloger.go | 14 +- .../binary/classifier_cataloger_test.go | 2 +- .../cataloger/binary/elf_package_cataloger.go | 11 +- .../binary/elf_package_cataloger_test.go | 5 + .../elf-test-fixtures/Dockerfile | 4 + .../elfbinwithcorrupt/elfsrc/hello_world.cpp | 6 + .../elfbinwithcorrupt/elfsrc/hello_world.h | 8 + .../elfbinwithcorrupt/elfsrc/makefile | 48 + .../elfbinwithcorrupt/elfsrc/testbin.cpp | 8 + .../elfbinwithcorrupt/makefile | 18 + syft/pkg/cataloger/cpp/parse_conanfile.go | 3 +- syft/pkg/cataloger/cpp/parse_conanlock.go | 3 +- .../pkg/cataloger/cpp/parse_conanlock_test.go | 7 + .../cpp/test-fixtures/corrupt/conan.lock | 10 + syft/pkg/cataloger/dart/parse_pubspec_lock.go | 3 +- .../cataloger/dart/parse_pubspec_lock_test.go | 7 + .../dart/test-fixtures/corrupt/pubspec.lock | 7 + syft/pkg/cataloger/debian/parse_dpkg_db.go | 3 +- .../cataloger/debian/parse_dpkg_db_test.go | 13 +- .../var/lib/dpkg/status.d/corrupt | 6 + .../pkg/cataloger/dotnet/parse_dotnet_deps.go | 3 +- .../dotnet/parse_dotnet_deps_test.go | 7 + .../parse_dotnet_portable_executable.go | 18 +- .../parse_dotnet_portable_executable_test.go | 7 + syft/pkg/cataloger/elixir/parse_mix_lock.go | 11 +- syft/pkg/cataloger/erlang/parse_otp_app.go | 3 +- .../cataloger/erlang/parse_otp_app_test.go | 7 + syft/pkg/cataloger/erlang/parse_rebar_lock.go | 3 +- .../cataloger/erlang/parse_rebar_lock_test.go | 7 + .../erlang/test-fixtures/corrupt/rabbitmq.app | 9 + .../erlang/test-fixtures/corrupt/rebar.lock | 11 + syft/pkg/cataloger/generic/cataloger.go | 13 +- syft/pkg/cataloger/generic/cataloger_test.go | 25 + .../gentoo/parse_portage_contents.go | 2 +- syft/pkg/cataloger/githubactions/package.go | 9 +- .../githubactions/parse_composite_action.go | 18 +- .../parse_composite_action_test.go | 7 + .../cataloger/githubactions/parse_workflow.go | 35 +- .../githubactions/parse_workflow_test.go | 14 + .../corrupt/composite-action.yaml | 13 + .../corrupt/workflow-multi-job.yaml | 16 + syft/pkg/cataloger/golang/parse_go_binary.go | 6 +- .../pkg/cataloger/golang/parse_go_mod_test.go | 8 + syft/pkg/cataloger/golang/scan_binary.go | 23 +- .../golang/test-fixtures/corrupt/go.mod | 11 + .../golang/test-fixtures/corrupt/go.sum | 4 + .../pkg/cataloger/haskell/parse_stack_lock.go | 5 +- .../haskell/parse_stack_lock_test.go | 7 + .../pkg/cataloger/haskell/parse_stack_yaml.go | 5 +- .../haskell/parse_stack_yaml_test.go | 7 + .../haskell/test-fixtures/corrupt/stack.yaml | 6 + .../test-fixtures/corrupt/stack.yaml.lock | 8 + .../internal/pkgtest/test_generic_parser.go | 15 +- syft/pkg/cataloger/java/archive_parser.go | 25 +- .../pkg/cataloger/java/archive_parser_test.go | 18 +- syft/pkg/cataloger/java/maven_resolver.go | 2 +- syft/pkg/cataloger/java/parse_pom_xml.go | 5 +- syft/pkg/cataloger/java/parse_pom_xml_test.go | 8 + .../java/tar_wrapped_archive_parser_test.go | 9 + .../java/test-fixtures/corrupt/example.jar | 1 + .../java/test-fixtures/corrupt/example.tar | 1 + .../java/test-fixtures/corrupt/pom.xml | 1 + .../javascript/parse_package_json.go | 12 +- .../javascript/parse_package_json_test.go | 20 +- .../javascript/parse_package_lock.go | 3 +- .../javascript/parse_package_lock_test.go | 8 + .../cataloger/javascript/parse_pnpm_lock.go | 3 +- .../javascript/parse_pnpm_lock_test.go | 7 + .../cataloger/javascript/parse_yarn_lock.go | 3 +- .../test-fixtures/corrupt/package-lock.json | 4 + .../test-fixtures/corrupt/package.json | 5 + .../test-fixtures/corrupt/pnpm-lock.yaml | 7 + syft/pkg/cataloger/kernel/cataloger.go | 7 +- syft/pkg/cataloger/lua/parse_rockspec.go | 3 +- syft/pkg/cataloger/lua/parse_rockspec_test.go | 7 + .../corrupt/bad-1.23.0-0.rockspec | 5 + syft/pkg/cataloger/php/parse_composer_lock.go | 3 +- .../cataloger/php/parse_composer_lock_test.go | 7 + .../pkg/cataloger/php/parse_installed_json.go | 3 +- .../php/parse_installed_json_test.go | 7 + .../php/parse_pecl_serialized_test.go | 7 + .../python/parse_pipfile_lock_test.go | 7 + .../pkg/cataloger/python/parse_poetry_lock.go | 3 +- .../python/parse_poetry_lock_test.go | 7 + .../cataloger/python/parse_requirements.go | 6 +- .../python/parse_requirements_test.go | 8 + .../redhat/parse_rpm_archive_test.go | 7 + syft/pkg/cataloger/redhat/parse_rpm_db.go | 20 +- .../pkg/cataloger/redhat/parse_rpm_db_test.go | 7 + .../cataloger/redhat/parse_rpm_manifest.go | 3 +- syft/pkg/cataloger/ruby/parse_gemfile_lock.go | 3 +- syft/pkg/cataloger/rust/cataloger_test.go | 7 + syft/pkg/cataloger/rust/parse_audit_binary.go | 16 +- syft/pkg/cataloger/rust/parse_cargo_lock.go | 3 +- .../cataloger/rust/parse_cargo_lock_test.go | 7 + syft/pkg/cataloger/sbom/cataloger_test.go | 7 + .../cataloger/swift/parse_package_resolved.go | 5 +- .../swift/parse_package_resolved_test.go | 7 + .../pkg/cataloger/swift/parse_podfile_lock.go | 3 +- .../swift/parse_podfile_lock_test.go | 7 + syft/sbom/sbom.go | 4 + .../test-fixtures/image-unknowns/Dockerfile | 3 + test/cli/test-fixtures/image-unknowns/exe | 0 .../image-unknowns/executable-script | 2 + .../image-unknowns/package-lock.json | 3 + .../image-unknowns/unextracted.tar.gz | 0 .../image-unknowns/unextracted.zip | 0 .../image-unknowns/unknown-readable.jar | Bin 0 -> 209 bytes .../image-unknowns/unknown-unreadable.jar | 0 test/cli/unknowns_test.go | 39 + test/cli/utils_test.go | 27 +- 145 files changed, 4420 insertions(+), 233 deletions(-) create mode 100644 cmd/syft/internal/options/unknowns.go create mode 100644 internal/task/unknowns_tasks.go create mode 100644 internal/unknown/coordinate_error.go create mode 100644 internal/unknown/coordinate_error_test.go create mode 100644 schema/json/schema-16.0.18.json create mode 100644 syft/cataloging/unknowns.go create mode 100644 syft/pkg/cataloger/alpine/test-fixtures/corrupt/lib/apk/db/installed create mode 100644 syft/pkg/cataloger/arch/test-fixtures/installed/var/lib/pacman/local/corrupt-0.2.1-3/desc create mode 100644 syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/hello_world.cpp create mode 100644 syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/hello_world.h create mode 100644 syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/makefile create mode 100644 syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/testbin.cpp create mode 100644 syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/makefile create mode 100644 syft/pkg/cataloger/cpp/test-fixtures/corrupt/conan.lock create mode 100644 syft/pkg/cataloger/dart/test-fixtures/corrupt/pubspec.lock create mode 100644 syft/pkg/cataloger/debian/test-fixtures/var/lib/dpkg/status.d/corrupt create mode 100644 syft/pkg/cataloger/erlang/test-fixtures/corrupt/rabbitmq.app create mode 100644 syft/pkg/cataloger/erlang/test-fixtures/corrupt/rebar.lock create mode 100644 syft/pkg/cataloger/githubactions/test-fixtures/corrupt/composite-action.yaml create mode 100644 syft/pkg/cataloger/githubactions/test-fixtures/corrupt/workflow-multi-job.yaml create mode 100644 syft/pkg/cataloger/golang/test-fixtures/corrupt/go.mod create mode 100644 syft/pkg/cataloger/golang/test-fixtures/corrupt/go.sum create mode 100644 syft/pkg/cataloger/haskell/test-fixtures/corrupt/stack.yaml create mode 100644 syft/pkg/cataloger/haskell/test-fixtures/corrupt/stack.yaml.lock create mode 100644 syft/pkg/cataloger/java/test-fixtures/corrupt/example.jar create mode 100644 syft/pkg/cataloger/java/test-fixtures/corrupt/example.tar create mode 100644 syft/pkg/cataloger/java/test-fixtures/corrupt/pom.xml create mode 100644 syft/pkg/cataloger/javascript/test-fixtures/corrupt/package-lock.json create mode 100644 syft/pkg/cataloger/javascript/test-fixtures/corrupt/package.json create mode 100644 syft/pkg/cataloger/javascript/test-fixtures/corrupt/pnpm-lock.yaml create mode 100644 syft/pkg/cataloger/lua/test-fixtures/corrupt/bad-1.23.0-0.rockspec create mode 100644 test/cli/test-fixtures/image-unknowns/Dockerfile create mode 100644 test/cli/test-fixtures/image-unknowns/exe create mode 100755 test/cli/test-fixtures/image-unknowns/executable-script create mode 100644 test/cli/test-fixtures/image-unknowns/package-lock.json create mode 100644 test/cli/test-fixtures/image-unknowns/unextracted.tar.gz create mode 100644 test/cli/test-fixtures/image-unknowns/unextracted.zip create mode 100644 test/cli/test-fixtures/image-unknowns/unknown-readable.jar create mode 100644 test/cli/test-fixtures/image-unknowns/unknown-unreadable.jar create mode 100644 test/cli/unknowns_test.go diff --git a/cmd/syft/internal/options/catalog.go b/cmd/syft/internal/options/catalog.go index e692f546c39..c7e0e4f6d25 100644 --- a/cmd/syft/internal/options/catalog.go +++ b/cmd/syft/internal/options/catalog.go @@ -53,6 +53,9 @@ type Catalog struct { Platform string `yaml:"platform" json:"platform" mapstructure:"platform"` Source sourceConfig `yaml:"source" json:"source" mapstructure:"source"` Exclusions []string `yaml:"exclude" json:"exclude" mapstructure:"exclude"` + + // configuration for inclusion of unknown information within elements + Unknowns unknownsConfig `yaml:"unknowns" mapstructure:"unknowns"` } var _ interface { @@ -71,6 +74,7 @@ func DefaultCatalog() Catalog { Java: defaultJavaConfig(), File: defaultFileConfig(), Relationships: defaultRelationshipsConfig(), + Unknowns: defaultUnknowns(), Source: defaultSourceConfig(), Parallelism: 1, } @@ -82,6 +86,7 @@ func (cfg Catalog) ToSBOMConfig(id clio.Identification) *syft.CreateSBOMConfig { WithParallelism(cfg.Parallelism). WithRelationshipsConfig(cfg.ToRelationshipsConfig()). WithComplianceConfig(cfg.ToComplianceConfig()). + WithUnknownsConfig(cfg.ToUnknownsConfig()). WithSearchConfig(cfg.ToSearchConfig()). WithPackagesConfig(cfg.ToPackagesConfig()). WithFilesConfig(cfg.ToFilesConfig()). @@ -114,6 +119,13 @@ func (cfg Catalog) ToComplianceConfig() cataloging.ComplianceConfig { } } +func (cfg Catalog) ToUnknownsConfig() cataloging.UnknownsConfig { + return cataloging.UnknownsConfig{ + IncludeExecutablesWithoutPackages: cfg.Unknowns.ExecutablesWithoutPackages, + IncludeUnexpandedArchives: cfg.Unknowns.UnexpandedArchives, + } +} + func (cfg Catalog) ToFilesConfig() filecataloging.Config { hashers, err := intFile.Hashers(cfg.File.Metadata.Digests...) if err != nil { diff --git a/cmd/syft/internal/options/unknowns.go b/cmd/syft/internal/options/unknowns.go new file mode 100644 index 00000000000..e26694902fa --- /dev/null +++ b/cmd/syft/internal/options/unknowns.go @@ -0,0 +1,31 @@ +package options + +import ( + "github.com/anchore/clio" + "github.com/anchore/syft/syft/cataloging" +) + +type unknownsConfig struct { + RemoveWhenPackagesDefined bool `json:"remove-when-packages-defined" yaml:"remove-when-packages-defined" mapstructure:"remove-when-packages-defined"` + ExecutablesWithoutPackages bool `json:"executables-without-packages" yaml:"executables-without-packages" mapstructure:"executables-without-packages"` + UnexpandedArchives bool `json:"unexpanded-archives" yaml:"unexpanded-archives" mapstructure:"unexpanded-archives"` +} + +var _ interface { + clio.FieldDescriber +} = (*unknownsConfig)(nil) + +func (o *unknownsConfig) DescribeFields(descriptions clio.FieldDescriptionSet) { + descriptions.Add(&o.RemoveWhenPackagesDefined, `remove unknown errors on files with discovered packages`) + descriptions.Add(&o.ExecutablesWithoutPackages, `include executables without any identified packages`) + descriptions.Add(&o.UnexpandedArchives, `include archives which were not expanded and searched`) +} + +func defaultUnknowns() unknownsConfig { + def := cataloging.DefaultUnknownsConfig() + return unknownsConfig{ + RemoveWhenPackagesDefined: def.RemoveWhenPackagesDefined, + ExecutablesWithoutPackages: def.IncludeExecutablesWithoutPackages, + UnexpandedArchives: def.IncludeUnexpandedArchives, + } +} diff --git a/internal/constants.go b/internal/constants.go index e21291830b4..4dd27cf7953 100644 --- a/internal/constants.go +++ b/internal/constants.go @@ -3,5 +3,5 @@ package internal const ( // JSONSchemaVersion is the current schema version output by the JSON encoder // This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment. - JSONSchemaVersion = "16.0.17" + JSONSchemaVersion = "16.0.18" ) diff --git a/internal/file/zip_file_manifest.go b/internal/file/zip_file_manifest.go index 6306ca6b07d..ac61e6ae98e 100644 --- a/internal/file/zip_file_manifest.go +++ b/internal/file/zip_file_manifest.go @@ -1,7 +1,6 @@ package file import ( - "fmt" "os" "sort" "strings" @@ -19,12 +18,13 @@ func NewZipFileManifest(archivePath string) (ZipFileManifest, error) { zipReader, err := OpenZip(archivePath) manifest := make(ZipFileManifest) if err != nil { - return manifest, fmt.Errorf("unable to open zip archive (%s): %w", archivePath, err) + log.Debugf("unable to open zip archive (%s): %v", archivePath, err) + return manifest, err } defer func() { err = zipReader.Close() if err != nil { - log.Warnf("unable to close zip archive (%s): %+v", archivePath, err) + log.Debugf("unable to close zip archive (%s): %+v", archivePath, err) } }() diff --git a/internal/file/zip_read_closer.go b/internal/file/zip_read_closer.go index 4c0f523e741..fd45f52a12b 100644 --- a/internal/file/zip_read_closer.go +++ b/internal/file/zip_read_closer.go @@ -8,6 +8,8 @@ import ( "io" "math" "os" + + "github.com/anchore/syft/internal/log" ) // directoryEndLen, readByf, directoryEnd, and findSignatureInBlock were copied from the golang stdlib, specifically: @@ -46,7 +48,8 @@ func OpenZip(filepath string) (*ZipReadCloser, error) { // need to find the start of the archive and keep track of this offset. offset, err := findArchiveStartOffset(f, fi.Size()) if err != nil { - return nil, fmt.Errorf("cannot find beginning of zip archive=%q : %w", filepath, err) + log.Debugf("cannot find beginning of zip archive=%q : %v", filepath, err) + return nil, err } if _, err := f.Seek(0, io.SeekStart); err != nil { @@ -62,7 +65,8 @@ func OpenZip(filepath string) (*ZipReadCloser, error) { r, err := zip.NewReader(io.NewSectionReader(f, offset64, size), size) if err != nil { - return nil, fmt.Errorf("unable to open ZipReadCloser @ %q: %w", filepath, err) + log.Debugf("unable to open ZipReadCloser @ %q: %v", filepath, err) + return nil, err } return &ZipReadCloser{ diff --git a/internal/task/executor.go b/internal/task/executor.go index 899796424be..0d8754b9d0b 100644 --- a/internal/task/executor.go +++ b/internal/task/executor.go @@ -11,8 +11,10 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/sbomsync" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/event/monitor" "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/sbom" ) type Executor struct { @@ -35,6 +37,12 @@ func NewTaskExecutor(tasks []Task, numWorkers int) *Executor { } func (p *Executor) Execute(ctx context.Context, resolver file.Resolver, s sbomsync.Builder, prog *monitor.CatalogerTaskProgress) error { + var lock sync.Mutex + withLock := func(fn func()) { + lock.Lock() + defer lock.Unlock() + fn() + } var errs error wg := &sync.WaitGroup{} for i := 0; i < p.numWorkers; i++ { @@ -48,9 +56,16 @@ func (p *Executor) Execute(ctx context.Context, resolver file.Resolver, s sbomsy return } - if err := runTaskSafely(ctx, tsk, resolver, s); err != nil { - errs = multierror.Append(errs, fmt.Errorf("failed to run task: %w", err)) - prog.SetError(err) + err := runTaskSafely(ctx, tsk, resolver, s) + unknowns, err := unknown.ExtractCoordinateErrors(err) + if len(unknowns) > 0 { + appendUnknowns(s, tsk.Name(), unknowns) + } + if err != nil { + withLock(func() { + errs = multierror.Append(errs, fmt.Errorf("failed to run task: %w", err)) + prog.SetError(err) + }) } prog.Increment() } @@ -62,6 +77,19 @@ func (p *Executor) Execute(ctx context.Context, resolver file.Resolver, s sbomsy return errs } +func appendUnknowns(builder sbomsync.Builder, taskName string, unknowns []unknown.CoordinateError) { + if accessor, ok := builder.(sbomsync.Accessor); ok { + accessor.WriteToSBOM(func(sb *sbom.SBOM) { + for _, u := range unknowns { + if sb.Artifacts.Unknowns == nil { + sb.Artifacts.Unknowns = map[file.Coordinates][]string{} + } + sb.Artifacts.Unknowns[u.Coordinates] = append(sb.Artifacts.Unknowns[u.Coordinates], formatUnknown(u.Reason.Error(), taskName)) + } + }) + } +} + func runTaskSafely(ctx context.Context, t Task, resolver file.Resolver, s sbomsync.Builder) (err error) { // handle individual cataloger panics defer func() { diff --git a/internal/task/file_tasks.go b/internal/task/file_tasks.go index 5369db32720..5b6f7bbd4dd 100644 --- a/internal/task/file_tasks.go +++ b/internal/task/file_tasks.go @@ -3,7 +3,6 @@ package task import ( "context" "crypto" - "fmt" "github.com/anchore/syft/internal/sbomsync" "github.com/anchore/syft/syft/artifact" @@ -32,15 +31,12 @@ func NewFileDigestCatalogerTask(selection file.Selection, hashers ...crypto.Hash } result, err := digestsCataloger.Catalog(ctx, resolver, coordinates...) - if err != nil { - return fmt.Errorf("unable to catalog file digests: %w", err) - } accessor.WriteToSBOM(func(sbom *sbom.SBOM) { sbom.Artifacts.FileDigests = result }) - return nil + return err } return NewTask("file-digest-cataloger", fn) @@ -62,15 +58,12 @@ func NewFileMetadataCatalogerTask(selection file.Selection) Task { } result, err := metadataCataloger.Catalog(ctx, resolver, coordinates...) - if err != nil { - return err - } accessor.WriteToSBOM(func(sbom *sbom.SBOM) { sbom.Artifacts.FileMetadata = result }) - return nil + return err } return NewTask("file-metadata-cataloger", fn) @@ -87,15 +80,12 @@ func NewFileContentCatalogerTask(cfg filecontent.Config) Task { accessor := builder.(sbomsync.Accessor) result, err := cat.Catalog(ctx, resolver) - if err != nil { - return err - } accessor.WriteToSBOM(func(sbom *sbom.SBOM) { sbom.Artifacts.FileContents = result }) - return nil + return err } return NewTask("file-content-cataloger", fn) @@ -112,15 +102,12 @@ func NewExecutableCatalogerTask(selection file.Selection, cfg executable.Config) accessor := builder.(sbomsync.Accessor) result, err := cat.Catalog(resolver) - if err != nil { - return err - } accessor.WriteToSBOM(func(sbom *sbom.SBOM) { sbom.Artifacts.Executables = result }) - return nil + return err } return NewTask("file-executable-cataloger", fn) diff --git a/internal/task/package_task_factory.go b/internal/task/package_task_factory.go index 4a86aed1237..5a65a29f040 100644 --- a/internal/task/package_task_factory.go +++ b/internal/task/package_task_factory.go @@ -103,9 +103,6 @@ func NewPackageTask(cfg CatalogingFactoryConfig, c pkg.Cataloger, tags ...string t := bus.StartCatalogerTask(info, -1, "") pkgs, relationships, err := c.Catalog(ctx, resolver) - if err != nil { - return fmt.Errorf("unable to catalog packages with %q: %w", catalogerName, err) - } log.WithFields("cataloger", catalogerName).Debugf("discovered %d packages", len(pkgs)) @@ -120,7 +117,7 @@ func NewPackageTask(cfg CatalogingFactoryConfig, c pkg.Cataloger, tags ...string t.SetCompleted() log.WithFields("name", catalogerName).Trace("package cataloger completed") - return nil + return err } tags = append(tags, pkgcataloging.PackageTag) diff --git a/internal/task/unknowns_tasks.go b/internal/task/unknowns_tasks.go new file mode 100644 index 00000000000..243ba950288 --- /dev/null +++ b/internal/task/unknowns_tasks.go @@ -0,0 +1,114 @@ +package task + +import ( + "context" + "strings" + + "github.com/mholt/archiver/v3" + + "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/sbomsync" + "github.com/anchore/syft/syft/cataloging" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/pkg" + "github.com/anchore/syft/syft/sbom" +) + +const unknownsLabelerTaskName = "unknowns-labeler" + +func NewUnknownsLabelerTask(cfg cataloging.UnknownsConfig) Task { + return NewTask(unknownsLabelerTaskName, unknownsLabelerTask{cfg}.processUnknowns) +} + +type unknownsLabelerTask struct { + cataloging.UnknownsConfig +} + +// processUnknowns removes unknown entries that have valid packages reported for the locations +func (c unknownsLabelerTask) processUnknowns(_ context.Context, resolver file.Resolver, builder sbomsync.Builder) error { + accessor := builder.(sbomsync.Accessor) + accessor.WriteToSBOM(func(s *sbom.SBOM) { + c.finalize(resolver, s) + }) + return nil +} + +func (c unknownsLabelerTask) finalize(resolver file.Resolver, s *sbom.SBOM) { + hasPackageReference := coordinateReferenceLookup(resolver, s) + + if c.RemoveWhenPackagesDefined { + for coords := range s.Artifacts.Unknowns { + if !hasPackageReference(coords) { + continue + } + delete(s.Artifacts.Unknowns, coords) + } + } + + if s.Artifacts.Unknowns == nil { + s.Artifacts.Unknowns = map[file.Coordinates][]string{} + } + + if c.IncludeExecutablesWithoutPackages { + for coords := range s.Artifacts.Executables { + if !hasPackageReference(coords) { + s.Artifacts.Unknowns[coords] = append(s.Artifacts.Unknowns[coords], formatUnknown("no package identified in executable file", unknownsLabelerTaskName)) + } + } + } + + if c.IncludeUnexpandedArchives { + for coords := range s.Artifacts.FileMetadata { + unarchiver, notArchiveErr := archiver.ByExtension(coords.RealPath) + if unarchiver != nil && notArchiveErr == nil && !hasPackageReference(coords) { + s.Artifacts.Unknowns[coords] = append(s.Artifacts.Unknowns[coords], "archive not cataloged") + } + } + } +} + +func formatUnknown(err string, task ...string) string { + return strings.Join(task, "/") + ": " + err +} + +func coordinateReferenceLookup(resolver file.Resolver, s *sbom.SBOM) func(coords file.Coordinates) bool { + allPackageCoords := file.NewCoordinateSet() + + // include all directly included locations that result in packages + for p := range s.Artifacts.Packages.Enumerate() { + allPackageCoords.Add(p.Locations.CoordinateSet().ToSlice()...) + } + + // include owned files, for example specified by package managers. + // relationships for these owned files may be disabled, but we always want to include them + for p := range s.Artifacts.Packages.Enumerate() { + if f, ok := p.Metadata.(pkg.FileOwner); ok { + for _, ownedFilePath := range f.OwnedFiles() { + // resolve these owned files, as they may have symlinks + // but coordinates we will test against are always absolute paths + locations, err := resolver.FilesByPath(ownedFilePath) + if err != nil { + log.Debugf("unable to resolve owned file '%s': %v", ownedFilePath, err) + } + for _, loc := range locations { + allPackageCoords.Add(loc.Coordinates) + } + } + } + } + + // include relationships + for _, r := range s.Relationships { + _, fromPkgOk := r.From.(pkg.Package) + fromFile, fromFileOk := r.From.(file.Coordinates) + _, toPkgOk := r.To.(pkg.Package) + toFile, toFileOk := r.To.(file.Coordinates) + if fromPkgOk && toFileOk { + allPackageCoords.Add(toFile) + } else if fromFileOk && toPkgOk { + allPackageCoords.Add(fromFile) + } + } + + return allPackageCoords.Contains +} diff --git a/internal/unknown/coordinate_error.go b/internal/unknown/coordinate_error.go new file mode 100644 index 00000000000..46bc53e11b1 --- /dev/null +++ b/internal/unknown/coordinate_error.go @@ -0,0 +1,201 @@ +package unknown + +import ( + "errors" + "fmt" + "strings" + + "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" +) + +type hasCoordinates interface { + GetCoordinates() file.Coordinates +} + +type CoordinateError struct { + Coordinates file.Coordinates + Reason error +} + +var _ error = (*CoordinateError)(nil) + +func (u *CoordinateError) Error() string { + if u.Coordinates.FileSystemID == "" { + return fmt.Sprintf("%s: %v", u.Coordinates.RealPath, u.Reason) + } + return fmt.Sprintf("%s (%s): %v", u.Coordinates.RealPath, u.Coordinates.FileSystemID, u.Reason) +} + +// New returns a new CoordinateError unless the reason is a CoordinateError itself, in which case +// reason will be returned directly or if reason is nil, nil will be returned +func New(coords hasCoordinates, reason error) *CoordinateError { + if reason == nil { + return nil + } + coordinates := coords.GetCoordinates() + reasonCoordinateError := &CoordinateError{} + if errors.As(reason, &reasonCoordinateError) { + // if the reason is already a coordinate error, it is potentially for a different location, + // so we do not want to surface this location having an error + return reasonCoordinateError + } + return &CoordinateError{ + Coordinates: coordinates, + Reason: reason, + } +} + +// Newf returns a new CoordinateError with a reason of an error created from given format and args +func Newf(coords hasCoordinates, format string, args ...any) *CoordinateError { + return New(coords, fmt.Errorf(format, args...)) +} + +// Append returns an error joined to the first error/set of errors, with a new CoordinateError appended to the end +func Append(errs error, coords hasCoordinates, reason error) error { + return Join(errs, New(coords, reason)) +} + +// Appendf returns an error joined to the first error/set of errors, with a new CoordinateError appended to the end, +// created from the given reason and args +func Appendf(errs error, coords hasCoordinates, format string, args ...any) error { + return Append(errs, coords, fmt.Errorf(format, args...)) +} + +// Join joins the provided sets of errors together in a flattened manner, taking into account nested errors created +// from other sources, including errors.Join, multierror.Append, and unknown.Join +func Join(errs ...error) error { + var out []error + for _, err := range errs { + // append errors, de-duplicated + for _, e := range flatten(err) { + if containsErr(out, e) { + continue + } + out = append(out, e) + } + } + if len(out) == 1 { + return out[0] + } + if len(out) == 0 { + return nil + } + return errors.Join(out...) +} + +// Joinf joins the provided sets of errors together in a flattened manner, taking into account nested errors created +// from other sources, including errors.Join, multierror.Append, and unknown.Join and appending a new error, +// created from the format and args provided -- the error is NOT a CoordinateError +func Joinf(errs error, format string, args ...any) error { + return Join(errs, fmt.Errorf(format, args...)) +} + +// IfEmptyf returns a new Errorf-formatted error, only when the provided slice is empty or nil when +// the slice has entries +func IfEmptyf[T any](emptyTest []T, format string, args ...any) error { + if len(emptyTest) == 0 { + return fmt.Errorf(format, args...) + } + return nil +} + +// ExtractCoordinateErrors extracts all coordinate errors returned, and any _additional_ errors in the graph +// are encapsulated in the second, error return parameter +func ExtractCoordinateErrors(err error) (coordinateErrors []CoordinateError, remainingErrors error) { + remainingErrors = visitErrors(err, func(e error) error { + if coordinateError, _ := e.(*CoordinateError); coordinateError != nil { + coordinateErrors = append(coordinateErrors, *coordinateError) + return nil + } + return e + }) + return coordinateErrors, remainingErrors +} + +func flatten(errs ...error) []error { + var out []error + for _, err := range errs { + if err == nil { + continue + } + // turn all errors nested under a coordinate error to individual coordinate errors + if e, ok := err.(*CoordinateError); ok { + if e == nil { + continue + } + for _, r := range flatten(e.Reason) { + out = append(out, New(e.Coordinates, r)) + } + } else + // from multierror.Append + if e, ok := err.(interface{ WrappedErrors() []error }); ok { + if e == nil { + continue + } + out = append(out, flatten(e.WrappedErrors()...)...) + } else + // from errors.Join + if e, ok := err.(interface{ Unwrap() []error }); ok { + if e == nil { + continue + } + out = append(out, flatten(e.Unwrap()...)...) + } else { + out = append(out, err) + } + } + return out +} + +// containsErr returns true if a duplicate error is found +func containsErr(out []error, err error) bool { + defer func() { + if err := recover(); err != nil { + log.Tracef("error comparing errors: %v", err) + } + }() + for _, e := range out { + if e == err { + return true + } + } + return false +} + +// visitErrors visits every wrapped error. the returned error replaces the provided error, null errors are omitted from +// the object graph +func visitErrors(err error, fn func(error) error) error { + // unwrap errors from errors.Join + if errs, ok := err.(interface{ Unwrap() []error }); ok { + var out []error + for _, e := range errs.Unwrap() { + out = append(out, visitErrors(e, fn)) + } + // errors.Join omits nil errors and will return nil if all passed errors are nil + return errors.Join(out...) + } + // unwrap errors from multierror.Append -- these also implement Unwrap() error, so check this first + if errs, ok := err.(interface{ WrappedErrors() []error }); ok { + var out []error + for _, e := range errs.WrappedErrors() { + out = append(out, visitErrors(e, fn)) + } + // errors.Join omits nil errors and will return nil if all passed errors are nil + return errors.Join(out...) + } + // unwrap singly wrapped errors + if e, ok := err.(interface{ Unwrap() error }); ok { + wrapped := e.Unwrap() + got := visitErrors(wrapped, fn) + if got == nil { + return nil + } + if wrapped.Error() != got.Error() { + prefix := strings.TrimSuffix(err.Error(), wrapped.Error()) + return fmt.Errorf("%s%w", prefix, got) + } + return err + } + return fn(err) +} diff --git a/internal/unknown/coordinate_error_test.go b/internal/unknown/coordinate_error_test.go new file mode 100644 index 00000000000..27c53a5ff6d --- /dev/null +++ b/internal/unknown/coordinate_error_test.go @@ -0,0 +1,236 @@ +package unknown + +import ( + "errors" + "fmt" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/anchore/syft/syft/file" +) + +func Test_visitErrors(t *testing.T) { + tests := []struct { + name string + in error + transform func(error) error + expected string + }{ + { + name: "return", + in: fmt.Errorf("err1"), + transform: func(e error) error { + return e + }, + expected: "err1", + }, + { + name: "omit", + in: fmt.Errorf("err1"), + transform: func(_ error) error { + return nil + }, + expected: "", + }, + { + name: "wrapped return", + in: fmt.Errorf("wrapped: %w", fmt.Errorf("err1")), + transform: func(e error) error { + return e + }, + expected: "wrapped: err1", + }, + { + name: "wrapped omit", + in: fmt.Errorf("wrapped: %w", fmt.Errorf("err1")), + transform: func(e error) error { + if e.Error() == "err1" { + return nil + } + return e + }, + expected: "", + }, + { + name: "joined return", + in: errors.Join(fmt.Errorf("err1"), fmt.Errorf("err2")), + transform: func(e error) error { + return e + }, + expected: "err1\nerr2", + }, + { + name: "joined omit", + in: errors.Join(fmt.Errorf("err1"), fmt.Errorf("err2")), + transform: func(_ error) error { + return nil + }, + expected: "", + }, + { + name: "joined omit first", + in: errors.Join(fmt.Errorf("err1"), fmt.Errorf("err2")), + transform: func(e error) error { + if e.Error() == "err1" { + return nil + } + return e + }, + expected: "err2", + }, + { + name: "joined wrapped return", + in: errors.Join(fmt.Errorf("wrapped: %w", fmt.Errorf("err1")), fmt.Errorf("err2")), + transform: func(e error) error { + return e + }, + expected: "wrapped: err1\nerr2", + }, + { + name: "joined wrapped omit first", + in: errors.Join(fmt.Errorf("wrapped: %w", fmt.Errorf("err1")), fmt.Errorf("err2")), + transform: func(e error) error { + if e.Error() == "err1" { + return nil + } + return e + }, + expected: "err2", + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + gotErr := visitErrors(test.in, test.transform) + got := fmt.Sprintf("%v", gotErr) + require.Equal(t, test.expected, got) + }) + } +} + +func Test_Join(t *testing.T) { + err1 := fmt.Errorf("err1") + err2 := fmt.Errorf("err2") + + tests := []struct { + name string `` + in []error + expected string + }{ + { + name: "basic", + in: []error{fmt.Errorf("err")}, + expected: "err", + }, + { + name: "wrapped", + in: []error{fmt.Errorf("outer: %w", fmt.Errorf("err"))}, + expected: "outer: err", + }, + { + name: "wrapped joined", + in: []error{errors.Join(fmt.Errorf("outer: %w", fmt.Errorf("err1")), fmt.Errorf("err2"))}, + expected: "outer: err1\nerr2", + }, + { + name: "duplicates", + in: []error{err1, err1, err2}, + expected: "err1\nerr2", + }, + { + name: "nested duplicates", + in: []error{errors.Join(err1, err2), err1, err2}, + expected: "err1\nerr2", + }, + { + name: "nested duplicates coords", + in: []error{New(file.NewLocation("l1"), errors.Join(fmt.Errorf("err1"), fmt.Errorf("err2"))), fmt.Errorf("err1"), fmt.Errorf("err2")}, + expected: "l1: err1\nl1: err2\nerr1\nerr2", + }, + { + name: "all nil", + in: []error{nil, nil, nil}, + expected: "", + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + got := Join(test.in...) + if test.expected == "" { + require.Nil(t, got) + return + } + require.NotNil(t, got) + require.Equal(t, test.expected, got.Error()) + }) + } +} + +func Test_flatten(t *testing.T) { + coords := file.Coordinates{ + RealPath: "real/path", + } + e1 := fmt.Errorf("e1") + e2 := fmt.Errorf("e2") + c1 := New(coords, fmt.Errorf("c1")) + c2 := New(coords, fmt.Errorf("c2")) + tests := []struct { + name string `` + in error + expected string + }{ + { + name: "basic", + in: errors.Join(e1, e2), + expected: "e1//e2", + }, + { + name: "coords", + in: New(coords, e1), + expected: "real/path: e1", + }, + { + name: "coords with joined children", + in: New(coords, errors.Join(e1, e2)), + expected: "real/path: e1//real/path: e2", + }, + { + name: "very nested", + in: errors.Join(errors.Join(errors.Join(errors.Join(e1, c1), e2), c2), e2), + expected: "e1//real/path: c1//e2//real/path: c2//e2", + }, + } + toString := func(errs ...error) string { + var parts []string + for _, e := range errs { + parts = append(parts, e.Error()) + } + return strings.Join(parts, "//") + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + got := flatten(test.in) + require.NotNil(t, got) + require.Equal(t, test.expected, toString(got...)) + }) + } +} + +func Test_Append(t *testing.T) { + e1 := New(file.NewLocation("l1"), fmt.Errorf("e1")) + e2 := Append(e1, file.NewLocation("l2"), fmt.Errorf("e2")) + e3 := Appendf(e2, file.NewLocation("l3"), "%s", "e3") + require.Equal(t, "l1: e1\nl2: e2\nl3: e3", e3.Error()) + + e1 = New(file.NewLocation("l1"), nil) + require.Nil(t, e1) + e2 = Append(e1, file.NewLocation("l2"), fmt.Errorf("e2")) + e3 = Appendf(e2, file.NewLocation("l3"), "%s", "e3") + require.Equal(t, "l2: e2\nl3: e3", e3.Error()) + + e1 = New(file.NewLocation("l1"), fmt.Errorf("e1")) + e2 = Append(e1, file.NewLocation("l2"), nil) + e3 = Appendf(e2, file.NewLocation("l3"), "%s", "e3") + require.Equal(t, "l1: e1\nl3: e3", e3.Error()) +} diff --git a/schema/json/schema-16.0.18.json b/schema/json/schema-16.0.18.json new file mode 100644 index 00000000000..936582e2d22 --- /dev/null +++ b/schema/json/schema-16.0.18.json @@ -0,0 +1,2727 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "anchore.io/schema/syft/json/16.0.18/document", + "$ref": "#/$defs/Document", + "$defs": { + "AlpmDbEntry": { + "properties": { + "basepackage": { + "type": "string" + }, + "package": { + "type": "string" + }, + "version": { + "type": "string" + }, + "description": { + "type": "string" + }, + "architecture": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "packager": { + "type": "string" + }, + "url": { + "type": "string" + }, + "validation": { + "type": "string" + }, + "reason": { + "type": "integer" + }, + "files": { + "items": { + "$ref": "#/$defs/AlpmFileRecord" + }, + "type": "array" + }, + "backup": { + "items": { + "$ref": "#/$defs/AlpmFileRecord" + }, + "type": "array" + }, + "provides": { + "items": { + "type": "string" + }, + "type": "array" + }, + "depends": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "basepackage", + "package", + "version", + "description", + "architecture", + "size", + "packager", + "url", + "validation", + "reason", + "files", + "backup" + ] + }, + "AlpmFileRecord": { + "properties": { + "path": { + "type": "string" + }, + "type": { + "type": "string" + }, + "uid": { + "type": "string" + }, + "gid": { + "type": "string" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "size": { + "type": "string" + }, + "link": { + "type": "string" + }, + "digest": { + "items": { + "$ref": "#/$defs/Digest" + }, + "type": "array" + } + }, + "type": "object" + }, + "ApkDbEntry": { + "properties": { + "package": { + "type": "string" + }, + "originPackage": { + "type": "string" + }, + "maintainer": { + "type": "string" + }, + "version": { + "type": "string" + }, + "architecture": { + "type": "string" + }, + "url": { + "type": "string" + }, + "description": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "installedSize": { + "type": "integer" + }, + "pullDependencies": { + "items": { + "type": "string" + }, + "type": "array" + }, + "provides": { + "items": { + "type": "string" + }, + "type": "array" + }, + "pullChecksum": { + "type": "string" + }, + "gitCommitOfApkPort": { + "type": "string" + }, + "files": { + "items": { + "$ref": "#/$defs/ApkFileRecord" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "package", + "originPackage", + "maintainer", + "version", + "architecture", + "url", + "description", + "size", + "installedSize", + "pullDependencies", + "provides", + "pullChecksum", + "gitCommitOfApkPort", + "files" + ] + }, + "ApkFileRecord": { + "properties": { + "path": { + "type": "string" + }, + "ownerUid": { + "type": "string" + }, + "ownerGid": { + "type": "string" + }, + "permissions": { + "type": "string" + }, + "digest": { + "$ref": "#/$defs/Digest" + } + }, + "type": "object", + "required": [ + "path" + ] + }, + "BinarySignature": { + "properties": { + "matches": { + "items": { + "$ref": "#/$defs/ClassifierMatch" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "matches" + ] + }, + "CConanFileEntry": { + "properties": { + "ref": { + "type": "string" + } + }, + "type": "object", + "required": [ + "ref" + ] + }, + "CConanInfoEntry": { + "properties": { + "ref": { + "type": "string" + }, + "package_id": { + "type": "string" + } + }, + "type": "object", + "required": [ + "ref" + ] + }, + "CConanLockEntry": { + "properties": { + "ref": { + "type": "string" + }, + "package_id": { + "type": "string" + }, + "prev": { + "type": "string" + }, + "requires": { + "items": { + "type": "string" + }, + "type": "array" + }, + "build_requires": { + "items": { + "type": "string" + }, + "type": "array" + }, + "py_requires": { + "items": { + "type": "string" + }, + "type": "array" + }, + "options": { + "$ref": "#/$defs/KeyValues" + }, + "path": { + "type": "string" + }, + "context": { + "type": "string" + } + }, + "type": "object", + "required": [ + "ref" + ] + }, + "CConanLockV2Entry": { + "properties": { + "ref": { + "type": "string" + }, + "packageID": { + "type": "string" + }, + "username": { + "type": "string" + }, + "channel": { + "type": "string" + }, + "recipeRevision": { + "type": "string" + }, + "packageRevision": { + "type": "string" + }, + "timestamp": { + "type": "string" + } + }, + "type": "object", + "required": [ + "ref" + ] + }, + "CPE": { + "properties": { + "cpe": { + "type": "string" + }, + "source": { + "type": "string" + } + }, + "type": "object", + "required": [ + "cpe" + ] + }, + "ClassifierMatch": { + "properties": { + "classifier": { + "type": "string" + }, + "location": { + "$ref": "#/$defs/Location" + } + }, + "type": "object", + "required": [ + "classifier", + "location" + ] + }, + "CocoaPodfileLockEntry": { + "properties": { + "checksum": { + "type": "string" + } + }, + "type": "object", + "required": [ + "checksum" + ] + }, + "Coordinates": { + "properties": { + "path": { + "type": "string" + }, + "layerID": { + "type": "string" + } + }, + "type": "object", + "required": [ + "path" + ] + }, + "DartPubspecLockEntry": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "hosted_url": { + "type": "string" + }, + "vcs_url": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version" + ] + }, + "Descriptor": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "configuration": true + }, + "type": "object", + "required": [ + "name", + "version" + ] + }, + "Digest": { + "properties": { + "algorithm": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object", + "required": [ + "algorithm", + "value" + ] + }, + "Document": { + "properties": { + "artifacts": { + "items": { + "$ref": "#/$defs/Package" + }, + "type": "array" + }, + "artifactRelationships": { + "items": { + "$ref": "#/$defs/Relationship" + }, + "type": "array" + }, + "files": { + "items": { + "$ref": "#/$defs/File" + }, + "type": "array" + }, + "source": { + "$ref": "#/$defs/Source" + }, + "distro": { + "$ref": "#/$defs/LinuxRelease" + }, + "descriptor": { + "$ref": "#/$defs/Descriptor" + }, + "schema": { + "$ref": "#/$defs/Schema" + } + }, + "type": "object", + "required": [ + "artifacts", + "artifactRelationships", + "source", + "distro", + "descriptor", + "schema" + ] + }, + "DotnetDepsEntry": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "path": { + "type": "string" + }, + "sha512": { + "type": "string" + }, + "hashPath": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version", + "path", + "sha512", + "hashPath" + ] + }, + "DotnetPortableExecutableEntry": { + "properties": { + "assemblyVersion": { + "type": "string" + }, + "legalCopyright": { + "type": "string" + }, + "comments": { + "type": "string" + }, + "internalName": { + "type": "string" + }, + "companyName": { + "type": "string" + }, + "productName": { + "type": "string" + }, + "productVersion": { + "type": "string" + } + }, + "type": "object", + "required": [ + "assemblyVersion", + "legalCopyright", + "companyName", + "productName", + "productVersion" + ] + }, + "DpkgDbEntry": { + "properties": { + "package": { + "type": "string" + }, + "source": { + "type": "string" + }, + "version": { + "type": "string" + }, + "sourceVersion": { + "type": "string" + }, + "architecture": { + "type": "string" + }, + "maintainer": { + "type": "string" + }, + "installedSize": { + "type": "integer" + }, + "provides": { + "items": { + "type": "string" + }, + "type": "array" + }, + "depends": { + "items": { + "type": "string" + }, + "type": "array" + }, + "preDepends": { + "items": { + "type": "string" + }, + "type": "array" + }, + "files": { + "items": { + "$ref": "#/$defs/DpkgFileRecord" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "package", + "source", + "version", + "sourceVersion", + "architecture", + "maintainer", + "installedSize", + "files" + ] + }, + "DpkgFileRecord": { + "properties": { + "path": { + "type": "string" + }, + "digest": { + "$ref": "#/$defs/Digest" + }, + "isConfigFile": { + "type": "boolean" + } + }, + "type": "object", + "required": [ + "path", + "isConfigFile" + ] + }, + "ELFSecurityFeatures": { + "properties": { + "symbolTableStripped": { + "type": "boolean" + }, + "stackCanary": { + "type": "boolean" + }, + "nx": { + "type": "boolean" + }, + "relRO": { + "type": "string" + }, + "pie": { + "type": "boolean" + }, + "dso": { + "type": "boolean" + }, + "safeStack": { + "type": "boolean" + }, + "cfi": { + "type": "boolean" + }, + "fortify": { + "type": "boolean" + } + }, + "type": "object", + "required": [ + "symbolTableStripped", + "nx", + "relRO", + "pie", + "dso" + ] + }, + "ElfBinaryPackageNoteJsonPayload": { + "properties": { + "type": { + "type": "string" + }, + "architecture": { + "type": "string" + }, + "osCPE": { + "type": "string" + }, + "os": { + "type": "string" + }, + "osVersion": { + "type": "string" + }, + "system": { + "type": "string" + }, + "vendor": { + "type": "string" + }, + "sourceRepo": { + "type": "string" + }, + "commit": { + "type": "string" + } + }, + "type": "object" + }, + "ElixirMixLockEntry": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "pkgHash": { + "type": "string" + }, + "pkgHashExt": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version", + "pkgHash", + "pkgHashExt" + ] + }, + "ErlangRebarLockEntry": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "pkgHash": { + "type": "string" + }, + "pkgHashExt": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version", + "pkgHash", + "pkgHashExt" + ] + }, + "Executable": { + "properties": { + "format": { + "type": "string" + }, + "hasExports": { + "type": "boolean" + }, + "hasEntrypoint": { + "type": "boolean" + }, + "importedLibraries": { + "items": { + "type": "string" + }, + "type": "array" + }, + "elfSecurityFeatures": { + "$ref": "#/$defs/ELFSecurityFeatures" + } + }, + "type": "object", + "required": [ + "format", + "hasExports", + "hasEntrypoint", + "importedLibraries" + ] + }, + "File": { + "properties": { + "id": { + "type": "string" + }, + "location": { + "$ref": "#/$defs/Coordinates" + }, + "metadata": { + "$ref": "#/$defs/FileMetadataEntry" + }, + "contents": { + "type": "string" + }, + "digests": { + "items": { + "$ref": "#/$defs/Digest" + }, + "type": "array" + }, + "licenses": { + "items": { + "$ref": "#/$defs/FileLicense" + }, + "type": "array" + }, + "executable": { + "$ref": "#/$defs/Executable" + }, + "unknowns": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "id", + "location" + ] + }, + "FileLicense": { + "properties": { + "value": { + "type": "string" + }, + "spdxExpression": { + "type": "string" + }, + "type": { + "type": "string" + }, + "evidence": { + "$ref": "#/$defs/FileLicenseEvidence" + } + }, + "type": "object", + "required": [ + "value", + "spdxExpression", + "type" + ] + }, + "FileLicenseEvidence": { + "properties": { + "confidence": { + "type": "integer" + }, + "offset": { + "type": "integer" + }, + "extent": { + "type": "integer" + } + }, + "type": "object", + "required": [ + "confidence", + "offset", + "extent" + ] + }, + "FileMetadataEntry": { + "properties": { + "mode": { + "type": "integer" + }, + "type": { + "type": "string" + }, + "linkDestination": { + "type": "string" + }, + "userID": { + "type": "integer" + }, + "groupID": { + "type": "integer" + }, + "mimeType": { + "type": "string" + }, + "size": { + "type": "integer" + } + }, + "type": "object", + "required": [ + "mode", + "type", + "userID", + "groupID", + "mimeType", + "size" + ] + }, + "GoModuleBuildinfoEntry": { + "properties": { + "goBuildSettings": { + "$ref": "#/$defs/KeyValues" + }, + "goCompiledVersion": { + "type": "string" + }, + "architecture": { + "type": "string" + }, + "h1Digest": { + "type": "string" + }, + "mainModule": { + "type": "string" + }, + "goCryptoSettings": { + "items": { + "type": "string" + }, + "type": "array" + }, + "goExperiments": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "goCompiledVersion", + "architecture" + ] + }, + "GoModuleEntry": { + "properties": { + "h1Digest": { + "type": "string" + } + }, + "type": "object" + }, + "HaskellHackageStackEntry": { + "properties": { + "pkgHash": { + "type": "string" + } + }, + "type": "object" + }, + "HaskellHackageStackLockEntry": { + "properties": { + "pkgHash": { + "type": "string" + }, + "snapshotURL": { + "type": "string" + } + }, + "type": "object" + }, + "IDLikes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "JavaArchive": { + "properties": { + "virtualPath": { + "type": "string" + }, + "manifest": { + "$ref": "#/$defs/JavaManifest" + }, + "pomProperties": { + "$ref": "#/$defs/JavaPomProperties" + }, + "pomProject": { + "$ref": "#/$defs/JavaPomProject" + }, + "digest": { + "items": { + "$ref": "#/$defs/Digest" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "virtualPath" + ] + }, + "JavaJvmInstallation": { + "properties": { + "release": { + "$ref": "#/$defs/JavaVMRelease" + }, + "files": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "release", + "files" + ] + }, + "JavaManifest": { + "properties": { + "main": { + "$ref": "#/$defs/KeyValues" + }, + "sections": { + "items": { + "$ref": "#/$defs/KeyValues" + }, + "type": "array" + } + }, + "type": "object" + }, + "JavaPomParent": { + "properties": { + "groupId": { + "type": "string" + }, + "artifactId": { + "type": "string" + }, + "version": { + "type": "string" + } + }, + "type": "object", + "required": [ + "groupId", + "artifactId", + "version" + ] + }, + "JavaPomProject": { + "properties": { + "path": { + "type": "string" + }, + "parent": { + "$ref": "#/$defs/JavaPomParent" + }, + "groupId": { + "type": "string" + }, + "artifactId": { + "type": "string" + }, + "version": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "type": "object", + "required": [ + "path", + "groupId", + "artifactId", + "version", + "name" + ] + }, + "JavaPomProperties": { + "properties": { + "path": { + "type": "string" + }, + "name": { + "type": "string" + }, + "groupId": { + "type": "string" + }, + "artifactId": { + "type": "string" + }, + "version": { + "type": "string" + }, + "scope": { + "type": "string" + }, + "extraFields": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object", + "required": [ + "path", + "name", + "groupId", + "artifactId", + "version" + ] + }, + "JavaVMRelease": { + "properties": { + "implementor": { + "type": "string" + }, + "implementorVersion": { + "type": "string" + }, + "javaRuntimeVersion": { + "type": "string" + }, + "javaVersion": { + "type": "string" + }, + "javaVersionDate": { + "type": "string" + }, + "libc": { + "type": "string" + }, + "modules": { + "items": { + "type": "string" + }, + "type": "array" + }, + "osArch": { + "type": "string" + }, + "osName": { + "type": "string" + }, + "osVersion": { + "type": "string" + }, + "source": { + "type": "string" + }, + "buildSource": { + "type": "string" + }, + "buildSourceRepo": { + "type": "string" + }, + "sourceRepo": { + "type": "string" + }, + "fullVersion": { + "type": "string" + }, + "semanticVersion": { + "type": "string" + }, + "buildInfo": { + "type": "string" + }, + "jvmVariant": { + "type": "string" + }, + "jvmVersion": { + "type": "string" + }, + "imageType": { + "type": "string" + }, + "buildType": { + "type": "string" + } + }, + "type": "object" + }, + "JavascriptNpmPackage": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "author": { + "type": "string" + }, + "homepage": { + "type": "string" + }, + "description": { + "type": "string" + }, + "url": { + "type": "string" + }, + "private": { + "type": "boolean" + } + }, + "type": "object", + "required": [ + "name", + "version", + "author", + "homepage", + "description", + "url", + "private" + ] + }, + "JavascriptNpmPackageLockEntry": { + "properties": { + "resolved": { + "type": "string" + }, + "integrity": { + "type": "string" + } + }, + "type": "object", + "required": [ + "resolved", + "integrity" + ] + }, + "JavascriptYarnLockEntry": { + "properties": { + "resolved": { + "type": "string" + }, + "integrity": { + "type": "string" + } + }, + "type": "object", + "required": [ + "resolved", + "integrity" + ] + }, + "KeyValue": { + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object", + "required": [ + "key", + "value" + ] + }, + "KeyValues": { + "items": { + "$ref": "#/$defs/KeyValue" + }, + "type": "array" + }, + "License": { + "properties": { + "value": { + "type": "string" + }, + "spdxExpression": { + "type": "string" + }, + "type": { + "type": "string" + }, + "urls": { + "items": { + "type": "string" + }, + "type": "array" + }, + "locations": { + "items": { + "$ref": "#/$defs/Location" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "value", + "spdxExpression", + "type", + "urls", + "locations" + ] + }, + "LinuxKernelArchive": { + "properties": { + "name": { + "type": "string" + }, + "architecture": { + "type": "string" + }, + "version": { + "type": "string" + }, + "extendedVersion": { + "type": "string" + }, + "buildTime": { + "type": "string" + }, + "author": { + "type": "string" + }, + "format": { + "type": "string" + }, + "rwRootFS": { + "type": "boolean" + }, + "swapDevice": { + "type": "integer" + }, + "rootDevice": { + "type": "integer" + }, + "videoMode": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "architecture", + "version" + ] + }, + "LinuxKernelModule": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "sourceVersion": { + "type": "string" + }, + "path": { + "type": "string" + }, + "description": { + "type": "string" + }, + "author": { + "type": "string" + }, + "license": { + "type": "string" + }, + "kernelVersion": { + "type": "string" + }, + "versionMagic": { + "type": "string" + }, + "parameters": { + "patternProperties": { + ".*": { + "$ref": "#/$defs/LinuxKernelModuleParameter" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "LinuxKernelModuleParameter": { + "properties": { + "type": { + "type": "string" + }, + "description": { + "type": "string" + } + }, + "type": "object" + }, + "LinuxRelease": { + "properties": { + "prettyName": { + "type": "string" + }, + "name": { + "type": "string" + }, + "id": { + "type": "string" + }, + "idLike": { + "$ref": "#/$defs/IDLikes" + }, + "version": { + "type": "string" + }, + "versionID": { + "type": "string" + }, + "versionCodename": { + "type": "string" + }, + "buildID": { + "type": "string" + }, + "imageID": { + "type": "string" + }, + "imageVersion": { + "type": "string" + }, + "variant": { + "type": "string" + }, + "variantID": { + "type": "string" + }, + "homeURL": { + "type": "string" + }, + "supportURL": { + "type": "string" + }, + "bugReportURL": { + "type": "string" + }, + "privacyPolicyURL": { + "type": "string" + }, + "cpeName": { + "type": "string" + }, + "supportEnd": { + "type": "string" + } + }, + "type": "object" + }, + "Location": { + "properties": { + "path": { + "type": "string" + }, + "layerID": { + "type": "string" + }, + "accessPath": { + "type": "string" + }, + "annotations": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object", + "required": [ + "path", + "accessPath" + ] + }, + "LuarocksPackage": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "license": { + "type": "string" + }, + "homepage": { + "type": "string" + }, + "description": { + "type": "string" + }, + "url": { + "type": "string" + }, + "dependencies": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object", + "required": [ + "name", + "version", + "license", + "homepage", + "description", + "url", + "dependencies" + ] + }, + "MicrosoftKbPatch": { + "properties": { + "product_id": { + "type": "string" + }, + "kb": { + "type": "string" + } + }, + "type": "object", + "required": [ + "product_id", + "kb" + ] + }, + "NixStoreEntry": { + "properties": { + "outputHash": { + "type": "string" + }, + "output": { + "type": "string" + }, + "files": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "outputHash", + "files" + ] + }, + "OpamPackage": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "licenses": { + "items": { + "type": "string" + }, + "type": "array" + }, + "url": { + "type": "string" + }, + "checksum": { + "items": { + "type": "string" + }, + "type": "array" + }, + "homepage": { + "type": "string" + }, + "dependencies": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "name", + "version", + "licenses", + "url", + "checksum", + "homepage", + "dependencies" + ] + }, + "Package": { + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "type": { + "type": "string" + }, + "foundBy": { + "type": "string" + }, + "locations": { + "items": { + "$ref": "#/$defs/Location" + }, + "type": "array" + }, + "licenses": { + "$ref": "#/$defs/licenses" + }, + "language": { + "type": "string" + }, + "cpes": { + "$ref": "#/$defs/cpes" + }, + "purl": { + "type": "string" + }, + "metadataType": { + "type": "string" + }, + "metadata": { + "anyOf": [ + { + "type": "null" + }, + { + "$ref": "#/$defs/AlpmDbEntry" + }, + { + "$ref": "#/$defs/ApkDbEntry" + }, + { + "$ref": "#/$defs/BinarySignature" + }, + { + "$ref": "#/$defs/CConanFileEntry" + }, + { + "$ref": "#/$defs/CConanInfoEntry" + }, + { + "$ref": "#/$defs/CConanLockEntry" + }, + { + "$ref": "#/$defs/CConanLockV2Entry" + }, + { + "$ref": "#/$defs/CocoaPodfileLockEntry" + }, + { + "$ref": "#/$defs/DartPubspecLockEntry" + }, + { + "$ref": "#/$defs/DotnetDepsEntry" + }, + { + "$ref": "#/$defs/DotnetPortableExecutableEntry" + }, + { + "$ref": "#/$defs/DpkgDbEntry" + }, + { + "$ref": "#/$defs/ElfBinaryPackageNoteJsonPayload" + }, + { + "$ref": "#/$defs/ElixirMixLockEntry" + }, + { + "$ref": "#/$defs/ErlangRebarLockEntry" + }, + { + "$ref": "#/$defs/GoModuleBuildinfoEntry" + }, + { + "$ref": "#/$defs/GoModuleEntry" + }, + { + "$ref": "#/$defs/HaskellHackageStackEntry" + }, + { + "$ref": "#/$defs/HaskellHackageStackLockEntry" + }, + { + "$ref": "#/$defs/JavaArchive" + }, + { + "$ref": "#/$defs/JavaJvmInstallation" + }, + { + "$ref": "#/$defs/JavascriptNpmPackage" + }, + { + "$ref": "#/$defs/JavascriptNpmPackageLockEntry" + }, + { + "$ref": "#/$defs/JavascriptYarnLockEntry" + }, + { + "$ref": "#/$defs/LinuxKernelArchive" + }, + { + "$ref": "#/$defs/LinuxKernelModule" + }, + { + "$ref": "#/$defs/LuarocksPackage" + }, + { + "$ref": "#/$defs/MicrosoftKbPatch" + }, + { + "$ref": "#/$defs/NixStoreEntry" + }, + { + "$ref": "#/$defs/OpamPackage" + }, + { + "$ref": "#/$defs/PhpComposerInstalledEntry" + }, + { + "$ref": "#/$defs/PhpComposerLockEntry" + }, + { + "$ref": "#/$defs/PhpPeclEntry" + }, + { + "$ref": "#/$defs/PortageDbEntry" + }, + { + "$ref": "#/$defs/PythonPackage" + }, + { + "$ref": "#/$defs/PythonPipRequirementsEntry" + }, + { + "$ref": "#/$defs/PythonPipfileLockEntry" + }, + { + "$ref": "#/$defs/PythonPoetryLockEntry" + }, + { + "$ref": "#/$defs/RDescription" + }, + { + "$ref": "#/$defs/RpmArchive" + }, + { + "$ref": "#/$defs/RpmDbEntry" + }, + { + "$ref": "#/$defs/RubyGemspec" + }, + { + "$ref": "#/$defs/RustCargoAuditEntry" + }, + { + "$ref": "#/$defs/RustCargoLockEntry" + }, + { + "$ref": "#/$defs/SwiftPackageManagerLockEntry" + }, + { + "$ref": "#/$defs/SwiplpackPackage" + }, + { + "$ref": "#/$defs/WordpressPluginEntry" + } + ] + } + }, + "type": "object", + "required": [ + "id", + "name", + "version", + "type", + "foundBy", + "locations", + "licenses", + "language", + "cpes", + "purl" + ] + }, + "PhpComposerAuthors": { + "properties": { + "name": { + "type": "string" + }, + "email": { + "type": "string" + }, + "homepage": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name" + ] + }, + "PhpComposerExternalReference": { + "properties": { + "type": { + "type": "string" + }, + "url": { + "type": "string" + }, + "reference": { + "type": "string" + }, + "shasum": { + "type": "string" + } + }, + "type": "object", + "required": [ + "type", + "url", + "reference" + ] + }, + "PhpComposerInstalledEntry": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "source": { + "$ref": "#/$defs/PhpComposerExternalReference" + }, + "dist": { + "$ref": "#/$defs/PhpComposerExternalReference" + }, + "require": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "provide": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "require-dev": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "suggest": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "license": { + "items": { + "type": "string" + }, + "type": "array" + }, + "type": { + "type": "string" + }, + "notification-url": { + "type": "string" + }, + "bin": { + "items": { + "type": "string" + }, + "type": "array" + }, + "authors": { + "items": { + "$ref": "#/$defs/PhpComposerAuthors" + }, + "type": "array" + }, + "description": { + "type": "string" + }, + "homepage": { + "type": "string" + }, + "keywords": { + "items": { + "type": "string" + }, + "type": "array" + }, + "time": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version", + "source", + "dist" + ] + }, + "PhpComposerLockEntry": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "source": { + "$ref": "#/$defs/PhpComposerExternalReference" + }, + "dist": { + "$ref": "#/$defs/PhpComposerExternalReference" + }, + "require": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "provide": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "require-dev": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "suggest": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "license": { + "items": { + "type": "string" + }, + "type": "array" + }, + "type": { + "type": "string" + }, + "notification-url": { + "type": "string" + }, + "bin": { + "items": { + "type": "string" + }, + "type": "array" + }, + "authors": { + "items": { + "$ref": "#/$defs/PhpComposerAuthors" + }, + "type": "array" + }, + "description": { + "type": "string" + }, + "homepage": { + "type": "string" + }, + "keywords": { + "items": { + "type": "string" + }, + "type": "array" + }, + "time": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version", + "source", + "dist" + ] + }, + "PhpPeclEntry": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "license": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "name", + "version" + ] + }, + "PortageDbEntry": { + "properties": { + "installedSize": { + "type": "integer" + }, + "files": { + "items": { + "$ref": "#/$defs/PortageFileRecord" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "installedSize", + "files" + ] + }, + "PortageFileRecord": { + "properties": { + "path": { + "type": "string" + }, + "digest": { + "$ref": "#/$defs/Digest" + } + }, + "type": "object", + "required": [ + "path" + ] + }, + "PythonDirectURLOriginInfo": { + "properties": { + "url": { + "type": "string" + }, + "commitId": { + "type": "string" + }, + "vcs": { + "type": "string" + } + }, + "type": "object", + "required": [ + "url" + ] + }, + "PythonFileDigest": { + "properties": { + "algorithm": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object", + "required": [ + "algorithm", + "value" + ] + }, + "PythonFileRecord": { + "properties": { + "path": { + "type": "string" + }, + "digest": { + "$ref": "#/$defs/PythonFileDigest" + }, + "size": { + "type": "string" + } + }, + "type": "object", + "required": [ + "path" + ] + }, + "PythonPackage": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "author": { + "type": "string" + }, + "authorEmail": { + "type": "string" + }, + "platform": { + "type": "string" + }, + "files": { + "items": { + "$ref": "#/$defs/PythonFileRecord" + }, + "type": "array" + }, + "sitePackagesRootPath": { + "type": "string" + }, + "topLevelPackages": { + "items": { + "type": "string" + }, + "type": "array" + }, + "directUrlOrigin": { + "$ref": "#/$defs/PythonDirectURLOriginInfo" + }, + "requiresPython": { + "type": "string" + }, + "requiresDist": { + "items": { + "type": "string" + }, + "type": "array" + }, + "providesExtra": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "name", + "version", + "author", + "authorEmail", + "platform", + "sitePackagesRootPath" + ] + }, + "PythonPipRequirementsEntry": { + "properties": { + "name": { + "type": "string" + }, + "extras": { + "items": { + "type": "string" + }, + "type": "array" + }, + "versionConstraint": { + "type": "string" + }, + "url": { + "type": "string" + }, + "markers": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "versionConstraint" + ] + }, + "PythonPipfileLockEntry": { + "properties": { + "hashes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "index": { + "type": "string" + } + }, + "type": "object", + "required": [ + "hashes", + "index" + ] + }, + "PythonPoetryLockDependencyEntry": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "optional": { + "type": "boolean" + }, + "markers": { + "type": "string" + }, + "extras": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "name", + "version", + "optional" + ] + }, + "PythonPoetryLockEntry": { + "properties": { + "index": { + "type": "string" + }, + "dependencies": { + "items": { + "$ref": "#/$defs/PythonPoetryLockDependencyEntry" + }, + "type": "array" + }, + "extras": { + "items": { + "$ref": "#/$defs/PythonPoetryLockExtraEntry" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "index", + "dependencies" + ] + }, + "PythonPoetryLockExtraEntry": { + "properties": { + "name": { + "type": "string" + }, + "dependencies": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "name", + "dependencies" + ] + }, + "RDescription": { + "properties": { + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "author": { + "type": "string" + }, + "maintainer": { + "type": "string" + }, + "url": { + "items": { + "type": "string" + }, + "type": "array" + }, + "repository": { + "type": "string" + }, + "built": { + "type": "string" + }, + "needsCompilation": { + "type": "boolean" + }, + "imports": { + "items": { + "type": "string" + }, + "type": "array" + }, + "depends": { + "items": { + "type": "string" + }, + "type": "array" + }, + "suggests": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "Relationship": { + "properties": { + "parent": { + "type": "string" + }, + "child": { + "type": "string" + }, + "type": { + "type": "string" + }, + "metadata": true + }, + "type": "object", + "required": [ + "parent", + "child", + "type" + ] + }, + "RpmArchive": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "epoch": { + "oneOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ] + }, + "architecture": { + "type": "string" + }, + "release": { + "type": "string" + }, + "sourceRpm": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "vendor": { + "type": "string" + }, + "modularityLabel": { + "type": "string" + }, + "provides": { + "items": { + "type": "string" + }, + "type": "array" + }, + "requires": { + "items": { + "type": "string" + }, + "type": "array" + }, + "files": { + "items": { + "$ref": "#/$defs/RpmFileRecord" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "name", + "version", + "epoch", + "architecture", + "release", + "sourceRpm", + "size", + "vendor", + "files" + ] + }, + "RpmDbEntry": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "epoch": { + "oneOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ] + }, + "architecture": { + "type": "string" + }, + "release": { + "type": "string" + }, + "sourceRpm": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "vendor": { + "type": "string" + }, + "modularityLabel": { + "type": "string" + }, + "provides": { + "items": { + "type": "string" + }, + "type": "array" + }, + "requires": { + "items": { + "type": "string" + }, + "type": "array" + }, + "files": { + "items": { + "$ref": "#/$defs/RpmFileRecord" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "name", + "version", + "epoch", + "architecture", + "release", + "sourceRpm", + "size", + "vendor", + "files" + ] + }, + "RpmFileRecord": { + "properties": { + "path": { + "type": "string" + }, + "mode": { + "type": "integer" + }, + "size": { + "type": "integer" + }, + "digest": { + "$ref": "#/$defs/Digest" + }, + "userName": { + "type": "string" + }, + "groupName": { + "type": "string" + }, + "flags": { + "type": "string" + } + }, + "type": "object", + "required": [ + "path", + "mode", + "size", + "digest", + "userName", + "groupName", + "flags" + ] + }, + "RubyGemspec": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "files": { + "items": { + "type": "string" + }, + "type": "array" + }, + "authors": { + "items": { + "type": "string" + }, + "type": "array" + }, + "homepage": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version" + ] + }, + "RustCargoAuditEntry": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "source": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version", + "source" + ] + }, + "RustCargoLockEntry": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "source": { + "type": "string" + }, + "checksum": { + "type": "string" + }, + "dependencies": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "name", + "version", + "source", + "checksum", + "dependencies" + ] + }, + "Schema": { + "properties": { + "version": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "type": "object", + "required": [ + "version", + "url" + ] + }, + "Source": { + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "type": { + "type": "string" + }, + "metadata": true + }, + "type": "object", + "required": [ + "id", + "name", + "version", + "type", + "metadata" + ] + }, + "SwiftPackageManagerLockEntry": { + "properties": { + "revision": { + "type": "string" + } + }, + "type": "object", + "required": [ + "revision" + ] + }, + "SwiplpackPackage": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "author": { + "type": "string" + }, + "authorEmail": { + "type": "string" + }, + "packager": { + "type": "string" + }, + "packagerEmail": { + "type": "string" + }, + "homepage": { + "type": "string" + }, + "dependencies": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "name", + "version", + "author", + "authorEmail", + "packager", + "packagerEmail", + "homepage", + "dependencies" + ] + }, + "WordpressPluginEntry": { + "properties": { + "pluginInstallDirectory": { + "type": "string" + }, + "author": { + "type": "string" + }, + "authorUri": { + "type": "string" + } + }, + "type": "object", + "required": [ + "pluginInstallDirectory" + ] + }, + "cpes": { + "items": { + "$ref": "#/$defs/CPE" + }, + "type": "array" + }, + "licenses": { + "items": { + "$ref": "#/$defs/License" + }, + "type": "array" + } + } +} diff --git a/schema/json/schema-latest.json b/schema/json/schema-latest.json index 015c9003b2e..936582e2d22 100644 --- a/schema/json/schema-latest.json +++ b/schema/json/schema-latest.json @@ -1,6 +1,6 @@ { "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "anchore.io/schema/syft/json/16.0.17/document", + "$id": "anchore.io/schema/syft/json/16.0.18/document", "$ref": "#/$defs/Document", "$defs": { "AlpmDbEntry": { @@ -777,6 +777,12 @@ }, "executable": { "$ref": "#/$defs/Executable" + }, + "unknowns": { + "items": { + "type": "string" + }, + "type": "array" } }, "type": "object", diff --git a/syft/cataloging/unknowns.go b/syft/cataloging/unknowns.go new file mode 100644 index 00000000000..ad3f55fea6c --- /dev/null +++ b/syft/cataloging/unknowns.go @@ -0,0 +1,15 @@ +package cataloging + +type UnknownsConfig struct { + RemoveWhenPackagesDefined bool + IncludeExecutablesWithoutPackages bool + IncludeUnexpandedArchives bool +} + +func DefaultUnknownsConfig() UnknownsConfig { + return UnknownsConfig{ + RemoveWhenPackagesDefined: true, + IncludeExecutablesWithoutPackages: true, + IncludeUnexpandedArchives: true, + } +} diff --git a/syft/create_sbom_config.go b/syft/create_sbom_config.go index 66c1bb3398b..8e9e76b5c3b 100644 --- a/syft/create_sbom_config.go +++ b/syft/create_sbom_config.go @@ -22,6 +22,7 @@ type CreateSBOMConfig struct { Compliance cataloging.ComplianceConfig Search cataloging.SearchConfig Relationships cataloging.RelationshipsConfig + Unknowns cataloging.UnknownsConfig DataGeneration cataloging.DataGenerationConfig Packages pkgcataloging.Config Files filecataloging.Config @@ -113,6 +114,12 @@ func (c *CreateSBOMConfig) WithRelationshipsConfig(cfg cataloging.RelationshipsC return c } +// WithUnknownsConfig allows for defining the specific behavior dealing with unknowns +func (c *CreateSBOMConfig) WithUnknownsConfig(cfg cataloging.UnknownsConfig) *CreateSBOMConfig { + c.Unknowns = cfg + return c +} + // WithDataGenerationConfig allows for defining what data elements that cannot be discovered from the underlying // target being scanned that should be generated after package creation. func (c *CreateSBOMConfig) WithDataGenerationConfig(cfg cataloging.DataGenerationConfig) *CreateSBOMConfig { @@ -173,6 +180,7 @@ func (c *CreateSBOMConfig) makeTaskGroups(src source.Description) ([][]task.Task // generate package and file tasks based on the configuration environmentTasks := c.environmentTasks() relationshipsTasks := c.relationshipTasks(src) + unknownTasks := c.unknownsTasks() fileTasks := c.fileTasks() pkgTasks, selectionEvidence, err := c.packageTasks(src) if err != nil { @@ -192,6 +200,11 @@ func (c *CreateSBOMConfig) makeTaskGroups(src source.Description) ([][]task.Task taskGroups = append(taskGroups, relationshipsTasks) } + // all unknowns tasks should happen after all scanning is complete + if len(unknownTasks) > 0 { + taskGroups = append(taskGroups, unknownTasks) + } + // identifying the environment (i.e. the linux release) must be done first as this is required for package cataloging taskGroups = append( [][]task.Task{ @@ -338,6 +351,18 @@ func (c *CreateSBOMConfig) environmentTasks() []task.Task { return tsks } +// unknownsTasks returns a set of tasks that perform any necessary post-processing +// to identify SBOM elements as unknowns +func (c *CreateSBOMConfig) unknownsTasks() []task.Task { + var tasks []task.Task + + if t := task.NewUnknownsLabelerTask(c.Unknowns); t != nil { + tasks = append(tasks, t) + } + + return tasks +} + func (c *CreateSBOMConfig) validate() error { if c.Relationships.ExcludeBinaryPackagesWithFileOwnershipOverlap { if !c.Relationships.PackageFileOwnershipOverlap { diff --git a/syft/create_sbom_config_test.go b/syft/create_sbom_config_test.go index 8c74b57cd50..64a9a2a0af4 100644 --- a/syft/create_sbom_config_test.go +++ b/syft/create_sbom_config_test.go @@ -90,6 +90,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) { pkgCatalogerNamesWithTagOrName(t, "image"), fileCatalogerNames(true, true, true), relationshipCatalogerNames(), + unknownsTaskNames(), }, wantManifest: &catalogerManifest{ Requested: pkgcataloging.SelectionRequest{ @@ -108,6 +109,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) { pkgCatalogerNamesWithTagOrName(t, "directory"), fileCatalogerNames(true, true, true), relationshipCatalogerNames(), + unknownsTaskNames(), }, wantManifest: &catalogerManifest{ Requested: pkgcataloging.SelectionRequest{ @@ -127,6 +129,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) { pkgCatalogerNamesWithTagOrName(t, "directory"), fileCatalogerNames(true, true, true), relationshipCatalogerNames(), + unknownsTaskNames(), }, wantManifest: &catalogerManifest{ Requested: pkgcataloging.SelectionRequest{ @@ -145,6 +148,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) { pkgCatalogerNamesWithTagOrName(t, "image"), fileCatalogerNames(false, true, true), // note: the digest cataloger is not included relationshipCatalogerNames(), + unknownsTaskNames(), }, wantManifest: &catalogerManifest{ Requested: pkgcataloging.SelectionRequest{ @@ -163,6 +167,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) { pkgCatalogerNamesWithTagOrName(t, "image"), // note: there are no file catalogers in their own group relationshipCatalogerNames(), + unknownsTaskNames(), }, wantManifest: &catalogerManifest{ Requested: pkgcataloging.SelectionRequest{ @@ -184,6 +189,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) { fileCatalogerNames(true, true, true)..., ), relationshipCatalogerNames(), + unknownsTaskNames(), }, wantManifest: &catalogerManifest{ Requested: pkgcataloging.SelectionRequest{ @@ -204,6 +210,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) { addTo(pkgCatalogerNamesWithTagOrName(t, "image"), "persistent"), fileCatalogerNames(true, true, true), relationshipCatalogerNames(), + unknownsTaskNames(), }, wantManifest: &catalogerManifest{ Requested: pkgcataloging.SelectionRequest{ @@ -224,6 +231,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) { addTo(pkgCatalogerNamesWithTagOrName(t, "directory"), "persistent"), fileCatalogerNames(true, true, true), relationshipCatalogerNames(), + unknownsTaskNames(), }, wantManifest: &catalogerManifest{ Requested: pkgcataloging.SelectionRequest{ @@ -244,6 +252,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) { addTo(pkgIntersect("image", "javascript"), "persistent"), fileCatalogerNames(true, true, true), relationshipCatalogerNames(), + unknownsTaskNames(), }, wantManifest: &catalogerManifest{ Requested: pkgcataloging.SelectionRequest{ @@ -265,6 +274,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) { addTo(pkgCatalogerNamesWithTagOrName(t, "image"), "user-provided"), fileCatalogerNames(true, true, true), relationshipCatalogerNames(), + unknownsTaskNames(), }, wantManifest: &catalogerManifest{ Requested: pkgcataloging.SelectionRequest{ @@ -285,6 +295,7 @@ func TestCreateSBOMConfig_makeTaskGroups(t *testing.T) { pkgCatalogerNamesWithTagOrName(t, "image"), fileCatalogerNames(true, true, true), relationshipCatalogerNames(), + unknownsTaskNames(), }, wantManifest: &catalogerManifest{ Requested: pkgcataloging.SelectionRequest{ @@ -385,6 +396,10 @@ func relationshipCatalogerNames() []string { return []string{"relationships-cataloger"} } +func unknownsTaskNames() []string { + return []string{"unknowns-labeler"} +} + func environmentCatalogerNames() []string { return []string{"environment-cataloger"} } diff --git a/syft/file/cataloger/executable/cataloger.go b/syft/file/cataloger/executable/cataloger.go index b0f10e15d02..7de2b33b0d0 100644 --- a/syft/file/cataloger/executable/cataloger.go +++ b/syft/file/cataloger/executable/cataloger.go @@ -15,6 +15,7 @@ import ( "github.com/anchore/syft/internal/bus" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/mimetype" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/event/monitor" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/internal/unionreader" @@ -45,6 +46,8 @@ func NewCataloger(cfg Config) *Cataloger { } func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates]file.Executable, error) { + var errs error + locs, err := resolver.FilesByMIMEType(i.config.MIMETypes...) if err != nil { return nil, fmt.Errorf("unable to get file locations for binaries: %w", err) @@ -61,7 +64,10 @@ func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates]file.E for _, loc := range locs { prog.AtomicStage.Set(loc.Path()) - exec := processExecutableLocation(loc, resolver) + exec, err := processExecutableLocation(loc, resolver) + if err != nil { + errs = unknown.Append(errs, loc, err) + } if exec != nil { prog.Increment() @@ -74,30 +80,24 @@ func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates]file.E prog.AtomicStage.Set(fmt.Sprintf("%s executables", humanize.Comma(prog.Current()))) prog.SetCompleted() - return results, nil + return results, errs } -func processExecutableLocation(loc file.Location, resolver file.Resolver) *file.Executable { +func processExecutableLocation(loc file.Location, resolver file.Resolver) (*file.Executable, error) { reader, err := resolver.FileContentsByLocation(loc) if err != nil { - // TODO: known-unknowns log.WithFields("error", err).Warnf("unable to get file contents for %q", loc.RealPath) - return nil + return nil, fmt.Errorf("unable to get file contents: %w", err) } defer internal.CloseAndLogError(reader, loc.RealPath) uReader, err := unionreader.GetUnionReader(reader) if err != nil { - // TODO: known-unknowns log.WithFields("error", err).Warnf("unable to get union reader for %q", loc.RealPath) - return nil + return nil, fmt.Errorf("unable to get union reader: %w", err) } - exec, err := processExecutable(loc, uReader) - if err != nil { - log.WithFields("error", err).Warnf("unable to process executable %q", loc.RealPath) - } - return exec + return processExecutable(loc, uReader) } func catalogingProgress(locations int64) *monitor.CatalogerTaskProgress { @@ -153,10 +153,12 @@ func processExecutable(loc file.Location, reader unionreader.UnionReader) (*file format, err := findExecutableFormat(reader) if err != nil { + log.Debugf("unable to determine executable kind for %v: %v", loc.RealPath, err) return nil, fmt.Errorf("unable to determine executable kind: %w", err) } if format == "" { + // this is not an "unknown", so just log -- this binary does not have parseable data in it log.Debugf("unable to determine executable format for %q", loc.RealPath) return nil, nil } @@ -165,16 +167,19 @@ func processExecutable(loc file.Location, reader unionreader.UnionReader) (*file switch format { case file.ELF: - if err := findELFFeatures(&data, reader); err != nil { + if err = findELFFeatures(&data, reader); err != nil { log.WithFields("error", err).Tracef("unable to determine ELF features for %q", loc.RealPath) + err = fmt.Errorf("unable to determine ELF features: %w", err) } case file.PE: - if err := findPEFeatures(&data, reader); err != nil { + if err = findPEFeatures(&data, reader); err != nil { log.WithFields("error", err).Tracef("unable to determine PE features for %q", loc.RealPath) + err = fmt.Errorf("unable to determine PE features: %w", err) } case file.MachO: - if err := findMachoFeatures(&data, reader); err != nil { + if err = findMachoFeatures(&data, reader); err != nil { log.WithFields("error", err).Tracef("unable to determine Macho features for %q", loc.RealPath) + err = fmt.Errorf("unable to determine Macho features: %w", err) } } @@ -183,7 +188,7 @@ func processExecutable(loc file.Location, reader unionreader.UnionReader) (*file data.ImportedLibraries = []string{} } - return &data, nil + return &data, err } func findExecutableFormat(reader unionreader.UnionReader) (file.ExecutableFormat, error) { diff --git a/syft/file/cataloger/executable/elf.go b/syft/file/cataloger/executable/elf.go index dec6abd34ed..b9d2205cfa5 100644 --- a/syft/file/cataloger/executable/elf.go +++ b/syft/file/cataloger/executable/elf.go @@ -8,6 +8,7 @@ import ( "github.com/scylladb/go-set/strset" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/internal/unionreader" ) @@ -20,8 +21,8 @@ func findELFFeatures(data *file.Executable, reader unionreader.UnionReader) erro libs, err := f.ImportedLibraries() if err != nil { - // TODO: known-unknowns log.WithFields("error", err).Trace("unable to read imported libraries from elf file") + err = unknown.Joinf(err, "unable to read imported libraries from elf file: %w", err) libs = nil } @@ -34,7 +35,7 @@ func findELFFeatures(data *file.Executable, reader unionreader.UnionReader) erro data.HasEntrypoint = elfHasEntrypoint(f) data.HasExports = elfHasExports(f) - return nil + return err } func findELFSecurityFeatures(f *elf.File) *file.ELFSecurityFeatures { @@ -62,7 +63,6 @@ func checkElfStackCanary(file *elf.File) *bool { func hasAnyDynamicSymbols(file *elf.File, symbolNames ...string) *bool { dynSyms, err := file.DynamicSymbols() if err != nil { - // TODO: known-unknowns log.WithFields("error", err).Trace("unable to read dynamic symbols from elf file") return nil } @@ -129,7 +129,6 @@ func hasBindNowDynTagOrFlag(f *elf.File) bool { func hasElfDynFlag(f *elf.File, flag elf.DynFlag) bool { vals, err := f.DynValue(elf.DT_FLAGS) if err != nil { - // TODO: known-unknowns log.WithFields("error", err).Trace("unable to read DT_FLAGS from elf file") return false } @@ -144,7 +143,6 @@ func hasElfDynFlag(f *elf.File, flag elf.DynFlag) bool { func hasElfDynFlag1(f *elf.File, flag elf.DynFlag1) bool { vals, err := f.DynValue(elf.DT_FLAGS_1) if err != nil { - // TODO: known-unknowns log.WithFields("error", err).Trace("unable to read DT_FLAGS_1 from elf file") return false } @@ -203,7 +201,6 @@ func checkLLVMControlFlowIntegrity(file *elf.File) *bool { // look for any symbols that are functions and end with ".cfi" dynSyms, err := file.Symbols() if err != nil { - // TODO: known-unknowns log.WithFields("error", err).Trace("unable to read symbols from elf file") return nil } @@ -225,7 +222,6 @@ var fortifyPattern = regexp.MustCompile(`__\w+_chk@.+`) func checkClangFortifySource(file *elf.File) *bool { dynSyms, err := file.Symbols() if err != nil { - // TODO: known-unknowns log.WithFields("error", err).Trace("unable to read symbols from elf file") return nil } @@ -254,7 +250,7 @@ func elfHasExports(f *elf.File) bool { // really anything that is not marked with 'U' (undefined) is considered an export. symbols, err := f.DynamicSymbols() if err != nil { - // TODO: known-unknowns? + log.WithFields("error", err).Trace("unable to get ELF dynamic symbols") return false } diff --git a/syft/file/cataloger/executable/elf_test.go b/syft/file/cataloger/executable/elf_test.go index 45477589106..fd3536cca60 100644 --- a/syft/file/cataloger/executable/elf_test.go +++ b/syft/file/cataloger/executable/elf_test.go @@ -27,6 +27,7 @@ func Test_findELFSecurityFeatures(t *testing.T) { name string fixture string want *file.ELFSecurityFeatures + wantErr require.ErrorAssertionFunc wantStripped bool }{ { @@ -221,6 +222,7 @@ func Test_elfHasExports(t *testing.T) { f, err := elf.NewFile(readerForFixture(t, tt.fixture)) require.NoError(t, err) assert.Equal(t, tt.want, elfHasExports(f)) + require.NoError(t, err) }) } } diff --git a/syft/file/cataloger/filecontent/cataloger.go b/syft/file/cataloger/filecontent/cataloger.go index 36a411d3949..b88257308e0 100644 --- a/syft/file/cataloger/filecontent/cataloger.go +++ b/syft/file/cataloger/filecontent/cataloger.go @@ -13,6 +13,7 @@ import ( "github.com/anchore/syft/internal/bus" intFile "github.com/anchore/syft/internal/file" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/event/monitor" "github.com/anchore/syft/syft/file" ) @@ -46,6 +47,7 @@ func NewCataloger(cfg Config) *Cataloger { func (i *Cataloger) Catalog(_ context.Context, resolver file.Resolver) (map[file.Coordinates]string, error) { results := make(map[file.Coordinates]string) var locations []file.Location + var errs error locations, err := resolver.FilesByGlob(i.globs...) if err != nil { @@ -59,8 +61,9 @@ func (i *Cataloger) Catalog(_ context.Context, resolver file.Resolver) (map[file metadata, err := resolver.FileMetadataByLocation(location) if err != nil { + errs = unknown.Append(errs, location, err) prog.SetError(err) - return nil, err + continue } if i.skipFilesAboveSizeInBytes > 0 && metadata.Size() > i.skipFilesAboveSizeInBytes { @@ -69,12 +72,12 @@ func (i *Cataloger) Catalog(_ context.Context, resolver file.Resolver) (map[file result, err := i.catalogLocation(resolver, location) if internal.IsErrPathPermission(err) { - log.Debugf("file contents cataloger skipping - %+v", err) + errs = unknown.Append(errs, location, fmt.Errorf("permission error reading file contents: %w", err)) continue } if err != nil { - prog.SetError(err) - return nil, err + errs = unknown.Append(errs, location, err) + continue } prog.Increment() @@ -87,7 +90,7 @@ func (i *Cataloger) Catalog(_ context.Context, resolver file.Resolver) (map[file prog.AtomicStage.Set(fmt.Sprintf("%s files", humanize.Comma(prog.Current()))) prog.SetCompleted() - return results, nil + return results, errs } func (i *Cataloger) catalogLocation(resolver file.Resolver, location file.Location) (string, error) { diff --git a/syft/file/cataloger/filedigest/cataloger.go b/syft/file/cataloger/filedigest/cataloger.go index e5d8c347e96..f8aa9ace116 100644 --- a/syft/file/cataloger/filedigest/cataloger.go +++ b/syft/file/cataloger/filedigest/cataloger.go @@ -13,6 +13,7 @@ import ( "github.com/anchore/syft/internal/bus" intFile "github.com/anchore/syft/internal/file" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/event/monitor" "github.com/anchore/syft/syft/file" intCataloger "github.com/anchore/syft/syft/file/cataloger/internal" @@ -33,6 +34,7 @@ func NewCataloger(hashes []crypto.Hash) *Cataloger { func (i *Cataloger) Catalog(ctx context.Context, resolver file.Resolver, coordinates ...file.Coordinates) (map[file.Coordinates][]file.Digest, error) { results := make(map[file.Coordinates][]file.Digest) var locations []file.Location + var errs error if len(coordinates) == 0 { locations = intCataloger.AllRegularFiles(ctx, resolver) @@ -58,12 +60,14 @@ func (i *Cataloger) Catalog(ctx context.Context, resolver file.Resolver, coordin if internal.IsErrPathPermission(err) { log.Debugf("file digests cataloger skipping %q: %+v", location.RealPath, err) + errs = unknown.Append(errs, location, err) continue } if err != nil { prog.SetError(err) - return nil, fmt.Errorf("failed to process file %q: %w", location.RealPath, err) + errs = unknown.Append(errs, location, err) + continue } prog.Increment() @@ -76,7 +80,7 @@ func (i *Cataloger) Catalog(ctx context.Context, resolver file.Resolver, coordin prog.AtomicStage.Set(fmt.Sprintf("%s files", humanize.Comma(prog.Current()))) prog.SetCompleted() - return results, nil + return results, errs } func (i *Cataloger) catalogLocation(resolver file.Resolver, location file.Location) ([]file.Digest, error) { diff --git a/syft/file/cataloger/filemetadata/cataloger.go b/syft/file/cataloger/filemetadata/cataloger.go index e6935df73d7..b158e751ddc 100644 --- a/syft/file/cataloger/filemetadata/cataloger.go +++ b/syft/file/cataloger/filemetadata/cataloger.go @@ -8,6 +8,7 @@ import ( "github.com/anchore/syft/internal/bus" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/event/monitor" "github.com/anchore/syft/syft/file" ) @@ -20,6 +21,7 @@ func NewCataloger() *Cataloger { } func (i *Cataloger) Catalog(ctx context.Context, resolver file.Resolver, coordinates ...file.Coordinates) (map[file.Coordinates]file.Metadata, error) { + var errs error results := make(map[file.Coordinates]file.Metadata) var locations <-chan file.Location ctx, cancel := context.WithCancel(ctx) @@ -34,7 +36,7 @@ func (i *Cataloger) Catalog(ctx context.Context, resolver file.Resolver, coordin for _, c := range coordinates { locs, err := resolver.FilesByPath(c.RealPath) if err != nil { - log.Warn("unable to get file locations for path %q: %w", c.RealPath, err) + errs = unknown.Append(errs, c, err) continue } for _, loc := range locs { @@ -71,7 +73,7 @@ func (i *Cataloger) Catalog(ctx context.Context, resolver file.Resolver, coordin prog.AtomicStage.Set(fmt.Sprintf("%s locations", humanize.Comma(prog.Current()))) prog.SetCompleted() - return results, nil + return results, errs } func catalogingProgress(locations int64) *monitor.CatalogerTaskProgress { diff --git a/syft/file/coordinates.go b/syft/file/coordinates.go index 16256c31b17..0d6cd0ec34a 100644 --- a/syft/file/coordinates.go +++ b/syft/file/coordinates.go @@ -39,3 +39,7 @@ func (c Coordinates) String() string { } return fmt.Sprintf("Location<%s>", str) } + +func (c Coordinates) GetCoordinates() Coordinates { + return c +} diff --git a/syft/format/syftjson/decoder_test.go b/syft/format/syftjson/decoder_test.go index daa019dc4fb..27a6813b93b 100644 --- a/syft/format/syftjson/decoder_test.go +++ b/syft/format/syftjson/decoder_test.go @@ -221,6 +221,7 @@ func Test_encodeDecodeFileMetadata(t *testing.T) { }, }, }, + Unknowns: map[file.Coordinates][]string{}, Executables: map[file.Coordinates]file.Executable{ c: { Format: file.ELF, diff --git a/syft/format/syftjson/model/file.go b/syft/format/syftjson/model/file.go index a98349f4104..0ce0d92b306 100644 --- a/syft/format/syftjson/model/file.go +++ b/syft/format/syftjson/model/file.go @@ -13,6 +13,7 @@ type File struct { Digests []file.Digest `json:"digests,omitempty"` Licenses []FileLicense `json:"licenses,omitempty"` Executable *file.Executable `json:"executable,omitempty"` + Unknowns []string `json:"unknowns,omitempty"` } type FileMetadataEntry struct { diff --git a/syft/format/syftjson/to_format_model.go b/syft/format/syftjson/to_format_model.go index 42ec48f77d2..a5cd128a8b4 100644 --- a/syft/format/syftjson/to_format_model.go +++ b/syft/format/syftjson/to_format_model.go @@ -101,6 +101,11 @@ func toFile(s sbom.SBOM) []model.File { contents = contentsForLocation } + var unknowns []string + if unknownsForLocation, exists := artifacts.Unknowns[coordinates]; exists { + unknowns = unknownsForLocation + } + var licenses []model.FileLicense for _, l := range artifacts.FileLicenses[coordinates] { var evidence *model.FileLicenseEvidence @@ -132,6 +137,7 @@ func toFile(s sbom.SBOM) []model.File { Contents: contents, Licenses: licenses, Executable: executable, + Unknowns: unknowns, }) } diff --git a/syft/format/syftjson/to_syft_model.go b/syft/format/syftjson/to_syft_model.go index b2b1916e26d..154b8fa8e65 100644 --- a/syft/format/syftjson/to_syft_model.go +++ b/syft/format/syftjson/to_syft_model.go @@ -38,6 +38,7 @@ func toSyftModel(doc model.Document) *sbom.SBOM { FileContents: fileArtifacts.FileContents, FileLicenses: fileArtifacts.FileLicenses, Executables: fileArtifacts.Executables, + Unknowns: fileArtifacts.Unknowns, LinuxDistribution: toSyftLinuxRelease(doc.Distro), }, Source: *toSyftSourceData(doc.Source), @@ -66,6 +67,7 @@ func deduplicateErrors(errors []error) []string { return errorMessages } +//nolint:funlen func toSyftFiles(files []model.File) sbom.Artifacts { ret := sbom.Artifacts{ FileMetadata: make(map[file.Coordinates]file.Metadata), @@ -73,6 +75,7 @@ func toSyftFiles(files []model.File) sbom.Artifacts { FileContents: make(map[file.Coordinates]string), FileLicenses: make(map[file.Coordinates][]file.License), Executables: make(map[file.Coordinates]file.Executable), + Unknowns: make(map[file.Coordinates][]string), } for _, f := range files { @@ -130,6 +133,10 @@ func toSyftFiles(files []model.File) sbom.Artifacts { if f.Executable != nil { ret.Executables[coord] = *f.Executable } + + if len(f.Unknowns) > 0 { + ret.Unknowns[coord] = f.Unknowns + } } return ret diff --git a/syft/format/syftjson/to_syft_model_test.go b/syft/format/syftjson/to_syft_model_test.go index 5559db2953c..babba6345ff 100644 --- a/syft/format/syftjson/to_syft_model_test.go +++ b/syft/format/syftjson/to_syft_model_test.go @@ -234,6 +234,7 @@ func Test_toSyftFiles(t *testing.T) { FileMetadata: map[file.Coordinates]file.Metadata{}, FileDigests: map[file.Coordinates][]file.Digest{}, Executables: map[file.Coordinates]file.Executable{}, + Unknowns: make(map[file.Coordinates][]string), }, }, { @@ -349,6 +350,7 @@ func Test_toSyftFiles(t *testing.T) { t.Run(tt.name, func(t *testing.T) { tt.want.FileContents = make(map[file.Coordinates]string) tt.want.FileLicenses = make(map[file.Coordinates][]file.License) + tt.want.Unknowns = make(map[file.Coordinates][]string) assert.Equal(t, tt.want, toSyftFiles(tt.files)) }) } diff --git a/syft/pkg/cataloger/alpine/cataloger_test.go b/syft/pkg/cataloger/alpine/cataloger_test.go index 8ac6eaa8ed3..fff8e77f4ed 100644 --- a/syft/pkg/cataloger/alpine/cataloger_test.go +++ b/syft/pkg/cataloger/alpine/cataloger_test.go @@ -190,6 +190,14 @@ func TestApkDBCataloger(t *testing.T) { } +func Test_corruptDb(t *testing.T) { + pkgtest.NewCatalogTester(). + FromDirectory(t, "test-fixtures/corrupt"). + WithCompareOptions(cmpopts.IgnoreFields(pkg.ApkDBEntry{}, "Files", "GitCommit", "Checksum")). + WithError(). + TestCataloger(t, NewDBCataloger()) +} + func TestCatalogerDependencyTree(t *testing.T) { assertion := func(t *testing.T, pkgs []pkg.Package, relationships []artifact.Relationship) { expected := map[string][]string{ diff --git a/syft/pkg/cataloger/alpine/parse_apk_db.go b/syft/pkg/cataloger/alpine/parse_apk_db.go index 227d93bcbb4..181b2bbf41d 100644 --- a/syft/pkg/cataloger/alpine/parse_apk_db.go +++ b/syft/pkg/cataloger/alpine/parse_apk_db.go @@ -12,6 +12,7 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" @@ -38,6 +39,7 @@ type parsedData struct { func parseApkDB(_ context.Context, resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { scanner := bufio.NewScanner(reader) + var errs error var apks []parsedData var currentEntry parsedData entryParsingInProgress := false @@ -81,10 +83,12 @@ func parseApkDB(_ context.Context, resolver file.Resolver, env *generic.Environm field := parseApkField(line) if field == nil { log.Warnf("unable to parse field data from line %q", line) + errs = unknown.Appendf(errs, reader, "unable to parse field data from line %q", line) continue } if len(field.name) == 0 { log.Warnf("failed to parse field name from line %q", line) + errs = unknown.Appendf(errs, reader, "failed to parse field name from line %q", line) continue } if len(field.value) == 0 { @@ -131,7 +135,7 @@ func parseApkDB(_ context.Context, resolver file.Resolver, env *generic.Environm pkgs = append(pkgs, newPackage(apk, r, reader.Location)) } - return pkgs, nil, nil + return pkgs, nil, errs } func findReleases(resolver file.Resolver, dbPath string) []linux.Release { diff --git a/syft/pkg/cataloger/alpine/test-fixtures/corrupt/lib/apk/db/installed b/syft/pkg/cataloger/alpine/test-fixtures/corrupt/lib/apk/db/installed new file mode 100644 index 00000000000..b1f971acc27 --- /dev/null +++ b/syft/pkg/cataloger/alpine/test-fixtures/corrupt/lib/apk/db/installed @@ -0,0 +1,43 @@ +this is a corrupt db + +C:Q1v4QhLje3kWlC8DJj+ZfJTjlJRSU= +P:alpine-baselayout-data +V:3.2.0-r22 +A:x86_64 +S:11435 +I:73728 +o:alpine-baselayout +t:1655134784 +c:cb70ca5c6d6db0399d2dd09189c5d57827bce5cd +r:alpine-baselayout +F:etc +R:fstab +Z:Q11Q7hNe8QpDS531guqCdrXBzoA/o= +R:group +Z:Q13K+olJg5ayzHSVNUkggZJXuB+9Y= +R:hostname +Z:Q16nVwYVXP/tChvUPdukVD2ifXOmc= +R:hosts +Z:Q1BD6zJKZTRWyqGnPi4tSfd3krsMU= +R:inittab +Z:Q1TsthbhW7QzWRe1E/NKwTOuD4pHc= +R:modules +Z:Q1toogjUipHGcMgECgPJX64SwUT1M= +R:mtab +a:0:0:777 +Z:Q1kiljhXXH1LlQroHsEJIkPZg2eiw= +R:passwd +Z:Q1TchuuLUfur0izvfZQZxgN/LJhB8= +R:profile +Z:Q1F3DgXUP+jNZDknmQPPb5t9FSfDg= +R:protocols +Z:Q1omKlp3vgGq2ZqYzyD/KHNdo8rDc= +R:services +Z:Q19WLCv5ItKg4MH7RWfNRh1I7byQc= +R:shadow +a:0:42:640 +Z:Q1ltrPIAW2zHeDiajsex2Bdmq3uqA= +R:shells +Z:Q1ojm2YdpCJ6B/apGDaZ/Sdb2xJkA= +R:sysctl.conf +Z:Q14upz3tfnNxZkIEsUhWn7Xoiw96g= diff --git a/syft/pkg/cataloger/arch/cataloger_test.go b/syft/pkg/cataloger/arch/cataloger_test.go index 0badcb828fb..e6152524612 100644 --- a/syft/pkg/cataloger/arch/cataloger_test.go +++ b/syft/pkg/cataloger/arch/cataloger_test.go @@ -11,6 +11,14 @@ import ( "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" ) +func TestAlpmUnknowns(t *testing.T) { + pkgtest.NewCatalogTester(). + FromDirectory(t, "test-fixtures/installed"). + WithCompareOptions(cmpopts.IgnoreFields(pkg.AlpmFileRecord{}, "Time")). + WithError(). + TestCataloger(t, NewDBCataloger()) +} + func TestAlpmCataloger(t *testing.T) { gmpDbLocation := file.NewLocation("var/lib/pacman/local/gmp-6.2.1-2/desc") treeSitterDbLocation := file.NewLocation("var/lib/pacman/local/tree-sitter-0.22.6-1/desc") diff --git a/syft/pkg/cataloger/arch/parse_alpm_db.go b/syft/pkg/cataloger/arch/parse_alpm_db.go index cb068df8156..4c79abb8ff3 100644 --- a/syft/pkg/cataloger/arch/parse_alpm_db.go +++ b/syft/pkg/cataloger/arch/parse_alpm_db.go @@ -17,6 +17,7 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -41,6 +42,8 @@ type parsedData struct { // parseAlpmDB parses the arch linux pacman database flat-files and returns the packages and relationships found within. func parseAlpmDB(_ context.Context, resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { + var errs error + data, err := parseAlpmDBEntry(reader) if err != nil { return nil, nil, err @@ -52,24 +55,25 @@ func parseAlpmDB(_ context.Context, resolver file.Resolver, env *generic.Environ base := path.Dir(reader.RealPath) + var locs []file.Location + // replace the files found the pacman database with the files from the mtree These contain more metadata and // thus more useful. - files, fileLoc := fetchPkgFiles(base, resolver) - backups, backupLoc := fetchBackupFiles(base, resolver) - - var locs []file.Location - if fileLoc != nil { + files, fileLoc, err := fetchPkgFiles(base, resolver) + errs = unknown.Join(errs, err) + if err == nil { locs = append(locs, fileLoc.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation)) data.Files = files } - - if backupLoc != nil { + backups, backupLoc, err := fetchBackupFiles(base, resolver) + errs = unknown.Join(errs, err) + if err == nil { locs = append(locs, backupLoc.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation)) data.Backup = backups } if data.Package == "" { - return nil, nil, nil + return nil, nil, errs } return []pkg.Package{ @@ -79,63 +83,56 @@ func parseAlpmDB(_ context.Context, resolver file.Resolver, env *generic.Environ reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), locs..., ), - }, nil, nil + }, nil, errs } -func fetchPkgFiles(base string, resolver file.Resolver) ([]pkg.AlpmFileRecord, *file.Location) { +func fetchPkgFiles(base string, resolver file.Resolver) ([]pkg.AlpmFileRecord, file.Location, error) { // TODO: probably want to use MTREE and PKGINFO here target := path.Join(base, "mtree") loc, err := getLocation(target, resolver) if err != nil { log.WithFields("error", err, "path", target).Trace("failed to find mtree file") - return []pkg.AlpmFileRecord{}, nil - } - if loc == nil { - return []pkg.AlpmFileRecord{}, nil + return []pkg.AlpmFileRecord{}, loc, unknown.New(loc, fmt.Errorf("failed to find mtree file: %w", err)) } - - reader, err := resolver.FileContentsByLocation(*loc) + reader, err := resolver.FileContentsByLocation(loc) if err != nil { - return []pkg.AlpmFileRecord{}, nil + return []pkg.AlpmFileRecord{}, loc, unknown.New(loc, fmt.Errorf("failed to get contents: %w", err)) } defer internal.CloseAndLogError(reader, loc.RealPath) pkgFiles, err := parseMtree(reader) if err != nil { log.WithFields("error", err, "path", target).Trace("failed to parse mtree file") - return []pkg.AlpmFileRecord{}, nil + return []pkg.AlpmFileRecord{}, loc, unknown.New(loc, fmt.Errorf("failed to parse mtree: %w", err)) } - return pkgFiles, loc + return pkgFiles, loc, nil } -func fetchBackupFiles(base string, resolver file.Resolver) ([]pkg.AlpmFileRecord, *file.Location) { +func fetchBackupFiles(base string, resolver file.Resolver) ([]pkg.AlpmFileRecord, file.Location, error) { // We only really do this to get any backup database entries from the files database target := filepath.Join(base, "files") loc, err := getLocation(target, resolver) if err != nil { log.WithFields("error", err, "path", target).Trace("failed to find alpm files") - return []pkg.AlpmFileRecord{}, nil - } - if loc == nil { - return []pkg.AlpmFileRecord{}, nil + return []pkg.AlpmFileRecord{}, loc, unknown.New(loc, fmt.Errorf("failed to find alpm files: %w", err)) } - reader, err := resolver.FileContentsByLocation(*loc) + reader, err := resolver.FileContentsByLocation(loc) if err != nil { - return []pkg.AlpmFileRecord{}, nil + return []pkg.AlpmFileRecord{}, loc, unknown.New(loc, fmt.Errorf("failed to get contents: %w", err)) } defer internal.CloseAndLogError(reader, loc.RealPath) filesMetadata, err := parseAlpmDBEntry(reader) if err != nil { - return []pkg.AlpmFileRecord{}, nil + return []pkg.AlpmFileRecord{}, loc, unknown.New(loc, fmt.Errorf("failed to parse alpm db entry: %w", err)) } if filesMetadata != nil { - return filesMetadata.Backup, loc + return filesMetadata.Backup, loc, nil } - return []pkg.AlpmFileRecord{}, loc + return []pkg.AlpmFileRecord{}, loc, nil } func parseAlpmDBEntry(reader io.Reader) (*parsedData, error) { @@ -171,20 +168,21 @@ func newScanner(reader io.Reader) *bufio.Scanner { return scanner } -func getLocation(path string, resolver file.Resolver) (*file.Location, error) { +func getLocation(path string, resolver file.Resolver) (file.Location, error) { + loc := file.NewLocation(path) locs, err := resolver.FilesByPath(path) if err != nil { - return nil, err + return loc, err } if len(locs) == 0 { - return nil, fmt.Errorf("could not find file: %s", path) + return loc, fmt.Errorf("could not find file: %s", path) } if len(locs) > 1 { log.WithFields("path", path).Trace("multiple files found for path, using first path") } - return &locs[0], nil + return locs[0], nil } func parseDatabase(b *bufio.Scanner) (*parsedData, error) { diff --git a/syft/pkg/cataloger/arch/test-fixtures/installed/var/lib/pacman/local/corrupt-0.2.1-3/desc b/syft/pkg/cataloger/arch/test-fixtures/installed/var/lib/pacman/local/corrupt-0.2.1-3/desc new file mode 100644 index 00000000000..ff403145b1b --- /dev/null +++ b/syft/pkg/cataloger/arch/test-fixtures/installed/var/lib/pacman/local/corrupt-0.2.1-3/desc @@ -0,0 +1,34 @@ +%NME% +tree-sitter + +%VER.6-1 + +%BASE% +tree-sitter + +%DESC% +Incremental parsing library + + +%BUILDDATE% +1714945746 + +%INSTALLDATE% +1715026360 + +%PACKA@archlinux.org> + +%SIZE% +223539 + +%REASON% +1 + +%LICENSE% +MIT + +%VALIDATION% +pgp + +%PROVIDE +libtree-sitter.so=0-64 diff --git a/syft/pkg/cataloger/binary/classifier_cataloger.go b/syft/pkg/cataloger/binary/classifier_cataloger.go index 7352202d4d8..05e1c622a07 100644 --- a/syft/pkg/cataloger/binary/classifier_cataloger.go +++ b/syft/pkg/cataloger/binary/classifier_cataloger.go @@ -6,8 +6,10 @@ package binary import ( "context" "encoding/json" + "fmt" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -59,12 +61,14 @@ func (c cataloger) Name() string { func (c cataloger) Catalog(_ context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { var packages []pkg.Package var relationships []artifact.Relationship + var errs error for _, cls := range c.classifiers { log.WithFields("classifier", cls.Class).Trace("cataloging binaries") newPkgs, err := catalog(resolver, cls) if err != nil { - log.WithFields("error", err, "classifier", cls.Class).Warn("unable to catalog binary package: %w", err) + log.WithFields("error", err, "classifier", cls.Class).Debugf("unable to catalog binary package: %v", err) + errs = unknown.Join(errs, fmt.Errorf("%s: %w", cls.Class, err)) continue } newPackages: @@ -82,7 +86,7 @@ func (c cataloger) Catalog(_ context.Context, resolver file.Resolver) ([]pkg.Pac } } - return packages, relationships, nil + return packages, relationships, errs } // mergePackages merges information from the extra package into the target package @@ -98,6 +102,7 @@ func mergePackages(target *pkg.Package, extra *pkg.Package) { } func catalog(resolver file.Resolver, cls Classifier) (packages []pkg.Package, err error) { + var errs error locations, err := resolver.FilesByGlob(cls.FileGlob) if err != nil { return nil, err @@ -105,11 +110,12 @@ func catalog(resolver file.Resolver, cls Classifier) (packages []pkg.Package, er for _, location := range locations { pkgs, err := cls.EvidenceMatcher(cls, matcherContext{resolver: resolver, location: location}) if err != nil { - return nil, err + errs = unknown.Append(errs, location, err) + continue } packages = append(packages, pkgs...) } - return packages, nil + return packages, errs } // packagesMatch returns true if the binary packages "match" based on basic criteria diff --git a/syft/pkg/cataloger/binary/classifier_cataloger_test.go b/syft/pkg/cataloger/binary/classifier_cataloger_test.go index 1bba77b2584..f093b97cae4 100644 --- a/syft/pkg/cataloger/binary/classifier_cataloger_test.go +++ b/syft/pkg/cataloger/binary/classifier_cataloger_test.go @@ -1668,7 +1668,7 @@ func Test_Cataloger_ResilientToErrors(t *testing.T) { resolver := &panicyResolver{} _, _, err := c.Catalog(context.Background(), resolver) - assert.NoError(t, err) + assert.Error(t, err) assert.True(t, resolver.searchCalled) } diff --git a/syft/pkg/cataloger/binary/elf_package_cataloger.go b/syft/pkg/cataloger/binary/elf_package_cataloger.go index ae2b94d9e52..24b4c0b08c0 100644 --- a/syft/pkg/cataloger/binary/elf_package_cataloger.go +++ b/syft/pkg/cataloger/binary/elf_package_cataloger.go @@ -11,6 +11,7 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/mimetype" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/internal/unionreader" @@ -52,6 +53,7 @@ func (c *elfPackageCataloger) Name() string { } func (c *elfPackageCataloger) Catalog(_ context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { + var errs error locations, err := resolver.FilesByMIMEType(mimetype.ExecutableMIMETypeSet.List()...) if err != nil { return nil, nil, fmt.Errorf("unable to get binary files by mime type: %w", err) @@ -62,7 +64,8 @@ func (c *elfPackageCataloger) Catalog(_ context.Context, resolver file.Resolver) for _, location := range locations { notes, key, err := parseElfPackageNotes(resolver, location, c) if err != nil { - return nil, nil, err + errs = unknown.Append(errs, location, err) + continue } if notes == nil { continue @@ -87,7 +90,7 @@ func (c *elfPackageCataloger) Catalog(_ context.Context, resolver file.Resolver) // why not return relationships? We have an executable cataloger that will note the dynamic libraries imported by // each binary. After all files and packages are processed there is a final task that creates package-to-package // and package-to-file relationships based on the dynamic libraries imported by each binary. - return pkgs, nil, nil + return pkgs, nil, errs } func parseElfPackageNotes(resolver file.Resolver, location file.Location, c *elfPackageCataloger) (*elfBinaryPackageNotes, elfPackageKey, error) { @@ -104,7 +107,7 @@ func parseElfPackageNotes(resolver file.Resolver, location file.Location, c *elf if err != nil { log.WithFields("file", location.Path(), "error", err).Trace("unable to parse ELF notes") - return nil, elfPackageKey{}, nil + return nil, elfPackageKey{}, err } if notes == nil { @@ -173,7 +176,7 @@ func getELFNotes(r file.LocationReadCloser) (*elfBinaryPackageNotes, error) { if len(notes) > headerSize { var metadata elfBinaryPackageNotes newPayload := bytes.TrimRight(notes[headerSize:], "\x00") - if err := json.Unmarshal(newPayload, &metadata); err == nil { + if err = json.Unmarshal(newPayload, &metadata); err == nil { return &metadata, nil } log.WithFields("file", r.Location.Path(), "error", err).Trace("unable to unmarshal ELF package notes as JSON") diff --git a/syft/pkg/cataloger/binary/elf_package_cataloger_test.go b/syft/pkg/cataloger/binary/elf_package_cataloger_test.go index d9a97cc6476..0325b3083ad 100644 --- a/syft/pkg/cataloger/binary/elf_package_cataloger_test.go +++ b/syft/pkg/cataloger/binary/elf_package_cataloger_test.go @@ -3,6 +3,8 @@ package binary import ( "testing" + "github.com/stretchr/testify/require" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" @@ -14,6 +16,7 @@ func Test_ELF_Package_Cataloger(t *testing.T) { name string fixture string expected []pkg.Package + wantErr require.ErrorAssertionFunc }{ { name: "go case", @@ -63,6 +66,7 @@ func Test_ELF_Package_Cataloger(t *testing.T) { }, }, }, + wantErr: require.Error, }, { name: "fedora 64 bit binaries", @@ -116,6 +120,7 @@ func Test_ELF_Package_Cataloger(t *testing.T) { WithImageResolver(t, v.fixture). IgnoreLocationLayer(). // this fixture can be rebuilt, thus the layer ID will change Expects(v.expected, nil). + WithErrorAssertion(v.wantErr). TestCataloger(t, NewELFPackageCataloger()) }) } diff --git a/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/Dockerfile b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/Dockerfile index 42a74837c5a..a54ffdcc7fc 100644 --- a/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/Dockerfile +++ b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/Dockerfile @@ -5,9 +5,11 @@ RUN dnf update -y; \ dnf clean all RUN mkdir -p /usr/local/bin/elftests/elfbinwithnestedlib RUN mkdir -p /usr/local/bin/elftests/elfbinwithsisterlib +RUN mkdir -p /usr/local/bin/elftests/elfbinwithcorrupt COPY ./elfbinwithnestedlib /usr/local/bin/elftests/elfbinwithnestedlib COPY ./elfbinwithsisterlib /usr/local/bin/elftests/elfbinwithsisterlib +COPY ./elfbinwithcorrupt /usr/local/bin/elftests/elfbinwithcorrupt ENV LD_LIBRARY_PATH=/usr/local/bin/elftests/elfbinwithnestedlib/bin/lib @@ -16,6 +18,8 @@ RUN make WORKDIR /usr/local/bin/elftests/elfbinwithsisterlib RUN make +WORKDIR /usr/local/bin/elftests/elfbinwithcorrupt +RUN make # let's make the test image smaller, since we only require the built binaries and supporting libraries FROM busybox:1.36.1-musl diff --git a/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/hello_world.cpp b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/hello_world.cpp new file mode 100644 index 00000000000..9da59af37b5 --- /dev/null +++ b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/hello_world.cpp @@ -0,0 +1,6 @@ +#include +#include "hello_world.h" + +void print_hello_world() { + std::cout << "Hello, World!" << std::endl; +} diff --git a/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/hello_world.h b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/hello_world.h new file mode 100644 index 00000000000..d4193a60961 --- /dev/null +++ b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/hello_world.h @@ -0,0 +1,8 @@ + +#ifndef HELLO_WORLD_H +#define HELLO_WORLD_H + +// Function declaration for printing "Hello, World!" to stdout +void print_hello_world(); + +#endif // HELLO_WORLD_H diff --git a/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/makefile b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/makefile new file mode 100644 index 00000000000..9b471a878a5 --- /dev/null +++ b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/makefile @@ -0,0 +1,48 @@ +LDFLAGS := -L/lib64 -lstdc++ + +SRC_DIR := ./ +BUILD_DIR := ../build +BIN_DIR := ../bin +LIB_DIR := $(BIN_DIR)/lib + +LIB_NAME := hello_world +LIB_SRC := $(SRC_DIR)/hello_world.cpp +LIB_OBJ := $(BUILD_DIR)/$(LIB_NAME).o +LIB_SO := $(LIB_DIR)/lib$(LIB_NAME).so + +EXECUTABLE := elfbinwithnestedlib +EXEC_SRC := $(SRC_DIR)/testbin.cpp +EXEC_OBJ := $(BUILD_DIR)/$(EXECUTABLE).o + + + +all: testfixture + +$(LIB_SO): $(LIB_OBJ) | $(LIB_DIR) + $(CC) -shared -o $@ $< + echo '{ corrupt json "system": "syftsys","name": "libhello_world.so","version": "0.01","pure:0.01"}' | objcopy --add-section .note.package=/dev/stdin --set-section-flags .note.package=noload,readonly $@ + +$(LIB_OBJ): $(LIB_SRC) | $(BUILD_DIR) + $(CC) $(CFLAGS) -fPIC -c $< -o $@ + +$(EXEC_OBJ): $(EXEC_SRC) | $(BUILD_DIR) + $(CC) $(CFLAGS) -c $< -o $@ + +$(BIN_DIR): + mkdir -p $(BIN_DIR) +$(BUILD_DIR): + mkdir -p $(BUILD_DIR) +$(LIB_DIR): + mkdir -p $(LIB_DIR) + +$(BIN_DIR)/$(EXECUTABLE): $(EXEC_OBJ) $(LIB_SO) | $(BIN_DIR) + $(CC) $(CFLAGS) -o $@ $^ -L$(LIB_DIR) -l$(LIB_NAME) $(LDFLAGS) + echo '{corrupt json ..._syfttestfixture:0.01"}' | objcopy --add-section .note.package=/dev/stdin --set-section-flags .note.package=noload,readonly $@ + +testfixture: $(BIN_DIR)/$(EXECUTABLE) + +clean: + rm -rf $(BUILD_DIR) $(LIB_DIR) $(BIN_DIR) $(EXECUTABLE) + +.PHONY: all clean + diff --git a/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/testbin.cpp b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/testbin.cpp new file mode 100644 index 00000000000..58a6e10506a --- /dev/null +++ b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/elfsrc/testbin.cpp @@ -0,0 +1,8 @@ +#include "hello_world.h" + +int main() { + // Call the function from the shared library + print_hello_world(); + + return 0; +} diff --git a/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/makefile b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/makefile new file mode 100644 index 00000000000..b53f429a381 --- /dev/null +++ b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/elfbinwithcorrupt/makefile @@ -0,0 +1,18 @@ +CC = g++ +CFLAGS = -std=c++17 -Wall -Wextra -pedantic +BUILD_DIR := ./build +BIN_DIR := ./bin +LIB_DIR := $(BIN_DIR)/lib + + + +all: testfixtures + +testfixtures: + $(MAKE) -C elfsrc + +clean: + rm -rf $(BUILD_DIR) $(BIN_DIR) + +.PHONY: all clean testfixtures + diff --git a/syft/pkg/cataloger/cpp/parse_conanfile.go b/syft/pkg/cataloger/cpp/parse_conanfile.go index 622acbdcb3c..2885b6f71b5 100644 --- a/syft/pkg/cataloger/cpp/parse_conanfile.go +++ b/syft/pkg/cataloger/cpp/parse_conanfile.go @@ -8,6 +8,7 @@ import ( "io" "strings" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -25,7 +26,7 @@ func parseConanfile(_ context.Context, _ file.Resolver, _ *generic.Environment, line, err := r.ReadString('\n') switch { case errors.Is(err, io.EOF): - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") case err != nil: return nil, nil, fmt.Errorf("failed to parse conanfile.txt file: %w", err) } diff --git a/syft/pkg/cataloger/cpp/parse_conanlock.go b/syft/pkg/cataloger/cpp/parse_conanlock.go index efdfcf08d42..1a1aa4d124d 100644 --- a/syft/pkg/cataloger/cpp/parse_conanlock.go +++ b/syft/pkg/cataloger/cpp/parse_conanlock.go @@ -5,6 +5,7 @@ import ( "encoding/json" "strings" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -72,7 +73,7 @@ func parseConanLock(_ context.Context, _ file.Resolver, _ *generic.Environment, } } - return pkgs, relationships, nil + return pkgs, relationships, unknown.IfEmptyf(pkgs, "unable to determine packages") } // handleConanLockV1 handles the parsing of conan lock v1 files (aka v0.4) diff --git a/syft/pkg/cataloger/cpp/parse_conanlock_test.go b/syft/pkg/cataloger/cpp/parse_conanlock_test.go index da143fe51b3..23fe24c28c4 100644 --- a/syft/pkg/cataloger/cpp/parse_conanlock_test.go +++ b/syft/pkg/cataloger/cpp/parse_conanlock_test.go @@ -373,3 +373,10 @@ func TestParseConanLockV2(t *testing.T) { pkgtest.TestFileParser(t, fixture, parseConanLock, expected, expectedRelationships) } + +func Test_corruptConanlock(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/conan.lock"). + WithError(). + TestParser(t, parseConanLock) +} diff --git a/syft/pkg/cataloger/cpp/test-fixtures/corrupt/conan.lock b/syft/pkg/cataloger/cpp/test-fixtures/corrupt/conan.lock new file mode 100644 index 00000000000..c7e82d59c95 --- /dev/null +++ b/syft/pkg/cataloger/cpp/test-fixtures/corrupt/conan.lock @@ -0,0 +1,10 @@ +{ + corrupt json + version": "0.5", + "requires": [ + "sound32/1.0#83d4b7bf607b3b60a6546f8b58b5cdd7%1675278904.0791488", + "matrix/1.1#905c3f0babc520684c84127378fefdd0%1675278901.7527816" + ], + "build_requires": [], + "python_requires": [] +} diff --git a/syft/pkg/cataloger/dart/parse_pubspec_lock.go b/syft/pkg/cataloger/dart/parse_pubspec_lock.go index 12009659a16..ebf41f822da 100644 --- a/syft/pkg/cataloger/dart/parse_pubspec_lock.go +++ b/syft/pkg/cataloger/dart/parse_pubspec_lock.go @@ -9,6 +9,7 @@ import ( "gopkg.in/yaml.v3" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -85,7 +86,7 @@ func parsePubspecLock(_ context.Context, _ file.Resolver, _ *generic.Environment ) } - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } func (p *pubspecLockPackage) getVcsURL() string { diff --git a/syft/pkg/cataloger/dart/parse_pubspec_lock_test.go b/syft/pkg/cataloger/dart/parse_pubspec_lock_test.go index 6904b88c6d6..5aa5fc702b5 100644 --- a/syft/pkg/cataloger/dart/parse_pubspec_lock_test.go +++ b/syft/pkg/cataloger/dart/parse_pubspec_lock_test.go @@ -106,3 +106,10 @@ func TestParsePubspecLock(t *testing.T) { pkgtest.TestFileParser(t, fixture, parsePubspecLock, expected, expectedRelationships) } + +func Test_corruptPubspecLock(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/pubspec.lock"). + WithError(). + TestParser(t, parsePubspecLock) +} diff --git a/syft/pkg/cataloger/dart/test-fixtures/corrupt/pubspec.lock b/syft/pkg/cataloger/dart/test-fixtures/corrupt/pubspec.lock new file mode 100644 index 00000000000..583bf6702dd --- /dev/null +++ b/syft/pkg/cataloger/dart/test-fixtures/corrupt/pubspec.lock @@ -0,0 +1,7 @@ +pa +kages: + ale: + dependency: transitive + descr +s ps: + dart: ">=2.12.0 <3.0.0" diff --git a/syft/pkg/cataloger/debian/parse_dpkg_db.go b/syft/pkg/cataloger/debian/parse_dpkg_db.go index 8fc47eb4910..97a73c65642 100644 --- a/syft/pkg/cataloger/debian/parse_dpkg_db.go +++ b/syft/pkg/cataloger/debian/parse_dpkg_db.go @@ -15,6 +15,7 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -39,7 +40,7 @@ func parseDpkgDB(_ context.Context, resolver file.Resolver, env *generic.Environ pkgs = append(pkgs, p) } - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } func findDpkgInfoFiles(name string, resolver file.Resolver, dbLocation file.Location) []file.Location { diff --git a/syft/pkg/cataloger/debian/parse_dpkg_db_test.go b/syft/pkg/cataloger/debian/parse_dpkg_db_test.go index 603de91e3a6..50ab3485ba4 100644 --- a/syft/pkg/cataloger/debian/parse_dpkg_db_test.go +++ b/syft/pkg/cataloger/debian/parse_dpkg_db_test.go @@ -257,6 +257,17 @@ func Test_parseDpkgStatus(t *testing.T) { } } +func Test_corruptEntry(t *testing.T) { + f, err := os.Open("test-fixtures/var/lib/dpkg/status.d/corrupt") + require.NoError(t, err) + t.Cleanup(func() { require.NoError(t, f.Close()) }) + + reader := bufio.NewReader(f) + + _, err = parseDpkgStatus(reader) + require.Error(t, err) +} + func TestSourceVersionExtract(t *testing.T) { tests := []struct { name string @@ -312,7 +323,7 @@ func Test_parseDpkgStatus_negativeCases(t *testing.T) { { name: "no more packages", input: `Package: apt`, - wantErr: require.NoError, + wantErr: requireAs(errors.New("unable to determine packages")), }, { name: "duplicated key", diff --git a/syft/pkg/cataloger/debian/test-fixtures/var/lib/dpkg/status.d/corrupt b/syft/pkg/cataloger/debian/test-fixtures/var/lib/dpkg/status.d/corrupt new file mode 100644 index 00000000000..aadb721975d --- /dev/null +++ b/syft/pkg/cataloger/debian/test-fixtures/var/lib/dpkg/status.d/corrupt @@ -0,0 +1,6 @@ +Pakij: apt +Stratus: install ok installed +Prioority: required +Section: admin +Insterface to the configuration settings + * apt-key as an interface to manage authentication keys diff --git a/syft/pkg/cataloger/dotnet/parse_dotnet_deps.go b/syft/pkg/cataloger/dotnet/parse_dotnet_deps.go index 496148a35cb..2d980ca4ef3 100644 --- a/syft/pkg/cataloger/dotnet/parse_dotnet_deps.go +++ b/syft/pkg/cataloger/dotnet/parse_dotnet_deps.go @@ -8,6 +8,7 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/relationship" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -126,5 +127,5 @@ func parseDotnetDeps(_ context.Context, _ file.Resolver, _ *generic.Environment, // this will only consider package-to-package relationships. relationship.Sort(relationships) - return pkgs, relationships, nil + return pkgs, relationships, unknown.IfEmptyf(pkgs, "unable to determine packages") } diff --git a/syft/pkg/cataloger/dotnet/parse_dotnet_deps_test.go b/syft/pkg/cataloger/dotnet/parse_dotnet_deps_test.go index bdf40851ab2..84878c86c1c 100644 --- a/syft/pkg/cataloger/dotnet/parse_dotnet_deps_test.go +++ b/syft/pkg/cataloger/dotnet/parse_dotnet_deps_test.go @@ -9,6 +9,13 @@ import ( "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" ) +func Test_corruptDotnetDeps(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/src/something.deps.json"). + WithError(). + TestParser(t, parseDotnetDeps) +} + func TestParseDotnetDeps(t *testing.T) { fixture := "test-fixtures/TestLibrary.deps.json" fixtureLocationSet := file.NewLocationSet(file.NewLocation(fixture)) diff --git a/syft/pkg/cataloger/dotnet/parse_dotnet_portable_executable.go b/syft/pkg/cataloger/dotnet/parse_dotnet_portable_executable.go index 3ec83f2298f..4ea4d1c18d7 100644 --- a/syft/pkg/cataloger/dotnet/parse_dotnet_portable_executable.go +++ b/syft/pkg/cataloger/dotnet/parse_dotnet_portable_executable.go @@ -28,30 +28,26 @@ func parseDotnetPortableExecutable(_ context.Context, _ file.Resolver, _ *generi peFile, err := pe.NewBytes(by, &pe.Options{}) if err != nil { - // TODO: known-unknown log.Tracef("unable to create PE instance for file '%s': %v", f.RealPath, err) - return nil, nil, nil + return nil, nil, err } err = peFile.Parse() if err != nil { - // TODO: known-unknown log.Tracef("unable to parse PE file '%s': %v", f.RealPath, err) - return nil, nil, nil + return nil, nil, err } versionResources, err := peFile.ParseVersionResources() if err != nil { - // TODO: known-unknown log.Tracef("unable to parse version resources in PE file: %s: %v", f.RealPath, err) - return nil, nil, nil + return nil, nil, fmt.Errorf("unable to parse version resources in PE file: %w", err) } dotNetPkg, err := buildDotNetPackage(versionResources, f) if err != nil { - // TODO: known-unknown - log.Tracef("unable to build dotnet package: %v", err) - return nil, nil, nil + log.Tracef("unable to build dotnet package for: %v %v", f.RealPath, err) + return nil, nil, err } return []pkg.Package{dotNetPkg}, nil, nil @@ -60,12 +56,12 @@ func parseDotnetPortableExecutable(_ context.Context, _ file.Resolver, _ *generi func buildDotNetPackage(versionResources map[string]string, f file.LocationReadCloser) (dnpkg pkg.Package, err error) { name := findName(versionResources) if name == "" { - return dnpkg, fmt.Errorf("unable to find PE name in file: %s", f.RealPath) + return dnpkg, fmt.Errorf("unable to find PE name in file") } version := findVersion(versionResources) if version == "" { - return dnpkg, fmt.Errorf("unable to find PE version in file: %s", f.RealPath) + return dnpkg, fmt.Errorf("unable to find PE version in file") } metadata := pkg.DotnetPortableExecutableEntry{ diff --git a/syft/pkg/cataloger/dotnet/parse_dotnet_portable_executable_test.go b/syft/pkg/cataloger/dotnet/parse_dotnet_portable_executable_test.go index 4d915a50d2f..2be417d6a8e 100644 --- a/syft/pkg/cataloger/dotnet/parse_dotnet_portable_executable_test.go +++ b/syft/pkg/cataloger/dotnet/parse_dotnet_portable_executable_test.go @@ -297,6 +297,13 @@ func TestParseDotnetPortableExecutable(t *testing.T) { } } +func Test_corruptDotnetPE(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/src/something.exe"). + WithError(). + TestParser(t, parseDotnetPortableExecutable) +} + func Test_extractVersion(t *testing.T) { tests := []struct { input string diff --git a/syft/pkg/cataloger/elixir/parse_mix_lock.go b/syft/pkg/cataloger/elixir/parse_mix_lock.go index dcd4a7858db..adcdfbd9e8e 100644 --- a/syft/pkg/cataloger/elixir/parse_mix_lock.go +++ b/syft/pkg/cataloger/elixir/parse_mix_lock.go @@ -9,6 +9,7 @@ import ( "regexp" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -22,25 +23,33 @@ var mixLockDelimiter = regexp.MustCompile(`[%{}\n" ,:]+`) // parseMixLock parses a mix.lock and returns the discovered Elixir packages. func parseMixLock(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { + var errs error r := bufio.NewReader(reader) var packages []pkg.Package + lineNum := 0 for { + lineNum++ line, err := r.ReadString('\n') switch { case errors.Is(err, io.EOF): - return packages, nil, nil + if errs == nil { + errs = unknown.IfEmptyf(packages, "unable to determine packages") + } + return packages, nil, errs case err != nil: return nil, nil, fmt.Errorf("failed to parse mix.lock file: %w", err) } tokens := mixLockDelimiter.Split(line, -1) if len(tokens) < 6 { + errs = unknown.Appendf(errs, reader, "unable to read mix lock line %d: %s", lineNum, line) continue } name, version, hash, hashExt := tokens[1], tokens[4], tokens[5], tokens[len(tokens)-2] if name == "" { log.WithFields("path", reader.RealPath).Debug("skipping empty package name from mix.lock file") + errs = unknown.Appendf(errs, reader, "skipping empty package name from mix.lock file, for line: %d: %s", lineNum, line) continue } diff --git a/syft/pkg/cataloger/erlang/parse_otp_app.go b/syft/pkg/cataloger/erlang/parse_otp_app.go index dcea41c18e1..09edd1b3ae1 100644 --- a/syft/pkg/cataloger/erlang/parse_otp_app.go +++ b/syft/pkg/cataloger/erlang/parse_otp_app.go @@ -2,6 +2,7 @@ package erlang import ( "context" + "fmt" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" @@ -17,7 +18,7 @@ func parseOTPApp(_ context.Context, _ file.Resolver, _ *generic.Environment, rea // there are multiple file formats that use the *.app extension, so it's possible that this is not an OTP app file at all // ... which means we should not return an error here log.WithFields("error", err).Trace("unable to parse Erlang OTP app") - return nil, nil, nil + return nil, nil, fmt.Errorf("unable to parse Erlang OTP app") } var packages []pkg.Package diff --git a/syft/pkg/cataloger/erlang/parse_otp_app_test.go b/syft/pkg/cataloger/erlang/parse_otp_app_test.go index 9c0abf16edb..93c0aa238a9 100644 --- a/syft/pkg/cataloger/erlang/parse_otp_app_test.go +++ b/syft/pkg/cataloger/erlang/parse_otp_app_test.go @@ -41,3 +41,10 @@ func TestParseOTPApplication(t *testing.T) { }) } } + +func Test_corruptOtpApp(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/rabbitmq.app"). + WithError(). + TestParser(t, parseOTPApp) +} diff --git a/syft/pkg/cataloger/erlang/parse_rebar_lock.go b/syft/pkg/cataloger/erlang/parse_rebar_lock.go index a173ec81bfd..e77053419cd 100644 --- a/syft/pkg/cataloger/erlang/parse_rebar_lock.go +++ b/syft/pkg/cataloger/erlang/parse_rebar_lock.go @@ -4,6 +4,7 @@ import ( "context" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -95,7 +96,7 @@ func parseRebarLock(_ context.Context, _ file.Resolver, _ *generic.Environment, p.SetID() packages = append(packages, *p) } - return packages, nil, nil + return packages, nil, unknown.IfEmptyf(packages, "unable to determine packages") } // integrity check diff --git a/syft/pkg/cataloger/erlang/parse_rebar_lock_test.go b/syft/pkg/cataloger/erlang/parse_rebar_lock_test.go index 00d7a273066..67e309cd313 100644 --- a/syft/pkg/cataloger/erlang/parse_rebar_lock_test.go +++ b/syft/pkg/cataloger/erlang/parse_rebar_lock_test.go @@ -255,3 +255,10 @@ func TestParseRebarLock(t *testing.T) { }) } } + +func Test_corruptRebarLock(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/rebar.lock"). + WithError(). + TestParser(t, parseRebarLock) +} diff --git a/syft/pkg/cataloger/erlang/test-fixtures/corrupt/rabbitmq.app b/syft/pkg/cataloger/erlang/test-fixtures/corrupt/rabbitmq.app new file mode 100644 index 00000000000..5c8f73fd067 --- /dev/null +++ b/syft/pkg/cataloger/erlang/test-fixtures/corrupt/rabbitmq.app @@ -0,0 +1,9 @@ +cation, 'rabbit', [ + {description, "RabbitMQ"}, + {vsn, "3.12.10"}, + {id, "v3.12.9-9-g1f61ca8"}, + {modules, ['amqqueue','background_gc']}, + {optional_itmq-server#1593 + {channel_max, 2047} + ]} +]}. \ No newline at end of file diff --git a/syft/pkg/cataloger/erlang/test-fixtures/corrupt/rebar.lock b/syft/pkg/cataloger/erlang/test-fixtures/corrupt/rebar.lock new file mode 100644 index 00000000000..ea6afc2a641 --- /dev/null +++ b/syft/pkg/cataloger/erlang/test-fixtures/corrupt/rebar.lock @@ -0,0 +1,11 @@ +{"1.2.0", +[{<<"certifi{pkg,<<"certifi">>,<<"2.9.0">>},0}, + {<<"idna">>,{pkg,<<"idpkg,<<"parse_trans">>,<<"3.3.1">>},0}, + {<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.6">>},0}, + {<<"unicode_util_compat">>,{pkg,<<"unicode_util_compat">>,<<"0.7.0">>},0}]}. +[ +{pkg_hash,[ + {<<"certifi">>, <<"6F2A475689DD47F19FB74334859D460A2DC4E3252A3324BD2111B8F0429E7E21">>}, + {<<"idna">>, <<"8A63070E9F7D0C62EB9D9FCB360A7DE382448200FBBD1B106CC96D3D8099DF8D">>}, + {<<"metrics">>, <<"25F094DEA2CDA98213CECC3AEFF09E940299D950904393B2A29D191C346A8486">>}, + {<<"mimerl diff --git a/syft/pkg/cataloger/generic/cataloger.go b/syft/pkg/cataloger/generic/cataloger.go index 49e1b47b936..f955ed35076 100644 --- a/syft/pkg/cataloger/generic/cataloger.go +++ b/syft/pkg/cataloger/generic/cataloger.go @@ -6,6 +6,7 @@ import ( "github.com/anchore/go-logger" "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" @@ -151,6 +152,7 @@ func (c *Cataloger) Name() string { func (c *Cataloger) Catalog(ctx context.Context, resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { var packages []pkg.Package var relationships []artifact.Relationship + var errs error logger := log.Nested("cataloger", c.upstreamCataloger) @@ -166,7 +168,8 @@ func (c *Cataloger) Catalog(ctx context.Context, resolver file.Resolver) ([]pkg. discoveredPackages, discoveredRelationships, err := invokeParser(ctx, resolver, location, logger, parser, &env) if err != nil { - continue // logging is handled within invokeParser + // parsers may return errors and valid packages / relationships + errs = unknown.Append(errs, location, err) } for _, p := range discoveredPackages { @@ -176,7 +179,7 @@ func (c *Cataloger) Catalog(ctx context.Context, resolver file.Resolver) ([]pkg. relationships = append(relationships, discoveredRelationships...) } - return c.process(ctx, resolver, packages, relationships, nil) + return c.process(ctx, resolver, packages, relationships, errs) } func (c *Cataloger) process(ctx context.Context, resolver file.Resolver, pkgs []pkg.Package, rels []artifact.Relationship, err error) ([]pkg.Package, []artifact.Relationship, error) { @@ -196,11 +199,11 @@ func invokeParser(ctx context.Context, resolver file.Resolver, location file.Loc discoveredPackages, discoveredRelationships, err := parser(ctx, resolver, env, file.NewLocationReadCloser(location, contentReader)) if err != nil { - logger.WithFields("location", location.RealPath, "error", err).Warnf("cataloger failed") - return nil, nil, err + // these errors are propagated up, and are likely to be coordinate errors + logger.WithFields("location", location.RealPath, "error", err).Trace("cataloger returned errors") } - return discoveredPackages, discoveredRelationships, nil + return discoveredPackages, discoveredRelationships, err } // selectFiles takes a set of file trees and resolves and file references of interest for future cataloging diff --git a/syft/pkg/cataloger/generic/cataloger_test.go b/syft/pkg/cataloger/generic/cataloger_test.go index e971b4459f4..1941b93f389 100644 --- a/syft/pkg/cataloger/generic/cataloger_test.go +++ b/syft/pkg/cataloger/generic/cataloger_test.go @@ -10,6 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -187,3 +188,27 @@ func TestClosesFileOnParserPanic(t *testing.T) { }) require.True(t, spy.closed) } + +func Test_genericCatalogerReturnsErrors(t *testing.T) { + genericErrorReturning := NewCataloger("error returning").WithParserByGlobs(func(ctx context.Context, resolver file.Resolver, environment *Environment, locationReader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { + return []pkg.Package{ + { + Name: "some-package-" + locationReader.Path(), + }, + }, nil, unknown.Newf(locationReader, "unable to read") + }, "**/*") + + m := file.NewMockResolverForPaths( + "test-fixtures/a-path.txt", + "test-fixtures/empty.txt", + ) + + got, _, errs := genericErrorReturning.Catalog(context.TODO(), m) + + // require packages and errors + require.NotEmpty(t, got) + + unknowns, others := unknown.ExtractCoordinateErrors(errs) + require.NotEmpty(t, unknowns) + require.Empty(t, others) +} diff --git a/syft/pkg/cataloger/gentoo/parse_portage_contents.go b/syft/pkg/cataloger/gentoo/parse_portage_contents.go index 553976ea251..56370d0124b 100644 --- a/syft/pkg/cataloger/gentoo/parse_portage_contents.go +++ b/syft/pkg/cataloger/gentoo/parse_portage_contents.go @@ -35,7 +35,7 @@ func parsePortageContents(_ context.Context, resolver file.Resolver, _ *generic. name, version := cpvMatch[1], cpvMatch[2] if name == "" || version == "" { log.WithFields("path", reader.Location.RealPath).Warnf("failed to parse portage name and version") - return nil, nil, nil + return nil, nil, fmt.Errorf("failed to parse portage name and version") } p := pkg.Package{ diff --git a/syft/pkg/cataloger/githubactions/package.go b/syft/pkg/cataloger/githubactions/package.go index 7d6341d3c2c..ff1567a913a 100644 --- a/syft/pkg/cataloger/githubactions/package.go +++ b/syft/pkg/cataloger/githubactions/package.go @@ -1,6 +1,7 @@ package githubactions import ( + "fmt" "strings" "github.com/anchore/packageurl-go" @@ -9,19 +10,19 @@ import ( "github.com/anchore/syft/syft/pkg" ) -func newPackageFromUsageStatement(use string, location file.Location) *pkg.Package { +func newPackageFromUsageStatement(use string, location file.Location) (*pkg.Package, error) { name, version := parseStepUsageStatement(use) if name == "" { log.WithFields("file", location.RealPath, "statement", use).Trace("unable to parse github action usage statement") - return nil + return nil, fmt.Errorf("unable to parse github action usage statement") } if strings.Contains(name, ".github/workflows/") { - return newGithubActionWorkflowPackageUsage(name, version, location) + return newGithubActionWorkflowPackageUsage(name, version, location), nil } - return newGithubActionPackageUsage(name, version, location) + return newGithubActionPackageUsage(name, version, location), nil } func newGithubActionWorkflowPackageUsage(name, version string, workflowLocation file.Location) *pkg.Package { diff --git a/syft/pkg/cataloger/githubactions/parse_composite_action.go b/syft/pkg/cataloger/githubactions/parse_composite_action.go index 99a20d282e7..3236679deab 100644 --- a/syft/pkg/cataloger/githubactions/parse_composite_action.go +++ b/syft/pkg/cataloger/githubactions/parse_composite_action.go @@ -7,6 +7,7 @@ import ( "gopkg.in/yaml.v3" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -24,14 +25,14 @@ type compositeActionRunsDef struct { } func parseCompositeActionForActionUsage(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { - contents, err := io.ReadAll(reader) - if err != nil { - return nil, nil, fmt.Errorf("unable to read yaml composite action file: %w", err) + contents, errs := io.ReadAll(reader) + if errs != nil { + return nil, nil, fmt.Errorf("unable to read yaml composite action file: %w", errs) } var ca compositeActionDef - if err = yaml.Unmarshal(contents, &ca); err != nil { - return nil, nil, fmt.Errorf("unable to parse yaml composite action file: %w", err) + if errs = yaml.Unmarshal(contents, &ca); errs != nil { + return nil, nil, fmt.Errorf("unable to parse yaml composite action file: %w", errs) } // we use a collection to help with deduplication before raising to higher level processing @@ -42,11 +43,14 @@ func parseCompositeActionForActionUsage(_ context.Context, _ file.Resolver, _ *g continue } - p := newPackageFromUsageStatement(step.Uses, reader.Location) + p, err := newPackageFromUsageStatement(step.Uses, reader.Location) + if err != nil { + errs = unknown.Append(errs, reader, err) + } if p != nil { pkgs.Add(*p) } } - return pkgs.Sorted(), nil, nil + return pkgs.Sorted(), nil, errs } diff --git a/syft/pkg/cataloger/githubactions/parse_composite_action_test.go b/syft/pkg/cataloger/githubactions/parse_composite_action_test.go index e39e18e1a32..080d8081667 100644 --- a/syft/pkg/cataloger/githubactions/parse_composite_action_test.go +++ b/syft/pkg/cataloger/githubactions/parse_composite_action_test.go @@ -33,3 +33,10 @@ func Test_parseCompositeActionForActionUsage(t *testing.T) { var expectedRelationships []artifact.Relationship pkgtest.TestFileParser(t, fixture, parseCompositeActionForActionUsage, expected, expectedRelationships) } + +func Test_corruptCompositeAction(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/composite-action.yaml"). + WithError(). + TestParser(t, parseCompositeActionForActionUsage) +} diff --git a/syft/pkg/cataloger/githubactions/parse_workflow.go b/syft/pkg/cataloger/githubactions/parse_workflow.go index 880248cca10..2480caf08ff 100644 --- a/syft/pkg/cataloger/githubactions/parse_workflow.go +++ b/syft/pkg/cataloger/githubactions/parse_workflow.go @@ -7,6 +7,7 @@ import ( "gopkg.in/yaml.v3" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -37,14 +38,14 @@ type stepDef struct { } func parseWorkflowForWorkflowUsage(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { - contents, err := io.ReadAll(reader) - if err != nil { - return nil, nil, fmt.Errorf("unable to read yaml workflow file: %w", err) + contents, errs := io.ReadAll(reader) + if errs != nil { + return nil, nil, fmt.Errorf("unable to read yaml workflow file: %w", errs) } var wf workflowDef - if err = yaml.Unmarshal(contents, &wf); err != nil { - return nil, nil, fmt.Errorf("unable to parse yaml workflow file: %w", err) + if errs = yaml.Unmarshal(contents, &wf); errs != nil { + return nil, nil, fmt.Errorf("unable to parse yaml workflow file: %w", errs) } // we use a collection to help with deduplication before raising to higher level processing @@ -52,25 +53,28 @@ func parseWorkflowForWorkflowUsage(_ context.Context, _ file.Resolver, _ *generi for _, job := range wf.Jobs { if job.Uses != "" { - p := newPackageFromUsageStatement(job.Uses, reader.Location) + p, err := newPackageFromUsageStatement(job.Uses, reader.Location) + if err != nil { + errs = unknown.Append(errs, reader, err) + } if p != nil { pkgs.Add(*p) } } } - return pkgs.Sorted(), nil, nil + return pkgs.Sorted(), nil, errs } func parseWorkflowForActionUsage(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { - contents, err := io.ReadAll(reader) - if err != nil { - return nil, nil, fmt.Errorf("unable to read yaml workflow file: %w", err) + contents, errs := io.ReadAll(reader) + if errs != nil { + return nil, nil, fmt.Errorf("unable to read yaml workflow file: %w", errs) } var wf workflowDef - if err = yaml.Unmarshal(contents, &wf); err != nil { - return nil, nil, fmt.Errorf("unable to parse yaml workflow file: %w", err) + if errs = yaml.Unmarshal(contents, &wf); errs != nil { + return nil, nil, fmt.Errorf("unable to parse yaml workflow file: %w", errs) } // we use a collection to help with deduplication before raising to higher level processing @@ -81,12 +85,15 @@ func parseWorkflowForActionUsage(_ context.Context, _ file.Resolver, _ *generic. if step.Uses == "" { continue } - p := newPackageFromUsageStatement(step.Uses, reader.Location) + p, err := newPackageFromUsageStatement(step.Uses, reader.Location) + if err != nil { + errs = unknown.Append(errs, reader, err) + } if p != nil { pkgs.Add(*p) } } } - return pkgs.Sorted(), nil, nil + return pkgs.Sorted(), nil, errs } diff --git a/syft/pkg/cataloger/githubactions/parse_workflow_test.go b/syft/pkg/cataloger/githubactions/parse_workflow_test.go index f5e5128b41f..52c4e1ebd0c 100644 --- a/syft/pkg/cataloger/githubactions/parse_workflow_test.go +++ b/syft/pkg/cataloger/githubactions/parse_workflow_test.go @@ -86,3 +86,17 @@ func Test_parseWorkflowForWorkflowUsage(t *testing.T) { var expectedRelationships []artifact.Relationship pkgtest.TestFileParser(t, fixture, parseWorkflowForWorkflowUsage, expected, expectedRelationships) } + +func Test_corruptActionWorkflow(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/workflow-multi-job.yaml"). + WithError(). + TestParser(t, parseWorkflowForActionUsage) +} + +func Test_corruptWorkflowWorkflow(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/workflow-multi-job.yaml"). + WithError(). + TestParser(t, parseWorkflowForWorkflowUsage) +} diff --git a/syft/pkg/cataloger/githubactions/test-fixtures/corrupt/composite-action.yaml b/syft/pkg/cataloger/githubactions/test-fixtures/corrupt/composite-action.yaml new file mode 100644 index 00000000000..44526253e18 --- /dev/null +++ b/syft/pkg/cataloger/githubactions/test-fixtures/corrupt/composite-action.yaml @@ -0,0 +1,13 @@ +name: "Bootstrap" +description: "Bootstrap all tools and dependencies" +ints: + go-version: + descrapt-packages: + description: "Space delimited list of tools to install via apt" + default: "libxml2-utils" + +rns: + us all cache fingerprints + shell: bash + run: make fingerprints + diff --git a/syft/pkg/cataloger/githubactions/test-fixtures/corrupt/workflow-multi-job.yaml b/syft/pkg/cataloger/githubactions/test-fixtures/corrupt/workflow-multi-job.yaml new file mode 100644 index 00000000000..f9c2c057040 --- /dev/null +++ b/syft/pkg/cataloger/githubactions/test-fixtures/corrupt/workflow-multi-job.yaml @@ -0,0 +1,16 @@ +name: "Validations" + +on: + workflow_dispatch: + pull_request: + push: + branches: + - main + +jbs + + Statnapshot + key: snapshot-build-${{ github.run_id }} + + - name: Run CLI Tests (Linux) + run: make cli diff --git a/syft/pkg/cataloger/golang/parse_go_binary.go b/syft/pkg/cataloger/golang/parse_go_binary.go index 3f62ea4d876..bf3a6d722eb 100644 --- a/syft/pkg/cataloger/golang/parse_go_binary.go +++ b/syft/pkg/cataloger/golang/parse_go_binary.go @@ -66,9 +66,9 @@ func (c *goBinaryCataloger) parseGoBinary(_ context.Context, resolver file.Resol if err != nil { return nil, nil, err } + defer internal.CloseAndLogError(reader.ReadCloser, reader.RealPath) - mods := scanFile(unionReader, reader.RealPath) - internal.CloseAndLogError(reader.ReadCloser, reader.RealPath) + mods, errs := scanFile(reader.Location, unionReader) var rels []artifact.Relationship for _, mod := range mods { @@ -81,7 +81,7 @@ func (c *goBinaryCataloger) parseGoBinary(_ context.Context, resolver file.Resol pkgs = append(pkgs, depPkgs...) } - return pkgs, rels, nil + return pkgs, rels, errs } func createModuleRelationships(main pkg.Package, deps []pkg.Package) []artifact.Relationship { diff --git a/syft/pkg/cataloger/golang/parse_go_mod_test.go b/syft/pkg/cataloger/golang/parse_go_mod_test.go index 19ae7f33b70..20538e4c78d 100644 --- a/syft/pkg/cataloger/golang/parse_go_mod_test.go +++ b/syft/pkg/cataloger/golang/parse_go_mod_test.go @@ -157,3 +157,11 @@ func Test_GoSumHashes(t *testing.T) { }) } } + +func Test_corruptGoMod(t *testing.T) { + c := NewGoModuleFileCataloger(DefaultCatalogerConfig().WithSearchRemoteLicenses(false)) + pkgtest.NewCatalogTester(). + FromDirectory(t, "test-fixtures/corrupt"). + WithError(). + TestCataloger(t, c) +} diff --git a/syft/pkg/cataloger/golang/scan_binary.go b/syft/pkg/cataloger/golang/scan_binary.go index 720008045a8..bef5c1b9910 100644 --- a/syft/pkg/cataloger/golang/scan_binary.go +++ b/syft/pkg/cataloger/golang/scan_binary.go @@ -9,6 +9,8 @@ import ( "github.com/kastenhq/goversion/version" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/internal/unionreader" ) @@ -19,20 +21,21 @@ type extendedBuildInfo struct { } // scanFile scans file to try to report the Go and module versions. -func scanFile(reader unionreader.UnionReader, filename string) []*extendedBuildInfo { +func scanFile(location file.Location, reader unionreader.UnionReader) ([]*extendedBuildInfo, error) { // NOTE: multiple readers are returned to cover universal binaries, which are files // with more than one binary - readers, err := unionreader.GetReaders(reader) - if err != nil { - log.WithFields("error", err).Warnf("failed to open a golang binary") - return nil + readers, errs := unionreader.GetReaders(reader) + if errs != nil { + log.WithFields("error", errs).Warnf("failed to open a golang binary") + return nil, fmt.Errorf("failed to open a golang binary: %w", errs) } var builds []*extendedBuildInfo for _, r := range readers { bi, err := getBuildInfo(r) if err != nil { - log.WithFields("file", filename, "error", err).Trace("unable to read golang buildinfo") + log.WithFields("file", location.RealPath, "error", err).Trace("unable to read golang buildinfo") + continue } // it's possible the reader just isn't a go binary, in which case just skip it @@ -42,23 +45,25 @@ func scanFile(reader unionreader.UnionReader, filename string) []*extendedBuildI v, err := getCryptoInformation(r) if err != nil { - log.WithFields("file", filename, "error", err).Trace("unable to read golang version info") + log.WithFields("file", location.RealPath, "error", err).Trace("unable to read golang version info") // don't skip this build info. // we can still catalog packages, even if we can't get the crypto information + errs = unknown.Appendf(errs, location, "unable to read golang version info: %w", err) } arch := getGOARCH(bi.Settings) if arch == "" { arch, err = getGOARCHFromBin(r) if err != nil { - log.WithFields("file", filename, "error", err).Trace("unable to read golang arch info") + log.WithFields("file", location.RealPath, "error", err).Trace("unable to read golang arch info") // don't skip this build info. // we can still catalog packages, even if we can't get the arch information + errs = unknown.Appendf(errs, location, "unable to read golang arch info: %w", err) } } builds = append(builds, &extendedBuildInfo{BuildInfo: bi, cryptoSettings: v, arch: arch}) } - return builds + return builds, errs } func getCryptoInformation(reader io.ReaderAt) ([]string, error) { diff --git a/syft/pkg/cataloger/golang/test-fixtures/corrupt/go.mod b/syft/pkg/cataloger/golang/test-fixtures/corrupt/go.mod new file mode 100644 index 00000000000..1e345eec5e6 --- /dev/null +++ b/syft/pkg/cataloger/golang/test-fixtures/corrupt/go.mod @@ -0,0 +1,11 @@ +module github.com/anchore/syft + +go 1.18 + +ruire ( + github.com/CycloneDX/cyclonedx-go v0.7.0 + github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d + github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect +) + +replace github.com/CycloneDX/cyclonedx-go => github.com/CycloneDX/cyclonedx-go v0.6.0 diff --git a/syft/pkg/cataloger/golang/test-fixtures/corrupt/go.sum b/syft/pkg/cataloger/golang/test-fixtures/corrupt/go.sum new file mode 100644 index 00000000000..e80236bebe8 --- /dev/null +++ b/syft/pkg/cataloger/golang/test-fixtures/corrupt/go.sum @@ -0,0 +1,4 @@ +github.com/CycloneDX/cyclonedx-go v0.6.0/go.mod h1:nQCiF4Tvrg5Ieu8qPhYMvzPGMu5I7fANZkrSsJjl5mg= +github.com/Cycpansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= +github6IF +github.com/stretchr/test4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/syft/pkg/cataloger/haskell/parse_stack_lock.go b/syft/pkg/cataloger/haskell/parse_stack_lock.go index eb8b854754f..505e2a61265 100644 --- a/syft/pkg/cataloger/haskell/parse_stack_lock.go +++ b/syft/pkg/cataloger/haskell/parse_stack_lock.go @@ -9,6 +9,7 @@ import ( "gopkg.in/yaml.v3" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -50,7 +51,7 @@ func parseStackLock(_ context.Context, _ file.Resolver, _ *generic.Environment, if err := yaml.Unmarshal(bytes, &lockFile); err != nil { log.WithFields("error", err).Tracef("failed to parse stack.yaml.lock file %q", reader.RealPath) - return nil, nil, nil + return nil, nil, fmt.Errorf("failed to parse stack.yaml.lock file") } var ( @@ -82,7 +83,7 @@ func parseStackLock(_ context.Context, _ file.Resolver, _ *generic.Environment, ) } - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } func parseStackPackageEncoding(pkgEncoding string) (name, version, hash string) { diff --git a/syft/pkg/cataloger/haskell/parse_stack_lock_test.go b/syft/pkg/cataloger/haskell/parse_stack_lock_test.go index 682aa343234..ee84ae092a6 100644 --- a/syft/pkg/cataloger/haskell/parse_stack_lock_test.go +++ b/syft/pkg/cataloger/haskell/parse_stack_lock_test.go @@ -130,3 +130,10 @@ func TestParseStackLock(t *testing.T) { pkgtest.TestFileParser(t, fixture, parseStackLock, expectedPkgs, expectedRelationships) } + +func Test_corruptStackLock(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/stack.yaml.lock"). + WithError(). + TestParser(t, parseStackLock) +} diff --git a/syft/pkg/cataloger/haskell/parse_stack_yaml.go b/syft/pkg/cataloger/haskell/parse_stack_yaml.go index 4ecb6af96ac..d762a2b0660 100644 --- a/syft/pkg/cataloger/haskell/parse_stack_yaml.go +++ b/syft/pkg/cataloger/haskell/parse_stack_yaml.go @@ -8,6 +8,7 @@ import ( "gopkg.in/yaml.v3" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -31,7 +32,7 @@ func parseStackYaml(_ context.Context, _ file.Resolver, _ *generic.Environment, if err := yaml.Unmarshal(bytes, &stackFile); err != nil { log.WithFields("error", err).Tracef("failed to parse stack.yaml file %q", reader.RealPath) - return nil, nil, nil + return nil, nil, fmt.Errorf("failed to parse stack.yaml file") } var pkgs []pkg.Package @@ -50,5 +51,5 @@ func parseStackYaml(_ context.Context, _ file.Resolver, _ *generic.Environment, ) } - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } diff --git a/syft/pkg/cataloger/haskell/parse_stack_yaml_test.go b/syft/pkg/cataloger/haskell/parse_stack_yaml_test.go index 1aa82535797..ee458639648 100644 --- a/syft/pkg/cataloger/haskell/parse_stack_yaml_test.go +++ b/syft/pkg/cataloger/haskell/parse_stack_yaml_test.go @@ -110,3 +110,10 @@ func TestParseStackYaml(t *testing.T) { pkgtest.TestFileParser(t, fixture, parseStackYaml, expectedPkgs, expectedRelationships) } + +func Test_corruptStackYaml(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/stack.yaml"). + WithError(). + TestParser(t, parseStackYaml) +} diff --git a/syft/pkg/cataloger/haskell/test-fixtures/corrupt/stack.yaml b/syft/pkg/cataloger/haskell/test-fixtures/corrupt/stack.yaml new file mode 100644 index 00000000000..918d0bce7fd --- /dev/null +++ b/syft/pkg/cataloger/haskell/test-fixtures/corrupt/stack.yaml @@ -0,0 +1,6 @@ +flag +extra-package-dbs: [] +packa@sha256:fbcf49ecfc3d4da53e797fd0275264cba776ffa324ee223e2a3f4ec2d2c9c4a6,2165 + - stm-2.5.0.2@sha256:e4dc6473faaa75fbd7eccab4e3ee1d651d75bb0e49946ef0b8b751ccde771a55,2314 +ghc-options: + "$everything": -haddock diff --git a/syft/pkg/cataloger/haskell/test-fixtures/corrupt/stack.yaml.lock b/syft/pkg/cataloger/haskell/test-fixtures/corrupt/stack.yaml.lock new file mode 100644 index 00000000000..138e87ad516 --- /dev/null +++ b/syft/pkg/cataloger/haskell/test-fixtures/corrupt/stack.yaml.lock @@ -0,0 +1,8 @@ +packages +-comp +al: + commit: a5847301404583e16d55cd4d051b8e605d704fbc + git: https://github.com/runtimeverification/haskell-backend.git + subdir: kore +snapshots +- complete//raw.github diff --git a/syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go b/syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go index ae26bb6d13b..96bd79450fb 100644 --- a/syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go +++ b/syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go @@ -48,7 +48,6 @@ type CatalogTester struct { func NewCatalogTester() *CatalogTester { return &CatalogTester{ - wantErr: require.NoError, locationComparer: cmptest.DefaultLocationComparer, licenseComparer: cmptest.DefaultLicenseComparer, packageStringer: stringPackage, @@ -113,7 +112,6 @@ func (p *CatalogTester) WithEnv(env *generic.Environment) *CatalogTester { } func (p *CatalogTester) WithError() *CatalogTester { - p.assertResultExpectations = true p.wantErr = require.Error return p } @@ -226,7 +224,10 @@ func (p *CatalogTester) IgnoreUnfulfilledPathResponses(paths ...string) *Catalog func (p *CatalogTester) TestParser(t *testing.T, parser generic.Parser) { t.Helper() pkgs, relationships, err := parser(context.Background(), p.resolver, p.env, p.reader) - p.wantErr(t, err) + // only test for errors if explicitly requested + if p.wantErr != nil { + p.wantErr(t, err) + } p.assertPkgs(t, pkgs, relationships) } @@ -247,8 +248,12 @@ func (p *CatalogTester) TestCataloger(t *testing.T, cataloger pkg.Cataloger) { assert.ElementsMatchf(t, p.expectedContentQueries, resolver.AllContentQueries(), "unexpected content queries observed: diff %s", cmp.Diff(p.expectedContentQueries, resolver.AllContentQueries())) } - if p.assertResultExpectations { + // only test for errors if explicitly requested + if p.wantErr != nil { p.wantErr(t, err) + } + + if p.assertResultExpectations { p.assertPkgs(t, pkgs, relationships) } @@ -256,7 +261,7 @@ func (p *CatalogTester) TestCataloger(t *testing.T, cataloger pkg.Cataloger) { a(t, pkgs, relationships) } - if !p.assertResultExpectations && len(p.customAssertions) == 0 { + if !p.assertResultExpectations && len(p.customAssertions) == 0 && p.wantErr == nil { resolver.PruneUnfulfilledPathResponses(p.ignoreUnfulfilledPathResponses, p.ignoreAnyUnfulfilledPaths...) // if we aren't testing the results, we should focus on what was searched for (for glob-centric tests) diff --git a/syft/pkg/cataloger/java/archive_parser.go b/syft/pkg/cataloger/java/archive_parser.go index 740ad0071a4..dc5ff5f1336 100644 --- a/syft/pkg/cataloger/java/archive_parser.go +++ b/syft/pkg/cataloger/java/archive_parser.go @@ -16,6 +16,7 @@ import ( intFile "github.com/anchore/syft/internal/file" "github.com/anchore/syft/internal/licenses" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -133,14 +134,22 @@ func (j *archiveParser) parse(ctx context.Context) ([]pkg.Package, []artifact.Re } pkgs = append(pkgs, auxPkgs...) + var errs error if j.detectNested { // find nested java archive packages nestedPkgs, nestedRelationships, err := j.discoverPkgsFromNestedArchives(ctx, parentPkg) if err != nil { - return nil, nil, err + errs = unknown.Append(errs, j.location, err) } pkgs = append(pkgs, nestedPkgs...) relationships = append(relationships, nestedRelationships...) + } else { + // .jar and .war files are present in archives, are others? or generally just consider them top-level? + nestedArchives := j.fileManifest.GlobMatch(true, "*.jar", "*.war") + if len(nestedArchives) > 0 { + slices.Sort(nestedArchives) + errs = unknown.Appendf(errs, j.location, "nested archives not cataloged: %v", strings.Join(nestedArchives, ", ")) + } } // lastly, add the parent package to the list (assuming the parent exists) @@ -166,7 +175,11 @@ func (j *archiveParser) parse(ctx context.Context) ([]pkg.Package, []artifact.Re p.SetID() } - return pkgs, relationships, nil + if len(pkgs) == 0 { + errs = unknown.Appendf(errs, j.location, "no package identified in archive") + } + + return pkgs, relationships, errs } // discoverMainPackage parses the root Java manifest used as the parent package to all discovered nested packages. @@ -283,14 +296,14 @@ func (j *archiveParser) findLicenseFromJavaMetadata(ctx context.Context, groupID if parsedPom != nil { pomLicenses, err = j.maven.resolveLicenses(ctx, parsedPom.project) if err != nil { - log.WithFields("error", err, "mavenID", j.maven.getMavenID(ctx, parsedPom.project)).Debug("error attempting to resolve pom licenses") + log.WithFields("error", err, "mavenID", j.maven.getMavenID(ctx, parsedPom.project)).Trace("error attempting to resolve pom licenses") } } if err == nil && len(pomLicenses) == 0 { pomLicenses, err = j.maven.findLicenses(ctx, groupID, artifactID, version) if err != nil { - log.WithFields("error", err, "mavenID", mavenID{groupID, artifactID, version}).Debug("error attempting to find licenses") + log.WithFields("error", err, "mavenID", mavenID{groupID, artifactID, version}).Trace("error attempting to find licenses") } } @@ -300,7 +313,7 @@ func (j *archiveParser) findLicenseFromJavaMetadata(ctx context.Context, groupID groupID = strings.Join(packages[:len(packages)-1], ".") pomLicenses, err = j.maven.findLicenses(ctx, groupID, artifactID, version) if err != nil { - log.WithFields("error", err, "mavenID", mavenID{groupID, artifactID, version}).Debug("error attempting to find sub-group licenses") + log.WithFields("error", err, "mavenID", mavenID{groupID, artifactID, version}).Trace("error attempting to find sub-group licenses") } } @@ -630,7 +643,7 @@ func newPackageFromMavenData(ctx context.Context, r *mavenResolver, pomPropertie } if err != nil { - log.WithFields("error", err, "mavenID", mavenID{pomProperties.GroupID, pomProperties.ArtifactID, pomProperties.Version}).Debug("error attempting to resolve licenses") + log.WithFields("error", err, "mavenID", mavenID{pomProperties.GroupID, pomProperties.ArtifactID, pomProperties.Version}).Trace("error attempting to resolve licenses") } licenses := make([]pkg.License, 0) diff --git a/syft/pkg/cataloger/java/archive_parser_test.go b/syft/pkg/cataloger/java/archive_parser_test.go index 2faf003d288..6fa0acc4755 100644 --- a/syft/pkg/cataloger/java/archive_parser_test.go +++ b/syft/pkg/cataloger/java/archive_parser_test.go @@ -91,10 +91,12 @@ func TestParseJar(t *testing.T) { fixture string expected map[string]pkg.Package ignoreExtras []string + wantErr require.ErrorAssertionFunc }{ { name: "example-jenkins-plugin", fixture: "test-fixtures/java-builds/packages/example-jenkins-plugin.hpi", + wantErr: require.Error, // there are nested jars, which are not scanned and result in unknown errors ignoreExtras: []string{ "Plugin-Version", // has dynamic date "Built-By", // podman returns the real UID @@ -153,6 +155,7 @@ func TestParseJar(t *testing.T) { { name: "example-java-app-gradle", fixture: "test-fixtures/java-builds/packages/example-java-app-gradle-0.1.0.jar", + wantErr: require.NoError, // no nested jars expected: map[string]pkg.Package{ "example-java-app-gradle": { Name: "example-java-app-gradle", @@ -226,6 +229,7 @@ func TestParseJar(t *testing.T) { { name: "example-java-app-maven", fixture: "test-fixtures/java-builds/packages/example-java-app-maven-0.1.0.jar", + wantErr: require.NoError, // no nested jars ignoreExtras: []string{ "Build-Jdk", // can't guarantee the JDK used at build time "Built-By", // podman returns the real UID @@ -351,13 +355,15 @@ func TestParseJar(t *testing.T) { require.NoError(t, err) actual, _, err := parser.parse(context.Background()) - require.NoError(t, err) + if test.wantErr != nil { + test.wantErr(t, err) + } if len(actual) != len(test.expected) { for _, a := range actual { t.Log(" ", a) } - t.Fatalf("unexpected package count: %d!=%d", len(actual), len(test.expected)) + t.Fatalf("unexpected package count; expected: %d got: %d", len(test.expected), len(actual)) } var parent *pkg.Package @@ -1488,3 +1494,11 @@ func run(t testing.TB, cmd *exec.Cmd) { func ptr[T any](value T) *T { return &value } + +func Test_corruptJarArchive(t *testing.T) { + ap := newGenericArchiveParserAdapter(DefaultArchiveCatalogerConfig()) + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/example.jar"). + WithError(). + TestParser(t, ap.parseJavaArchive) +} diff --git a/syft/pkg/cataloger/java/maven_resolver.go b/syft/pkg/cataloger/java/maven_resolver.go index f9d375b8ea3..f9e37389542 100644 --- a/syft/pkg/cataloger/java/maven_resolver.go +++ b/syft/pkg/cataloger/java/maven_resolver.go @@ -79,7 +79,7 @@ func (r *mavenResolver) resolvePropertyValue(ctx context.Context, propertyValue } resolved, err := r.resolveExpression(ctx, resolutionContext, *propertyValue, resolvingProperties) if err != nil { - log.WithFields("error", err, "propertyValue", *propertyValue).Debug("error resolving maven property") + log.WithFields("error", err, "propertyValue", *propertyValue).Trace("error resolving maven property") return "" } return resolved diff --git a/syft/pkg/cataloger/java/parse_pom_xml.go b/syft/pkg/cataloger/java/parse_pom_xml.go index 370ebc2c84a..9a5f391d01a 100644 --- a/syft/pkg/cataloger/java/parse_pom_xml.go +++ b/syft/pkg/cataloger/java/parse_pom_xml.go @@ -15,6 +15,7 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -41,11 +42,13 @@ func (p pomXMLCataloger) Catalog(ctx context.Context, fileResolver file.Resolver r := newMavenResolver(fileResolver, p.cfg) + var errs error var poms []*gopom.Project for _, pomLocation := range locations { pom, err := readPomFromLocation(fileResolver, pomLocation) if err != nil || pom == nil { log.WithFields("error", err, "pomLocation", pomLocation).Debug("error while reading pom") + errs = unknown.Appendf(errs, pomLocation, "error reading pom.xml: %w", err) continue } @@ -60,7 +63,7 @@ func (p pomXMLCataloger) Catalog(ctx context.Context, fileResolver file.Resolver for _, pom := range poms { pkgs = append(pkgs, processPomXML(ctx, r, pom, r.pomLocations[pom])...) } - return pkgs, nil, nil + return pkgs, nil, errs } func readPomFromLocation(fileResolver file.Resolver, pomLocation file.Location) (*gopom.Project, error) { diff --git a/syft/pkg/cataloger/java/parse_pom_xml_test.go b/syft/pkg/cataloger/java/parse_pom_xml_test.go index 45650049072..f80a6762840 100644 --- a/syft/pkg/cataloger/java/parse_pom_xml_test.go +++ b/syft/pkg/cataloger/java/parse_pom_xml_test.go @@ -705,3 +705,11 @@ func getCommonsTextExpectedPackages() []pkg.Package { }, } } + +func Test_corruptPomXml(t *testing.T) { + c := NewPomCataloger(DefaultArchiveCatalogerConfig()) + pkgtest.NewCatalogTester(). + FromDirectory(t, "test-fixtures/corrupt"). + WithError(). + TestCataloger(t, c) +} diff --git a/syft/pkg/cataloger/java/tar_wrapped_archive_parser_test.go b/syft/pkg/cataloger/java/tar_wrapped_archive_parser_test.go index 24b1b55f5c7..089d19eb1dc 100644 --- a/syft/pkg/cataloger/java/tar_wrapped_archive_parser_test.go +++ b/syft/pkg/cataloger/java/tar_wrapped_archive_parser_test.go @@ -10,6 +10,7 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" ) func Test_parseTarWrappedJavaArchive(t *testing.T) { @@ -57,3 +58,11 @@ func Test_parseTarWrappedJavaArchive(t *testing.T) { }) } } + +func Test_corruptTarArchive(t *testing.T) { + ap := newGenericTarWrappedJavaArchiveParser(DefaultArchiveCatalogerConfig()) + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/example.tar"). + WithError(). + TestParser(t, ap.parseTarWrappedJavaArchive) +} diff --git a/syft/pkg/cataloger/java/test-fixtures/corrupt/example.jar b/syft/pkg/cataloger/java/test-fixtures/corrupt/example.jar new file mode 100644 index 00000000000..8944cbcc070 --- /dev/null +++ b/syft/pkg/cataloger/java/test-fixtures/corrupt/example.jar @@ -0,0 +1 @@ +example archive diff --git a/syft/pkg/cataloger/java/test-fixtures/corrupt/example.tar b/syft/pkg/cataloger/java/test-fixtures/corrupt/example.tar new file mode 100644 index 00000000000..8944cbcc070 --- /dev/null +++ b/syft/pkg/cataloger/java/test-fixtures/corrupt/example.tar @@ -0,0 +1 @@ +example archive diff --git a/syft/pkg/cataloger/java/test-fixtures/corrupt/pom.xml b/syft/pkg/cataloger/java/test-fixtures/corrupt/pom.xml new file mode 100644 index 00000000000..11183e1abe7 --- /dev/null +++ b/syft/pkg/cataloger/java/test-fixtures/corrupt/pom.xml @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/syft/pkg/cataloger/javascript/parse_package_json.go b/syft/pkg/cataloger/javascript/parse_package_json.go index cc628c7724e..f3f43569025 100644 --- a/syft/pkg/cataloger/javascript/parse_package_json.go +++ b/syft/pkg/cataloger/javascript/parse_package_json.go @@ -11,7 +11,6 @@ import ( "github.com/mitchellh/mapstructure" "github.com/anchore/syft/internal" - "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -64,11 +63,8 @@ func parsePackageJSON(_ context.Context, _ file.Resolver, _ *generic.Environment return nil, nil, fmt.Errorf("failed to parse package.json file: %w", err) } - if !p.hasNameAndVersionValues() { - log.Debugf("encountered package.json file without a name and/or version field, ignoring (path=%q)", reader.Path()) - return nil, nil, nil - } - + // always create a package, regardless of having a valid name and/or version, + // a compliance filter later will remove these packages based on compliance rules pkgs = append( pkgs, newPackageJSONPackage(p, reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), @@ -203,10 +199,6 @@ func licensesFromJSON(b []byte) ([]npmPackageLicense, error) { return nil, errors.New("unmarshal failed") } -func (p packageJSON) hasNameAndVersionValues() bool { - return p.Name != "" && p.Version != "" -} - // this supports both windows and unix paths var filepathSeparator = regexp.MustCompile(`[\\/]`) diff --git a/syft/pkg/cataloger/javascript/parse_package_json_test.go b/syft/pkg/cataloger/javascript/parse_package_json_test.go index 5d6ebaf4a7e..ea0102d9841 100644 --- a/syft/pkg/cataloger/javascript/parse_package_json_test.go +++ b/syft/pkg/cataloger/javascript/parse_package_json_test.go @@ -210,10 +210,28 @@ func TestParsePackageJSON(t *testing.T) { } } +func Test_corruptPackageJSON(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/package.json"). + WithError(). + TestParser(t, parsePackageJSON) +} + func TestParsePackageJSON_Partial(t *testing.T) { // see https://github.com/anchore/syft/issues/311 const fixtureFile = "test-fixtures/pkg-json/package-partial.json" - pkgtest.TestFileParser(t, fixtureFile, parsePackageJSON, nil, nil) + // raise package.json files as packages with any information we find, these will be filtered out + // according to compliance rules later + expectedPkgs := []pkg.Package{ + { + Language: pkg.JavaScript, + Type: pkg.NpmPkg, + PURL: packageURL("", ""), + Metadata: pkg.NpmPackage{}, + Locations: file.NewLocationSet(file.NewLocation(fixtureFile)), + }, + } + pkgtest.TestFileParser(t, fixtureFile, parsePackageJSON, expectedPkgs, nil) } func Test_pathContainsNodeModulesDirectory(t *testing.T) { diff --git a/syft/pkg/cataloger/javascript/parse_package_lock.go b/syft/pkg/cataloger/javascript/parse_package_lock.go index 1c7564cc3fe..ec8a2b60029 100644 --- a/syft/pkg/cataloger/javascript/parse_package_lock.go +++ b/syft/pkg/cataloger/javascript/parse_package_lock.go @@ -9,6 +9,7 @@ import ( "strings" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -100,7 +101,7 @@ func (a genericPackageLockAdapter) parsePackageLock(_ context.Context, resolver pkg.Sort(pkgs) - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } func (licenses *packageLockLicense) UnmarshalJSON(data []byte) (err error) { diff --git a/syft/pkg/cataloger/javascript/parse_package_lock_test.go b/syft/pkg/cataloger/javascript/parse_package_lock_test.go index 98448bf7b43..b3ba527bb7b 100644 --- a/syft/pkg/cataloger/javascript/parse_package_lock_test.go +++ b/syft/pkg/cataloger/javascript/parse_package_lock_test.go @@ -333,3 +333,11 @@ func TestParsePackageLockLicenseWithArray(t *testing.T) { adapter := newGenericPackageLockAdapter(CatalogerConfig{}) pkgtest.TestFileParser(t, fixture, adapter.parsePackageLock, expectedPkgs, expectedRelationships) } + +func Test_corruptPackageLock(t *testing.T) { + gap := newGenericPackageLockAdapter(DefaultCatalogerConfig()) + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/package-lock.json"). + WithError(). + TestParser(t, gap.parsePackageLock) +} diff --git a/syft/pkg/cataloger/javascript/parse_pnpm_lock.go b/syft/pkg/cataloger/javascript/parse_pnpm_lock.go index 4936f7a2b37..8c7257a3847 100644 --- a/syft/pkg/cataloger/javascript/parse_pnpm_lock.go +++ b/syft/pkg/cataloger/javascript/parse_pnpm_lock.go @@ -11,6 +11,7 @@ import ( "gopkg.in/yaml.v3" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -94,7 +95,7 @@ func parsePnpmLock(_ context.Context, resolver file.Resolver, _ *generic.Environ pkg.Sort(pkgs) - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } func hasPkg(pkgs []pkg.Package, name, version string) bool { diff --git a/syft/pkg/cataloger/javascript/parse_pnpm_lock_test.go b/syft/pkg/cataloger/javascript/parse_pnpm_lock_test.go index 7c0ed1c4db8..8b5ed7d0ae0 100644 --- a/syft/pkg/cataloger/javascript/parse_pnpm_lock_test.go +++ b/syft/pkg/cataloger/javascript/parse_pnpm_lock_test.go @@ -144,3 +144,10 @@ func TestParsePnpmV6Lock(t *testing.T) { pkgtest.TestFileParser(t, fixture, parsePnpmLock, expectedPkgs, expectedRelationships) } + +func Test_corruptPnpmLock(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/pnpm-lock.yaml"). + WithError(). + TestParser(t, parsePnpmLock) +} diff --git a/syft/pkg/cataloger/javascript/parse_yarn_lock.go b/syft/pkg/cataloger/javascript/parse_yarn_lock.go index 73dcd30018d..94aa430a4ba 100644 --- a/syft/pkg/cataloger/javascript/parse_yarn_lock.go +++ b/syft/pkg/cataloger/javascript/parse_yarn_lock.go @@ -8,6 +8,7 @@ import ( "github.com/scylladb/go-set/strset" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -112,7 +113,7 @@ func (a genericYarnLockAdapter) parseYarnLock(_ context.Context, resolver file.R pkg.Sort(pkgs) - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } func findPackageName(line string) string { diff --git a/syft/pkg/cataloger/javascript/test-fixtures/corrupt/package-lock.json b/syft/pkg/cataloger/javascript/test-fixtures/corrupt/package-lock.json new file mode 100644 index 00000000000..a13d744c1a1 --- /dev/null +++ b/syft/pkg/cataloger/javascript/test-fixtures/corrupt/package-lock.json @@ -0,0 +1,4 @@ +{ + "requires": true, + "lockfi +} diff --git a/syft/pkg/cataloger/javascript/test-fixtures/corrupt/package.json b/syft/pkg/cataloger/javascript/test-fixtures/corrupt/package.json new file mode 100644 index 00000000000..209f9ad58db --- /dev/null +++ b/syft/pkg/cataloger/javascript/test-fixtures/corrupt/package.json @@ -0,0 +1,5 @@ +{ + "version": "6.14.6", + "name"node": "6 >=6.2.0 || 8 || >=9.3.0" + } +} \ No newline at end of file diff --git a/syft/pkg/cataloger/javascript/test-fixtures/corrupt/pnpm-lock.yaml b/syft/pkg/cataloger/javascript/test-fixtures/corrupt/pnpm-lock.yaml new file mode 100644 index 00000000000..5717c62b427 --- /dev/null +++ b/syft/pkg/cataloger/javascript/test-fixtures/corrupt/pnpm-lock.yaml @@ -0,0 +1,7 @@ +lockfileVersion: 5.4 + +specifi +lution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} + dev: true + + # removed other packages diff --git a/syft/pkg/cataloger/kernel/cataloger.go b/syft/pkg/cataloger/kernel/cataloger.go index d0cc4d131bf..877e9819bd1 100644 --- a/syft/pkg/cataloger/kernel/cataloger.go +++ b/syft/pkg/cataloger/kernel/cataloger.go @@ -6,9 +6,8 @@ package kernel import ( "context" - "github.com/hashicorp/go-multierror" - "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -62,7 +61,7 @@ func (l linuxKernelCataloger) Catalog(ctx context.Context, resolver file.Resolve kernelPackages, kernelRelationships, err := generic.NewCataloger(l.Name()).WithParserByGlobs(parseLinuxKernelFile, kernelArchiveGlobs...).Catalog(ctx, resolver) if err != nil { - errs = multierror.Append(errs, err) + errs = unknown.Join(errs, err) } allRelationships = append(allRelationships, kernelRelationships...) @@ -71,7 +70,7 @@ func (l linuxKernelCataloger) Catalog(ctx context.Context, resolver file.Resolve if l.cfg.CatalogModules { modulePackages, moduleRelationships, err := generic.NewCataloger(l.Name()).WithParserByGlobs(parseLinuxKernelModuleFile, kernelModuleGlobs...).Catalog(ctx, resolver) if err != nil { - errs = multierror.Append(errs, err) + errs = unknown.Join(errs, err) } allPackages = append(allPackages, modulePackages...) diff --git a/syft/pkg/cataloger/lua/parse_rockspec.go b/syft/pkg/cataloger/lua/parse_rockspec.go index a11709fa8e5..de109e77e5d 100644 --- a/syft/pkg/cataloger/lua/parse_rockspec.go +++ b/syft/pkg/cataloger/lua/parse_rockspec.go @@ -2,6 +2,7 @@ package lua import ( "context" + "fmt" "strings" "github.com/anchore/syft/internal/log" @@ -30,7 +31,7 @@ func parseRockspec(_ context.Context, _ file.Resolver, _ *generic.Environment, r doc, err := parseRockspecData(reader) if err != nil { log.WithFields("error", err).Trace("unable to parse Rockspec app") - return nil, nil, nil + return nil, nil, fmt.Errorf("unable to parse Rockspec app: %w", err) } var name, version, license, homepage, description, url string diff --git a/syft/pkg/cataloger/lua/parse_rockspec_test.go b/syft/pkg/cataloger/lua/parse_rockspec_test.go index f429a9dc771..a85cda58c3a 100644 --- a/syft/pkg/cataloger/lua/parse_rockspec_test.go +++ b/syft/pkg/cataloger/lua/parse_rockspec_test.go @@ -106,3 +106,10 @@ func TestParseRockspec(t *testing.T) { }) } } + +func Test_corruptRockspec(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/corrupt/bad-1.23.0-0.rockspec"). + WithError(). + TestParser(t, parseRockspec) +} diff --git a/syft/pkg/cataloger/lua/test-fixtures/corrupt/bad-1.23.0-0.rockspec b/syft/pkg/cataloger/lua/test-fixtures/corrupt/bad-1.23.0-0.rockspec new file mode 100644 index 00000000000..d2ed1d025e2 --- /dev/null +++ b/syft/pkg/cataloger/lua/test-fixtures/corrupt/bad-1.23.0-0.rockspec @@ -0,0 +1,5 @@ +package = {"kon + +3.7.0-0" +rockspec_fo} +} diff --git a/syft/pkg/cataloger/php/parse_composer_lock.go b/syft/pkg/cataloger/php/parse_composer_lock.go index 4abb66c2449..5348e6ffcd1 100644 --- a/syft/pkg/cataloger/php/parse_composer_lock.go +++ b/syft/pkg/cataloger/php/parse_composer_lock.go @@ -7,6 +7,7 @@ import ( "fmt" "io" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -48,5 +49,5 @@ func parseComposerLock(_ context.Context, _ file.Resolver, _ *generic.Environmen } } - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } diff --git a/syft/pkg/cataloger/php/parse_composer_lock_test.go b/syft/pkg/cataloger/php/parse_composer_lock_test.go index ae01097e255..1da97c42d2e 100644 --- a/syft/pkg/cataloger/php/parse_composer_lock_test.go +++ b/syft/pkg/cataloger/php/parse_composer_lock_test.go @@ -113,3 +113,10 @@ func TestParseComposerFileLock(t *testing.T) { } pkgtest.TestFileParser(t, fixture, parseComposerLock, expectedPkgs, expectedRelationships) } + +func Test_corruptComposerLock(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/src/composer.lock"). + WithError(). + TestParser(t, parseComposerLock) +} diff --git a/syft/pkg/cataloger/php/parse_installed_json.go b/syft/pkg/cataloger/php/parse_installed_json.go index f1498d9316f..b7c7d9d3243 100644 --- a/syft/pkg/cataloger/php/parse_installed_json.go +++ b/syft/pkg/cataloger/php/parse_installed_json.go @@ -7,6 +7,7 @@ import ( "fmt" "io" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -69,5 +70,5 @@ func parseInstalledJSON(_ context.Context, _ file.Resolver, _ *generic.Environme } } - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } diff --git a/syft/pkg/cataloger/php/parse_installed_json_test.go b/syft/pkg/cataloger/php/parse_installed_json_test.go index abc29863c2b..7e0c06c3203 100644 --- a/syft/pkg/cataloger/php/parse_installed_json_test.go +++ b/syft/pkg/cataloger/php/parse_installed_json_test.go @@ -142,3 +142,10 @@ func TestParseInstalledJsonComposerV1(t *testing.T) { }) } } + +func Test_corruptInstalledJSON(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/src/installed.json"). + WithError(). + TestParser(t, parseInstalledJSON) +} diff --git a/syft/pkg/cataloger/php/parse_pecl_serialized_test.go b/syft/pkg/cataloger/php/parse_pecl_serialized_test.go index 7f685ffdaee..84f84ea7282 100644 --- a/syft/pkg/cataloger/php/parse_pecl_serialized_test.go +++ b/syft/pkg/cataloger/php/parse_pecl_serialized_test.go @@ -33,3 +33,10 @@ func TestParsePeclSerialized(t *testing.T) { } pkgtest.TestFileParser(t, fixture, parsePeclSerialized, expectedPkgs, expectedRelationships) } + +func Test_corruptPecl(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/php/.registry/.channel.pecl.php.net/memcached.reg"). + WithError(). + TestParser(t, parseComposerLock) +} diff --git a/syft/pkg/cataloger/python/parse_pipfile_lock_test.go b/syft/pkg/cataloger/python/parse_pipfile_lock_test.go index 60ec99c70e9..051bfd78ae2 100644 --- a/syft/pkg/cataloger/python/parse_pipfile_lock_test.go +++ b/syft/pkg/cataloger/python/parse_pipfile_lock_test.go @@ -80,3 +80,10 @@ func TestParsePipFileLock(t *testing.T) { pkgtest.TestFileParser(t, fixture, parsePipfileLock, expectedPkgs, expectedRelationships) } + +func Test_corruptPipfileLock(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/src/Pipfile.lock"). + WithError(). + TestParser(t, parsePipfileLock) +} diff --git a/syft/pkg/cataloger/python/parse_poetry_lock.go b/syft/pkg/cataloger/python/parse_poetry_lock.go index 84e67ddb589..2b75f51b871 100644 --- a/syft/pkg/cataloger/python/parse_poetry_lock.go +++ b/syft/pkg/cataloger/python/parse_poetry_lock.go @@ -8,6 +8,7 @@ import ( "github.com/BurntSushi/toml" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -57,7 +58,7 @@ func parsePoetryLock(_ context.Context, _ file.Resolver, _ *generic.Environment, // since we would never expect to create relationships for packages across multiple poetry.lock files // we should do this on a file parser level (each poetry.lock) instead of a cataloger level (across all // poetry.lock files) - return pkgs, dependency.Resolve(poetryLockDependencySpecifier, pkgs), nil + return pkgs, dependency.Resolve(poetryLockDependencySpecifier, pkgs), unknown.IfEmptyf(pkgs, "unable to determine packages") } func poetryLockPackages(reader file.LocationReadCloser) ([]pkg.Package, error) { diff --git a/syft/pkg/cataloger/python/parse_poetry_lock_test.go b/syft/pkg/cataloger/python/parse_poetry_lock_test.go index 1a6c1beb797..0cb0030a2e5 100644 --- a/syft/pkg/cataloger/python/parse_poetry_lock_test.go +++ b/syft/pkg/cataloger/python/parse_poetry_lock_test.go @@ -81,3 +81,10 @@ func TestParsePoetryLock(t *testing.T) { pkgtest.TestFileParser(t, fixture, parsePoetryLock, expectedPkgs, expectedRelationships) } + +func Test_corruptPoetryLock(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/src/poetry.lock"). + WithError(). + TestParser(t, parsePoetryLock) +} diff --git a/syft/pkg/cataloger/python/parse_requirements.go b/syft/pkg/cataloger/python/parse_requirements.go index 38bbde3e83b..7d5d1d8a4ef 100644 --- a/syft/pkg/cataloger/python/parse_requirements.go +++ b/syft/pkg/cataloger/python/parse_requirements.go @@ -13,6 +13,7 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -94,6 +95,7 @@ func newRequirementsParser(cfg CatalogerConfig) requirementsParser { // parseRequirementsTxt takes a Python requirements.txt file, returning all Python packages that are locked to a // specific version. func (rp requirementsParser) parseRequirementsTxt(_ context.Context, _ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { + var errs error var packages []pkg.Package scanner := bufio.NewScanner(reader) @@ -126,6 +128,7 @@ func (rp requirementsParser) parseRequirementsTxt(_ context.Context, _ file.Reso req := newRequirement(line) if req == nil { log.WithFields("path", reader.RealPath).Warnf("unable to parse requirements.txt line: %q", line) + errs = unknown.Appendf(errs, reader, "unable to parse requirements.txt line: %q", line) continue } @@ -134,6 +137,7 @@ func (rp requirementsParser) parseRequirementsTxt(_ context.Context, _ file.Reso if version == "" { log.WithFields("path", reader.RealPath).Tracef("unable to determine package version in requirements.txt line: %q", line) + errs = unknown.Appendf(errs, reader, "unable to determine package version in requirements.txt line: %q", line) continue } @@ -158,7 +162,7 @@ func (rp requirementsParser) parseRequirementsTxt(_ context.Context, _ file.Reso return nil, nil, fmt.Errorf("failed to parse python requirements file: %w", err) } - return packages, nil, nil + return packages, nil, unknown.Join(errs, unknown.IfEmptyf(packages, "unable to determine packages")) } func parseVersion(version string, guessFromConstraint bool) string { diff --git a/syft/pkg/cataloger/python/parse_requirements_test.go b/syft/pkg/cataloger/python/parse_requirements_test.go index cd796955c80..6573b13d620 100644 --- a/syft/pkg/cataloger/python/parse_requirements_test.go +++ b/syft/pkg/cataloger/python/parse_requirements_test.go @@ -353,3 +353,11 @@ func Test_parseVersion(t *testing.T) { }) } } + +func Test_corruptRequirementsTxt(t *testing.T) { + rp := newRequirementsParser(DefaultCatalogerConfig()) + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/src/requirements.txt"). + WithError(). + TestParser(t, rp.parseRequirementsTxt) +} diff --git a/syft/pkg/cataloger/redhat/parse_rpm_archive_test.go b/syft/pkg/cataloger/redhat/parse_rpm_archive_test.go index dddf81a9c9d..287c63c0bea 100644 --- a/syft/pkg/cataloger/redhat/parse_rpm_archive_test.go +++ b/syft/pkg/cataloger/redhat/parse_rpm_archive_test.go @@ -94,3 +94,10 @@ func TestParseRpmFiles(t *testing.T) { }) } } + +func Test_corruptRpmArchive(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/bad/bad.rpm"). + WithError(). + TestParser(t, parseRpmArchive) +} diff --git a/syft/pkg/cataloger/redhat/parse_rpm_db.go b/syft/pkg/cataloger/redhat/parse_rpm_db.go index 23c5dd487a6..785b2c9adc3 100644 --- a/syft/pkg/cataloger/redhat/parse_rpm_db.go +++ b/syft/pkg/cataloger/redhat/parse_rpm_db.go @@ -9,6 +9,7 @@ import ( rpmdb "github.com/knqyf263/go-rpmdb/pkg" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" @@ -59,11 +60,15 @@ func parseRpmDB(_ context.Context, resolver file.Resolver, env *generic.Environm distro = env.LinuxRelease } + var errs error for _, entry := range pkgList { if entry == nil { continue } + files, err := extractRpmFileRecords(resolver, *entry) + errs = unknown.Join(errs, err) + metadata := pkg.RpmDBEntry{ Name: entry.Name, Version: entry.Version, @@ -74,7 +79,7 @@ func parseRpmDB(_ context.Context, resolver file.Resolver, env *generic.Environm Vendor: entry.Vendor, Size: entry.Size, ModularityLabel: &entry.Modularitylabel, - Files: extractRpmFileRecords(resolver, *entry), + Files: files, Provides: entry.Provides, Requires: entry.Requires, } @@ -89,6 +94,7 @@ func parseRpmDB(_ context.Context, resolver file.Resolver, env *generic.Environm if !pkg.IsValid(&p) { log.WithFields("location", reader.RealPath, "pkg", fmt.Sprintf("%s@%s", entry.Name, entry.Version)). Warn("ignoring invalid package found in RPM DB") + errs = unknown.Appendf(errs, reader, "invalild package found; name: %s, version: %s", entry.Name, entry.Version) continue } @@ -96,7 +102,11 @@ func parseRpmDB(_ context.Context, resolver file.Resolver, env *generic.Environm allPkgs = append(allPkgs, p) } - return allPkgs, nil, nil + if errs == nil && len(allPkgs) == 0 { + errs = fmt.Errorf("unable to determine packages") + } + + return allPkgs, nil, errs } // The RPM naming scheme is [name]-[version]-[release]-[arch], where version is implicitly expands to [epoch]:[version]. @@ -112,13 +122,13 @@ func toELVersion(epoch *int, version, release string) string { return fmt.Sprintf("%s-%s", version, release) } -func extractRpmFileRecords(resolver file.PathResolver, entry rpmdb.PackageInfo) []pkg.RpmFileRecord { +func extractRpmFileRecords(resolver file.PathResolver, entry rpmdb.PackageInfo) ([]pkg.RpmFileRecord, error) { var records = make([]pkg.RpmFileRecord, 0) files, err := entry.InstalledFiles() if err != nil { log.Warnf("unable to parse listing of installed files for RPM DB entry: %s", err.Error()) - return records + return records, fmt.Errorf("unable to parse listing of installed files for RPM DB entry: %w", err) } for _, record := range files { @@ -138,5 +148,5 @@ func extractRpmFileRecords(resolver file.PathResolver, entry rpmdb.PackageInfo) }) } } - return records + return records, nil } diff --git a/syft/pkg/cataloger/redhat/parse_rpm_db_test.go b/syft/pkg/cataloger/redhat/parse_rpm_db_test.go index a1da936182f..7572642c88a 100644 --- a/syft/pkg/cataloger/redhat/parse_rpm_db_test.go +++ b/syft/pkg/cataloger/redhat/parse_rpm_db_test.go @@ -214,6 +214,13 @@ func TestToElVersion(t *testing.T) { } } +func Test_corruptRpmDbEntry(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/usr/lib/sysimage/rpm/Packages.db"). + WithError(). + TestParser(t, parseRpmDB) +} + func intRef(i int) *int { return &i } diff --git a/syft/pkg/cataloger/redhat/parse_rpm_manifest.go b/syft/pkg/cataloger/redhat/parse_rpm_manifest.go index 9c0185aef00..3e44ba8fdfe 100644 --- a/syft/pkg/cataloger/redhat/parse_rpm_manifest.go +++ b/syft/pkg/cataloger/redhat/parse_rpm_manifest.go @@ -8,6 +8,7 @@ import ( "strings" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -53,5 +54,5 @@ func parseRpmManifest(_ context.Context, _ file.Resolver, _ *generic.Environment allPkgs = append(allPkgs, p) } - return allPkgs, nil, nil + return allPkgs, nil, unknown.IfEmptyf(allPkgs, "unable to determine packages") } diff --git a/syft/pkg/cataloger/ruby/parse_gemfile_lock.go b/syft/pkg/cataloger/ruby/parse_gemfile_lock.go index 6c5432db883..748727bcc18 100644 --- a/syft/pkg/cataloger/ruby/parse_gemfile_lock.go +++ b/syft/pkg/cataloger/ruby/parse_gemfile_lock.go @@ -7,6 +7,7 @@ import ( "github.com/scylladb/go-set/strset" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -54,7 +55,7 @@ func parseGemFileLockEntries(_ context.Context, _ file.Resolver, _ *generic.Envi if err := scanner.Err(); err != nil { return nil, nil, err } - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } func isDependencyLine(line string) bool { diff --git a/syft/pkg/cataloger/rust/cataloger_test.go b/syft/pkg/cataloger/rust/cataloger_test.go index 494b8cfbff5..3a7a356db21 100644 --- a/syft/pkg/cataloger/rust/cataloger_test.go +++ b/syft/pkg/cataloger/rust/cataloger_test.go @@ -97,3 +97,10 @@ func Test_AuditBinaryCataloger_Globs(t *testing.T) { }) } } + +func Test_corruptAuditBinary(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/partial-binary"). + WithError(). + TestParser(t, parseAuditBinary) +} diff --git a/syft/pkg/cataloger/rust/parse_audit_binary.go b/syft/pkg/cataloger/rust/parse_audit_binary.go index afa09a058cf..25525e91656 100644 --- a/syft/pkg/cataloger/rust/parse_audit_binary.go +++ b/syft/pkg/cataloger/rust/parse_audit_binary.go @@ -3,6 +3,7 @@ package rust import ( "context" "errors" + "fmt" rustaudit "github.com/microsoft/go-rustaudit" @@ -23,21 +24,22 @@ func parseAuditBinary(_ context.Context, _ file.Resolver, _ *generic.Environment return nil, nil, err } - for _, versionInfo := range parseAuditBinaryEntry(unionReader, reader.RealPath) { + infos, err := parseAuditBinaryEntry(unionReader, reader.RealPath) + for _, versionInfo := range infos { pkgs = append(pkgs, newPackagesFromAudit(reader.Location, versionInfo)...) } - return pkgs, nil, nil + return pkgs, nil, err } // scanFile scans file to try to report the Rust crate dependencies -func parseAuditBinaryEntry(reader unionreader.UnionReader, filename string) []rustaudit.VersionInfo { +func parseAuditBinaryEntry(reader unionreader.UnionReader, filename string) ([]rustaudit.VersionInfo, error) { // NOTE: multiple readers are returned to cover universal binaries, which are files // with more than one binary readers, err := unionreader.GetReaders(reader) if err != nil { log.Warnf("rust cataloger: failed to open a binary: %v", err) - return nil + return nil, fmt.Errorf("rust cataloger: failed to open a binary: %w", err) } var versionInfos []rustaudit.VersionInfo @@ -48,14 +50,14 @@ func parseAuditBinaryEntry(reader unionreader.UnionReader, filename string) []ru if errors.Is(err, rustaudit.ErrNoRustDepInfo) { // since the cataloger can only select executables and not distinguish if they are a Rust-compiled // binary, we should not show warnings/logs in this case. - return nil + return nil, nil } log.Tracef("rust cataloger: unable to read dependency information (file=%q): %v", filename, err) - return nil + return nil, fmt.Errorf("rust cataloger: unable to read dependency information: %w", err) } versionInfos = append(versionInfos, versionInfo) } - return versionInfos + return versionInfos, nil } diff --git a/syft/pkg/cataloger/rust/parse_cargo_lock.go b/syft/pkg/cataloger/rust/parse_cargo_lock.go index d1ef3db1261..01e9fd87e0a 100644 --- a/syft/pkg/cataloger/rust/parse_cargo_lock.go +++ b/syft/pkg/cataloger/rust/parse_cargo_lock.go @@ -6,6 +6,7 @@ import ( "github.com/pelletier/go-toml" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -46,5 +47,5 @@ func parseCargoLock(_ context.Context, _ file.Resolver, _ *generic.Environment, ) } - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } diff --git a/syft/pkg/cataloger/rust/parse_cargo_lock_test.go b/syft/pkg/cataloger/rust/parse_cargo_lock_test.go index 6e12fb809ee..4239297795d 100644 --- a/syft/pkg/cataloger/rust/parse_cargo_lock_test.go +++ b/syft/pkg/cataloger/rust/parse_cargo_lock_test.go @@ -191,3 +191,10 @@ func TestParseCargoLock(t *testing.T) { pkgtest.TestFileParser(t, fixture, parseCargoLock, expectedPkgs, expectedRelationships) } + +func Test_corruptCargoLock(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/src/Cargo.lock"). + WithError(). + TestParser(t, parseCargoLock) +} diff --git a/syft/pkg/cataloger/sbom/cataloger_test.go b/syft/pkg/cataloger/sbom/cataloger_test.go index ad6aea71d1d..e53b14ae780 100644 --- a/syft/pkg/cataloger/sbom/cataloger_test.go +++ b/syft/pkg/cataloger/sbom/cataloger_test.go @@ -450,3 +450,10 @@ func Test_Cataloger_Globs(t *testing.T) { }) } } + +func Test_corruptSBOM(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/app.spdx.json"). + WithError(). + TestParser(t, parseSBOM) +} diff --git a/syft/pkg/cataloger/swift/parse_package_resolved.go b/syft/pkg/cataloger/swift/parse_package_resolved.go index cba7f54b3b9..4fc3965c911 100644 --- a/syft/pkg/cataloger/swift/parse_package_resolved.go +++ b/syft/pkg/cataloger/swift/parse_package_resolved.go @@ -8,6 +8,7 @@ import ( "io" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -71,7 +72,7 @@ func parsePackageResolved(_ context.Context, _ file.Resolver, _ *generic.Environ if packageResolvedData["version"] == nil { log.Trace("no version found in Package.resolved file, skipping") - return nil, nil, nil + return nil, nil, fmt.Errorf("no version found in Package.resolved file") } version, ok := packageResolvedData["version"].(float64) @@ -97,7 +98,7 @@ func parsePackageResolved(_ context.Context, _ file.Resolver, _ *generic.Environ ), ) } - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } func pinsForVersion(data map[string]interface{}, version float64) ([]packagePin, error) { diff --git a/syft/pkg/cataloger/swift/parse_package_resolved_test.go b/syft/pkg/cataloger/swift/parse_package_resolved_test.go index 499e705f672..bf16d7542e1 100644 --- a/syft/pkg/cataloger/swift/parse_package_resolved_test.go +++ b/syft/pkg/cataloger/swift/parse_package_resolved_test.go @@ -134,3 +134,10 @@ func TestParsePackageResolved_versionNotANumber(t *testing.T) { pkgtest.NewCatalogTester().FromFile(t, fixture).WithError().TestParser(t, parsePackageResolved) } + +func Test_corruptPackageResolved(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/bad-version-packages.resolved"). + WithError(). + TestParser(t, parsePackageResolved) +} diff --git a/syft/pkg/cataloger/swift/parse_podfile_lock.go b/syft/pkg/cataloger/swift/parse_podfile_lock.go index 680dd0caa8d..43803a9f8ea 100644 --- a/syft/pkg/cataloger/swift/parse_podfile_lock.go +++ b/syft/pkg/cataloger/swift/parse_podfile_lock.go @@ -8,6 +8,7 @@ import ( "gopkg.in/yaml.v3" + "github.com/anchore/syft/internal/unknown" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" @@ -71,5 +72,5 @@ func parsePodfileLock(_ context.Context, _ file.Resolver, _ *generic.Environment ) } - return pkgs, nil, nil + return pkgs, nil, unknown.IfEmptyf(pkgs, "unable to determine packages") } diff --git a/syft/pkg/cataloger/swift/parse_podfile_lock_test.go b/syft/pkg/cataloger/swift/parse_podfile_lock_test.go index 4a3ec2bbc4a..0abdd0d7cec 100644 --- a/syft/pkg/cataloger/swift/parse_podfile_lock_test.go +++ b/syft/pkg/cataloger/swift/parse_podfile_lock_test.go @@ -273,3 +273,10 @@ func TestParsePodfileLock(t *testing.T) { pkgtest.TestFileParser(t, fixture, parsePodfileLock, expectedPkgs, expectedRelationships) } + +func Test_corruptPodfile(t *testing.T) { + pkgtest.NewCatalogTester(). + FromFile(t, "test-fixtures/glob-paths/src/Podfile.lock"). + WithError(). + TestParser(t, parsePodfileLock) +} diff --git a/syft/sbom/sbom.go b/syft/sbom/sbom.go index ba3f95f3d46..1c8cbc60ff2 100644 --- a/syft/sbom/sbom.go +++ b/syft/sbom/sbom.go @@ -26,6 +26,7 @@ type Artifacts struct { FileContents map[file.Coordinates]string FileLicenses map[file.Coordinates][]file.License Executables map[file.Coordinates]file.Executable + Unknowns map[file.Coordinates][]string LinuxDistribution *linux.Release } @@ -62,6 +63,9 @@ func (s SBOM) AllCoordinates() []file.Coordinates { for coordinates := range s.Artifacts.FileDigests { set.Add(coordinates) } + for coordinates := range s.Artifacts.Unknowns { + set.Add(coordinates) + } for _, relationship := range s.Relationships { for _, coordinates := range extractCoordinates(relationship) { set.Add(coordinates) diff --git a/test/cli/test-fixtures/image-unknowns/Dockerfile b/test/cli/test-fixtures/image-unknowns/Dockerfile new file mode 100644 index 00000000000..503f6808d0c --- /dev/null +++ b/test/cli/test-fixtures/image-unknowns/Dockerfile @@ -0,0 +1,3 @@ +FROM alpine@sha256:c5c5fda71656f28e49ac9c5416b3643eaa6a108a8093151d6d1afc9463be8e33 +RUN rm -rf /lib/apk/db/installed +COPY . /home/files diff --git a/test/cli/test-fixtures/image-unknowns/exe b/test/cli/test-fixtures/image-unknowns/exe new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test/cli/test-fixtures/image-unknowns/executable-script b/test/cli/test-fixtures/image-unknowns/executable-script new file mode 100755 index 00000000000..07c90f31c81 --- /dev/null +++ b/test/cli/test-fixtures/image-unknowns/executable-script @@ -0,0 +1,2 @@ +#!/bin/sh +echo "hello" diff --git a/test/cli/test-fixtures/image-unknowns/package-lock.json b/test/cli/test-fixtures/image-unknowns/package-lock.json new file mode 100644 index 00000000000..6649985c546 --- /dev/null +++ b/test/cli/test-fixtures/image-unknowns/package-lock.json @@ -0,0 +1,3 @@ +invalid [{ + +}] \ No newline at end of file diff --git a/test/cli/test-fixtures/image-unknowns/unextracted.tar.gz b/test/cli/test-fixtures/image-unknowns/unextracted.tar.gz new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test/cli/test-fixtures/image-unknowns/unextracted.zip b/test/cli/test-fixtures/image-unknowns/unextracted.zip new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test/cli/test-fixtures/image-unknowns/unknown-readable.jar b/test/cli/test-fixtures/image-unknowns/unknown-readable.jar new file mode 100644 index 0000000000000000000000000000000000000000..a4defd8b54f2c8d6d4ca6ce5e1d326c2d14e9d18 GIT binary patch literal 209 zcmWIWW@Zs#-~hsFVaFmEpgJ|J0ew`sW^a`JOuw9&pOz?8%eikC+$&yxBQg%r4%R1S$fV7vRmvB*Kia g3t0|i7Yw{@1hI&)Il!Bh4WyJ22m^t17Kp