Skip to content
This repository has been archived by the owner on Oct 14, 2024. It is now read-only.

Commit

Permalink
fix: image parsing issue in the case of SBOM input. (#178)
Browse files Browse the repository at this point in the history
* fix(scanner): get hash from sbom input
* increase sleep after e2e port-forward
* fix(charts): update bitnami/postgress chart version
Co-authored-by: Erez Fishhimer <fishkerez.github@gmail.com>
  • Loading branch information
pbalogh-sa authored Jun 2, 2022
1 parent 3712f7e commit 04b4bf2
Show file tree
Hide file tree
Showing 11 changed files with 154 additions and 20 deletions.
6 changes: 3 additions & 3 deletions charts/kubeclarity/Chart.lock
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
dependencies:
- name: postgresql
repository: https://charts.bitnami.com/bitnami
version: 10.12.2
digest: sha256:ea766847129fa7dfb45b80a1feeffea8308f748ca846463bf1f320c1a6913cf1
generated: "2022-03-31T13:11:05.076557+03:00"
version: 10.16.2
digest: sha256:7e13127118d7d974c014d9b4600c165479b2519aede190a91ff295c47772b3c7
generated: "2022-06-02T13:48:53.394278+02:00"
2 changes: 1 addition & 1 deletion charts/kubeclarity/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ maintainers:
type: application
dependencies:
- name: postgresql
version: "10.12.2"
version: "10.16.2"
repository: https://charts.bitnami.com/bitnami
condition: kubeclarity-postgresql.enabled
alias: kubeclarity-postgresql
Binary file removed charts/kubeclarity/charts/postgresql-10.12.2.tgz
Binary file not shown.
Binary file added charts/kubeclarity/charts/postgresql-10.16.2.tgz
Binary file not shown.
2 changes: 1 addition & 1 deletion e2e/common/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ func PortForwardToKubeClarity(stopCh chan struct{}) {
return
}
}()
time.Sleep(3 * time.Second)
time.Sleep(10 * time.Second)
}

func StringPtr(val string) *string {
Expand Down
7 changes: 5 additions & 2 deletions shared/pkg/converter/cyclonedx_to_syft.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ type emptyMetadata struct{}
var ErrFailedToGetCycloneDXSBOM = errors.New("failed to get CycloneDX SBOM from file")

func ConvertCycloneDXToSyftJSONFromFile(inputSBOMFile string, outputSBOMFile string) error {
cdxBOM, err := getCycloneDXSBOMFromFile(inputSBOMFile)
cdxBOM, err := GetCycloneDXSBOMFromFile(inputSBOMFile)
if err != nil {
return ErrFailedToGetCycloneDXSBOM
}
Expand Down Expand Up @@ -80,7 +80,7 @@ func saveSyftSBOMToFile(syftBOM syft_sbom.SBOM, outputSBOMFile string) error {
return nil
}

func getCycloneDXSBOMFromFile(inputSBOMFile string) (*cdx.BOM, error) {
func GetCycloneDXSBOMFromFile(inputSBOMFile string) (*cdx.BOM, error) {
inputSBOM, err := os.ReadFile(inputSBOMFile)
if err != nil {
return nil, fmt.Errorf("failed to read SBOM file %s: %v", inputSBOMFile, err)
Expand Down Expand Up @@ -121,6 +121,9 @@ func convertCycloneDXtoSyft(bom *cdx.BOM) (syft_sbom.SBOM, error) {
if bom.Metadata.Component == nil {
return syft_sbom.SBOM{}, fmt.Errorf("cycloneDX metadata component is nil")
}
if bom.Components == nil {
return syft_sbom.SBOM{}, fmt.Errorf("cycloneDX doesn't have any components")
}
// nolint:exhaustive
switch bom.Metadata.Component.Type {
case cdx.ComponentTypeContainer:
Expand Down
31 changes: 26 additions & 5 deletions shared/pkg/scanner/grype/common.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import (
"github.com/openclarity/kubeclarity/shared/pkg/converter"
"github.com/openclarity/kubeclarity/shared/pkg/job_manager"
"github.com/openclarity/kubeclarity/shared/pkg/scanner"
cdx_helper "github.com/openclarity/kubeclarity/shared/pkg/utils/cyclonedx_helper"
"github.com/openclarity/kubeclarity/shared/pkg/utils/image_helper"
)

Expand Down Expand Up @@ -76,7 +77,7 @@ func ConvertCycloneDXFileToSyftJSONFile(inputFilePath string, logger *log.Entry)
return outputFilePath, func() { _ = os.Remove(outputFilePath) }, nil
}

func CreateResults(doc grype_models.Document, userInput, scannerName string) *scanner.Results {
func CreateResults(doc grype_models.Document, userInput, scannerName, hash string) *scanner.Results {
distro := getDistro(doc)

matches := make(scanner.Matches, len(doc.Matches))
Expand Down Expand Up @@ -117,24 +118,34 @@ func CreateResults(doc grype_models.Document, userInput, scannerName string) *sc
ScannerInfo: scanner.Info{
Name: scannerName,
},
Source: getSource(doc, userInput),
Source: getSource(doc, userInput, hash),
}
}

func getSource(doc grype_models.Document, userInput string) scanner.Source {
func getSource(doc grype_models.Document, userInput, hash string) scanner.Source {
var source scanner.Source
if doc.Source == nil {
return source
}

var srcName, hash string
var srcName string
switch doc.Source.Target.(type) {
case syft_source.ImageMetadata:
srcName = doc.Source.Target.(syft_source.ImageMetadata).UserInput
// If the userInput is a SBOM, the srcName and hash will be got from the SBOM.
if srcName == "" {
srcName = userInput
}
if hash != "" {
break
}
hash = image_helper.GetHashFromRepoDigest(doc.Source.Target.(syft_source.ImageMetadata).RepoDigests, userInput)
if hash == "" {
// set hash using ManifestDigest if RepoDigest is missing
hash = doc.Source.Target.(syft_source.ImageMetadata).ManifestDigest
manifestHash := doc.Source.Target.(syft_source.ImageMetadata).ManifestDigest
if idx := strings.Index(manifestHash, ":"); idx != -1 {
hash = manifestHash[idx+1:]
}
}
case string:
srcName = doc.Source.Target.(string) // nolint:forcetypeassert
Expand Down Expand Up @@ -214,3 +225,13 @@ func parseLayerHex(layerID string) string {

return layerID[index+1:]
}

func getOriginalInputAndHashFromSBOM(inputSBOMFile string) (string, string, error) {
cdxBOM, err := converter.GetCycloneDXSBOMFromFile(inputSBOMFile)
if err != nil {
return "", "", converter.ErrFailedToGetCycloneDXSBOM
}
hash := cdx_helper.GetComponentHash(cdxBOM.Metadata.Component)

return cdxBOM.Metadata.Component.Name, hash, nil
}
100 changes: 98 additions & 2 deletions shared/pkg/scanner/grype/common_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,12 @@ package grype
import (
"encoding/json"
"os"
"reflect"
"testing"

"github.com/anchore/grype/grype/presenter/models"
syft_source "github.com/anchore/syft/syft/source"
"github.com/jinzhu/copier"
"gotest.tools/assert"

"github.com/openclarity/kubeclarity/shared/pkg/scanner"
Expand All @@ -32,7 +35,12 @@ func TestCreateResults(t *testing.T) {
file, err := os.ReadFile("./test_data/nginx.json")
assert.NilError(t, err)
assert.NilError(t, json.Unmarshal(file, &doc))

// define Target properly for image input
doc.Source.Target = syft_source.ImageMetadata{
UserInput: "nginx",
ManifestDigest: "sha256:43ef2d67f4f458c2ac373ce0abf34ff6ad61616dd7cfd2880c6381d7904b6a94",
RepoDigests: []string{"sha256:43ef2d67f4f458c2ac373ce0abf34ff6ad61616dd7cfd2880c6381d7904b6a94"},
}
// read expected results
var results scanner.Results
file, err = os.ReadFile("./test_data/nginx.results.json")
Expand All @@ -43,6 +51,7 @@ func TestCreateResults(t *testing.T) {
doc models.Document
userInput string
scannerName string
hash string
}
tests := []struct {
name string
Expand All @@ -61,10 +70,97 @@ func TestCreateResults(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := CreateResults(tt.args.doc, tt.args.userInput, tt.args.scannerName)
got := CreateResults(tt.args.doc, tt.args.userInput, tt.args.scannerName, tt.args.hash)
// gotB, _ := json.Marshal(got)
// assert.NilError(t, os.WriteFile("./test_data/nginx.results.json", gotB, 0666))
assert.DeepEqual(t, got, tt.want)
})
}
}

func Test_getSource(t *testing.T) {
// read input document
var doc models.Document
file, err := os.ReadFile("./test_data/nginx.json")
assert.NilError(t, err)
assert.NilError(t, json.Unmarshal(file, &doc))

// make a copies of document
var sbomDoc models.Document
if err := copier.Copy(&sbomDoc, &doc); err != nil {
t.Errorf("failed to copy document struct: %v", err)
}
var otherDoc models.Document
if err := copier.Copy(&otherDoc, &doc); err != nil {
t.Errorf("failed to copy document struct: %v", err)
}

// define Target properly for image input
doc.Source.Target = syft_source.ImageMetadata{
UserInput: "nginx",
ManifestDigest: "sha256:43ef2d67f4f458c2ac373ce0abf34ff6ad61616dd7cfd2880c6381d7904b6a94",
RepoDigests: []string{"sha256:43ef2d67f4f458c2ac373ce0abf34ff6ad61616dd7cfd2880c6381d7904b6a94"},
}
// empty imageMetadata for SBOM input
sbomDoc.Source.Target = syft_source.ImageMetadata{}
// string for other input
otherDoc.Source.Target = "test"

type args struct {
doc models.Document
userInput string
hash string
}
tests := []struct {
name string
args args
want scanner.Source
}{
{
name: "input is an image",
args: args{
doc: doc,
userInput: "nginx",
hash: "",
},
want: scanner.Source{
Type: "image",
Name: "nginx",
Hash: "43ef2d67f4f458c2ac373ce0abf34ff6ad61616dd7cfd2880c6381d7904b6a94",
},
},
{
name: "input is a SBOM",
args: args{
doc: sbomDoc,
userInput: "nginx",
hash: "testhash",
},
want: scanner.Source{
Type: "image",
Name: "nginx",
Hash: "testhash",
},
},
{
name: "input is not SBOM or image",
args: args{
doc: otherDoc,
userInput: "test",
hash: "testhash",
},
want: scanner.Source{
Type: "image",
Name: "test",
Hash: "testhash",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := getSource(tt.args.doc, tt.args.userInput, tt.args.hash); !reflect.DeepEqual(got, tt.want) {
t.Errorf("getSource() = %v, want %v", got, tt.want)
}
})
}
}
9 changes: 8 additions & 1 deletion shared/pkg/scanner/grype/local_grype.go
Original file line number Diff line number Diff line change
Expand Up @@ -68,13 +68,20 @@ func (s *LocalScanner) run(sourceType utils.SourceType, userInput string) {
return
}

var hash string
origInput := userInput
if sourceType == utils.SBOM {
syftJSONFilePath, cleanup, err := ConvertCycloneDXFileToSyftJSONFile(userInput, s.logger)
if err != nil {
ReportError(s.resultChan, fmt.Errorf("failed to convert sbom file: %w", err), s.logger)
return
}
defer cleanup()
origInput, hash, err = getOriginalInputAndHashFromSBOM(userInput)
if err != nil {
ReportError(s.resultChan, fmt.Errorf("failed to get original source and hash from SBOM: %w", err), s.logger)
return
}
userInput = syftJSONFilePath
}

Expand Down Expand Up @@ -102,7 +109,7 @@ func (s *LocalScanner) run(sourceType utils.SourceType, userInput string) {
}

s.logger.Infof("Sending successful results")
s.resultChan <- CreateResults(doc, userInput, ScannerName)
s.resultChan <- CreateResults(doc, origInput, ScannerName, hash)
}

func validateDBLoad(loadErr error, status *db.Status) error {
Expand Down
13 changes: 10 additions & 3 deletions shared/pkg/scanner/grype/remote_grype.go
Original file line number Diff line number Diff line change
Expand Up @@ -72,14 +72,14 @@ func (s *RemoteScanner) Run(sourceType utils.SourceType, userInput string) error
}

func (s *RemoteScanner) run(sbomInputFilePath string) {
sbomInputFilePath, cleanup, err := ConvertCycloneDXFileToSyftJSONFile(sbomInputFilePath, s.logger)
syftJSONInputFilePath, cleanup, err := ConvertCycloneDXFileToSyftJSONFile(sbomInputFilePath, s.logger)
if err != nil {
ReportError(s.resultChan, fmt.Errorf("failed to convert sbom file: %w", err), s.logger)
return
}
defer cleanup()

syftJSON, err := os.ReadFile(sbomInputFilePath)
syftJSON, err := os.ReadFile(syftJSONInputFilePath)
if err != nil {
ReportError(s.resultChan, fmt.Errorf("failed to read input file after conversion: %w", err), s.logger)
return
Expand All @@ -91,8 +91,14 @@ func (s *RemoteScanner) run(sbomInputFilePath string) {
return
}

userInput, hash, err := getOriginalInputAndHashFromSBOM(sbomInputFilePath)
if err != nil {
ReportError(s.resultChan, fmt.Errorf("failed to get original source and hash from SBOM: %w", err), s.logger)
return
}

s.logger.Infof("Sending successful results")
s.resultChan <- CreateResults(*doc, sbomInputFilePath, ScannerName)
s.resultChan <- CreateResults(*doc, userInput, ScannerName, hash)
}

func (s *RemoteScanner) scanSbomWithGrypeServer(sbom []byte) (*grype_models.Document, error) {
Expand All @@ -110,5 +116,6 @@ func (s *RemoteScanner) scanSbomWithGrypeServer(sbom []byte) (*grype_models.Docu
if err != nil {
return nil, fmt.Errorf("failed to unmarshal vulnerabilities document: %v", err)
}

return &doc, nil
}
4 changes: 2 additions & 2 deletions shared/pkg/scanner/grype/test_data/nginx.results.json
Original file line number Diff line number Diff line change
Expand Up @@ -11401,7 +11401,7 @@
},
"source": {
"type": "image",
"name": "",
"hash": ""
"name": "nginx",
"hash": "43ef2d67f4f458c2ac373ce0abf34ff6ad61616dd7cfd2880c6381d7904b6a94"
}
}

0 comments on commit 04b4bf2

Please sign in to comment.