From 9a685f30fce7b44c59e5619164a53feea46cc650 Mon Sep 17 00:00:00 2001 From: "Jose R. Gonzalez" Date: Tue, 27 Aug 2024 15:26:20 -0500 Subject: [PATCH] Adds support for writing supplemental junitxml reports Signed-off-by: Jose R. Gonzalez --- cmd/verify.go | 20 +++ docs/helm-chart-checks.md | 20 +++ internal/chartverifier/junitxml/junitxml.go | 150 ++++++++++++++++++++ pkg/chartverifier/report/types.go | 4 +- 4 files changed, 192 insertions(+), 2 deletions(-) create mode 100644 internal/chartverifier/junitxml/junitxml.go diff --git a/cmd/verify.go b/cmd/verify.go index 6c152f2e..6b95ef93 100644 --- a/cmd/verify.go +++ b/cmd/verify.go @@ -19,9 +19,11 @@ package cmd import ( "errors" "fmt" + "os" "strings" "time" + "github.com/redhat-certification/chart-verifier/internal/chartverifier/junitxml" "github.com/redhat-certification/chart-verifier/internal/chartverifier/utils" "github.com/redhat-certification/chart-verifier/internal/tool" apiChecks "github.com/redhat-certification/chart-verifier/pkg/chartverifier/checks" @@ -67,6 +69,8 @@ var ( pgpPublicKeyFile string // helm install timeout helmInstallTimeout time.Duration + // writeJUnitXMLTo is where to write an additional junitxml representation of the outcome + writeJUnitXMLTo string ) func buildChecks(enabled []string, unEnabled []string) ([]apiChecks.CheckName, []apiChecks.CheckName, error) { @@ -215,6 +219,20 @@ func NewVerifyCmd(config *viper.Viper) *cobra.Command { return reportErr } + // Failure to write JUnitXML result is non-fatal because junitxml reports are considered extra. + if writeJUnitXMLTo != "" { + utils.LogInfo(fmt.Sprintf("user requested additional junitxml report be written to %s", writeJUnitXMLTo)) + junitOutput, err := junitxml.Format(*verifier.GetReport()) + if err != nil { + utils.LogError(fmt.Sprintf("failed to convert report content to junitxml: %s", err)) + } else { + err = os.WriteFile(writeJUnitXMLTo, junitOutput, 0o644) + if err != nil { + utils.LogError(fmt.Sprintf("failed to write junitxml output to specified path %s: %s", writeJUnitXMLTo, err)) + } + } + } + utils.WriteStdOut(report) utils.WriteLogs(outputFormatFlag) @@ -250,6 +268,8 @@ func NewVerifyCmd(config *viper.Viper) *cobra.Command { cmd.Flags().BoolVarP(&webCatalogOnly, "web-catalog-only", "W", false, "set this to indicate that the distribution method is web catalog only (default: false)") cmd.Flags().StringVarP(&pgpPublicKeyFile, "pgp-public-key", "k", "", "file containing gpg public key of the key used to sign the chart") cmd.Flags().DurationVar(&helmInstallTimeout, "helm-install-timeout", 5*time.Minute, "helm install timeout") + cmd.Flags().StringVar(&writeJUnitXMLTo, "write-junitxml-to", "", "If set, will write a junitXML representation of the result to the specified path in addition to the configured output format") + return cmd } diff --git a/docs/helm-chart-checks.md b/docs/helm-chart-checks.md index caff548a..22aa78e6 100644 --- a/docs/helm-chart-checks.md +++ b/docs/helm-chart-checks.md @@ -148,6 +148,7 @@ This section provides help on the basic usage of Helm chart checks with the podm -f, --set-values strings specify application and check configuration values in a YAML file or a URL (can specify multiple) -E, --suppress-error-log suppress the error log (default: written to ./chartverifier/verifier-.log) --timeout duration time to wait for completion of chart install and test (default 30m0s) + --write-junitxml-to string If set, will write a junitXML representation of the result to the specified path in addition to the configured output format -w, --write-to-file write report to ./chartverifier/report.yaml (default: stdout) Global Flags: --config string config file (default is $HOME/.chart-verifier.yaml) @@ -239,6 +240,25 @@ Alternatively, use the ```-w``` option to write the report directly to the file ``` If the file already exists it is overwritten. +An additional report can be written in JUnit XML format if requested with the +`--write-junitxml-to` flag, passing in the desired output filename. + +``` + $ podman run --rm -i \ + -e KUBECONFIG=/.kube/config \ + -v "${HOME}/.kube":/.kube:z \ + -v $(pwd)/chartverifier:/app/chartverifier:z \ + -w \ + "quay.io/redhat-certification/chart-verifier" \ + verify \ + --write-junitxml-to /app/chartverifier/report-junit.xml \ + +``` + +JUnitXML is not an additional report format that can be used for certification +or validation using chart-verifier, and is only intended to be consumed by user +tooling. The YAML or JSON report is always written as specified. + ### The error log By default an error log is written to file ```./chartverifier/verify-.yaml```. It includes any error messages, the results of each check and additional information around chart testing. To get a copy of the error log a volume mount is required to ```/app/chartverifer```. For example: diff --git a/internal/chartverifier/junitxml/junitxml.go b/internal/chartverifier/junitxml/junitxml.go new file mode 100644 index 00000000..878e8b06 --- /dev/null +++ b/internal/chartverifier/junitxml/junitxml.go @@ -0,0 +1,150 @@ +package junitxml + +import ( + "encoding/xml" + "fmt" + "strconv" + + "github.com/redhat-certification/chart-verifier/pkg/chartverifier/report" +) + +type JUnitTestSuites struct { + XMLName xml.Name `xml:"testsuites"` + Suites []JUnitTestSuite `xml:"testsuite"` +} + +type JUnitTestSuite struct { + XMLName xml.Name `xml:"testsuite"` + Tests int `xml:"tests,attr"` + Failures int `xml:"failures,attr"` + Skipped int `xml:"skipped,attr"` + Unknown int `xml:"unknown,attr"` + ReportDigest string `xml:"reportDigest,attr"` + Name string `xml:"name,attr"` + Properties []JUnitProperty `xml:"properties>property,omitempty"` + TestCases []JUnitTestCase `xml:"testcase"` +} + +type JUnitTestCase struct { + XMLName xml.Name `xml:"testcase"` + Classname string `xml:"classname,attr"` + Name string `xml:"name,attr"` + SkipMessage *JUnitSkipMessage `xml:"skipped,omitempty"` + Failure *JUnitMessage `xml:"failure,omitempty"` + Warning *JUnitMessage `xml:"warning,omitempty"` + SystemOut string `xml:"system-out,omitempty"` + Message string `xml:",chardata"` +} + +type JUnitSkipMessage struct { + Message string `xml:"message,attr"` +} + +type JUnitProperty struct { + Name string `xml:"name,attr"` + Value string `xml:"value,attr"` +} + +type JUnitMessage struct { + Message string `xml:"message,attr"` + Type string `xml:"type,attr"` + Contents string `xml:",chardata"` +} + +func Format(r report.Report) ([]byte, error) { + results := r.Results + checksByOutcome := map[string][]report.CheckReport{} + + for i, result := range results { + checksByOutcome[result.Outcome] = append(checksByOutcome[result.Outcome], *results[i]) + } + + digest, err := r.GetReportDigest() + if err != nil { + // Prefer to continue even if digest calculation fails for some reason. + digest = "unknown" + } + + testsuite := JUnitTestSuite{ + Tests: len(results), + Failures: len(checksByOutcome[report.FailOutcomeType]), + Skipped: len(checksByOutcome[report.SkippedOutcomeType]), + Unknown: len(checksByOutcome[report.UnknownOutcomeType]), + ReportDigest: digest, + Name: "Red Hat Helm Chart Certification", + Properties: []JUnitProperty{ + {Name: "profileType", Value: r.Metadata.ToolMetadata.Profile.VendorType}, + {Name: "profileVersion", Value: r.Metadata.ToolMetadata.Profile.Version}, + {Name: "webCatalogOnly", Value: strconv.FormatBool(r.Metadata.ToolMetadata.ProviderDelivery || r.Metadata.ToolMetadata.WebCatalogOnly)}, + {Name: "verifierVersion", Value: r.Metadata.ToolMetadata.Version}, + }, + TestCases: []JUnitTestCase{}, + } + + for _, tc := range checksByOutcome[report.PassOutcomeType] { + c := JUnitTestCase{ + Classname: r.Metadata.ToolMetadata.ChartUri, + Name: string(tc.Check), + Failure: nil, + Message: tc.Reason, + } + testsuite.TestCases = append(testsuite.TestCases, c) + } + + for _, tc := range checksByOutcome[report.FailOutcomeType] { + c := JUnitTestCase{ + Classname: r.Metadata.ToolMetadata.ChartUri, + Name: string(tc.Check), + Failure: &JUnitMessage{ + Message: "Failed", + Type: string(tc.Type), + Contents: tc.Reason, + }, + Message: tc.Reason, + } + testsuite.TestCases = append(testsuite.TestCases, c) + } + + for _, tc := range checksByOutcome[report.UnknownOutcomeType] { + c := JUnitTestCase{ + Classname: r.Metadata.ToolMetadata.ChartUri, + Name: string(tc.Check), + Failure: &JUnitMessage{ + Message: "Unknown", + Type: string(tc.Type), + Contents: tc.Reason, + }, + Message: tc.Reason, + } + testsuite.TestCases = append(testsuite.TestCases, c) + } + + for _, tc := range checksByOutcome[report.SkippedOutcomeType] { + c := JUnitTestCase{ + Classname: r.Metadata.ToolMetadata.ChartUri, + Name: string(tc.Check), + Failure: nil, + Message: tc.Reason, + SkipMessage: &JUnitSkipMessage{ + Message: tc.Reason, + }, + } + testsuite.TestCases = append(testsuite.TestCases, c) + } + + suites := JUnitTestSuites{ + Suites: []JUnitTestSuite{testsuite}, + } + + bytes, err := xml.MarshalIndent(suites, "", "\t") + if err != nil { + o := fmt.Errorf("error formatting results with formatter %s: %v", + "junitxml", + err, + ) + + return nil, o + } + + return bytes, nil +} diff --git a/pkg/chartverifier/report/types.go b/pkg/chartverifier/report/types.go index a8c593ef..d1e4c2af 100644 --- a/pkg/chartverifier/report/types.go +++ b/pkg/chartverifier/report/types.go @@ -9,8 +9,8 @@ import ( ) type ( - ReportFormat string - OutcomeType string + ReportFormat = string + OutcomeType = string ) type ShaValue struct{}