Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adds support for writing supplemental junitxml reports #471

Merged
merged 1 commit into from
Oct 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions cmd/verify.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,11 @@
import (
"errors"
"fmt"
"os"
"strings"
"time"

"github.com/redhat-certification/chart-verifier/internal/chartverifier/junitxml"
"github.com/redhat-certification/chart-verifier/internal/chartverifier/utils"
"github.com/redhat-certification/chart-verifier/internal/tool"
apiChecks "github.com/redhat-certification/chart-verifier/pkg/chartverifier/checks"
Expand Down Expand Up @@ -67,6 +69,8 @@
pgpPublicKeyFile string
// helm install timeout
helmInstallTimeout time.Duration
// writeJUnitXMLTo is where to write an additional junitxml representation of the outcome
writeJUnitXMLTo string
)

func buildChecks(enabled []string, unEnabled []string) ([]apiChecks.CheckName, []apiChecks.CheckName, error) {
Expand Down Expand Up @@ -215,6 +219,20 @@
return reportErr
}

// Failure to write JUnitXML result is non-fatal because junitxml reports are considered extra.
if writeJUnitXMLTo != "" {
utils.LogInfo(fmt.Sprintf("user requested additional junitxml report be written to %s", writeJUnitXMLTo))
junitOutput, err := junitxml.Format(*verifier.GetReport())
if err != nil {
utils.LogError(fmt.Sprintf("failed to convert report content to junitxml: %s", err))
} else {
err = os.WriteFile(writeJUnitXMLTo, junitOutput, 0o644)
Dismissed Show dismissed Hide dismissed
if err != nil {
utils.LogError(fmt.Sprintf("failed to write junitxml output to specified path %s: %s", writeJUnitXMLTo, err))
}
}
}

utils.WriteStdOut(report)

utils.WriteLogs(outputFormatFlag)
Expand Down Expand Up @@ -250,6 +268,8 @@
cmd.Flags().BoolVarP(&webCatalogOnly, "web-catalog-only", "W", false, "set this to indicate that the distribution method is web catalog only (default: false)")
cmd.Flags().StringVarP(&pgpPublicKeyFile, "pgp-public-key", "k", "", "file containing gpg public key of the key used to sign the chart")
cmd.Flags().DurationVar(&helmInstallTimeout, "helm-install-timeout", 5*time.Minute, "helm install timeout")
cmd.Flags().StringVar(&writeJUnitXMLTo, "write-junitxml-to", "", "If set, will write a junitXML representation of the result to the specified path in addition to the configured output format")

return cmd
}

Expand Down
20 changes: 20 additions & 0 deletions docs/helm-chart-checks.md
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@ This section provides help on the basic usage of Helm chart checks with the podm
-f, --set-values strings specify application and check configuration values in a YAML file or a URL (can specify multiple)
-E, --suppress-error-log suppress the error log (default: written to ./chartverifier/verifier-<timestamp>.log)
--timeout duration time to wait for completion of chart install and test (default 30m0s)
--write-junitxml-to string If set, will write a junitXML representation of the result to the specified path in addition to the configured output format
-w, --write-to-file write report to ./chartverifier/report.yaml (default: stdout)
Global Flags:
--config string config file (default is $HOME/.chart-verifier.yaml)
Expand Down Expand Up @@ -239,6 +240,25 @@ Alternatively, use the ```-w``` option to write the report directly to the file
```
If the file already exists it is overwritten.

An additional report can be written in JUnit XML format if requested with the
`--write-junitxml-to` flag, passing in the desired output filename.

```
$ podman run --rm -i \
-e KUBECONFIG=/.kube/config \
-v "${HOME}/.kube":/.kube:z \
-v $(pwd)/chartverifier:/app/chartverifier:z \
-w \
"quay.io/redhat-certification/chart-verifier" \
verify \
--write-junitxml-to /app/chartverifier/report-junit.xml \
<chart-uri>
```

JUnitXML is not an additional report format that can be used for certification
or validation using chart-verifier, and is only intended to be consumed by user
tooling. The YAML or JSON report is always written as specified.

### The error log

By default an error log is written to file ```./chartverifier/verify-<timestamp>.yaml```. It includes any error messages, the results of each check and additional information around chart testing. To get a copy of the error log a volume mount is required to ```/app/chartverifer```. For example:
Expand Down
150 changes: 150 additions & 0 deletions internal/chartverifier/junitxml/junitxml.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
package junitxml

import (
"encoding/xml"
"fmt"
"strconv"

"github.com/redhat-certification/chart-verifier/pkg/chartverifier/report"
)

type JUnitTestSuites struct {
XMLName xml.Name `xml:"testsuites"`
Suites []JUnitTestSuite `xml:"testsuite"`
}

type JUnitTestSuite struct {
XMLName xml.Name `xml:"testsuite"`
Tests int `xml:"tests,attr"`
Failures int `xml:"failures,attr"`
Skipped int `xml:"skipped,attr"`
Unknown int `xml:"unknown,attr"`
ReportDigest string `xml:"reportDigest,attr"`
Name string `xml:"name,attr"`
Properties []JUnitProperty `xml:"properties>property,omitempty"`
TestCases []JUnitTestCase `xml:"testcase"`
}

type JUnitTestCase struct {
XMLName xml.Name `xml:"testcase"`
Classname string `xml:"classname,attr"`
Name string `xml:"name,attr"`
SkipMessage *JUnitSkipMessage `xml:"skipped,omitempty"`
Failure *JUnitMessage `xml:"failure,omitempty"`
Warning *JUnitMessage `xml:"warning,omitempty"`
SystemOut string `xml:"system-out,omitempty"`
Message string `xml:",chardata"`
}

type JUnitSkipMessage struct {
Message string `xml:"message,attr"`
}

type JUnitProperty struct {
Name string `xml:"name,attr"`
Value string `xml:"value,attr"`
}

type JUnitMessage struct {
Message string `xml:"message,attr"`
Type string `xml:"type,attr"`
Contents string `xml:",chardata"`
}

func Format(r report.Report) ([]byte, error) {
results := r.Results
checksByOutcome := map[string][]report.CheckReport{}

for i, result := range results {
checksByOutcome[result.Outcome] = append(checksByOutcome[result.Outcome], *results[i])
}

digest, err := r.GetReportDigest()
if err != nil {
// Prefer to continue even if digest calculation fails for some reason.
digest = "unknown"
}

testsuite := JUnitTestSuite{
Tests: len(results),
Failures: len(checksByOutcome[report.FailOutcomeType]),
Skipped: len(checksByOutcome[report.SkippedOutcomeType]),
Unknown: len(checksByOutcome[report.UnknownOutcomeType]),
ReportDigest: digest,
Name: "Red Hat Helm Chart Certification",
Properties: []JUnitProperty{
{Name: "profileType", Value: r.Metadata.ToolMetadata.Profile.VendorType},
{Name: "profileVersion", Value: r.Metadata.ToolMetadata.Profile.Version},
{Name: "webCatalogOnly", Value: strconv.FormatBool(r.Metadata.ToolMetadata.ProviderDelivery || r.Metadata.ToolMetadata.WebCatalogOnly)},
{Name: "verifierVersion", Value: r.Metadata.ToolMetadata.Version},
},
TestCases: []JUnitTestCase{},
}

for _, tc := range checksByOutcome[report.PassOutcomeType] {
c := JUnitTestCase{
Classname: r.Metadata.ToolMetadata.ChartUri,
Name: string(tc.Check),
Failure: nil,
Message: tc.Reason,
}
testsuite.TestCases = append(testsuite.TestCases, c)
}

for _, tc := range checksByOutcome[report.FailOutcomeType] {
c := JUnitTestCase{
Classname: r.Metadata.ToolMetadata.ChartUri,
Name: string(tc.Check),
Failure: &JUnitMessage{
Message: "Failed",
Type: string(tc.Type),
Contents: tc.Reason,
},
Message: tc.Reason,
}
testsuite.TestCases = append(testsuite.TestCases, c)
}

for _, tc := range checksByOutcome[report.UnknownOutcomeType] {
c := JUnitTestCase{
Classname: r.Metadata.ToolMetadata.ChartUri,
Name: string(tc.Check),
Failure: &JUnitMessage{
Message: "Unknown",
Type: string(tc.Type),
Contents: tc.Reason,
},
Message: tc.Reason,
}
testsuite.TestCases = append(testsuite.TestCases, c)
}

for _, tc := range checksByOutcome[report.SkippedOutcomeType] {
c := JUnitTestCase{
Classname: r.Metadata.ToolMetadata.ChartUri,
Name: string(tc.Check),
Failure: nil,
Message: tc.Reason,
SkipMessage: &JUnitSkipMessage{
Message: tc.Reason,
},
}
testsuite.TestCases = append(testsuite.TestCases, c)
}

suites := JUnitTestSuites{
Suites: []JUnitTestSuite{testsuite},
}

bytes, err := xml.MarshalIndent(suites, "", "\t")
if err != nil {
o := fmt.Errorf("error formatting results with formatter %s: %v",
"junitxml",
err,
)

return nil, o
}

return bytes, nil
}
4 changes: 2 additions & 2 deletions pkg/chartverifier/report/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ import (
)

type (
ReportFormat string
OutcomeType string
ReportFormat = string
OutcomeType = string
)

type ShaValue struct{}
Expand Down
Loading