diff --git a/go.mod b/go.mod index 0f2f1a8ba..473da7f49 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ toolchain go1.21.0 require ( github.com/envoyproxy/go-control-plane v0.12.0 github.com/golang/protobuf v1.5.4 - github.com/open-policy-agent/opa v0.65.0 + github.com/open-policy-agent/opa v0.66.0 github.com/pkg/errors v0.9.1 github.com/prometheus/client_golang v1.19.1 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 @@ -23,6 +23,7 @@ require ( ) require ( + github.com/containerd/errdefs v0.1.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect @@ -52,7 +53,7 @@ require ( github.com/cespare/xxhash v1.1.0 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/cncf/xds/go v0.0.0-20240318125728-8a4994d93e50 // indirect - github.com/containerd/containerd v1.7.17 // indirect + github.com/containerd/containerd v1.7.18 // indirect github.com/containerd/log v0.1.0 // indirect github.com/dgraph-io/badger/v3 v3.2103.5 // indirect github.com/dgraph-io/ristretto v0.1.1 // indirect @@ -87,7 +88,7 @@ require ( github.com/rivo/uniseg v0.2.0 // indirect github.com/sergi/go-diff v1.3.1 // indirect github.com/sirupsen/logrus v1.9.3 // indirect - github.com/spf13/cobra v1.8.0 // indirect + github.com/spf13/cobra v1.8.1 // indirect github.com/spf13/pflag v1.0.5 // indirect github.com/tchap/go-patricia/v2 v2.3.1 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect diff --git a/go.sum b/go.sum index 5a71b6e7e..e4c1aba95 100644 --- a/go.sum +++ b/go.sum @@ -37,17 +37,19 @@ github.com/cncf/xds/go v0.0.0-20240318125728-8a4994d93e50 h1:DBmgJDC9dTfkVyGgipa github.com/cncf/xds/go v0.0.0-20240318125728-8a4994d93e50/go.mod h1:5e1+Vvlzido69INQaVO6d87Qn543Xr6nooe9Kz7oBFM= github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM= github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw= -github.com/containerd/containerd v1.7.17 h1:KjNnn0+tAVQHAoaWRjmdak9WlvnFR/8rU1CHHy8Rm2A= -github.com/containerd/containerd v1.7.17/go.mod h1:vK+hhT4TIv2uejlcDlbVIc8+h/BqtKLIyNrtCZol8lI= +github.com/containerd/containerd v1.7.18 h1:jqjZTQNfXGoEaZdW1WwPU0RqSn1Bm2Ay/KJPUuO8nao= +github.com/containerd/containerd v1.7.18/go.mod h1:IYEk9/IO6wAPUz2bCMVUbsfXjzw5UNP5fLz4PsUygQ4= github.com/containerd/continuity v0.4.2 h1:v3y/4Yz5jwnvqPKJJ+7Wf93fyWoCB3F5EclWG023MDM= github.com/containerd/continuity v0.4.2/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ= +github.com/containerd/errdefs v0.1.0 h1:m0wCRBiu1WJT/Fr+iOoQHMQS/eP5myQ8lCv4Dz5ZURM= +github.com/containerd/errdefs v0.1.0/go.mod h1:YgWiiHtLmSeBrvpw+UfPijzbLaB77mEG1WwJTDETIV0= github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= -github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= @@ -171,8 +173,8 @@ github.com/moby/sys/mountinfo v0.6.2 h1:BzJjoreD5BMFNmD9Rus6gdd1pLuecOFPt8wC+Vyg github.com/moby/sys/mountinfo v0.6.2/go.mod h1:IJb6JQeOklcdMU9F5xQ8ZALD+CUr5VlGpwtX+VE0rpI= github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= -github.com/open-policy-agent/opa v0.65.0 h1:wnEU0pEk80YjFi3yoDbFTMluyNssgPI4VJNJetD9a4U= -github.com/open-policy-agent/opa v0.65.0/go.mod h1:CNoLL44LuCH1Yot/zoeZXRKFylQtCJV+oGFiP2TeeEc= +github.com/open-policy-agent/opa v0.66.0 h1:DbrvfJQja0FBRcPOB3Z/BOckocN+M4ApNWyNhSRJt0w= +github.com/open-policy-agent/opa v0.66.0/go.mod h1:EIgNnJcol7AvQR/IcWLwL13k64gHVbNAVG46b2G+/EY= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= @@ -226,8 +228,8 @@ github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkU github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= -github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= -github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= +github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= +github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= diff --git a/vendor/github.com/containerd/containerd/archive/compression/compression.go b/vendor/github.com/containerd/containerd/archive/compression/compression.go index 31bbe4124..23ddfab1a 100644 --- a/vendor/github.com/containerd/containerd/archive/compression/compression.go +++ b/vendor/github.com/containerd/containerd/archive/compression/compression.go @@ -29,7 +29,7 @@ import ( "strconv" "sync" - "github.com/containerd/containerd/log" + "github.com/containerd/log" "github.com/klauspost/compress/zstd" ) diff --git a/vendor/github.com/containerd/containerd/content/helpers.go b/vendor/github.com/containerd/containerd/content/helpers.go index d3a82cb6f..f4763847d 100644 --- a/vendor/github.com/containerd/containerd/content/helpers.go +++ b/vendor/github.com/containerd/containerd/content/helpers.go @@ -24,9 +24,9 @@ import ( "sync" "time" - "github.com/containerd/containerd/errdefs" - "github.com/containerd/containerd/log" "github.com/containerd/containerd/pkg/randutil" + "github.com/containerd/errdefs" + "github.com/containerd/log" "github.com/opencontainers/go-digest" ocispec "github.com/opencontainers/image-spec/specs-go/v1" ) diff --git a/vendor/github.com/containerd/containerd/content/local/locks.go b/vendor/github.com/containerd/containerd/content/local/locks.go index 1e59f39b3..4caffcc02 100644 --- a/vendor/github.com/containerd/containerd/content/local/locks.go +++ b/vendor/github.com/containerd/containerd/content/local/locks.go @@ -21,7 +21,7 @@ import ( "sync" "time" - "github.com/containerd/containerd/errdefs" + "github.com/containerd/errdefs" ) // Handles locking references diff --git a/vendor/github.com/containerd/containerd/content/local/readerat.go b/vendor/github.com/containerd/containerd/content/local/readerat.go index 899e85c0b..7918844b1 100644 --- a/vendor/github.com/containerd/containerd/content/local/readerat.go +++ b/vendor/github.com/containerd/containerd/content/local/readerat.go @@ -22,7 +22,7 @@ import ( "os" "github.com/containerd/containerd/content" - "github.com/containerd/containerd/errdefs" + "github.com/containerd/errdefs" ) // readerat implements io.ReaderAt in a completely stateless manner by opening diff --git a/vendor/github.com/containerd/containerd/content/local/store.go b/vendor/github.com/containerd/containerd/content/local/store.go index baae3565b..feecec79f 100644 --- a/vendor/github.com/containerd/containerd/content/local/store.go +++ b/vendor/github.com/containerd/containerd/content/local/store.go @@ -28,10 +28,10 @@ import ( "time" "github.com/containerd/containerd/content" - "github.com/containerd/containerd/errdefs" "github.com/containerd/containerd/filters" - "github.com/containerd/containerd/log" "github.com/containerd/containerd/pkg/randutil" + "github.com/containerd/errdefs" + "github.com/containerd/log" "github.com/sirupsen/logrus" "github.com/opencontainers/go-digest" diff --git a/vendor/github.com/containerd/containerd/content/local/writer.go b/vendor/github.com/containerd/containerd/content/local/writer.go index b187e524c..f82b131e1 100644 --- a/vendor/github.com/containerd/containerd/content/local/writer.go +++ b/vendor/github.com/containerd/containerd/content/local/writer.go @@ -27,8 +27,8 @@ import ( "time" "github.com/containerd/containerd/content" - "github.com/containerd/containerd/errdefs" - "github.com/containerd/containerd/log" + "github.com/containerd/errdefs" + "github.com/containerd/log" "github.com/opencontainers/go-digest" ) diff --git a/vendor/github.com/containerd/containerd/errdefs/errdefs_deprecated.go b/vendor/github.com/containerd/containerd/errdefs/errdefs_deprecated.go new file mode 100644 index 000000000..c6a0d843e --- /dev/null +++ b/vendor/github.com/containerd/containerd/errdefs/errdefs_deprecated.go @@ -0,0 +1,116 @@ +/* + Copyright The containerd Authors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +// Package errdefs defines the common errors used throughout containerd +// packages. +// +// Use with fmt.Errorf to add context to an error. +// +// To detect an error class, use the IsXXX functions to tell whether an error +// is of a certain type. +// +// The functions ToGRPC and FromGRPC can be used to map server-side and +// client-side errors to the correct types. +package errdefs + +import ( + "github.com/containerd/errdefs" +) + +// Definitions of common error types used throughout containerd. All containerd +// errors returned by most packages will map into one of these errors classes. +// Packages should return errors of these types when they want to instruct a +// client to take a particular action. +// +// For the most part, we just try to provide local grpc errors. Most conditions +// map very well to those defined by grpc. +var ( + ErrUnknown = errdefs.ErrUnknown + ErrInvalidArgument = errdefs.ErrInvalidArgument + ErrNotFound = errdefs.ErrNotFound + ErrAlreadyExists = errdefs.ErrAlreadyExists + ErrFailedPrecondition = errdefs.ErrFailedPrecondition + ErrUnavailable = errdefs.ErrUnavailable + ErrNotImplemented = errdefs.ErrNotImplemented +) + +// IsInvalidArgument returns true if the error is due to an invalid argument +func IsInvalidArgument(err error) bool { + return errdefs.IsInvalidArgument(err) +} + +// IsNotFound returns true if the error is due to a missing object +func IsNotFound(err error) bool { + return errdefs.IsNotFound(err) +} + +// IsAlreadyExists returns true if the error is due to an already existing +// metadata item +func IsAlreadyExists(err error) bool { + return errdefs.IsAlreadyExists(err) +} + +// IsFailedPrecondition returns true if an operation could not proceed to the +// lack of a particular condition +func IsFailedPrecondition(err error) bool { + return errdefs.IsFailedPrecondition(err) +} + +// IsUnavailable returns true if the error is due to a resource being unavailable +func IsUnavailable(err error) bool { + return errdefs.IsUnavailable(err) +} + +// IsNotImplemented returns true if the error is due to not being implemented +func IsNotImplemented(err error) bool { + return errdefs.IsNotImplemented(err) +} + +// IsCanceled returns true if the error is due to `context.Canceled`. +func IsCanceled(err error) bool { + return errdefs.IsCanceled(err) +} + +// IsDeadlineExceeded returns true if the error is due to +// `context.DeadlineExceeded`. +func IsDeadlineExceeded(err error) bool { + return errdefs.IsDeadlineExceeded(err) +} + +// ToGRPC will attempt to map the backend containerd error into a grpc error, +// using the original error message as a description. +// +// Further information may be extracted from certain errors depending on their +// type. +// +// If the error is unmapped, the original error will be returned to be handled +// by the regular grpc error handling stack. +func ToGRPC(err error) error { + return errdefs.ToGRPC(err) +} + +// ToGRPCf maps the error to grpc error codes, assembling the formatting string +// and combining it with the target error string. +// +// This is equivalent to errdefs.ToGRPC(fmt.Errorf("%s: %w", fmt.Sprintf(format, args...), err)) +func ToGRPCf(err error, format string, args ...interface{}) error { + return errdefs.ToGRPCf(err, format, args...) +} + +// FromGRPC returns the underlying error from a grpc service based on the grpc error code +func FromGRPC(err error) error { + return errdefs.FromGRPC(err) +} diff --git a/vendor/github.com/containerd/containerd/filters/filter.go b/vendor/github.com/containerd/containerd/filters/filter.go index e13f2625c..dcc569a4b 100644 --- a/vendor/github.com/containerd/containerd/filters/filter.go +++ b/vendor/github.com/containerd/containerd/filters/filter.go @@ -70,7 +70,7 @@ package filters import ( "regexp" - "github.com/containerd/containerd/log" + "github.com/containerd/log" ) // Filter matches specific resources based the provided filter diff --git a/vendor/github.com/containerd/containerd/filters/parser.go b/vendor/github.com/containerd/containerd/filters/parser.go index 32767909b..f07fd33bd 100644 --- a/vendor/github.com/containerd/containerd/filters/parser.go +++ b/vendor/github.com/containerd/containerd/filters/parser.go @@ -20,7 +20,7 @@ import ( "fmt" "io" - "github.com/containerd/containerd/errdefs" + "github.com/containerd/errdefs" ) /* diff --git a/vendor/github.com/containerd/containerd/images/handlers.go b/vendor/github.com/containerd/containerd/images/handlers.go index 077d88e78..162e87a86 100644 --- a/vendor/github.com/containerd/containerd/images/handlers.go +++ b/vendor/github.com/containerd/containerd/images/handlers.go @@ -23,8 +23,8 @@ import ( "sort" "github.com/containerd/containerd/content" - "github.com/containerd/containerd/errdefs" "github.com/containerd/containerd/platforms" + "github.com/containerd/errdefs" ocispec "github.com/opencontainers/image-spec/specs-go/v1" "golang.org/x/sync/errgroup" "golang.org/x/sync/semaphore" diff --git a/vendor/github.com/containerd/containerd/images/image.go b/vendor/github.com/containerd/containerd/images/image.go index 2d2e36a9a..3e2abc75f 100644 --- a/vendor/github.com/containerd/containerd/images/image.go +++ b/vendor/github.com/containerd/containerd/images/image.go @@ -24,9 +24,9 @@ import ( "time" "github.com/containerd/containerd/content" - "github.com/containerd/containerd/errdefs" - "github.com/containerd/containerd/log" "github.com/containerd/containerd/platforms" + "github.com/containerd/errdefs" + "github.com/containerd/log" digest "github.com/opencontainers/go-digest" ocispec "github.com/opencontainers/image-spec/specs-go/v1" ) @@ -268,6 +268,9 @@ func Platforms(ctx context.Context, provider content.Provider, image ocispec.Des var platformSpecs []ocispec.Platform return platformSpecs, Walk(ctx, Handlers(HandlerFunc(func(ctx context.Context, desc ocispec.Descriptor) ([]ocispec.Descriptor, error) { if desc.Platform != nil { + if desc.Platform.OS == "unknown" || desc.Platform.Architecture == "unknown" { + return nil, ErrSkipDesc + } platformSpecs = append(platformSpecs, *desc.Platform) return nil, ErrSkipDesc } diff --git a/vendor/github.com/containerd/containerd/images/mediatypes.go b/vendor/github.com/containerd/containerd/images/mediatypes.go index d3b28d42d..cd51aa5eb 100644 --- a/vendor/github.com/containerd/containerd/images/mediatypes.go +++ b/vendor/github.com/containerd/containerd/images/mediatypes.go @@ -22,7 +22,7 @@ import ( "sort" "strings" - "github.com/containerd/containerd/errdefs" + "github.com/containerd/errdefs" ocispec "github.com/opencontainers/image-spec/specs-go/v1" ) diff --git a/vendor/github.com/containerd/containerd/labels/validate.go b/vendor/github.com/containerd/containerd/labels/validate.go index f83b5dde2..6f23cdd7c 100644 --- a/vendor/github.com/containerd/containerd/labels/validate.go +++ b/vendor/github.com/containerd/containerd/labels/validate.go @@ -19,7 +19,7 @@ package labels import ( "fmt" - "github.com/containerd/containerd/errdefs" + "github.com/containerd/errdefs" ) const ( diff --git a/vendor/github.com/containerd/containerd/log/context_deprecated.go b/vendor/github.com/containerd/containerd/log/context_deprecated.go deleted file mode 100644 index 9e9e8b491..000000000 --- a/vendor/github.com/containerd/containerd/log/context_deprecated.go +++ /dev/null @@ -1,149 +0,0 @@ -/* - Copyright The containerd Authors. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -package log - -import ( - "context" - - "github.com/containerd/log" -) - -// G is a shorthand for [GetLogger]. -// -// Deprecated: use [log.G]. -var G = log.G - -// L is an alias for the standard logger. -// -// Deprecated: use [log.L]. -var L = log.L - -// Fields type to pass to "WithFields". -// -// Deprecated: use [log.Fields]. -type Fields = log.Fields - -// Entry is a logging entry. -// -// Deprecated: use [log.Entry]. -type Entry = log.Entry - -// RFC3339NanoFixed is [time.RFC3339Nano] with nanoseconds padded using -// zeros to ensure the formatted time is always the same number of -// characters. -// -// Deprecated: use [log.RFC3339NanoFixed]. -const RFC3339NanoFixed = log.RFC3339NanoFixed - -// Level is a logging level. -// -// Deprecated: use [log.Level]. -type Level = log.Level - -// Supported log levels. -const ( - // TraceLevel level. - // - // Deprecated: use [log.TraceLevel]. - TraceLevel Level = log.TraceLevel - - // DebugLevel level. - // - // Deprecated: use [log.DebugLevel]. - DebugLevel Level = log.DebugLevel - - // InfoLevel level. - // - // Deprecated: use [log.InfoLevel]. - InfoLevel Level = log.InfoLevel - - // WarnLevel level. - // - // Deprecated: use [log.WarnLevel]. - WarnLevel Level = log.WarnLevel - - // ErrorLevel level - // - // Deprecated: use [log.ErrorLevel]. - ErrorLevel Level = log.ErrorLevel - - // FatalLevel level. - // - // Deprecated: use [log.FatalLevel]. - FatalLevel Level = log.FatalLevel - - // PanicLevel level. - // - // Deprecated: use [log.PanicLevel]. - PanicLevel Level = log.PanicLevel -) - -// SetLevel sets log level globally. It returns an error if the given -// level is not supported. -// -// Deprecated: use [log.SetLevel]. -func SetLevel(level string) error { - return log.SetLevel(level) -} - -// GetLevel returns the current log level. -// -// Deprecated: use [log.GetLevel]. -func GetLevel() log.Level { - return log.GetLevel() -} - -// OutputFormat specifies a log output format. -// -// Deprecated: use [log.OutputFormat]. -type OutputFormat = log.OutputFormat - -// Supported log output formats. -const ( - // TextFormat represents the text logging format. - // - // Deprecated: use [log.TextFormat]. - TextFormat log.OutputFormat = "text" - - // JSONFormat represents the JSON logging format. - // - // Deprecated: use [log.JSONFormat]. - JSONFormat log.OutputFormat = "json" -) - -// SetFormat sets the log output format. -// -// Deprecated: use [log.SetFormat]. -func SetFormat(format OutputFormat) error { - return log.SetFormat(format) -} - -// WithLogger returns a new context with the provided logger. Use in -// combination with logger.WithField(s) for great effect. -// -// Deprecated: use [log.WithLogger]. -func WithLogger(ctx context.Context, logger *log.Entry) context.Context { - return log.WithLogger(ctx, logger) -} - -// GetLogger retrieves the current logger from the context. If no logger is -// available, the default logger is returned. -// -// Deprecated: use [log.GetLogger]. -func GetLogger(ctx context.Context) *log.Entry { - return log.GetLogger(ctx) -} diff --git a/vendor/github.com/containerd/containerd/platforms/cpuinfo.go b/vendor/github.com/containerd/containerd/platforms/cpuinfo.go index 8c600fc96..91f50e8c8 100644 --- a/vendor/github.com/containerd/containerd/platforms/cpuinfo.go +++ b/vendor/github.com/containerd/containerd/platforms/cpuinfo.go @@ -20,7 +20,7 @@ import ( "runtime" "sync" - "github.com/containerd/containerd/log" + "github.com/containerd/log" ) // Present the ARM instruction set architecture, eg: v7, v8 diff --git a/vendor/github.com/containerd/containerd/platforms/cpuinfo_linux.go b/vendor/github.com/containerd/containerd/platforms/cpuinfo_linux.go index 722d86c35..e07aa99cc 100644 --- a/vendor/github.com/containerd/containerd/platforms/cpuinfo_linux.go +++ b/vendor/github.com/containerd/containerd/platforms/cpuinfo_linux.go @@ -24,7 +24,7 @@ import ( "runtime" "strings" - "github.com/containerd/containerd/errdefs" + "github.com/containerd/errdefs" "golang.org/x/sys/unix" ) diff --git a/vendor/github.com/containerd/containerd/platforms/cpuinfo_other.go b/vendor/github.com/containerd/containerd/platforms/cpuinfo_other.go index fa5f19c42..8cbcbb24a 100644 --- a/vendor/github.com/containerd/containerd/platforms/cpuinfo_other.go +++ b/vendor/github.com/containerd/containerd/platforms/cpuinfo_other.go @@ -22,7 +22,7 @@ import ( "fmt" "runtime" - "github.com/containerd/containerd/errdefs" + "github.com/containerd/errdefs" ) func getCPUVariant() (string, error) { diff --git a/vendor/github.com/containerd/containerd/platforms/platforms.go b/vendor/github.com/containerd/containerd/platforms/platforms.go index 56613b076..44bc24a5c 100644 --- a/vendor/github.com/containerd/containerd/platforms/platforms.go +++ b/vendor/github.com/containerd/containerd/platforms/platforms.go @@ -116,7 +116,7 @@ import ( specs "github.com/opencontainers/image-spec/specs-go/v1" - "github.com/containerd/containerd/errdefs" + "github.com/containerd/errdefs" ) var ( diff --git a/vendor/github.com/containerd/containerd/remotes/docker/auth/fetch.go b/vendor/github.com/containerd/containerd/remotes/docker/auth/fetch.go index 64c6a38f9..244e03509 100644 --- a/vendor/github.com/containerd/containerd/remotes/docker/auth/fetch.go +++ b/vendor/github.com/containerd/containerd/remotes/docker/auth/fetch.go @@ -26,9 +26,9 @@ import ( "strings" "time" - "github.com/containerd/containerd/log" remoteserrors "github.com/containerd/containerd/remotes/errors" "github.com/containerd/containerd/version" + "github.com/containerd/log" ) var ( diff --git a/vendor/github.com/containerd/containerd/remotes/docker/authorizer.go b/vendor/github.com/containerd/containerd/remotes/docker/authorizer.go index ebd69c16c..2fd1118bc 100644 --- a/vendor/github.com/containerd/containerd/remotes/docker/authorizer.go +++ b/vendor/github.com/containerd/containerd/remotes/docker/authorizer.go @@ -25,10 +25,10 @@ import ( "strings" "sync" - "github.com/containerd/containerd/errdefs" - "github.com/containerd/containerd/log" "github.com/containerd/containerd/remotes/docker/auth" remoteerrors "github.com/containerd/containerd/remotes/errors" + "github.com/containerd/errdefs" + "github.com/containerd/log" ) type dockerAuthorizer struct { diff --git a/vendor/github.com/containerd/containerd/remotes/docker/converter.go b/vendor/github.com/containerd/containerd/remotes/docker/converter.go index d7dca0d36..95a68d70e 100644 --- a/vendor/github.com/containerd/containerd/remotes/docker/converter.go +++ b/vendor/github.com/containerd/containerd/remotes/docker/converter.go @@ -24,8 +24,8 @@ import ( "github.com/containerd/containerd/content" "github.com/containerd/containerd/images" - "github.com/containerd/containerd/log" "github.com/containerd/containerd/remotes" + "github.com/containerd/log" digest "github.com/opencontainers/go-digest" ocispec "github.com/opencontainers/image-spec/specs-go/v1" ) diff --git a/vendor/github.com/containerd/containerd/remotes/docker/converter_fuzz.go b/vendor/github.com/containerd/containerd/remotes/docker/converter_fuzz.go index 908205392..aa7cf4666 100644 --- a/vendor/github.com/containerd/containerd/remotes/docker/converter_fuzz.go +++ b/vendor/github.com/containerd/containerd/remotes/docker/converter_fuzz.go @@ -24,7 +24,7 @@ import ( fuzz "github.com/AdaLogics/go-fuzz-headers" "github.com/containerd/containerd/content/local" - "github.com/containerd/containerd/log" + "github.com/containerd/log" ocispec "github.com/opencontainers/image-spec/specs-go/v1" "github.com/sirupsen/logrus" ) diff --git a/vendor/github.com/containerd/containerd/remotes/docker/fetcher.go b/vendor/github.com/containerd/containerd/remotes/docker/fetcher.go index ecf245933..3589db3ef 100644 --- a/vendor/github.com/containerd/containerd/remotes/docker/fetcher.go +++ b/vendor/github.com/containerd/containerd/remotes/docker/fetcher.go @@ -26,9 +26,9 @@ import ( "net/url" "strings" - "github.com/containerd/containerd/errdefs" "github.com/containerd/containerd/images" - "github.com/containerd/containerd/log" + "github.com/containerd/errdefs" + "github.com/containerd/log" digest "github.com/opencontainers/go-digest" ocispec "github.com/opencontainers/image-spec/specs-go/v1" ) diff --git a/vendor/github.com/containerd/containerd/remotes/docker/handler.go b/vendor/github.com/containerd/containerd/remotes/docker/handler.go index 27638ccc0..ccec49013 100644 --- a/vendor/github.com/containerd/containerd/remotes/docker/handler.go +++ b/vendor/github.com/containerd/containerd/remotes/docker/handler.go @@ -25,8 +25,8 @@ import ( "github.com/containerd/containerd/content" "github.com/containerd/containerd/images" "github.com/containerd/containerd/labels" - "github.com/containerd/containerd/log" "github.com/containerd/containerd/reference" + "github.com/containerd/log" ocispec "github.com/opencontainers/image-spec/specs-go/v1" ) diff --git a/vendor/github.com/containerd/containerd/remotes/docker/httpreadseeker.go b/vendor/github.com/containerd/containerd/remotes/docker/httpreadseeker.go index 824359339..6739e7904 100644 --- a/vendor/github.com/containerd/containerd/remotes/docker/httpreadseeker.go +++ b/vendor/github.com/containerd/containerd/remotes/docker/httpreadseeker.go @@ -21,8 +21,8 @@ import ( "fmt" "io" - "github.com/containerd/containerd/errdefs" - "github.com/containerd/containerd/log" + "github.com/containerd/errdefs" + "github.com/containerd/log" ) const maxRetry = 3 diff --git a/vendor/github.com/containerd/containerd/remotes/docker/pusher.go b/vendor/github.com/containerd/containerd/remotes/docker/pusher.go index 218a5dd30..a27cda0b5 100644 --- a/vendor/github.com/containerd/containerd/remotes/docker/pusher.go +++ b/vendor/github.com/containerd/containerd/remotes/docker/pusher.go @@ -29,11 +29,11 @@ import ( "time" "github.com/containerd/containerd/content" - "github.com/containerd/containerd/errdefs" "github.com/containerd/containerd/images" - "github.com/containerd/containerd/log" "github.com/containerd/containerd/remotes" remoteserrors "github.com/containerd/containerd/remotes/errors" + "github.com/containerd/errdefs" + "github.com/containerd/log" digest "github.com/opencontainers/go-digest" ocispec "github.com/opencontainers/image-spec/specs-go/v1" ) diff --git a/vendor/github.com/containerd/containerd/remotes/docker/resolver.go b/vendor/github.com/containerd/containerd/remotes/docker/resolver.go index ce1f64625..b2b124214 100644 --- a/vendor/github.com/containerd/containerd/remotes/docker/resolver.go +++ b/vendor/github.com/containerd/containerd/remotes/docker/resolver.go @@ -28,15 +28,15 @@ import ( "path" "strings" - "github.com/containerd/containerd/errdefs" "github.com/containerd/containerd/images" - "github.com/containerd/containerd/log" "github.com/containerd/containerd/reference" "github.com/containerd/containerd/remotes" "github.com/containerd/containerd/remotes/docker/schema1" //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. remoteerrors "github.com/containerd/containerd/remotes/errors" "github.com/containerd/containerd/tracing" "github.com/containerd/containerd/version" + "github.com/containerd/errdefs" + "github.com/containerd/log" "github.com/opencontainers/go-digest" ocispec "github.com/opencontainers/image-spec/specs-go/v1" ) diff --git a/vendor/github.com/containerd/containerd/remotes/docker/schema1/converter.go b/vendor/github.com/containerd/containerd/remotes/docker/schema1/converter.go index 8c9e520cd..75bd9875a 100644 --- a/vendor/github.com/containerd/containerd/remotes/docker/schema1/converter.go +++ b/vendor/github.com/containerd/containerd/remotes/docker/schema1/converter.go @@ -34,11 +34,11 @@ import ( "github.com/containerd/containerd/archive/compression" "github.com/containerd/containerd/content" - "github.com/containerd/containerd/errdefs" "github.com/containerd/containerd/images" "github.com/containerd/containerd/labels" - "github.com/containerd/containerd/log" "github.com/containerd/containerd/remotes" + "github.com/containerd/errdefs" + "github.com/containerd/log" digest "github.com/opencontainers/go-digest" specs "github.com/opencontainers/image-spec/specs-go" ocispec "github.com/opencontainers/image-spec/specs-go/v1" diff --git a/vendor/github.com/containerd/containerd/remotes/docker/status.go b/vendor/github.com/containerd/containerd/remotes/docker/status.go index 1a9227725..c7764758f 100644 --- a/vendor/github.com/containerd/containerd/remotes/docker/status.go +++ b/vendor/github.com/containerd/containerd/remotes/docker/status.go @@ -21,7 +21,7 @@ import ( "sync" "github.com/containerd/containerd/content" - "github.com/containerd/containerd/errdefs" + "github.com/containerd/errdefs" "github.com/moby/locker" ) diff --git a/vendor/github.com/containerd/containerd/remotes/handlers.go b/vendor/github.com/containerd/containerd/remotes/handlers.go index f24669dc4..912b85bfe 100644 --- a/vendor/github.com/containerd/containerd/remotes/handlers.go +++ b/vendor/github.com/containerd/containerd/remotes/handlers.go @@ -26,11 +26,11 @@ import ( "sync" "github.com/containerd/containerd/content" - "github.com/containerd/containerd/errdefs" "github.com/containerd/containerd/images" "github.com/containerd/containerd/labels" - "github.com/containerd/containerd/log" "github.com/containerd/containerd/platforms" + "github.com/containerd/errdefs" + "github.com/containerd/log" ocispec "github.com/opencontainers/image-spec/specs-go/v1" "golang.org/x/sync/semaphore" ) diff --git a/vendor/github.com/containerd/containerd/version/version.go b/vendor/github.com/containerd/containerd/version/version.go index 782771e4d..dfe46c922 100644 --- a/vendor/github.com/containerd/containerd/version/version.go +++ b/vendor/github.com/containerd/containerd/version/version.go @@ -23,7 +23,7 @@ var ( Package = "github.com/containerd/containerd" // Version holds the complete version number. Filled in at linking time. - Version = "1.7.17+unknown" + Version = "1.7.18+unknown" // Revision is filled with the VCS (e.g. git) revision being used to build // the program at linking time. diff --git a/vendor/github.com/containerd/errdefs/LICENSE b/vendor/github.com/containerd/errdefs/LICENSE new file mode 100644 index 000000000..584149b6e --- /dev/null +++ b/vendor/github.com/containerd/errdefs/LICENSE @@ -0,0 +1,191 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright The containerd Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/containerd/errdefs/README.md b/vendor/github.com/containerd/errdefs/README.md new file mode 100644 index 000000000..bd418c63f --- /dev/null +++ b/vendor/github.com/containerd/errdefs/README.md @@ -0,0 +1,13 @@ +# errdefs + +A Go package for defining and checking common containerd errors. + +## Project details + +**errdefs** is a containerd sub-project, licensed under the [Apache 2.0 license](./LICENSE). +As a containerd sub-project, you will find the: + * [Project governance](https://github.com/containerd/project/blob/main/GOVERNANCE.md), + * [Maintainers](https://github.com/containerd/project/blob/main/MAINTAINERS), + * and [Contributing guidelines](https://github.com/containerd/project/blob/main/CONTRIBUTING.md) + +information in our [`containerd/project`](https://github.com/containerd/project) repository. diff --git a/vendor/github.com/containerd/containerd/errdefs/errors.go b/vendor/github.com/containerd/errdefs/errors.go similarity index 100% rename from vendor/github.com/containerd/containerd/errdefs/errors.go rename to vendor/github.com/containerd/errdefs/errors.go diff --git a/vendor/github.com/containerd/containerd/errdefs/grpc.go b/vendor/github.com/containerd/errdefs/grpc.go similarity index 100% rename from vendor/github.com/containerd/containerd/errdefs/grpc.go rename to vendor/github.com/containerd/errdefs/grpc.go diff --git a/vendor/github.com/open-policy-agent/opa/ast/annotations.go b/vendor/github.com/open-policy-agent/opa/ast/annotations.go index f7a5c78f7..9663b0cc6 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/annotations.go +++ b/vendor/github.com/open-policy-agent/opa/ast/annotations.go @@ -627,9 +627,8 @@ func (a *AuthorAnnotation) String() string { return a.Name } else if len(a.Name) == 0 { return fmt.Sprintf("<%s>", a.Email) - } else { - return fmt.Sprintf("%s <%s>", a.Name, a.Email) } + return fmt.Sprintf("%s <%s>", a.Name, a.Email) } // Copy returns a deep copy of rr. diff --git a/vendor/github.com/open-policy-agent/opa/ast/compile.go b/vendor/github.com/open-policy-agent/opa/ast/compile.go index 6afa648d0..c59cfede6 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/compile.go +++ b/vendor/github.com/open-policy-agent/opa/ast/compile.go @@ -120,34 +120,35 @@ type Compiler struct { // Capabliities required by the modules that were compiled. Required *Capabilities - localvargen *localVarGenerator - moduleLoader ModuleLoader - ruleIndices *util.HashMap - stages []stage - maxErrs int - sorted []string // list of sorted module names - pathExists func([]string) (bool, error) - after map[string][]CompilerStageDefinition - metrics metrics.Metrics - capabilities *Capabilities // user-supplied capabilities - imports map[string][]*Import // saved imports from stripping - builtins map[string]*Builtin // universe of built-in functions - customBuiltins map[string]*Builtin // user-supplied custom built-in functions (deprecated: use capabilities) - unsafeBuiltinsMap map[string]struct{} // user-supplied set of unsafe built-ins functions to block (deprecated: use capabilities) - deprecatedBuiltinsMap map[string]struct{} // set of deprecated, but not removed, built-in functions - enablePrintStatements bool // indicates if print statements should be elided (default) - comprehensionIndices map[*Term]*ComprehensionIndex // comprehension key index - initialized bool // indicates if init() has been called - debug debug.Debug // emits debug information produced during compilation - schemaSet *SchemaSet // user-supplied schemas for input and data documents - inputType types.Type // global input type retrieved from schema set - annotationSet *AnnotationSet // hierarchical set of annotations - strict bool // enforce strict compilation checks - keepModules bool // whether to keep the unprocessed, parse modules (below) - parsedModules map[string]*Module // parsed, but otherwise unprocessed modules, kept track of when keepModules is true - useTypeCheckAnnotations bool // whether to provide annotated information (schemas) to the type checker - allowUndefinedFuncCalls bool // don't error on calls to unknown functions. - evalMode CompilerEvalMode + localvargen *localVarGenerator + moduleLoader ModuleLoader + ruleIndices *util.HashMap + stages []stage + maxErrs int + sorted []string // list of sorted module names + pathExists func([]string) (bool, error) + after map[string][]CompilerStageDefinition + metrics metrics.Metrics + capabilities *Capabilities // user-supplied capabilities + imports map[string][]*Import // saved imports from stripping + builtins map[string]*Builtin // universe of built-in functions + customBuiltins map[string]*Builtin // user-supplied custom built-in functions (deprecated: use capabilities) + unsafeBuiltinsMap map[string]struct{} // user-supplied set of unsafe built-ins functions to block (deprecated: use capabilities) + deprecatedBuiltinsMap map[string]struct{} // set of deprecated, but not removed, built-in functions + enablePrintStatements bool // indicates if print statements should be elided (default) + comprehensionIndices map[*Term]*ComprehensionIndex // comprehension key index + initialized bool // indicates if init() has been called + debug debug.Debug // emits debug information produced during compilation + schemaSet *SchemaSet // user-supplied schemas for input and data documents + inputType types.Type // global input type retrieved from schema set + annotationSet *AnnotationSet // hierarchical set of annotations + strict bool // enforce strict compilation checks + keepModules bool // whether to keep the unprocessed, parse modules (below) + parsedModules map[string]*Module // parsed, but otherwise unprocessed modules, kept track of when keepModules is true + useTypeCheckAnnotations bool // whether to provide annotated information (schemas) to the type checker + allowUndefinedFuncCalls bool // don't error on calls to unknown functions. + evalMode CompilerEvalMode // + rewriteTestRulesForTracing bool // rewrite test rules to capture dynamic values for tracing. } // CompilerStage defines the interface for stages in the compiler. @@ -346,6 +347,7 @@ func NewCompiler() *Compiler { {"CheckSafetyRuleBodies", "compile_stage_check_safety_rule_bodies", c.checkSafetyRuleBodies}, {"RewriteEquals", "compile_stage_rewrite_equals", c.rewriteEquals}, {"RewriteDynamicTerms", "compile_stage_rewrite_dynamic_terms", c.rewriteDynamicTerms}, + {"RewriteTestRulesForTracing", "compile_stage_rewrite_test_rules_for_tracing", c.rewriteTestRuleEqualities}, // must run after RewriteDynamicTerms {"CheckRecursion", "compile_stage_check_recursion", c.checkRecursion}, {"CheckTypes", "compile_stage_check_types", c.checkTypes}, // must be run after CheckRecursion {"CheckUnsafeBuiltins", "compile_state_check_unsafe_builtins", c.checkUnsafeBuiltins}, @@ -469,6 +471,13 @@ func (c *Compiler) WithEvalMode(e CompilerEvalMode) *Compiler { return c } +// WithRewriteTestRules enables rewriting test rules to capture dynamic values in local variables, +// so they can be accessed by tracing. +func (c *Compiler) WithRewriteTestRules(rewrite bool) *Compiler { + c.rewriteTestRulesForTracing = rewrite + return c +} + // ParsedModules returns the parsed, unprocessed modules from the compiler. // It is `nil` if keeping modules wasn't enabled via `WithKeepModules(true)`. // The map includes all modules loaded via the ModuleLoader, if one was used. @@ -2167,6 +2176,43 @@ func (c *Compiler) rewriteDynamicTerms() { } } +// rewriteTestRuleEqualities rewrites equality expressions in test rule bodies to create local vars for statements that would otherwise +// not have their values captured through tracing, such as refs and comprehensions not unified/assigned to a local var. +// For example, given the following module: +// +// package test +// +// p.q contains v if { +// some v in numbers.range(1, 3) +// } +// +// p.r := "foo" +// +// test_rule { +// p == { +// "q": {4, 5, 6} +// } +// } +// +// `p` in `test_rule` resolves to `data.test.p`, which won't be an entry in the virtual-cache and must therefore be calculated after-the-fact. +// If `p` isn't captured in a local var, there is no trivial way to retrieve its value for test reporting. +func (c *Compiler) rewriteTestRuleEqualities() { + if !c.rewriteTestRulesForTracing { + return + } + + f := newEqualityFactory(c.localvargen) + for _, name := range c.sorted { + mod := c.Modules[name] + WalkRules(mod, func(rule *Rule) bool { + if strings.HasPrefix(string(rule.Head.Name), "test_") { + rule.Body = rewriteTestEqualities(f, rule.Body) + } + return false + }) + } +} + func (c *Compiler) parseMetadataBlocks() { // Only parse annotations if rego.metadata built-ins are called regoMetadataCalled := false @@ -4194,7 +4240,7 @@ func resolveRefsInRule(globals map[Var]*usedRef, rule *Rule) error { // Object keys cannot be pattern matched so only walk values. case *object: - x.Foreach(func(k, v *Term) { + x.Foreach(func(_, v *Term) { vis.Walk(v) }) @@ -4517,6 +4563,41 @@ func rewriteEquals(x interface{}) (modified bool) { return modified } +func rewriteTestEqualities(f *equalityFactory, body Body) Body { + result := make(Body, 0, len(body)) + for _, expr := range body { + // We can't rewrite negated expressions; if the extracted term is undefined, evaluation would fail before + // reaching the negation check. + if !expr.Negated && !expr.Generated { + switch { + case expr.IsEquality(): + terms := expr.Terms.([]*Term) + result, terms[1] = rewriteDynamicsShallow(expr, f, terms[1], result) + result, terms[2] = rewriteDynamicsShallow(expr, f, terms[2], result) + case expr.IsEvery(): + // We rewrite equalities inside of every-bodies as a fail here will be the cause of the test-rule fail. + // Failures inside other expressions with closures, such as comprehensions, won't cause the test-rule to fail, so we skip those. + every := expr.Terms.(*Every) + every.Body = rewriteTestEqualities(f, every.Body) + } + } + result = appendExpr(result, expr) + } + return result +} + +func rewriteDynamicsShallow(original *Expr, f *equalityFactory, term *Term, result Body) (Body, *Term) { + switch term.Value.(type) { + case Ref, *ArrayComprehension, *SetComprehension, *ObjectComprehension: + generated := f.Generate(term) + generated.With = original.With + result.Append(generated) + connectGeneratedExprs(original, generated) + return result, result[len(result)-1].Operand(0) + } + return result, term +} + // rewriteDynamics will rewrite the body so that dynamic terms (i.e., refs and // comprehensions) are bound to vars earlier in the query. This translation // results in eager evaluation. @@ -4608,6 +4689,7 @@ func rewriteDynamicsOne(original *Expr, f *equalityFactory, term *Term, result B generated := f.Generate(term) generated.With = original.With result.Append(generated) + connectGeneratedExprs(original, generated) return result, result[len(result)-1].Operand(0) case *Array: for i := 0; i < v.Len(); i++ { @@ -4636,16 +4718,19 @@ func rewriteDynamicsOne(original *Expr, f *equalityFactory, term *Term, result B var extra *Expr v.Body, extra = rewriteDynamicsComprehensionBody(original, f, v.Body, term) result.Append(extra) + connectGeneratedExprs(original, extra) return result, result[len(result)-1].Operand(0) case *SetComprehension: var extra *Expr v.Body, extra = rewriteDynamicsComprehensionBody(original, f, v.Body, term) result.Append(extra) + connectGeneratedExprs(original, extra) return result, result[len(result)-1].Operand(0) case *ObjectComprehension: var extra *Expr v.Body, extra = rewriteDynamicsComprehensionBody(original, f, v.Body, term) result.Append(extra) + connectGeneratedExprs(original, extra) return result, result[len(result)-1].Operand(0) } return result, term @@ -4713,6 +4798,7 @@ func expandExpr(gen *localVarGenerator, expr *Expr) (result []*Expr) { for i := 1; i < len(terms); i++ { var extras []*Expr extras, terms[i] = expandExprTerm(gen, terms[i]) + connectGeneratedExprs(expr, extras...) if len(expr.With) > 0 { for i := range extras { extras[i].With = expr.With @@ -4723,16 +4809,14 @@ func expandExpr(gen *localVarGenerator, expr *Expr) (result []*Expr) { result = append(result, expr) case *Every: var extras []*Expr - if _, ok := terms.Domain.Value.(Call); ok { - extras, terms.Domain = expandExprTerm(gen, terms.Domain) - } else { - term := NewTerm(gen.Generate()).SetLocation(terms.Domain.Location) - eq := Equality.Expr(term, terms.Domain).SetLocation(terms.Domain.Location) - eq.Generated = true - eq.With = expr.With - extras = append(extras, eq) - terms.Domain = term - } + + term := NewTerm(gen.Generate()).SetLocation(terms.Domain.Location) + eq := Equality.Expr(term, terms.Domain).SetLocation(terms.Domain.Location) + eq.Generated = true + eq.With = expr.With + extras = expandExpr(gen, eq) + terms.Domain = term + terms.Body = rewriteExprTermsInBody(gen, terms.Body) result = append(result, extras...) result = append(result, expr) @@ -4740,6 +4824,13 @@ func expandExpr(gen *localVarGenerator, expr *Expr) (result []*Expr) { return } +func connectGeneratedExprs(parent *Expr, children ...*Expr) { + for _, child := range children { + child.generatedFrom = parent + parent.generates = append(parent.generates, child) + } +} + func expandExprTerm(gen *localVarGenerator, term *Term) (support []*Expr, output *Term) { output = term switch v := term.Value.(type) { @@ -5494,26 +5585,34 @@ func validateWith(c *Compiler, unsafeBuiltinsMap map[string]struct{}, expr *Expr return false, err } + isAllowedUnknownFuncCall := false + if c.allowUndefinedFuncCalls { + switch target.Value.(type) { + case Ref, Var: + isAllowedUnknownFuncCall = true + } + } + switch { case isDataRef(target): ref := target.Value.(Ref) - node := c.RuleTree + targetNode := c.RuleTree for i := 0; i < len(ref)-1; i++ { - child := node.Child(ref[i].Value) + child := targetNode.Child(ref[i].Value) if child == nil { break } else if len(child.Values) > 0 { return false, NewError(CompileErr, target.Loc(), "with keyword cannot partially replace virtual document(s)") } - node = child + targetNode = child } - if node != nil { + if targetNode != nil { // NOTE(sr): at this point in the compiler stages, we don't have a fully-populated // TypeEnv yet -- so we have to make do with this check to see if the replacement // target is a function. It's probably wrong for arity-0 functions, but those are // and edge case anyways. - if child := node.Child(ref[len(ref)-1].Value); child != nil { + if child := targetNode.Child(ref[len(ref)-1].Value); child != nil { for _, v := range child.Values { if len(v.(*Rule).Head.Args) > 0 { if ok, err := validateWithFunctionValue(c.builtins, unsafeBuiltinsMap, c.RuleTree, value); err != nil || ok { @@ -5523,6 +5622,18 @@ func validateWith(c *Compiler, unsafeBuiltinsMap map[string]struct{}, expr *Expr } } } + + // If the with-value is a ref to a function, but not a call, we can't rewrite it + if r, ok := value.Value.(Ref); ok { + // TODO: check that target ref doesn't exist? + if valueNode := c.RuleTree.Find(r); valueNode != nil { + for _, v := range valueNode.Values { + if len(v.(*Rule).Head.Args) > 0 { + return false, nil + } + } + } + } case isInputRef(target): // ok, valid case isBuiltinRefOrVar: @@ -5541,6 +5652,9 @@ func validateWith(c *Compiler, unsafeBuiltinsMap map[string]struct{}, expr *Expr if ok, err := validateWithFunctionValue(c.builtins, unsafeBuiltinsMap, c.RuleTree, value); err != nil || ok { return false, err // err may be nil } + case isAllowedUnknownFuncCall: + // The target isn't a ref to the input doc, data doc, or a known built-in, but it might be a ref to an unknown built-in. + return false, nil default: return false, NewError(TypeErr, target.Location, "with keyword target must reference existing %v, %v, or a function", InputRootDocument, DefaultRootDocument) } diff --git a/vendor/github.com/open-policy-agent/opa/ast/env.go b/vendor/github.com/open-policy-agent/opa/ast/env.go index 784a34c04..c767aafef 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/env.go +++ b/vendor/github.com/open-policy-agent/opa/ast/env.go @@ -472,7 +472,7 @@ func insertIntoObject(o *types.Object, path Ref, tpe types.Type, env *TypeEnv) ( func (n *typeTreeNode) Leafs() map[*Ref]types.Type { leafs := map[*Ref]types.Type{} - n.children.Iter(func(k, v util.T) bool { + n.children.Iter(func(_, v util.T) bool { collectLeafs(v.(*typeTreeNode), nil, leafs) return false }) @@ -485,7 +485,7 @@ func collectLeafs(n *typeTreeNode, path Ref, leafs map[*Ref]types.Type) { leafs[&nPath] = n.Value() return } - n.children.Iter(func(k, v util.T) bool { + n.children.Iter(func(_, v util.T) bool { collectLeafs(v.(*typeTreeNode), nPath, leafs) return false }) diff --git a/vendor/github.com/open-policy-agent/opa/ast/index.go b/vendor/github.com/open-policy-agent/opa/ast/index.go index e14bb72ee..8cad71f1e 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/index.go +++ b/vendor/github.com/open-policy-agent/opa/ast/index.go @@ -164,7 +164,7 @@ func (i *baseDocEqIndex) Lookup(resolver ValueResolver) (*IndexResult, error) { return result, nil } -func (i *baseDocEqIndex) AllRules(resolver ValueResolver) (*IndexResult, error) { +func (i *baseDocEqIndex) AllRules(_ ValueResolver) (*IndexResult, error) { tr := newTrieTraversalResult() // Walk over the rule trie and accumulate _all_ rules diff --git a/vendor/github.com/open-policy-agent/opa/ast/internal/scanner/scanner.go b/vendor/github.com/open-policy-agent/opa/ast/internal/scanner/scanner.go index 97ee8bde0..a0200ac18 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/internal/scanner/scanner.go +++ b/vendor/github.com/open-policy-agent/opa/ast/internal/scanner/scanner.go @@ -26,6 +26,7 @@ type Scanner struct { width int errors []Error keywords map[string]tokens.Token + tabs []int regoV1Compatible bool } @@ -37,10 +38,11 @@ type Error struct { // Position represents a point in the scanned source code. type Position struct { - Offset int // start offset in bytes - End int // end offset in bytes - Row int // line number computed in bytes - Col int // column number computed in bytes + Offset int // start offset in bytes + End int // end offset in bytes + Row int // line number computed in bytes + Col int // column number computed in bytes + Tabs []int // positions of any tabs preceding Col } // New returns an initialized scanner that will scan @@ -60,6 +62,7 @@ func New(r io.Reader) (*Scanner, error) { curr: -1, width: 0, keywords: tokens.Keywords(), + tabs: []int{}, } s.next() @@ -156,7 +159,7 @@ func (s *Scanner) WithoutKeywords(kws map[string]tokens.Token) (*Scanner, map[st // for any errors before using the other values. func (s *Scanner) Scan() (tokens.Token, Position, string, []Error) { - pos := Position{Offset: s.offset - s.width, Row: s.row, Col: s.col} + pos := Position{Offset: s.offset - s.width, Row: s.row, Col: s.col, Tabs: s.tabs} var tok tokens.Token var lit string @@ -410,8 +413,12 @@ func (s *Scanner) next() { if s.curr == '\n' { s.row++ s.col = 0 + s.tabs = []int{} } else { s.col++ + if s.curr == '\t' { + s.tabs = append(s.tabs, s.col) + } } } diff --git a/vendor/github.com/open-policy-agent/opa/ast/location/location.go b/vendor/github.com/open-policy-agent/opa/ast/location/location.go index 309351a1e..92226df3f 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/location/location.go +++ b/vendor/github.com/open-policy-agent/opa/ast/location/location.go @@ -20,6 +20,8 @@ type Location struct { // JSONOptions specifies options for marshaling and unmarshalling of locations JSONOptions astJSON.Options + + Tabs []int `json:"-"` // The column offsets of tabs in the source. } // NewLocation returns a new Location object. diff --git a/vendor/github.com/open-policy-agent/opa/ast/parser.go b/vendor/github.com/open-policy-agent/opa/ast/parser.go index 10479caff..388e5e592 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/parser.go +++ b/vendor/github.com/open-policy-agent/opa/ast/parser.go @@ -943,12 +943,10 @@ func (p *Parser) parseHead(defaultRule bool) (*Head, bool) { p.illegal("expected rule value term (e.g., %s[%s] = { ... })", name, head.Key) } case tokens.Assign: - s := p.save() p.scan() head.Assign = true head.Value = p.parseTermInfixCall() if head.Value == nil { - p.restore(s) switch { case len(head.Args) > 0: p.illegal("expected function value term (e.g., %s(...) := { ... })", name) @@ -2132,6 +2130,7 @@ func (p *Parser) doScan(skipws bool) { p.s.loc.Col = pos.Col p.s.loc.Offset = pos.Offset p.s.loc.Text = p.s.Text(pos.Offset, pos.End) + p.s.loc.Tabs = pos.Tabs for _, err := range errs { p.error(p.s.Loc(), err.Message) @@ -2308,6 +2307,11 @@ func (b *metadataParser) Parse() (*Annotations, error) { b.loc = comment.Location } } + + if match == nil && len(b.comments) > 0 { + b.loc = b.comments[0].Location + } + return nil, augmentYamlError(err, b.comments) } @@ -2375,6 +2379,21 @@ func (b *metadataParser) Parse() (*Annotations, error) { } result.Location = b.loc + + // recreate original text of entire metadata block for location text attribute + sb := strings.Builder{} + sb.WriteString("# METADATA\n") + + lines := bytes.Split(b.buf.Bytes(), []byte{'\n'}) + + for _, line := range lines[:len(lines)-1] { + sb.WriteString("# ") + sb.Write(line) + sb.WriteByte('\n') + } + + result.Location.Text = []byte(strings.TrimSuffix(sb.String(), "\n")) + return &result, nil } @@ -2412,10 +2431,11 @@ func augmentYamlError(err error, comments []*Comment) error { return err } -func unwrapPair(pair map[string]interface{}) (k string, v interface{}) { - for k, v = range pair { +func unwrapPair(pair map[string]interface{}) (string, interface{}) { + for k, v := range pair { + return k, v } - return + return "", nil } var errInvalidSchemaRef = fmt.Errorf("invalid schema reference") diff --git a/vendor/github.com/open-policy-agent/opa/ast/policy.go b/vendor/github.com/open-policy-agent/opa/ast/policy.go index 505b7abd7..d8e6fa3bc 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/policy.go +++ b/vendor/github.com/open-policy-agent/opa/ast/policy.go @@ -233,7 +233,9 @@ type ( Negated bool `json:"negated,omitempty"` Location *Location `json:"location,omitempty"` - jsonOptions astJSON.Options + jsonOptions astJSON.Options + generatedFrom *Expr + generates []*Expr } // SomeDecl represents a variable declaration statement. The symbols are variables. @@ -1593,6 +1595,46 @@ func NewBuiltinExpr(terms ...*Term) *Expr { return &Expr{Terms: terms} } +func (expr *Expr) CogeneratedExprs() []*Expr { + visited := map[*Expr]struct{}{} + visitCogeneratedExprs(expr, func(e *Expr) bool { + if expr.Equal(e) { + return true + } + if _, ok := visited[e]; ok { + return true + } + visited[e] = struct{}{} + return false + }) + + result := make([]*Expr, 0, len(visited)) + for e := range visited { + result = append(result, e) + } + return result +} + +func (expr *Expr) BaseCogeneratedExpr() *Expr { + if expr.generatedFrom == nil { + return expr + } + return expr.generatedFrom.BaseCogeneratedExpr() +} + +func visitCogeneratedExprs(expr *Expr, f func(*Expr) bool) { + if parent := expr.generatedFrom; parent != nil { + if stop := f(parent); !stop { + visitCogeneratedExprs(parent, f) + } + } + for _, child := range expr.generates { + if stop := f(child); !stop { + visitCogeneratedExprs(child, f) + } + } +} + func (d *SomeDecl) String() string { if call, ok := d.Symbols[0].Value.(Call); ok { if len(call) == 4 { diff --git a/vendor/github.com/open-policy-agent/opa/ast/term.go b/vendor/github.com/open-policy-agent/opa/ast/term.go index bb39c18e3..4664bc5da 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/term.go +++ b/vendor/github.com/open-policy-agent/opa/ast/term.go @@ -2633,7 +2633,7 @@ func filterObject(o Value, filter Value) (Value, error) { other = v } - err := iterObj.Iter(func(key *Term, value *Term) error { + err := iterObj.Iter(func(key *Term, _ *Term) error { if other.Get(key) != nil { filteredValue, err := filterObject(v.Get(key).Value, filteredObj.Get(key).Value) if err != nil { @@ -3091,12 +3091,12 @@ func unmarshalTermSlice(s []interface{}) ([]*Term, error) { buf := []*Term{} for _, x := range s { if m, ok := x.(map[string]interface{}); ok { - if t, err := unmarshalTerm(m); err == nil { + t, err := unmarshalTerm(m) + if err == nil { buf = append(buf, t) continue - } else { - return nil, err } + return nil, err } return nil, fmt.Errorf("ast: unable to unmarshal term") } diff --git a/vendor/github.com/open-policy-agent/opa/ast/visit.go b/vendor/github.com/open-policy-agent/opa/ast/visit.go index 7b0d3b08e..d83c31149 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/visit.go +++ b/vendor/github.com/open-policy-agent/opa/ast/visit.go @@ -352,12 +352,12 @@ func (vis *GenericVisitor) Walk(x interface{}) { vis.Walk(x[i]) } case *object: - x.Foreach(func(k, v *Term) { + x.Foreach(func(k, _ *Term) { vis.Walk(k) vis.Walk(x.Get(k)) }) case Object: - x.Foreach(func(k, v *Term) { + x.Foreach(func(k, _ *Term) { vis.Walk(k) vis.Walk(x.Get(k)) }) @@ -492,12 +492,12 @@ func (vis *BeforeAfterVisitor) Walk(x interface{}) { vis.Walk(x[i]) } case *object: - x.Foreach(func(k, v *Term) { + x.Foreach(func(k, _ *Term) { vis.Walk(k) vis.Walk(x.Get(k)) }) case Object: - x.Foreach(func(k, v *Term) { + x.Foreach(func(k, _ *Term) { vis.Walk(k) vis.Walk(x.Get(k)) }) @@ -579,7 +579,7 @@ func (vis *VarVisitor) Vars() VarSet { func (vis *VarVisitor) visit(v interface{}) bool { if vis.params.SkipObjectKeys { if o, ok := v.(Object); ok { - o.Foreach(func(k, v *Term) { + o.Foreach(func(_, v *Term) { vis.Walk(v) }) return true @@ -741,7 +741,7 @@ func (vis *VarVisitor) Walk(x interface{}) { vis.Walk(x[i]) } case *object: - x.Foreach(func(k, v *Term) { + x.Foreach(func(k, _ *Term) { vis.Walk(k) vis.Walk(x.Get(k)) }) diff --git a/vendor/github.com/open-policy-agent/opa/bundle/bundle.go b/vendor/github.com/open-policy-agent/opa/bundle/bundle.go index 9c02568b5..a68c3c712 100644 --- a/vendor/github.com/open-policy-agent/opa/bundle/bundle.go +++ b/vendor/github.com/open-policy-agent/opa/bundle/bundle.go @@ -15,6 +15,7 @@ import ( "fmt" "io" "net/url" + "os" "path" "path/filepath" "reflect" @@ -1190,7 +1191,8 @@ func (b *Bundle) SetRegoVersion(v ast.RegoVersion) { // If there is no defined version for the given path, the default version def is returned. // If the version does not correspond to ast.RegoV0 or ast.RegoV1, an error is returned. func (b *Bundle) RegoVersionForFile(path string, def ast.RegoVersion) (ast.RegoVersion, error) { - if version, err := b.Manifest.numericRegoVersionForFile(path); err != nil { + version, err := b.Manifest.numericRegoVersionForFile(path) + if err != nil { return def, err } else if version == nil { return def, nil @@ -1198,9 +1200,8 @@ func (b *Bundle) RegoVersionForFile(path string, def ast.RegoVersion) (ast.RegoV return ast.RegoV0, nil } else if *version == 1 { return ast.RegoV1, nil - } else { - return def, fmt.Errorf("unknown bundle rego-version %d for file '%s'", *version, path) } + return def, fmt.Errorf("unknown bundle rego-version %d for file '%s'", *version, path) } func (m *Manifest) numericRegoVersionForFile(path string) (*int, error) { @@ -1667,6 +1668,7 @@ func preProcessBundle(loader DirectoryLoader, skipVerify bool, sizeLimitBytes in } func readFile(f *Descriptor, sizeLimitBytes int64) (bytes.Buffer, error) { + // Case for pre-loaded byte buffers, like those from the tarballLoader. if bb, ok := f.reader.(*bytes.Buffer); ok { _ = f.Close() // always close, even on error @@ -1678,6 +1680,37 @@ func readFile(f *Descriptor, sizeLimitBytes int64) (bytes.Buffer, error) { return *bb, nil } + // Case for *lazyFile readers: + if lf, ok := f.reader.(*lazyFile); ok { + var buf bytes.Buffer + if lf.file == nil { + var err error + if lf.file, err = os.Open(lf.path); err != nil { + return buf, fmt.Errorf("failed to open file %s: %w", f.path, err) + } + } + // Bail out if we can't read the whole file-- there's nothing useful we can do at that point! + fileSize, _ := fstatFileSize(lf.file) + if fileSize > sizeLimitBytes { + return buf, fmt.Errorf(maxSizeLimitBytesErrMsg, strings.TrimPrefix(f.Path(), "/"), fileSize, sizeLimitBytes-1) + } + // Prealloc the buffer for the file read. + buffer := make([]byte, fileSize) + _, err := io.ReadFull(lf.file, buffer) + if err != nil { + return buf, err + } + _ = lf.file.Close() // always close, even on error + + // Note(philipc): Replace the lazyFile reader in the *Descriptor with a + // pointer to the wrapping bytes.Buffer, so that we don't re-read the + // file on disk again by accident. + buf = *bytes.NewBuffer(buffer) + f.reader = &buf + return buf, nil + } + + // Fallback case: var buf bytes.Buffer n, err := f.Read(&buf, sizeLimitBytes) _ = f.Close() // always close, even on error @@ -1691,6 +1724,17 @@ func readFile(f *Descriptor, sizeLimitBytes int64) (bytes.Buffer, error) { return buf, nil } +// Takes an already open file handle and invokes the os.Stat system call on it +// to determine the file's size. Passes any errors from *File.Stat on up to the +// caller. +func fstatFileSize(f *os.File) (int64, error) { + fileInfo, err := f.Stat() + if err != nil { + return 0, err + } + return fileInfo.Size(), nil +} + func normalizePath(p string) string { return filepath.ToSlash(p) } diff --git a/vendor/github.com/open-policy-agent/opa/bundle/file.go b/vendor/github.com/open-policy-agent/opa/bundle/file.go index 62ffeeff5..c2c5a6b84 100644 --- a/vendor/github.com/open-policy-agent/opa/bundle/file.go +++ b/vendor/github.com/open-policy-agent/opa/bundle/file.go @@ -211,7 +211,7 @@ func (d *dirLoader) NextFile() (*Descriptor, error) { // build a list of all files we will iterate over and read, but only one time if d.files == nil { d.files = []string{} - err := filepath.Walk(d.root, func(path string, info os.FileInfo, err error) error { + err := filepath.Walk(d.root, func(path string, info os.FileInfo, _ error) error { if info != nil && info.Mode().IsRegular() { if d.filter != nil && d.filter(filepath.ToSlash(path), info, getdepth(path, false)) { return nil @@ -370,12 +370,13 @@ func (t *tarballLoader) NextFile() (*Descriptor, error) { f := file{name: header.Name} - var buf bytes.Buffer - if _, err := io.Copy(&buf, t.tr); err != nil { + // Note(philipc): We rely on the previous size check in this loop for safety. + buf := bytes.NewBuffer(make([]byte, 0, header.Size)) + if _, err := io.Copy(buf, t.tr); err != nil { return nil, fmt.Errorf("failed to copy file %s: %w", header.Name, err) } - f.reader = &buf + f.reader = buf t.files = append(t.files, f) } else if header.Typeflag == tar.TypeDir { diff --git a/vendor/github.com/open-policy-agent/opa/capabilities/v0.66.0.json b/vendor/github.com/open-policy-agent/opa/capabilities/v0.66.0.json new file mode 100644 index 000000000..06c04773c --- /dev/null +++ b/vendor/github.com/open-policy-agent/opa/capabilities/v0.66.0.json @@ -0,0 +1,4826 @@ +{ + "builtins": [ + { + "name": "abs", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "all", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "and", + "decl": { + "args": [ + { + "of": { + "type": "any" + }, + "type": "set" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + }, + "infix": "\u0026" + }, + { + "name": "any", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "array.concat", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "array.reverse", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "array.slice", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "assign", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": ":=" + }, + { + "name": "base64.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "base64url.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64url.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64url.encode_no_pad", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "bits.and", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.lsh", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.negate", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.or", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.rsh", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.xor", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "cast_array", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "cast_boolean", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "cast_null", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "null" + }, + "type": "function" + } + }, + { + "name": "cast_object", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "cast_set", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "cast_string", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "ceil", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "concat", + "decl": { + "args": [ + { + "type": "string" + }, + { + "of": [ + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "contains", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "count", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.equal", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.md5", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.sha1", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.sha256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.sha512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.md5", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.parse_private_keys", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.sha1", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.sha256", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_and_verify_certificates", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_and_verify_certificates_with_options", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_certificate_request", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_certificates", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_keypair", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_rsa_private_key", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "div", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "/" + }, + { + "name": "endswith", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "eq", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "=" + }, + { + "name": "equal", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "==" + }, + { + "name": "floor", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "format_int", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "glob.match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "of": [ + { + "type": "null" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + } + ], + "type": "any" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "glob.quote_meta", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "graph.reachable", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "graph.reachable_paths", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "graphql.is_valid", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "graphql.parse", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "graphql.parse_and_verify", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "graphql.parse_query", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "graphql.parse_schema", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "graphql.schema_is_valid", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "gt", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003e" + }, + { + "name": "gte", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003e=" + }, + { + "name": "hex.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "hex.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "http.send", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "indexof", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "indexof_n", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "internal.member_2", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "in" + }, + { + "name": "internal.member_3", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "in" + }, + { + "name": "internal.print", + "decl": { + "args": [ + { + "dynamic": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "array" + } + ], + "type": "function" + } + }, + { + "name": "intersection", + "decl": { + "args": [ + { + "of": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "io.jwt.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "static": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "type": "string" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "io.jwt.decode_verify", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "io.jwt.encode_sign", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "io.jwt.encode_sign_raw", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "io.jwt.verify_es256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_es384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_es512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_hs256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_hs384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_hs512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_ps256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_ps384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_ps512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_rs256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_rs384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_rs512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_array", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_boolean", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_null", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_number", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_object", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_set", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_string", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "json.filter", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "json.marshal", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "json.marshal_with_options", + "decl": { + "args": [ + { + "type": "any" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "static": [ + { + "key": "indent", + "value": { + "type": "string" + } + }, + { + "key": "prefix", + "value": { + "type": "string" + } + }, + { + "key": "pretty", + "value": { + "type": "boolean" + } + } + ], + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "json.match_schema", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "static": [ + { + "key": "desc", + "value": { + "type": "string" + } + }, + { + "key": "error", + "value": { + "type": "string" + } + }, + { + "key": "field", + "value": { + "type": "string" + } + }, + { + "key": "type", + "value": { + "type": "string" + } + } + ], + "type": "object" + }, + "type": "array" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "json.patch", + "decl": { + "args": [ + { + "type": "any" + }, + { + "dynamic": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "static": [ + { + "key": "op", + "value": { + "type": "string" + } + }, + { + "key": "path", + "value": { + "type": "any" + } + } + ], + "type": "object" + }, + "type": "array" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.remove", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.unmarshal", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.verify_schema", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "of": [ + { + "type": "null" + }, + { + "type": "string" + } + ], + "type": "any" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "lower", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "lt", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003c" + }, + { + "name": "lte", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003c=" + }, + { + "name": "max", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "min", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "minus", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "number" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": [ + { + "type": "number" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + "type": "function" + }, + "infix": "-" + }, + { + "name": "mul", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "*" + }, + { + "name": "neq", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "!=" + }, + { + "name": "net.cidr_contains", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.cidr_contains_matches", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "static": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "type": "array" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "net.cidr_expand", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "of": { + "type": "string" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "net.cidr_intersects", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.cidr_is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.cidr_merge", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "of": [ + { + "type": "string" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "type": "string" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "net.cidr_overlap", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.lookup_ip_addr", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "of": { + "type": "string" + }, + "type": "set" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "numbers.range", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "numbers.range_step", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "object.filter", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.get", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.keys", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "object.remove", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.subset", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.union", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.union_n", + "decl": { + "args": [ + { + "dynamic": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "opa.runtime", + "decl": { + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "or", + "decl": { + "args": [ + { + "of": { + "type": "any" + }, + "type": "set" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + }, + "infix": "|" + }, + { + "name": "plus", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "+" + }, + { + "name": "print", + "decl": { + "type": "function", + "variadic": { + "type": "any" + } + } + }, + { + "name": "product", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + { + "of": { + "type": "number" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "providers.aws.sign_req", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "rand.intn", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "re_match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.find_all_string_submatch_n", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "regex.find_n", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "regex.globs_match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.replace", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "regex.split", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "regex.template_match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "rego.metadata.chain", + "decl": { + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "rego.metadata.rule", + "decl": { + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "rego.parse_module", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "rem", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "%" + }, + { + "name": "replace", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "round", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "semver.compare", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "semver.is_valid", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "set_diff", + "decl": { + "args": [ + { + "of": { + "type": "any" + }, + "type": "set" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "sort", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "split", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "sprintf", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "startswith", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "strings.any_prefix_match", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "strings.any_suffix_match", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "strings.render_template", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "strings.replace_n", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "strings.reverse", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "substring", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "sum", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + { + "of": { + "type": "number" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.add_date", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.clock", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "time.date", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "time.diff", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "time.format", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "time.now_ns", + "decl": { + "result": { + "type": "number" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "time.parse_duration_ns", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.parse_ns", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.parse_rfc3339_ns", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.weekday", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "to_number", + "decl": { + "args": [ + { + "of": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "trace", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "trim", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_left", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_prefix", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_right", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_space", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_suffix", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "type_name", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "union", + "decl": { + "args": [ + { + "of": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "units.parse", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "units.parse_bytes", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "upper", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "urlquery.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "urlquery.decode_object", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "dynamic": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "urlquery.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "urlquery.encode_object", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "uuid.parse", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "uuid.rfc4122", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "walk", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "static": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "type": "any" + } + ], + "type": "array" + }, + "type": "function" + }, + "relation": true + }, + { + "name": "yaml.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "yaml.marshal", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "yaml.unmarshal", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + } + ], + "future_keywords": [ + "contains", + "every", + "if", + "in" + ], + "wasm_abi_versions": [ + { + "version": 1, + "minor_version": 1 + }, + { + "version": 1, + "minor_version": 2 + } + ], + "features": [ + "rule_head_ref_string_prefixes", + "rule_head_refs", + "rego_v1_import" + ] +} diff --git a/vendor/github.com/open-policy-agent/opa/cmd/build.go b/vendor/github.com/open-policy-agent/opa/cmd/build.go index 824f1b8f6..fffb8f2a3 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/build.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/build.go @@ -222,7 +222,7 @@ against OPA v0.22.0: } return env.CmdFlags.CheckEnvironmentVariables(Cmd) }, - Run: func(cmd *cobra.Command, args []string) { + Run: func(_ *cobra.Command, args []string) { if err := dobuild(buildParams, args); err != nil { fmt.Println("error:", err) os.Exit(1) diff --git a/vendor/github.com/open-policy-agent/opa/cmd/capabilities.go b/vendor/github.com/open-policy-agent/opa/cmd/capabilities.go index bf2f2589b..4fdc3685a 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/capabilities.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/capabilities.go @@ -70,7 +70,7 @@ Print the capabilities of a capabilities file } `, - PreRunE: func(cmd *cobra.Command, args []string) error { + PreRunE: func(cmd *cobra.Command, _ []string) error { return env.CmdFlags.CheckEnvironmentVariables(cmd) }, RunE: func(*cobra.Command, []string) error { diff --git a/vendor/github.com/open-policy-agent/opa/cmd/deps.go b/vendor/github.com/open-policy-agent/opa/cmd/deps.go index 1ef22a180..823758faa 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/deps.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/deps.go @@ -96,7 +96,7 @@ data.policy.is_admin. } return env.CmdFlags.CheckEnvironmentVariables(cmd) }, - Run: func(cmd *cobra.Command, args []string) { + Run: func(_ *cobra.Command, args []string) { if err := deps(args, params, os.Stdout); err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) diff --git a/vendor/github.com/open-policy-agent/opa/cmd/eval.go b/vendor/github.com/open-policy-agent/opa/cmd/eval.go index 4dba949d9..b10bf6459 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/eval.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/eval.go @@ -72,6 +72,7 @@ type evalCommandParams struct { entrypoints repeatedStringFlag strict bool v1Compatible bool + traceVarValues bool } func newEvalCommandParams() evalCommandParams { @@ -276,7 +277,7 @@ access. } return env.CmdFlags.CheckEnvironmentVariables(cmd) }, - Run: func(cmd *cobra.Command, args []string) { + Run: func(_ *cobra.Command, args []string) { defined, err := eval(args, params, os.Stdout) if err != nil { @@ -307,9 +308,9 @@ access. evalCommand.Flags().VarP(¶ms.prettyLimit, "pretty-limit", "", "set limit after which pretty output gets truncated") evalCommand.Flags().BoolVarP(¶ms.failDefined, "fail-defined", "", false, "exits with non-zero exit code on defined/non-empty result and errors") evalCommand.Flags().DurationVar(¶ms.timeout, "timeout", 0, "set eval timeout (default unlimited)") - evalCommand.Flags().IntVarP(¶ms.optimizationLevel, "optimize", "O", 0, "set optimization level") evalCommand.Flags().VarP(¶ms.entrypoints, "entrypoint", "e", "set slash separated entrypoint path") + evalCommand.Flags().BoolVar(¶ms.traceVarValues, "var-values", false, "show local variable values in pretty trace output") // Shared flags addCapabilitiesFlag(evalCommand.Flags(), params.capabilities) @@ -398,7 +399,12 @@ func eval(args []string, params evalCommandParams, w io.Writer) (bool, error) { case evalValuesOutput: err = pr.Values(w, result) case evalPrettyOutput: - err = pr.Pretty(w, result) + err = pr.PrettyWithOptions(w, result, pr.PrettyOptions{ + TraceOpts: topdown.PrettyTraceOptions{ + Locations: true, + ExprVariables: ectx.params.traceVarValues, + }, + }) case evalSourceOutput: err = pr.Source(w, result) case evalRawOutput: diff --git a/vendor/github.com/open-policy-agent/opa/cmd/exec.go b/vendor/github.com/open-policy-agent/opa/cmd/exec.go index f8fad7514..c59e36421 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/exec.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/exec.go @@ -4,6 +4,7 @@ import ( "bytes" "context" "encoding/json" + "errors" "fmt" "os" "path/filepath" @@ -50,13 +51,15 @@ After: Decision Logs By default, the 'exec' command executes the "default decision" (specified in the OPA configuration) against each input file. This can be overridden by specifying the --decision argument and pointing at a specific policy decision, -e.g., opa exec --decision /foo/bar/baz ...`, +e.g., opa exec --decision /foo/bar/baz ... - Args: cobra.MinimumNArgs(1), - PreRunE: func(cmd *cobra.Command, args []string) error { +Alternative Usage: + ` + RootCommand.Use + ` exec [ [...]] --stdin-input [flags]`, + + PreRunE: func(cmd *cobra.Command, _ []string) error { return env.CmdFlags.CheckEnvironmentVariables(cmd) }, - Run: func(cmd *cobra.Command, args []string) { + Run: func(_ *cobra.Command, args []string) { params.Paths = args params.BundlePaths = bundlePaths.v if err := runExec(params); err != nil { @@ -78,6 +81,7 @@ e.g., opa exec --decision /foo/bar/baz ...`, cmd.Flags().VarP(params.LogLevel, "log-level", "l", "set log level") cmd.Flags().Var(params.LogFormat, "log-format", "set log format") cmd.Flags().StringVar(¶ms.LogTimestampFormat, "log-timestamp-format", "", "set log timestamp format (OPA_LOG_TIMESTAMP_FORMAT environment variable)") + cmd.Flags().BoolVarP(¶ms.StdIn, "stdin-input", "I", false, "read input document from stdin rather than a static file") cmd.Flags().DurationVar(¶ms.Timeout, "timeout", 0, "set exec timeout with a Go-style duration, such as '5m 30s'. (default unlimited)") addV1CompatibleFlag(cmd.Flags(), ¶ms.V1Compatible, false) @@ -95,6 +99,10 @@ func runExec(params *exec.Params) error { } func runExecWithContext(ctx context.Context, params *exec.Params) error { + if minimumInputErr := validateMinimumInput(params); minimumInputErr != nil { + return minimumInputErr + } + stdLogger, consoleLogger, err := setupLogging(params.LogLevel.String(), params.LogFormat.String(), params.LogTimestampFormat) if err != nil { return fmt.Errorf("config error: %w", err) @@ -258,3 +266,10 @@ func injectExplicitBundles(root map[string]interface{}, paths []string) error { return nil } + +func validateMinimumInput(params *exec.Params) error { + if !params.StdIn && len(params.Paths) == 0 { + return errors.New("requires at least 1 path arg, or the --stdin-input flag") + } + return nil +} diff --git a/vendor/github.com/open-policy-agent/opa/cmd/fmt.go b/vendor/github.com/open-policy-agent/opa/cmd/fmt.go index 48616102d..df0622aad 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/fmt.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/fmt.go @@ -64,10 +64,10 @@ to stdout from the 'fmt' command. If the '--fail' option is supplied, the 'fmt' command will return a non zero exit code if a file would be reformatted.`, - PreRunE: func(cmd *cobra.Command, args []string) error { + PreRunE: func(cmd *cobra.Command, _ []string) error { return env.CmdFlags.CheckEnvironmentVariables(cmd) }, - Run: func(cmd *cobra.Command, args []string) { + Run: func(_ *cobra.Command, args []string) { os.Exit(opaFmt(args)) }, } diff --git a/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/exec.go b/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/exec.go index edd1d88c4..8748b4dda 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/exec.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/exec.go @@ -2,61 +2,25 @@ package exec import ( "context" - "encoding/json" "errors" - "fmt" - "io" "os" "path" "path/filepath" - "time" + "strings" - "github.com/open-policy-agent/opa/logging" "github.com/open-policy-agent/opa/sdk" - "github.com/open-policy-agent/opa/util" ) -type Params struct { - Paths []string // file paths to execute against - Output io.Writer // output stream to write normal output to - ConfigFile string // OPA configuration file path - ConfigOverrides []string // OPA configuration overrides (--set arguments) - ConfigOverrideFiles []string // OPA configuration overrides (--set-file arguments) - OutputFormat *util.EnumFlag // output format (default: pretty) - LogLevel *util.EnumFlag // log level for plugins - LogFormat *util.EnumFlag // log format for plugins - LogTimestampFormat string // log timestamp format for plugins - BundlePaths []string // explicit paths of bundles to inject into the configuration - Decision string // decision to evaluate (overrides default decision set by configuration) - Fail bool // exits with non-zero exit code on undefined policy decision or empty policy decision result or other errors - FailDefined bool // exits with non-zero exit code on 'not undefined policy decisiondefined' or 'not empty policy decision result' or other errors - FailNonEmpty bool // exits with non-zero exit code on non-empty set (array) results - Timeout time.Duration // timeout to prevent infinite hangs. If set to 0, the command will never time out - V1Compatible bool // use OPA 1.0 compatibility mode - Logger logging.Logger // Logger override. If set to nil, the default logger is used. -} - -func NewParams(w io.Writer) *Params { - return &Params{ - Output: w, - OutputFormat: util.NewEnumFlag("pretty", []string{"pretty", "json"}), - LogLevel: util.NewEnumFlag("error", []string{"debug", "info", "error"}), - LogFormat: util.NewEnumFlag("json", []string{"text", "json", "json-pretty"}), +var ( + r *jsonReporter + parsers = map[string]parser{ + ".json": utilParser{}, + ".yaml": utilParser{}, + ".yml": utilParser{}, } -} +) -func (p *Params) validateParams() error { - if p.Fail && p.FailDefined { - return errors.New("specify --fail or --fail-defined but not both") - } - if p.FailNonEmpty && p.Fail { - return errors.New("specify --fail-non-empty or --fail but not both") - } - if p.FailNonEmpty && p.FailDefined { - return errors.New("specify --fail-non-empty or --fail-defined but not both") - } - return nil -} +const stdInPath = "--stdin-input" // Exec executes OPA against the supplied files and outputs each result. // @@ -66,18 +30,49 @@ func (p *Params) validateParams() error { // - exit codes set by convention or policy (e.g,. non-empty set => error) // - support for new input file formats beyond JSON and YAML func Exec(ctx context.Context, opa *sdk.OPA, params *Params) error { + if err := params.validateParams(); err != nil { + return err + } - err := params.validateParams() - if err != nil { + r = &jsonReporter{w: params.Output, buf: make([]result, 0), ctx: &ctx, opa: opa, params: params, decisionFunc: opa.Decision} + + if params.StdIn { + if err := execOnStdIn(); err != nil { + return err + } + } + + if err := execOnInputFiles(params); err != nil { return err } - now := time.Now() - r := &jsonReporter{w: params.Output, buf: make([]result, 0)} + if err := r.Close(); err != nil { + return err + } - failCount := 0 - errorCount := 0 + return r.ReportFailure() +} +func execOnStdIn() error { + sr := stdInReader{Reader: os.Stdin} + p := utilParser{} + raw := sr.ReadInput() + input, err := p.Parse(strings.NewReader(raw)) + if err != nil { + return err + } else if input == nil { + return errors.New("cannot execute on empty input; please enter valid json or yaml when using the --stdin-input flag") + } + r.StoreDecision(&input, stdInPath) + return nil +} + +type fileListItem struct { + Path string + Error error +} + +func execOnInputFiles(params *Params) error { for item := range listAllPaths(params.Paths) { if item.Error != nil { @@ -87,96 +82,19 @@ func Exec(ctx context.Context, opa *sdk.OPA, params *Params) error { input, err := parse(item.Path) if err != nil { - if err2 := r.Report(result{Path: item.Path, Error: err}); err2 != nil { - return err2 - } + r.Report(result{Path: item.Path, Error: err}) if params.FailDefined || params.Fail || params.FailNonEmpty { - errorCount++ + r.errorCount++ } continue } else if input == nil { continue } - - rs, err := opa.Decision(ctx, sdk.DecisionOptions{ - Path: params.Decision, - Now: now, - Input: input, - }) - if err != nil { - if err2 := r.Report(result{Path: item.Path, Error: err}); err2 != nil { - return err2 - } - if (params.FailDefined && !sdk.IsUndefinedErr(err)) || (params.Fail && sdk.IsUndefinedErr(err)) || (params.FailNonEmpty && !sdk.IsUndefinedErr(err)) { - errorCount++ - } - continue - } - - if err := r.Report(result{Path: item.Path, Result: &rs.Result}); err != nil { - return err - } - - if (params.FailDefined && rs.Result != nil) || (params.Fail && rs.Result == nil) { - failCount++ - } - - if params.FailNonEmpty && rs.Result != nil { - // Check if rs.Result is an array and has one or more members - resultArray, isArray := rs.Result.([]interface{}) - if (!isArray) || (isArray && (len(resultArray) > 0)) { - failCount++ - } - } - } - if err := r.Close(); err != nil { - return err - } - - if (params.Fail || params.FailDefined || params.FailNonEmpty) && (failCount > 0 || errorCount > 0) { - if params.Fail { - return fmt.Errorf("there were %d failures and %d errors counted in the results list, and --fail is set", failCount, errorCount) - } - if params.FailDefined { - return fmt.Errorf("there were %d failures and %d errors counted in the results list, and --fail-defined is set", failCount, errorCount) - } - return fmt.Errorf("there were %d failures and %d errors counted in the results list, and --fail-non-empty is set", failCount, errorCount) + r.StoreDecision(input, item.Path) } - - return nil -} - -type result struct { - Path string `json:"path"` - Error error `json:"error,omitempty"` - Result *interface{} `json:"result,omitempty"` -} - -type jsonReporter struct { - w io.Writer - buf []result -} - -func (jr *jsonReporter) Report(r result) error { - jr.buf = append(jr.buf, r) return nil } -func (jr *jsonReporter) Close() error { - enc := json.NewEncoder(jr.w) - enc.SetIndent("", " ") - return enc.Encode(struct { - Result []result `json:"result"` - }{ - Result: jr.buf, - }) -} - -type fileListItem struct { - Path string - Error error -} - func listAllPaths(roots []string) chan fileListItem { ch := make(chan fileListItem) go func() { @@ -200,31 +118,8 @@ func listAllPaths(roots []string) chan fileListItem { return ch } -var parsers = map[string]parser{ - ".json": utilParser{}, - ".yaml": utilParser{}, - ".yml": utilParser{}, -} - -type parser interface { - Parse(io.Reader) (interface{}, error) -} - -type utilParser struct { -} - -func (utilParser) Parse(r io.Reader) (interface{}, error) { - bs, err := io.ReadAll(r) - if err != nil { - return nil, err - } - var x interface{} - return x, util.Unmarshal(bs, &x) -} - func parse(p string) (*interface{}, error) { - - parser, ok := parsers[path.Ext(p)] + selectedParser, ok := parsers[path.Ext(p)] if !ok { return nil, nil } @@ -236,7 +131,7 @@ func parse(p string) (*interface{}, error) { defer f.Close() - val, err := parser.Parse(f) + val, err := selectedParser.Parse(f) if err != nil { return nil, err } diff --git a/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/json_reporter.go b/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/json_reporter.go new file mode 100644 index 000000000..c0b23dbe7 --- /dev/null +++ b/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/json_reporter.go @@ -0,0 +1,85 @@ +package exec + +import ( + "context" + "encoding/json" + "fmt" + "io" + "time" + + "github.com/open-policy-agent/opa/sdk" +) + +type result struct { + Path string `json:"path"` + Error error `json:"error,omitempty"` + Result *interface{} `json:"result,omitempty"` +} + +type jsonReporter struct { + w io.Writer + buf []result + ctx *context.Context + opa *sdk.OPA + params *Params + errorCount int + failCount int + decisionFunc func(ctx context.Context, options sdk.DecisionOptions) (*sdk.DecisionResult, error) +} + +func (jr *jsonReporter) Report(r result) { + jr.buf = append(jr.buf, r) +} + +func (jr *jsonReporter) Close() error { + enc := json.NewEncoder(jr.w) + enc.SetIndent("", " ") + return enc.Encode(struct { + Result []result `json:"result"` + }{ + Result: jr.buf, + }) +} + +func (jr *jsonReporter) StoreDecision(input *interface{}, itemPath string) { + rs, err := jr.decisionFunc(*jr.ctx, sdk.DecisionOptions{ + Path: jr.params.Decision, + Now: time.Now(), + Input: input, + }) + if err != nil { + jr.Report(result{Path: itemPath, Error: err}) + if (jr.params.FailDefined && !sdk.IsUndefinedErr(err)) || (jr.params.Fail && sdk.IsUndefinedErr(err)) || (jr.params.FailNonEmpty && !sdk.IsUndefinedErr(err)) { + jr.errorCount++ + } + return + } + + jr.Report(result{Path: itemPath, Result: &rs.Result}) + + if (jr.params.FailDefined && rs.Result != nil) || (jr.params.Fail && rs.Result == nil) { + jr.failCount++ + } + + if jr.params.FailNonEmpty && rs.Result != nil { + // Check if rs.Result is an array and has one or more members + resultArray, isArray := rs.Result.([]interface{}) + if (!isArray) || (isArray && (len(resultArray) > 0)) { + jr.failCount++ + } + } +} + +func (jr *jsonReporter) ReportFailure() error { + if (jr.params.Fail || jr.params.FailDefined || jr.params.FailNonEmpty) && (jr.failCount > 0 || jr.errorCount > 0) { + if jr.params.Fail { + return fmt.Errorf("there were %d failures and %d errors counted in the results list, and --fail is set", jr.failCount, jr.errorCount) + } + if jr.params.FailDefined { + return fmt.Errorf("there were %d failures and %d errors counted in the results list, and --fail-defined is set", jr.failCount, jr.errorCount) + } + return fmt.Errorf("there were %d failures and %d errors counted in the results list, and --fail-non-empty is set", jr.failCount, jr.errorCount) + } + + return nil +} diff --git a/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/params.go b/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/params.go new file mode 100644 index 000000000..bf9c0a139 --- /dev/null +++ b/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/params.go @@ -0,0 +1,53 @@ +package exec + +import ( + "errors" + "io" + "time" + + "github.com/open-policy-agent/opa/logging" + "github.com/open-policy-agent/opa/util" +) + +type Params struct { + Paths []string // file paths to execute against + Output io.Writer // output stream to write normal output to + ConfigFile string // OPA configuration file path + ConfigOverrides []string // OPA configuration overrides (--set arguments) + ConfigOverrideFiles []string // OPA configuration overrides (--set-file arguments) + OutputFormat *util.EnumFlag // output format (default: pretty) + LogLevel *util.EnumFlag // log level for plugins + LogFormat *util.EnumFlag // log format for plugins + LogTimestampFormat string // log timestamp format for plugins + BundlePaths []string // explicit paths of bundles to inject into the configuration + Decision string // decision to evaluate (overrides default decision set by configuration) + Fail bool // exits with non-zero exit code on undefined policy decision or empty policy decision result or other errors + FailDefined bool // exits with non-zero exit code on 'not undefined policy decisiondefined' or 'not empty policy decision result' or other errors + FailNonEmpty bool // exits with non-zero exit code on non-empty set (array) results + StdIn bool // pull input from std-in, rather than input files + Timeout time.Duration // timeout to prevent infinite hangs. If set to 0, the command will never time out + V1Compatible bool // use OPA 1.0 compatibility mode + Logger logging.Logger // Logger override. If set to nil, the default logger is used. +} + +func NewParams(w io.Writer) *Params { + return &Params{ + Output: w, + OutputFormat: util.NewEnumFlag("pretty", []string{"pretty", "json"}), + LogLevel: util.NewEnumFlag("error", []string{"debug", "info", "error"}), + LogFormat: util.NewEnumFlag("json", []string{"text", "json", "json-pretty"}), + } +} + +func (p *Params) validateParams() error { + if p.Fail && p.FailDefined { + return errors.New("specify --fail or --fail-defined but not both") + } + if p.FailNonEmpty && p.Fail { + return errors.New("specify --fail-non-empty or --fail but not both") + } + if p.FailNonEmpty && p.FailDefined { + return errors.New("specify --fail-non-empty or --fail-defined but not both") + } + return nil +} diff --git a/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/parser.go b/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/parser.go new file mode 100644 index 000000000..46ebf08dc --- /dev/null +++ b/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/parser.go @@ -0,0 +1,23 @@ +package exec + +import ( + "io" + + "github.com/open-policy-agent/opa/util" +) + +type parser interface { + Parse(io.Reader) (interface{}, error) +} + +type utilParser struct { +} + +func (utilParser) Parse(r io.Reader) (interface{}, error) { + bs, err := io.ReadAll(r) + if err != nil { + return nil, err + } + var x interface{} + return x, util.Unmarshal(bs, &x) +} diff --git a/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/std_in_reader.go b/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/std_in_reader.go new file mode 100644 index 000000000..85db3ee1c --- /dev/null +++ b/vendor/github.com/open-policy-agent/opa/cmd/internal/exec/std_in_reader.go @@ -0,0 +1,25 @@ +package exec + +import ( + "bufio" + "io" + "strings" +) + +type stdInReader struct { + Reader io.Reader +} + +func (sr *stdInReader) ReadInput() string { + var lines []string + in := bufio.NewScanner(sr.Reader) + for { + in.Scan() + line := in.Text() + if len(line) == 0 { + break + } + lines = append(lines, line) + } + return strings.Join(lines, "\n") +} diff --git a/vendor/github.com/open-policy-agent/opa/cmd/oracle.go b/vendor/github.com/open-policy-agent/opa/cmd/oracle.go index 5ffea13e5..d688aa0b6 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/oracle.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/oracle.go @@ -82,7 +82,7 @@ by the input location.`, } return env.CmdFlags.CheckEnvironmentVariables(cmd) }, - Run: func(cmd *cobra.Command, args []string) { + Run: func(_ *cobra.Command, args []string) { if err := dofindDefinition(findDefinitionParams, os.Stdin, os.Stdout, args); err != nil { fmt.Fprintln(os.Stderr, "error:", err) os.Exit(1) @@ -111,7 +111,7 @@ func dofindDefinition(params findDefinitionParams, stdin io.Reader, stdout io.Wr } b, err = loader.NewFileLoader(). WithSkipBundleVerification(true). - WithFilter(func(abspath string, info os.FileInfo, depth int) bool { + WithFilter(func(_ string, info os.FileInfo, _ int) bool { // While directories may contain other things of interest for OPA (json, yaml..), // only .rego will work reliably for the purpose of finding definitions return strings.HasPrefix(info.Name(), ".rego") diff --git a/vendor/github.com/open-policy-agent/opa/cmd/refactor.go b/vendor/github.com/open-policy-agent/opa/cmd/refactor.go index 0786a3e5b..4cad94e9c 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/refactor.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/refactor.go @@ -76,7 +76,7 @@ The 'move' command outputs the below policy to stdout with the package name rewr } return env.CmdFlags.CheckEnvironmentVariables(cmd) }, - Run: func(cmd *cobra.Command, args []string) { + Run: func(_ *cobra.Command, args []string) { if err := doMove(moveCommandParams, args, os.Stdout); err != nil { fmt.Fprintln(os.Stderr, "error:", err) os.Exit(1) diff --git a/vendor/github.com/open-policy-agent/opa/cmd/run.go b/vendor/github.com/open-policy-agent/opa/cmd/run.go index 18291e026..28ac319e7 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/run.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/run.go @@ -195,7 +195,7 @@ TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256, TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1 See https://godoc.org/crypto/tls#pkg-constants for more information. `, - PreRunE: func(cmd *cobra.Command, args []string) error { + PreRunE: func(cmd *cobra.Command, _ []string) error { return env.CmdFlags.CheckEnvironmentVariables(cmd) }, Run: func(cmd *cobra.Command, args []string) { @@ -391,7 +391,7 @@ func verifyCipherSuites(cipherSuites []string) (*[]uint16, error) { cipherSuitesMap[c.Name] = c } - cipherSuitesIds := []uint16{} + cipherSuitesIDs := []uint16{} for _, c := range cipherSuites { val, ok := cipherSuitesMap[c] if !ok { @@ -405,10 +405,10 @@ func verifyCipherSuites(cipherSuites []string) (*[]uint16, error) { } } - cipherSuitesIds = append(cipherSuitesIds, val.ID) + cipherSuitesIDs = append(cipherSuitesIDs, val.ID) } - return &cipherSuitesIds, nil + return &cipherSuitesIDs, nil } func historyPath() string { diff --git a/vendor/github.com/open-policy-agent/opa/cmd/sign.go b/vendor/github.com/open-policy-agent/opa/cmd/sign.go index a59e57658..1580b13ad 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/sign.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/sign.go @@ -138,7 +138,7 @@ https://www.openpolicyagent.org/docs/latest/management-bundles/#signature-format return env.CmdFlags.CheckEnvironmentVariables(cmd) }, - Run: func(cmd *cobra.Command, args []string) { + Run: func(_ *cobra.Command, args []string) { if err := doSign(args, cmdParams); err != nil { fmt.Println("error:", err) os.Exit(1) diff --git a/vendor/github.com/open-policy-agent/opa/cmd/test.go b/vendor/github.com/open-policy-agent/opa/cmd/test.go index 09fa2092c..be266ab29 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/test.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/test.go @@ -62,6 +62,7 @@ type testCommandParams struct { output io.Writer errOutput io.Writer v1Compatible bool + varValues bool } func newTestCommandParams() testCommandParams { @@ -348,7 +349,8 @@ func compileAndSetupTests(ctx context.Context, testParams testCommandParams, sto WithEnablePrintStatements(!testParams.benchmark). WithCapabilities(capabilities). WithSchemas(schemaSet). - WithUseTypeCheckAnnotations(true) + WithUseTypeCheckAnnotations(true). + WithRewriteTestRules(testParams.varValues) info, err := runtime.Term(runtime.Params{}) if err != nil { @@ -384,7 +386,7 @@ func compileAndSetupTests(ctx context.Context, testParams testCommandParams, sto SetCompiler(compiler). SetStore(store). CapturePrintOutput(true). - EnableTracing(testParams.verbose). + EnableTracing(testParams.verbose || testParams.varValues). SetCoverageQueryTracer(coverTracer). SetRuntime(info). SetModules(modules). @@ -413,6 +415,8 @@ func compileAndSetupTests(ctx context.Context, testParams testCommandParams, sto BenchmarkResults: testParams.benchmark, BenchMarkShowAllocations: testParams.benchMem, BenchMarkGoBenchFormat: goBench, + FailureLine: testParams.varValues, + LocalVars: testParams.varValues, } } } else { @@ -520,7 +524,7 @@ recommended as some updates might cause them to be dropped by OPA. return env.CmdFlags.CheckEnvironmentVariables(cmd) }, - Run: func(cmd *cobra.Command, args []string) { + Run: func(_ *cobra.Command, args []string) { exitCode, _ := opaTest(args, testParams) os.Exit(exitCode) }, @@ -536,6 +540,7 @@ recommended as some updates might cause them to be dropped by OPA. testCommand.Flags().BoolVar(&testParams.benchmark, "bench", false, "benchmark the unit tests") testCommand.Flags().StringVarP(&testParams.runRegex, "run", "r", "", "run only test cases matching the regular expression.") testCommand.Flags().BoolVarP(&testParams.watch, "watch", "w", false, "watch command line files for changes") + testCommand.Flags().BoolVar(&testParams.varValues, "var-values", false, "show local variable values in test output") // Shared flags addBundleModeFlag(testCommand.Flags(), &testParams.bundleMode, false) diff --git a/vendor/github.com/open-policy-agent/opa/cmd/version.go b/vendor/github.com/open-policy-agent/opa/cmd/version.go index 2501ad117..3d3b0de4a 100644 --- a/vendor/github.com/open-policy-agent/opa/cmd/version.go +++ b/vendor/github.com/open-policy-agent/opa/cmd/version.go @@ -26,10 +26,10 @@ func init() { Use: "version", Short: "Print the version of OPA", Long: "Show version and build information for OPA.", - PreRunE: func(cmd *cobra.Command, args []string) error { + PreRunE: func(cmd *cobra.Command, _ []string) error { return env.CmdFlags.CheckEnvironmentVariables(cmd) }, - Run: func(cmd *cobra.Command, args []string) { + Run: func(_ *cobra.Command, _ []string) { generateCmdOutput(os.Stdout, check) }, } diff --git a/vendor/github.com/open-policy-agent/opa/format/format.go b/vendor/github.com/open-policy-agent/opa/format/format.go index 6d327296d..02b6f73d8 100644 --- a/vendor/github.com/open-policy-agent/opa/format/format.go +++ b/vendor/github.com/open-policy-agent/opa/format/format.go @@ -1340,7 +1340,10 @@ func closingLoc(skipOpen, skipClose, open, close byte, loc *ast.Location) *ast.L i, offset = skipPast(skipOpen, skipClose, loc) } - for ; i < len(loc.Text) && loc.Text[i] != open; i++ { + for ; i < len(loc.Text); i++ { + if loc.Text[i] == open { + break + } } if i >= len(loc.Text) { @@ -1369,7 +1372,10 @@ func closingLoc(skipOpen, skipClose, open, close byte, loc *ast.Location) *ast.L func skipPast(open, close byte, loc *ast.Location) (int, int) { i := 0 - for ; i < len(loc.Text) && loc.Text[i] != open; i++ { + for ; i < len(loc.Text); i++ { + if loc.Text[i] == open { + break + } } state := 1 diff --git a/vendor/github.com/open-policy-agent/opa/internal/bundle/utils.go b/vendor/github.com/open-policy-agent/opa/internal/bundle/utils.go index 89f3e2a6f..064649733 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/bundle/utils.go +++ b/vendor/github.com/open-policy-agent/opa/internal/bundle/utils.go @@ -95,7 +95,8 @@ func LoadBundleFromDisk(path, name string, bvc *bundle.VerificationConfig) (*bun func LoadBundleFromDiskForRegoVersion(regoVersion ast.RegoVersion, path, name string, bvc *bundle.VerificationConfig) (*bundle.Bundle, error) { bundlePath := filepath.Join(path, name, "bundle.tar.gz") - if _, err := os.Stat(bundlePath); err == nil { + _, err := os.Stat(bundlePath) + if err == nil { f, err := os.Open(filepath.Join(bundlePath)) if err != nil { return nil, err @@ -116,9 +117,9 @@ func LoadBundleFromDiskForRegoVersion(regoVersion ast.RegoVersion, path, name st return &b, nil } else if os.IsNotExist(err) { return nil, nil - } else { - return nil, err } + + return nil, err } // SaveBundleToDisk saves the given raw bytes representing the bundle's content to disk diff --git a/vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go b/vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go index b827ebb91..9a5cebec5 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go +++ b/vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go @@ -1332,7 +1332,7 @@ func (c *Compiler) compileWithStmt(with *ir.WithStmt, result *[]instruction.Inst return nil } -func (c *Compiler) compileUpsert(local ir.Local, path []int, value ir.Operand, loc ir.Location, instrs []instruction.Instruction) []instruction.Instruction { +func (c *Compiler) compileUpsert(local ir.Local, path []int, value ir.Operand, _ ir.Location, instrs []instruction.Instruction) []instruction.Instruction { lcopy := c.genLocal() // holds copy of local instrs = append(instrs, instruction.GetLocal{Index: c.local(local)}) diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schema.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schema.go index 5bdfada5d..8e035013c 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schema.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schema.go @@ -788,7 +788,7 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) return nil } -func (d *Schema) parseReference(documentNode interface{}, currentSchema *SubSchema) error { +func (d *Schema) parseReference(_ interface{}, currentSchema *SubSchema) error { var ( refdDocumentNode interface{} dsp *schemaPoolDocument diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/validation.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/validation.go index 58d10c1f7..7c86e3724 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/validation.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/validation.go @@ -556,10 +556,10 @@ func (v *SubSchema) validateArray(currentSubSchema *SubSchema, value []interface if validationResult.Valid() { validatedOne = true break - } else { - if bestValidationResult == nil || validationResult.score > bestValidationResult.score { - bestValidationResult = validationResult - } + } + + if bestValidationResult == nil || validationResult.score > bestValidationResult.score { + bestValidationResult = validationResult } } if !validatedOne { diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/fragments_on_composite_types.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/fragments_on_composite_types.go index 1af9c4f96..66bd348c4 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/fragments_on_composite_types.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/fragments_on_composite_types.go @@ -25,7 +25,7 @@ func init() { ) }) - observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + observers.OnFragment(func(_ *Walker, fragment *ast.FragmentDefinition) { if fragment.Definition == nil || fragment.TypeCondition == "" || fragment.Definition.IsCompositeType() { return } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_argument_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_argument_names.go index d9aa8873e..36b2d057c 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_argument_names.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_argument_names.go @@ -10,7 +10,7 @@ import ( func init() { AddRule("KnownArgumentNames", func(observers *Events, addError AddErrFunc) { // A GraphQL field is only valid if all supplied arguments are defined by that field. - observers.OnField(func(walker *Walker, field *ast.Field) { + observers.OnField(func(_ *Walker, field *ast.Field) { if field.Definition == nil || field.ObjectDefinition == nil { return } @@ -33,7 +33,7 @@ func init() { } }) - observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + observers.OnDirective(func(_ *Walker, directive *ast.Directive) { if directive.Definition == nil { return } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_directives.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_directives.go index 016541e82..9855291e3 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_directives.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_directives.go @@ -15,7 +15,7 @@ func init() { Column int } var seen = map[mayNotBeUsedDirective]bool{} - observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + observers.OnDirective(func(_ *Walker, directive *ast.Directive) { if directive.Definition == nil { addError( Message(`Unknown directive "@%s".`, directive.Name), diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_fragment_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_fragment_names.go index 94583dac3..8ae1fc33f 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_fragment_names.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_fragment_names.go @@ -9,7 +9,7 @@ import ( func init() { AddRule("KnownFragmentNames", func(observers *Events, addError AddErrFunc) { - observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) { + observers.OnFragmentSpread(func(_ *Walker, fragmentSpread *ast.FragmentSpread) { if fragmentSpread.Definition == nil { addError( Message(`Unknown fragment "%s".`, fragmentSpread.Name), diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_fragments.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_fragments.go index f82cfe9f9..f6ba046a1 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_fragments.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_fragments.go @@ -13,13 +13,13 @@ func init() { inFragmentDefinition := false fragmentNameUsed := make(map[string]bool) - observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) { + observers.OnFragmentSpread(func(_ *Walker, fragmentSpread *ast.FragmentSpread) { if !inFragmentDefinition { fragmentNameUsed[fragmentSpread.Name] = true } }) - observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + observers.OnFragment(func(_ *Walker, fragment *ast.FragmentDefinition) { inFragmentDefinition = true if !fragmentNameUsed[fragment.Name] { addError( diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_variables.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_variables.go index c019ae42e..163ac895b 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_variables.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_variables.go @@ -9,7 +9,7 @@ import ( func init() { AddRule("NoUnusedVariables", func(observers *Events, addError AddErrFunc) { - observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + observers.OnOperation(func(_ *Walker, operation *ast.OperationDefinition) { for _, varDef := range operation.VariableDefinitions { if varDef.Used { continue diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/provided_required_arguments.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/provided_required_arguments.go index 20364f8ba..d6d12c4fd 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/provided_required_arguments.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/provided_required_arguments.go @@ -9,7 +9,7 @@ import ( func init() { AddRule("ProvidedRequiredArguments", func(observers *Events, addError AddErrFunc) { - observers.OnField(func(walker *Walker, field *ast.Field) { + observers.OnField(func(_ *Walker, field *ast.Field) { if field.Definition == nil { return } @@ -35,7 +35,7 @@ func init() { } }) - observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + observers.OnDirective(func(_ *Walker, directive *ast.Directive) { if directive.Definition == nil { return } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_argument_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_argument_names.go index 4466d5672..7458c5f6c 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_argument_names.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_argument_names.go @@ -9,11 +9,11 @@ import ( func init() { AddRule("UniqueArgumentNames", func(observers *Events, addError AddErrFunc) { - observers.OnField(func(walker *Walker, field *ast.Field) { + observers.OnField(func(_ *Walker, field *ast.Field) { checkUniqueArgs(field.Arguments, addError) }) - observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + observers.OnDirective(func(_ *Walker, directive *ast.Directive) { checkUniqueArgs(directive.Arguments, addError) }) }) diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_directives_per_location.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_directives_per_location.go index 09968a741..ecf5a0a82 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_directives_per_location.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_directives_per_location.go @@ -9,7 +9,7 @@ import ( func init() { AddRule("UniqueDirectivesPerLocation", func(observers *Events, addError AddErrFunc) { - observers.OnDirectiveList(func(walker *Walker, directives []*ast.Directive) { + observers.OnDirectiveList(func(_ *Walker, directives []*ast.Directive) { seen := map[string]bool{} for _, dir := range directives { diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_fragment_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_fragment_names.go index 3da46467e..c94f3ad27 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_fragment_names.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_fragment_names.go @@ -11,7 +11,7 @@ func init() { AddRule("UniqueFragmentNames", func(observers *Events, addError AddErrFunc) { seenFragments := map[string]bool{} - observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + observers.OnFragment(func(_ *Walker, fragment *ast.FragmentDefinition) { if seenFragments[fragment.Name] { addError( Message(`There can be only one fragment named "%s".`, fragment.Name), diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_input_field_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_input_field_names.go index f9f69051b..a93d63bd1 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_input_field_names.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_input_field_names.go @@ -9,7 +9,7 @@ import ( func init() { AddRule("UniqueInputFieldNames", func(observers *Events, addError AddErrFunc) { - observers.OnValue(func(walker *Walker, value *ast.Value) { + observers.OnValue(func(_ *Walker, value *ast.Value) { if value.Kind != ast.ObjectValue { return } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_operation_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_operation_names.go index dc937795b..dcd404dad 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_operation_names.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_operation_names.go @@ -11,7 +11,7 @@ func init() { AddRule("UniqueOperationNames", func(observers *Events, addError AddErrFunc) { seen := map[string]bool{} - observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + observers.OnOperation(func(_ *Walker, operation *ast.OperationDefinition) { if seen[operation.Name] { addError( Message(`There can be only one operation named "%s".`, operation.Name), diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_variable_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_variable_names.go index 94dab3cf1..7a214dbe4 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_variable_names.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_variable_names.go @@ -9,7 +9,7 @@ import ( func init() { AddRule("UniqueVariableNames", func(observers *Events, addError AddErrFunc) { - observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + observers.OnOperation(func(_ *Walker, operation *ast.OperationDefinition) { seen := map[string]int{} for _, def := range operation.VariableDefinitions { // add the same error only once per a variable. diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/values_of_correct_type.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/values_of_correct_type.go index 88d8f53c4..8858023d4 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/values_of_correct_type.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/values_of_correct_type.go @@ -13,7 +13,7 @@ import ( func init() { AddRule("ValuesOfCorrectType", func(observers *Events, addError AddErrFunc) { - observers.OnValue(func(walker *Walker, value *ast.Value) { + observers.OnValue(func(_ *Walker, value *ast.Value) { if value.Definition == nil || value.ExpectedType == nil { return } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/variables_are_input_types.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/variables_are_input_types.go index 51cb77ca3..ea4dfcc5a 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/variables_are_input_types.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/variables_are_input_types.go @@ -9,7 +9,7 @@ import ( func init() { AddRule("VariablesAreInputTypes", func(observers *Events, addError AddErrFunc) { - observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + observers.OnOperation(func(_ *Walker, operation *ast.OperationDefinition) { for _, def := range operation.VariableDefinitions { if def.Definition == nil { continue diff --git a/vendor/github.com/open-policy-agent/opa/internal/oracle/oracle.go b/vendor/github.com/open-policy-agent/opa/internal/oracle/oracle.go index e3eb3afa1..baa74fae7 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/oracle/oracle.go +++ b/vendor/github.com/open-policy-agent/opa/internal/oracle/oracle.go @@ -142,7 +142,7 @@ func compileUpto(stage string, modules map[string]*ast.Module, bs []byte, filena if stage != "" { compiler = compiler.WithStageAfter(stage, ast.CompilerStageDefinition{ Name: "halt", - Stage: func(c *ast.Compiler) *ast.Error { + Stage: func(_ *ast.Compiler) *ast.Error { return &ast.Error{ Code: "halt", } diff --git a/vendor/github.com/open-policy-agent/opa/internal/planner/planner.go b/vendor/github.com/open-policy-agent/opa/internal/planner/planner.go index df1d66b85..b75d26dda 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/planner/planner.go +++ b/vendor/github.com/open-policy-agent/opa/internal/planner/planner.go @@ -1523,7 +1523,7 @@ func (p *Planner) planValue(t ast.Value, loc *ast.Location, iter planiter) error } } -func (p *Planner) planNull(null ast.Null, iter planiter) error { +func (p *Planner) planNull(_ ast.Null, iter planiter) error { target := p.newLocal() diff --git a/vendor/github.com/open-policy-agent/opa/internal/presentation/presentation.go b/vendor/github.com/open-policy-agent/opa/internal/presentation/presentation.go index 42fef2460..b69f93a27 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/presentation/presentation.go +++ b/vendor/github.com/open-policy-agent/opa/internal/presentation/presentation.go @@ -282,8 +282,21 @@ func Values(w io.Writer, r Output) error { // Pretty prints all of r to w in a human-readable format. func Pretty(w io.Writer, r Output) error { + return PrettyWithOptions(w, r, PrettyOptions{ + TraceOpts: topdown.PrettyTraceOptions{ + Locations: true, + }, + }) +} + +type PrettyOptions struct { + TraceOpts topdown.PrettyTraceOptions +} + +// PrettyWithOptions prints all of r to w in a human-readable format. +func PrettyWithOptions(w io.Writer, r Output, opts PrettyOptions) error { if len(r.Explanation) > 0 { - if err := prettyExplanation(w, r.Explanation); err != nil { + if err := prettyExplanation(w, r.Explanation, opts.TraceOpts); err != nil { return err } } @@ -554,8 +567,8 @@ func prettyAggregatedProfile(w io.Writer, profile []profiler.ExprStatsAggregated return nil } -func prettyExplanation(w io.Writer, explanation []*topdown.Event) error { - topdown.PrettyTraceWithLocation(w, explanation) +func prettyExplanation(w io.Writer, explanation []*topdown.Event, opts topdown.PrettyTraceOptions) error { + topdown.PrettyTraceWithOpts(w, explanation, opts) return nil } diff --git a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4a.go b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4a.go index 12bc38ec8..929f2006e 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4a.go +++ b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4a.go @@ -174,7 +174,7 @@ type httpSigner struct { PayloadHash string } -func (s *httpSigner) setRequiredSigningFields(headers http.Header, query url.Values) { +func (s *httpSigner) setRequiredSigningFields(headers http.Header, _ url.Values) { amzDate := s.Time.Format(timeFormat) headers.Set(AmzRegionSetKey, strings.Join(s.RegionSet, ",")) diff --git a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/util.go b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/util.go index 3dcb7d209..e033da746 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/util.go +++ b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/util.go @@ -8,7 +8,7 @@ import ( "github.com/open-policy-agent/opa/logging" ) -// DoRequestWithClient is a convenience function to get the body of a http response with +// DoRequestWithClient is a convenience function to get the body of an HTTP response with // appropriate error-handling boilerplate and logging. func DoRequestWithClient(req *http.Request, client *http.Client, desc string, logger logging.Logger) ([]byte, error) { resp, err := client.Do(req) diff --git a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/v4/util.go b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/v4/util.go index 0cb9cffaf..c8e7fb1ba 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/v4/util.go +++ b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/v4/util.go @@ -11,14 +11,9 @@ const doubleSpace = " " // contain multiple side-by-side spaces. func StripExcessSpaces(str string) string { var j, k, l, m, spaces int - // Trim trailing spaces - for j = len(str) - 1; j >= 0 && str[j] == ' '; j-- { - } - // Trim leading spaces - for k = 0; k < j && str[k] == ' '; k++ { - } - str = str[k : j+1] + // Trim leading and trailing spaces + str = strings.Trim(str, " ") // Strip multiple spaces. j = strings.Index(str, doubleSpace) diff --git a/vendor/github.com/open-policy-agent/opa/internal/strings/strings.go b/vendor/github.com/open-policy-agent/opa/internal/strings/strings.go index 76fb9ee8c..08f3bf918 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/strings/strings.go +++ b/vendor/github.com/open-policy-agent/opa/internal/strings/strings.go @@ -63,6 +63,14 @@ func TruncateFilePaths(maxIdealWidth, maxWidth int, path ...string) (map[string] return result, longestLocation } +func Truncate(str string, maxWidth int) string { + if len(str) <= maxWidth { + return str + } + + return str[:maxWidth-3] + "..." +} + func getPathFromFirstSeparator(path string) string { s := filepath.Dir(path) s = strings.TrimPrefix(s, string(filepath.Separator)) diff --git a/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go b/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go index d9c1bda3d..35e6059c7 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go +++ b/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go @@ -375,10 +375,10 @@ func readTableSection(r io.Reader, s *module.TableSection) error { return err } else if elem != constant.ElementTypeAnyFunc { return fmt.Errorf("illegal element type") - } else { - table.Type = types.Anyfunc } + table.Type = types.Anyfunc + if err := readLimits(r, &table.Lim); err != nil { return err } diff --git a/vendor/github.com/open-policy-agent/opa/internal/wasm/sdk/internal/wasm/pool.go b/vendor/github.com/open-policy-agent/opa/internal/wasm/sdk/internal/wasm/pool.go index bfc211240..40970be81 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/wasm/sdk/internal/wasm/pool.go +++ b/vendor/github.com/open-policy-agent/opa/internal/wasm/sdk/internal/wasm/pool.go @@ -220,7 +220,7 @@ func (p *Pool) SetPolicyData(ctx context.Context, policy []byte, data []byte) er func (p *Pool) SetDataPath(ctx context.Context, path []string, value interface{}) error { p.dataMtx.Lock() defer p.dataMtx.Unlock() - return p.updateVMs(func(vm *VM, opts vmOpts) error { + return p.updateVMs(func(vm *VM, _ vmOpts) error { return vm.SetDataPath(ctx, path, value) }) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/wasm/sdk/internal/wasm/vm.go b/vendor/github.com/open-policy-agent/opa/internal/wasm/sdk/internal/wasm/vm.go index 40453682c..b089a2042 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/wasm/sdk/internal/wasm/vm.go +++ b/vendor/github.com/open-policy-agent/opa/internal/wasm/sdk/internal/wasm/vm.go @@ -487,11 +487,11 @@ func (i *VM) SetPolicyData(ctx context.Context, opts vmOpts) error { } } mem := i.memory.UnsafeData(i.store) - len := int32(len(opts.parsedData)) - copy(mem[i.baseHeapPtr:i.baseHeapPtr+len], opts.parsedData) + length := int32(len(opts.parsedData)) + copy(mem[i.baseHeapPtr:i.baseHeapPtr+length], opts.parsedData) i.dataAddr = opts.parsedDataAddr - i.evalHeapPtr = i.baseHeapPtr + len + i.evalHeapPtr = i.baseHeapPtr + length err := i.setHeapState(ctx, i.evalHeapPtr) if err != nil { return err diff --git a/vendor/github.com/open-policy-agent/opa/ir/pretty.go b/vendor/github.com/open-policy-agent/opa/ir/pretty.go index a0f2df3bc..6102c5a91 100644 --- a/vendor/github.com/open-policy-agent/opa/ir/pretty.go +++ b/vendor/github.com/open-policy-agent/opa/ir/pretty.go @@ -25,11 +25,11 @@ type prettyPrinter struct { w io.Writer } -func (pp *prettyPrinter) Before(x interface{}) { +func (pp *prettyPrinter) Before(_ interface{}) { pp.depth++ } -func (pp *prettyPrinter) After(x interface{}) { +func (pp *prettyPrinter) After(_ interface{}) { pp.depth-- } diff --git a/vendor/github.com/open-policy-agent/opa/loader/errors.go b/vendor/github.com/open-policy-agent/opa/loader/errors.go index d6da40a06..b8aafb142 100644 --- a/vendor/github.com/open-policy-agent/opa/loader/errors.go +++ b/vendor/github.com/open-policy-agent/opa/loader/errors.go @@ -41,7 +41,7 @@ func (e *Errors) add(err error) { type unsupportedDocumentType string func (path unsupportedDocumentType) Error() string { - return string(path) + ": bad document type" + return string(path) + ": document must be of type object" } type unrecognizedFile string diff --git a/vendor/github.com/open-policy-agent/opa/loader/loader.go b/vendor/github.com/open-policy-agent/opa/loader/loader.go index 1ce0e7868..e584bab3c 100644 --- a/vendor/github.com/open-policy-agent/opa/loader/loader.go +++ b/vendor/github.com/open-policy-agent/opa/loader/loader.go @@ -81,7 +81,7 @@ type Filter = filter.LoaderFilter // GlobExcludeName excludes files and directories whose names do not match the // shell style pattern at minDepth or greater. func GlobExcludeName(pattern string, minDepth int) Filter { - return func(abspath string, info fs.FileInfo, depth int) bool { + return func(_ string, info fs.FileInfo, depth int) bool { match, _ := filepath.Match(pattern, info.Name()) return match && depth >= minDepth } @@ -486,7 +486,7 @@ func AsBundle(path string) (*bundle.Bundle, error) { // AllRegos returns a Result object loaded (recursively) with all Rego source // files from the specified paths. func AllRegos(paths []string) (*Result, error) { - return NewFileLoader().Filtered(paths, func(_ string, info os.FileInfo, depth int) bool { + return NewFileLoader().Filtered(paths, func(_ string, info os.FileInfo, _ int) bool { return !info.IsDir() && !strings.HasSuffix(info.Name(), bundle.RegoExt) }) } @@ -522,7 +522,7 @@ func Paths(path string, recurse bool) (paths []string, err error) { if err != nil { return nil, err } - err = filepath.Walk(path, func(f string, info os.FileInfo, err error) error { + err = filepath.Walk(path, func(f string, _ os.FileInfo, _ error) error { if !recurse { if path != f && path != filepath.Dir(f) { return filepath.SkipDir diff --git a/vendor/github.com/open-policy-agent/opa/plugins/discovery/config.go b/vendor/github.com/open-policy-agent/opa/plugins/discovery/config.go index a157f7abb..869ae9b4d 100644 --- a/vendor/github.com/open-policy-agent/opa/plugins/discovery/config.go +++ b/vendor/github.com/open-policy-agent/opa/plugins/discovery/config.go @@ -90,19 +90,19 @@ func (c *Config) validateAndInjectDefaults(services []string, confKeys map[strin } // make a copy of the keys map - copy := map[string]*keys.Config{} + cpy := map[string]*keys.Config{} for key, kc := range confKeys { - copy[key] = kc + cpy[key] = kc } if c.Signing != nil { - err := c.Signing.ValidateAndInjectDefaults(copy) + err := c.Signing.ValidateAndInjectDefaults(cpy) if err != nil { return fmt.Errorf("invalid configuration for discovery service: %s", err.Error()) } } else { if len(confKeys) > 0 { - c.Signing = bundle.NewVerificationConfig(copy, "", "", nil) + c.Signing = bundle.NewVerificationConfig(cpy, "", "", nil) } } diff --git a/vendor/github.com/open-policy-agent/opa/plugins/logs/plugin.go b/vendor/github.com/open-policy-agent/opa/plugins/logs/plugin.go index 5a58f827a..64a58a4b3 100644 --- a/vendor/github.com/open-policy-agent/opa/plugins/logs/plugin.go +++ b/vendor/github.com/open-policy-agent/opa/plugins/logs/plugin.go @@ -415,21 +415,42 @@ func (c *Config) validateAndInjectDefaults(services []string, pluginsList []stri // Plugin implements decision log buffering and uploading. type Plugin struct { - manager *plugins.Manager - config Config - buffer *logBuffer - enc *chunkEncoder - mtx sync.Mutex - stop chan chan struct{} - reconfig chan reconfigure - mask *rego.PreparedEvalQuery - maskMutex sync.Mutex - drop *rego.PreparedEvalQuery - dropMutex sync.Mutex - limiter *rate.Limiter - metrics metrics.Metrics - logger logging.Logger - status *lstat.Status + manager *plugins.Manager + config Config + buffer *logBuffer + enc *chunkEncoder + mtx sync.Mutex + stop chan chan struct{} + reconfig chan reconfigure + preparedMask prepareOnce + preparedDrop prepareOnce + limiter *rate.Limiter + metrics metrics.Metrics + logger logging.Logger + status *lstat.Status +} + +type prepareOnce struct { + once *sync.Once + preparedQuery *rego.PreparedEvalQuery + err error +} + +func newPrepareOnce() *prepareOnce { + return &prepareOnce{ + once: new(sync.Once), + } +} + +func (po *prepareOnce) drop() { + po.once = new(sync.Once) +} + +func (po *prepareOnce) prepareOnce(f func() (*rego.PreparedEvalQuery, error)) (*rego.PreparedEvalQuery, error) { + po.once.Do(func() { + po.preparedQuery, po.err = f() + }) + return po.preparedQuery, po.err } type reconfigure struct { @@ -513,14 +534,16 @@ func (b *ConfigBuilder) Parse() (*Config, error) { func New(parsedConfig *Config, manager *plugins.Manager) *Plugin { plugin := &Plugin{ - manager: manager, - config: *parsedConfig, - stop: make(chan chan struct{}), - buffer: newLogBuffer(*parsedConfig.Reporting.BufferSizeLimitBytes), - enc: newChunkEncoder(*parsedConfig.Reporting.UploadSizeLimitBytes), - reconfig: make(chan reconfigure), - logger: manager.Logger().WithFields(map[string]interface{}{"plugin": Name}), - status: &lstat.Status{}, + manager: manager, + config: *parsedConfig, + stop: make(chan chan struct{}), + buffer: newLogBuffer(*parsedConfig.Reporting.BufferSizeLimitBytes), + enc: newChunkEncoder(*parsedConfig.Reporting.UploadSizeLimitBytes), + reconfig: make(chan reconfigure), + logger: manager.Logger().WithFields(map[string]interface{}{"plugin": Name}), + status: &lstat.Status{}, + preparedDrop: *newPrepareOnce(), + preparedMask: *newPrepareOnce(), } if parsedConfig.Reporting.MaxDecisionsPerSecond != nil { @@ -554,7 +577,7 @@ func Lookup(manager *plugins.Manager) *Plugin { } // Start starts the plugin. -func (p *Plugin) Start(ctx context.Context) error { +func (p *Plugin) Start(_ context.Context) error { p.logger.Info("Starting decision logger.") go p.loop() p.manager.UpdatePluginStatus(Name, &plugins.Status{State: plugins.StateOK}) @@ -713,13 +736,8 @@ func (p *Plugin) Reconfigure(_ context.Context, config interface{}) { done := make(chan struct{}) p.reconfig <- reconfigure{config: config, done: done} - p.maskMutex.Lock() - defer p.maskMutex.Unlock() - p.mask = nil - - p.dropMutex.Lock() - defer p.dropMutex.Unlock() - p.drop = nil + p.preparedMask.drop() + p.preparedDrop.drop() <-done } @@ -753,13 +771,8 @@ func (p *Plugin) Trigger(ctx context.Context) error { // fires. This indicates a new compiler instance is available. The decision // logger needs to prepare a new masking query. func (p *Plugin) compilerUpdated(storage.Transaction) { - p.maskMutex.Lock() - defer p.maskMutex.Unlock() - p.mask = nil - - p.dropMutex.Lock() - defer p.dropMutex.Unlock() - p.drop = nil + p.preparedMask.drop() + p.preparedDrop.drop() } func (p *Plugin) loop() { @@ -975,42 +988,33 @@ func (p *Plugin) bufferChunk(buffer *logBuffer, bs []byte) { } func (p *Plugin) maskEvent(ctx context.Context, txn storage.Transaction, input ast.Value, event *EventV1) error { - - mask, err := func() (rego.PreparedEvalQuery, error) { - - p.maskMutex.Lock() - defer p.maskMutex.Unlock() - - if p.mask == nil { - - query := ast.NewBody(ast.NewExpr(ast.NewTerm(p.config.maskDecisionRef))) - - r := rego.New( - rego.ParsedQuery(query), - rego.Compiler(p.manager.GetCompiler()), - rego.Store(p.manager.Store), - rego.Transaction(txn), - rego.Runtime(p.manager.Info), - rego.EnablePrintStatements(p.manager.EnablePrintStatements()), - rego.PrintHook(p.manager.PrintHook()), - ) - - pq, err := r.PrepareForEval(context.Background()) - if err != nil { - return rego.PreparedEvalQuery{}, err - } - - p.mask = &pq + pq, err := p.preparedMask.prepareOnce(func() (*rego.PreparedEvalQuery, error) { + var pq rego.PreparedEvalQuery + + query := ast.NewBody(ast.NewExpr(ast.NewTerm(p.config.maskDecisionRef))) + + r := rego.New( + rego.ParsedQuery(query), + rego.Compiler(p.manager.GetCompiler()), + rego.Store(p.manager.Store), + rego.Transaction(txn), + rego.Runtime(p.manager.Info), + rego.EnablePrintStatements(p.manager.EnablePrintStatements()), + rego.PrintHook(p.manager.PrintHook()), + ) + + pq, err := r.PrepareForEval(context.Background()) + if err != nil { + return nil, err } - - return *p.mask, nil - }() + return &pq, nil + }) if err != nil { return err } - rs, err := mask.Eval( + rs, err := pq.Eval( ctx, rego.EvalParsedInput(input), rego.EvalTransaction(txn), @@ -1038,40 +1042,34 @@ func (p *Plugin) maskEvent(ctx context.Context, txn storage.Transaction, input a } func (p *Plugin) dropEvent(ctx context.Context, txn storage.Transaction, input ast.Value) (bool, error) { + var err error - drop, err := func() (rego.PreparedEvalQuery, error) { - - p.dropMutex.Lock() - defer p.dropMutex.Unlock() - - if p.drop == nil { - query := ast.NewBody(ast.NewExpr(ast.NewTerm(p.config.dropDecisionRef))) - r := rego.New( - rego.ParsedQuery(query), - rego.Compiler(p.manager.GetCompiler()), - rego.Store(p.manager.Store), - rego.Transaction(txn), - rego.Runtime(p.manager.Info), - rego.EnablePrintStatements(p.manager.EnablePrintStatements()), - rego.PrintHook(p.manager.PrintHook()), - ) - - pq, err := r.PrepareForEval(context.Background()) - if err != nil { - return rego.PreparedEvalQuery{}, err - } - - p.drop = &pq + pq, err := p.preparedDrop.prepareOnce(func() (*rego.PreparedEvalQuery, error) { + var pq rego.PreparedEvalQuery + + query := ast.NewBody(ast.NewExpr(ast.NewTerm(p.config.dropDecisionRef))) + r := rego.New( + rego.ParsedQuery(query), + rego.Compiler(p.manager.GetCompiler()), + rego.Store(p.manager.Store), + rego.Transaction(txn), + rego.Runtime(p.manager.Info), + rego.EnablePrintStatements(p.manager.EnablePrintStatements()), + rego.PrintHook(p.manager.PrintHook()), + ) + + pq, err := r.PrepareForEval(context.Background()) + if err != nil { + return nil, err } - - return *p.drop, nil - }() + return &pq, nil + }) if err != nil { return false, err } - rs, err := drop.Eval( + rs, err := pq.Eval( ctx, rego.EvalParsedInput(input), rego.EvalTransaction(txn), diff --git a/vendor/github.com/open-policy-agent/opa/plugins/plugins.go b/vendor/github.com/open-policy-agent/opa/plugins/plugins.go index 085930595..bacdd1507 100644 --- a/vendor/github.com/open-policy-agent/opa/plugins/plugins.go +++ b/vendor/github.com/open-policy-agent/opa/plugins/plugins.go @@ -286,11 +286,10 @@ func ValidateAndInjectDefaultsForTriggerMode(a, b *TriggerMode) (*TriggerMode, e return nil, err } return a, nil - - } else { - t := DefaultTriggerMode - return &t, nil } + + t := DefaultTriggerMode + return &t, nil } type namedplugin struct { diff --git a/vendor/github.com/open-policy-agent/opa/plugins/rest/auth.go b/vendor/github.com/open-policy-agent/opa/plugins/rest/auth.go index 3cae61ebf..c65681733 100644 --- a/vendor/github.com/open-policy-agent/opa/plugins/rest/auth.go +++ b/vendor/github.com/open-policy-agent/opa/plugins/rest/auth.go @@ -294,16 +294,13 @@ type oauth2Token struct { ExpiresAt time.Time } -func (ap *oauth2ClientCredentialsAuthPlugin) createAuthJWT(ctx context.Context, claims map[string]interface{}, signingKey interface{}) (*string, error) { +func (ap *oauth2ClientCredentialsAuthPlugin) createAuthJWT(ctx context.Context, extClaims map[string]interface{}, signingKey interface{}) (*string, error) { now := time.Now() - baseClaims := map[string]interface{}{ + claims := map[string]interface{}{ "iat": now.Unix(), "exp": now.Add(10 * time.Minute).Unix(), } - if claims == nil { - claims = make(map[string]interface{}) - } - for k, v := range baseClaims { + for k, v := range extClaims { claims[k] = v } @@ -693,7 +690,7 @@ func (ap *clientTLSAuthPlugin) NewClient(c Config) (*http.Client, error) { return client, nil } -func (ap *clientTLSAuthPlugin) Prepare(req *http.Request) error { +func (ap *clientTLSAuthPlugin) Prepare(_ *http.Request) error { return nil } diff --git a/vendor/github.com/open-policy-agent/opa/plugins/status/plugin.go b/vendor/github.com/open-policy-agent/opa/plugins/status/plugin.go index 295e1d8cc..880752403 100644 --- a/vendor/github.com/open-policy-agent/opa/plugins/status/plugin.go +++ b/vendor/github.com/open-policy-agent/opa/plugins/status/plugin.go @@ -283,7 +283,7 @@ func (p *Plugin) unregisterAll() { } // Stop stops the plugin. -func (p *Plugin) Stop(ctx context.Context) { +func (p *Plugin) Stop(_ context.Context) { p.logger.Info("Stopping status reporter.") p.manager.UnregisterPluginStatusListener(Name) done := make(chan struct{}) diff --git a/vendor/github.com/open-policy-agent/opa/rego/rego.go b/vendor/github.com/open-policy-agent/opa/rego/rego.go index cbe2d0c7d..266e6d6ab 100644 --- a/vendor/github.com/open-policy-agent/opa/rego/rego.go +++ b/vendor/github.com/open-policy-agent/opa/rego/rego.go @@ -1496,7 +1496,7 @@ func (r *Rego) Compile(ctx context.Context, opts ...CompileOption) (*CompileResu return r.compileWasm(modules, queries, compileQueryType) // TODO(sr) control flow is funky here } -func (r *Rego) compileWasm(modules []*ast.Module, queries []ast.Body, qType queryType) (*CompileResult, error) { +func (r *Rego) compileWasm(_ []*ast.Module, queries []ast.Body, qType queryType) (*CompileResult, error) { policy, err := r.planQuery(queries, qType) if err != nil { return nil, err @@ -1871,7 +1871,7 @@ func (r *Rego) loadFiles(ctx context.Context, txn storage.Transaction, m metrics return nil } -func (r *Rego) loadBundles(ctx context.Context, txn storage.Transaction, m metrics.Metrics) error { +func (r *Rego) loadBundles(_ context.Context, _ storage.Transaction, m metrics.Metrics) error { if len(r.bundlePaths) == 0 { return nil } @@ -2035,7 +2035,7 @@ func (r *Rego) prepareImports() ([]*ast.Import, error) { return imports, nil } -func (r *Rego) compileQuery(query ast.Body, imports []*ast.Import, m metrics.Metrics, extras []extraStage) (ast.QueryCompiler, ast.Body, error) { +func (r *Rego) compileQuery(query ast.Body, imports []*ast.Import, _ metrics.Metrics, extras []extraStage) (ast.QueryCompiler, ast.Body, error) { var pkg *ast.Package if r.pkg != "" { @@ -2476,7 +2476,7 @@ func (r *Rego) partial(ctx context.Context, ectx *EvalContext) (*PartialQueries, return pq, nil } -func (r *Rego) rewriteQueryToCaptureValue(qc ast.QueryCompiler, query ast.Body) (ast.Body, error) { +func (r *Rego) rewriteQueryToCaptureValue(_ ast.QueryCompiler, query ast.Body) (ast.Body, error) { checkCapture := iteration(query) || len(query) > 1 @@ -2593,7 +2593,7 @@ type transactionCloser func(ctx context.Context, err error) error // regardless of status. func (r *Rego) getTxn(ctx context.Context) (storage.Transaction, transactionCloser, error) { - noopCloser := func(ctx context.Context, err error) error { + noopCloser := func(_ context.Context, _ error) error { return nil // no-op default } diff --git a/vendor/github.com/open-policy-agent/opa/repl/repl.go b/vendor/github.com/open-policy-agent/opa/repl/repl.go index fe101144d..be0f33fbf 100644 --- a/vendor/github.com/open-policy-agent/opa/repl/repl.go +++ b/vendor/github.com/open-policy-agent/opa/repl/repl.go @@ -508,9 +508,8 @@ func (r *REPL) cmdShow(args []string) error { } fmt.Fprintln(r.output, string(b)) return nil - } else { - return fmt.Errorf("unknown option '%v'", args[0]) } + return fmt.Errorf("unknown option '%v'", args[0]) } type replDebugState struct { @@ -688,7 +687,7 @@ func (r *REPL) unsetRule(ctx context.Context, name ast.Var) (bool, error) { return true, nil } -func (r *REPL) unsetPackage(ctx context.Context, pkg *ast.Package) (bool, error) { +func (r *REPL) unsetPackage(_ context.Context, pkg *ast.Package) (bool, error) { path := fmt.Sprintf("%v", pkg.Path) _, ok := r.modules[path] if ok { @@ -748,7 +747,7 @@ func (r *REPL) recompile(ctx context.Context, cpy *ast.Module) error { return nil } -func (r *REPL) compileBody(ctx context.Context, compiler *ast.Compiler, body ast.Body) (ast.Body, *ast.TypeEnv, error) { +func (r *REPL) compileBody(_ context.Context, compiler *ast.Compiler, body ast.Body) (ast.Body, *ast.TypeEnv, error) { r.timerStart(metrics.RegoQueryCompile) defer r.timerStop(metrics.RegoQueryCompile) @@ -1280,7 +1279,7 @@ func (r *REPL) loadModules(ctx context.Context, txn storage.Transaction) (map[st return modules, nil } -func (r *REPL) printTypes(ctx context.Context, typeEnv *ast.TypeEnv, body ast.Body) { +func (r *REPL) printTypes(_ context.Context, typeEnv *ast.TypeEnv, body ast.Body) { ast.WalkRefs(body, func(ref ast.Ref) bool { fmt.Fprintf(r.output, "# %v: %v\n", ref, typeEnv.Get(ref)) diff --git a/vendor/github.com/open-policy-agent/opa/server/authorizer/authorizer.go b/vendor/github.com/open-policy-agent/opa/server/authorizer/authorizer.go index 1f706c56d..10d65ecd8 100644 --- a/vendor/github.com/open-policy-agent/opa/server/authorizer/authorizer.go +++ b/vendor/github.com/open-policy-agent/opa/server/authorizer/authorizer.go @@ -151,7 +151,6 @@ func (h *Basic) ServeHTTP(w http.ResponseWriter, r *http.Request) { } func makeInput(r *http.Request) (*http.Request, interface{}, error) { - path, err := parsePath(r.URL.Path) if err != nil { return r, nil, err @@ -164,7 +163,11 @@ func makeInput(r *http.Request) (*http.Request, interface{}, error) { if expectBody(r.Method, path) { var err error - rawBody, err = io.ReadAll(r.Body) + plaintextBody, err := util.ReadMaybeCompressedBody(r) + if err != nil { + return r, nil, err + } + rawBody, err = io.ReadAll(plaintextBody) if err != nil { return r, nil, err } diff --git a/vendor/github.com/open-policy-agent/opa/server/certs.go b/vendor/github.com/open-policy-agent/opa/server/certs.go index 6c92eca07..a02d74729 100644 --- a/vendor/github.com/open-policy-agent/opa/server/certs.go +++ b/vendor/github.com/open-policy-agent/opa/server/certs.go @@ -21,7 +21,7 @@ import ( "github.com/open-policy-agent/opa/logging" ) -func (s *Server) getCertificate(h *tls.ClientHelloInfo) (*tls.Certificate, error) { +func (s *Server) getCertificate(_ *tls.ClientHelloInfo) (*tls.Certificate, error) { s.tlsConfigMtx.RLock() defer s.tlsConfigMtx.RUnlock() return s.cert, nil @@ -72,7 +72,7 @@ func (s *Server) reloadTLSConfig(logger logging.Logger) error { } // reloadCertificatePool loads the CA cert pool from the given file and returns a new pool if the file has changed. -func reloadCertificatePool(certPoolFile string, certPoolFileHash []byte, logger logging.Logger) (*x509.CertPool, []byte, bool, error) { +func reloadCertificatePool(certPoolFile string, certPoolFileHash []byte, _ logging.Logger) (*x509.CertPool, []byte, bool, error) { certPoolHash, err := hash(certPoolFile) if err != nil { return nil, nil, false, fmt.Errorf("failed to hash CA cert pool file: %w", err) diff --git a/vendor/github.com/open-policy-agent/opa/server/server.go b/vendor/github.com/open-policy-agent/opa/server/server.go index 4e0e7930f..398126fac 100644 --- a/vendor/github.com/open-policy-agent/opa/server/server.go +++ b/vendor/github.com/open-policy-agent/opa/server/server.go @@ -6,7 +6,6 @@ package server import ( "bytes" - "compress/gzip" "context" "crypto/tls" "crypto/x509" @@ -647,7 +646,7 @@ func (s *Server) getListenerForHTTPSServer(u *url.URL, h http.Handler, t httpLis // GetConfigForClient is used to ensure that a fresh config is provided containing the latest cert pool. // This is not required, but appears to be how connect time updates config should be done: // https://github.com/golang/go/issues/16066#issuecomment-250606132 - GetConfigForClient: func(info *tls.ClientHelloInfo) (*tls.Config, error) { + GetConfigForClient: func(_ *tls.ClientHelloInfo) (*tls.Config, error) { s.tlsConfigMtx.Lock() defer s.tlsConfigMtx.Unlock() @@ -1337,7 +1336,7 @@ func (s *Server) v1CompilePost(w http.ResponseWriter, r *http.Request) { m.Timer(metrics.RegoQueryParse).Start() // decompress the input if sent as zip - body, err := readPlainBody(r) + body, err := util.ReadMaybeCompressedBody(r) if err != nil { writer.Error(w, http.StatusBadRequest, types.NewErrorV1(types.CodeInvalidParameter, "could not decompress the body")) return @@ -2503,7 +2502,7 @@ func (s *Server) getCompiler() *ast.Compiler { return s.manager.GetCompiler() } -func (s *Server) makeRego(ctx context.Context, +func (s *Server) makeRego(_ context.Context, strictBuiltinErrors bool, txn storage.Transaction, input ast.Value, @@ -2732,7 +2731,7 @@ func readInputV0(r *http.Request) (ast.Value, *interface{}, error) { } // decompress the input if sent as zip - body, err := readPlainBody(r) + body, err := util.ReadMaybeCompressedBody(r) if err != nil { return nil, nil, fmt.Errorf("could not decompress the body: %w", err) } @@ -2785,7 +2784,7 @@ func readInputPostV1(r *http.Request) (ast.Value, *interface{}, error) { var request types.DataRequestV1 // decompress the input if sent as zip - body, err := readPlainBody(r) + body, err := util.ReadMaybeCompressedBody(r) if err != nil { return nil, nil, fmt.Errorf("could not decompress the body: %w", err) } @@ -3023,22 +3022,6 @@ func annotateSpan(ctx context.Context, decisionID string) { SetAttributes(attribute.String(otelDecisionIDAttr, decisionID)) } -func readPlainBody(r *http.Request) (io.ReadCloser, error) { - if strings.Contains(r.Header.Get("Content-Encoding"), "gzip") { - gzReader, err := gzip.NewReader(r.Body) - if err != nil { - return nil, err - } - bytesBody, err := io.ReadAll(gzReader) - if err != nil { - return nil, err - } - defer gzReader.Close() - return io.NopCloser(bytes.NewReader(bytesBody)), err - } - return r.Body, nil -} - func pretty(r *http.Request) bool { return getBoolParam(r.URL, types.ParamPrettyV1, true) } diff --git a/vendor/github.com/open-policy-agent/opa/server/writer/writer.go b/vendor/github.com/open-policy-agent/opa/server/writer/writer.go index 4e8f36c68..378eb17ef 100644 --- a/vendor/github.com/open-policy-agent/opa/server/writer/writer.go +++ b/vendor/github.com/open-policy-agent/opa/server/writer/writer.go @@ -17,7 +17,7 @@ import ( // HTTPStatus is used to set a specific status code // Adapted from https://stackoverflow.com/questions/27711154/what-response-code-to-return-on-a-non-supported-http-method-on-rest func HTTPStatus(code int) http.HandlerFunc { - return func(w http.ResponseWriter, req *http.Request) { + return func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(code) } } diff --git a/vendor/github.com/open-policy-agent/opa/storage/disk/disk.go b/vendor/github.com/open-policy-agent/opa/storage/disk/disk.go index 71b06c476..0ebd980ff 100644 --- a/vendor/github.com/open-policy-agent/opa/storage/disk/disk.go +++ b/vendor/github.com/open-policy-agent/opa/storage/disk/disk.go @@ -229,7 +229,7 @@ func (w *wrap) Warningf(f string, as ...interface{}) { w.debugDo(w.l.Warn, f, as func (w *wrap) Errorf(f string, as ...interface{}) { w.debugDo(w.l.Error, f, as...) } // NewTransaction implements the storage.Store interface. -func (db *Store) NewTransaction(ctx context.Context, params ...storage.TransactionParams) (storage.Transaction, error) { +func (db *Store) NewTransaction(_ context.Context, params ...storage.TransactionParams) (storage.Transaction, error) { var write bool var context *storage.Context @@ -655,7 +655,7 @@ type handle struct { cb func(context.Context, storage.Transaction, storage.TriggerEvent) } -func (h *handle) Unregister(ctx context.Context, txn storage.Transaction) { +func (h *handle) Unregister(_ context.Context, txn storage.Transaction) { underlying, err := h.db.underlying(txn) if err != nil { panic(err) @@ -750,7 +750,7 @@ func (db *Store) init(ctx context.Context, txn *badger.Txn, partitions []storage }) } -func (db *Store) validatePartitions(ctx context.Context, txn *badger.Txn, existing metadata, partitions []storage.Path) error { +func (db *Store) validatePartitions(_ context.Context, txn *badger.Txn, existing metadata, partitions []storage.Path) error { oldPathSet := pathSet(existing.Partitions) newPathSet := pathSet(partitions) @@ -812,7 +812,7 @@ func (db *Store) validatePartitions(ctx context.Context, txn *badger.Txn, existi // // Here, we only check if it's a write transaction, for consistency with // other implementations, and do nothing. -func (db *Store) MakeDir(_ context.Context, txn storage.Transaction, path storage.Path) error { +func (db *Store) MakeDir(_ context.Context, txn storage.Transaction, _ storage.Path) error { underlying, err := db.underlying(txn) if err != nil { return err diff --git a/vendor/github.com/open-policy-agent/opa/tester/reporter.go b/vendor/github.com/open-policy-agent/opa/tester/reporter.go index 81774d618..c7ab9cbc9 100644 --- a/vendor/github.com/open-policy-agent/opa/tester/reporter.go +++ b/vendor/github.com/open-policy-agent/opa/tester/reporter.go @@ -28,6 +28,7 @@ type PrettyReporter struct { Output io.Writer Verbose bool FailureLine bool + LocalVars bool BenchmarkResults bool BenchMarkShowAllocations bool BenchMarkGoBenchFormat bool @@ -55,14 +56,41 @@ func (r PrettyReporter) Report(ch chan *Result) error { results = append(results, tr) } - if fail > 0 && r.Verbose { + if fail > 0 && (r.Verbose || r.FailureLine) { fmt.Fprintln(r.Output, "FAILURES") r.hl() for _, failure := range failures { fmt.Fprintln(r.Output, failure) - fmt.Fprintln(r.Output) - topdown.PrettyTraceWithLocation(newIndentingWriter(r.Output), failure.Trace) + if r.Verbose { + fmt.Fprintln(r.Output) + topdown.PrettyTraceWithOpts(newIndentingWriter(r.Output), failure.Trace, topdown.PrettyTraceOptions{ + Locations: true, + ExprVariables: r.LocalVars, + }) + } + + if r.FailureLine { + fmt.Fprintln(r.Output) + for i := len(failure.Trace) - 1; i >= 0; i-- { + e := failure.Trace[i] + if e.Op == topdown.FailOp && e.Location != nil && e.QueryID != 0 { + if expr, isExpr := e.Node.(*ast.Expr); isExpr { + if _, isEvery := expr.Terms.(*ast.Every); isEvery { + // We're interested in the failing expression inside the every body. + continue + } + } + _, _ = fmt.Fprintf(newIndentingWriter(r.Output), "%s:%d:\n", e.Location.File, e.Location.Row) + if err := topdown.PrettyEvent(newIndentingWriter(r.Output, 4), e, topdown.PrettyEventOpts{PrettyVars: r.LocalVars}); err != nil { + return err + } + _, _ = fmt.Fprintln(r.Output) + break + } + } + } + fmt.Fprintln(r.Output) } @@ -216,12 +244,18 @@ func (r JSONCoverageReporter) Report(ch chan *Result) error { } type indentingWriter struct { - w io.Writer + w io.Writer + indent int } -func newIndentingWriter(w io.Writer) indentingWriter { +func newIndentingWriter(w io.Writer, indent ...int) indentingWriter { + i := 2 + if len(indent) > 0 { + i = indent[0] + } return indentingWriter{ - w: w, + w: w, + indent: i, } } @@ -231,7 +265,7 @@ func (w indentingWriter) Write(bs []byte) (int, error) { indent := true for _, b := range bs { if indent { - wrote, err := w.w.Write([]byte(" ")) + wrote, err := w.w.Write([]byte(strings.Repeat(" ", w.indent))) if err != nil { return written, err } diff --git a/vendor/github.com/open-policy-agent/opa/tester/runner.go b/vendor/github.com/open-policy-agent/opa/tester/runner.go index b489cea03..8fccea82c 100644 --- a/vendor/github.com/open-policy-agent/opa/tester/runner.go +++ b/vendor/github.com/open-policy-agent/opa/tester/runner.go @@ -39,7 +39,7 @@ func Run(ctx context.Context, paths ...string) ([]*Result, error) { // RunWithFilter executes all test cases found under files in path. The filter // will be applied to exclude files that should not be included. -func RunWithFilter(ctx context.Context, filter loader.Filter, paths ...string) ([]*Result, error) { +func RunWithFilter(ctx context.Context, _ loader.Filter, paths ...string) ([]*Result, error) { modules, store, err := Load(paths, nil) if err != nil { return nil, err diff --git a/vendor/github.com/open-policy-agent/opa/topdown/bindings.go b/vendor/github.com/open-policy-agent/opa/topdown/bindings.go index 7621dac52..30a8ac5ec 100644 --- a/vendor/github.com/open-policy-agent/opa/topdown/bindings.go +++ b/vendor/github.com/open-policy-agent/opa/topdown/bindings.go @@ -43,7 +43,7 @@ func (u *bindings) Iter(caller *bindings, iter func(*ast.Term, *ast.Term) error) var err error - u.values.Iter(func(k *ast.Term, v value) bool { + u.values.Iter(func(k *ast.Term, _ value) bool { if err != nil { return true } diff --git a/vendor/github.com/open-policy-agent/opa/topdown/copypropagation/copypropagation.go b/vendor/github.com/open-policy-agent/opa/topdown/copypropagation/copypropagation.go index 4c6b8c42c..8824d19bd 100644 --- a/vendor/github.com/open-policy-agent/opa/topdown/copypropagation/copypropagation.go +++ b/vendor/github.com/open-policy-agent/opa/topdown/copypropagation/copypropagation.go @@ -222,11 +222,11 @@ func (p *CopyPropagator) plugBindings(pctx *plugContext, expr *ast.Expr) *ast.Ex // errors unreachable. x, err := ast.Transform(xform, expr.Copy()) - if expr, ok := x.(*ast.Expr); !ok || err != nil { + expr, ok := x.(*ast.Expr) + if !ok || err != nil { panic("unreachable") - } else { - return expr } + return expr } type bindingPlugTransform struct { diff --git a/vendor/github.com/open-policy-agent/opa/topdown/eval.go b/vendor/github.com/open-policy-agent/opa/topdown/eval.go index f0ce3b2a6..6263efba6 100644 --- a/vendor/github.com/open-policy-agent/opa/topdown/eval.go +++ b/vendor/github.com/open-policy-agent/opa/topdown/eval.go @@ -237,6 +237,10 @@ func (e *eval) traceWasm(x ast.Node, target *ast.Ref) { e.traceEvent(WasmOp, x, "", target) } +func (e *eval) traceUnify(a, b *ast.Term) { + e.traceEvent(UnifyOp, ast.Equality.Expr(a, b), "", nil) +} + func (e *eval) traceEvent(op Op, x ast.Node, msg string, target *ast.Ref) { if !e.traceEnabled { @@ -275,6 +279,7 @@ func (e *eval) traceEvent(op Op, x ast.Node, msg string, target *ast.Ref) { evt.Locals = ast.NewValueMap() evt.LocalMetadata = map[ast.Var]VarMetadata{} + evt.localVirtualCacheSnapshot = ast.NewValueMap() _ = e.bindings.Iter(nil, func(k, v *ast.Term) error { original := k.Value.(ast.Var) @@ -290,15 +295,21 @@ func (e *eval) traceEvent(op Op, x ast.Node, msg string, target *ast.Ref) { }) // cannot return error ast.WalkTerms(x, func(term *ast.Term) bool { - if v, ok := term.Value.(ast.Var); ok { - if _, ok := evt.LocalMetadata[v]; !ok { - if rewritten, ok := e.rewrittenVar(v); ok { - evt.LocalMetadata[v] = VarMetadata{ + switch x := term.Value.(type) { + case ast.Var: + if _, ok := evt.LocalMetadata[x]; !ok { + if rewritten, ok := e.rewrittenVar(x); ok { + evt.LocalMetadata[x] = VarMetadata{ Name: rewritten, Location: term.Loc(), } } } + case ast.Ref: + groundRef := x.GroundPrefix() + if v, _ := e.virtualCache.Get(groundRef); v != nil { + evt.localVirtualCacheSnapshot.Put(groundRef, v.Value) + } } return false }) @@ -858,7 +869,7 @@ func (e *eval) biunify(a, b *ast.Term, b1, b2 *bindings, iter unifyIterator) err a, b1 = b1.apply(a) b, b2 = b2.apply(b) if e.traceEnabled { - e.traceEvent(UnifyOp, ast.Equality.Expr(a, b), "", nil) + e.traceUnify(a, b) } switch vA := a.Value.(type) { case ast.Var, ast.Ref, *ast.ArrayComprehension, *ast.SetComprehension, *ast.ObjectComprehension: @@ -1096,10 +1107,10 @@ func (e *eval) biunifyComprehension(a, b *ast.Term, b1, b2 *bindings, swap bool, return err } else if value != nil { return e.biunify(value, b, b1, b2, iter) - } else { - e.instr.counterIncr(evalOpComprehensionCacheMiss) } + e.instr.counterIncr(evalOpComprehensionCacheMiss) + switch a := a.Value.(type) { case *ast.ArrayComprehension: return e.biunifyComprehensionArray(a, b, b1, b2, iter) @@ -2560,7 +2571,7 @@ func (e evalVirtualPartial) evalOneRulePreUnify(iter unifyIterator, rule *ast.Ru } // Walk the dynamic portion of rule ref and key to unify vars - err := child.biunifyRuleHead(e.pos+1, e.ref, rule, e.bindings, child.bindings, func(pos int) error { + err := child.biunifyRuleHead(e.pos+1, e.ref, rule, e.bindings, child.bindings, func(_ int) error { defined = true return child.eval(func(child *eval) error { @@ -2648,7 +2659,7 @@ func (e evalVirtualPartial) evalOneRulePostUnify(iter unifyIterator, rule *ast.R err := child.eval(func(child *eval) error { defined = true - return e.e.biunifyRuleHead(e.pos+1, e.ref, rule, e.bindings, child.bindings, func(pos int) error { + return e.e.biunifyRuleHead(e.pos+1, e.ref, rule, e.bindings, child.bindings, func(_ int) error { return e.evalOneRuleContinue(iter, rule, child) }) }) @@ -2724,7 +2735,7 @@ func (e evalVirtualPartial) partialEvalSupport(iter unifyIterator) error { return e.e.saveUnify(term, e.rterm, e.bindings, e.rbindings, iter) } -func (e evalVirtualPartial) partialEvalSupportRule(rule *ast.Rule, path ast.Ref) (bool, error) { +func (e evalVirtualPartial) partialEvalSupportRule(rule *ast.Rule, _ ast.Ref) (bool, error) { child := e.e.child(rule.Body) child.traceEnter(rule) diff --git a/vendor/github.com/open-policy-agent/opa/topdown/http.go b/vendor/github.com/open-policy-agent/opa/topdown/http.go index 22e6843d4..9d01bc14b 100644 --- a/vendor/github.com/open-policy-agent/opa/topdown/http.go +++ b/vendor/github.com/open-policy-agent/opa/topdown/http.go @@ -68,6 +68,7 @@ var allowedKeyNames = [...]string{ "raise_error", "caching_mode", "max_retry_attempts", + "cache_ignored_headers", } // ref: https://www.rfc-editor.org/rfc/rfc7231#section-6.1 @@ -168,12 +169,17 @@ func getHTTPResponse(bctx BuiltinContext, req ast.Object) (*ast.Term, error) { bctx.Metrics.Timer(httpSendLatencyMetricKey).Start() - reqExecutor, err := newHTTPRequestExecutor(bctx, req) + key, err := getKeyFromRequest(req) if err != nil { return nil, err } + reqExecutor, err := newHTTPRequestExecutor(bctx, req, key) + if err != nil { + return nil, err + } // Check if cache already has a response for this query + // set headers to exclude cache_ignored_headers resp, err := reqExecutor.CheckCache() if err != nil { return nil, err @@ -198,6 +204,43 @@ func getHTTPResponse(bctx BuiltinContext, req ast.Object) (*ast.Term, error) { return ast.NewTerm(resp), nil } +// getKeyFromRequest returns a key to be used for caching HTTP responses +// deletes headers from request object mentioned in cache_ignored_headers +func getKeyFromRequest(req ast.Object) (ast.Object, error) { + // deep copy so changes to key do not reflect in the request object + key := req.Copy() + cacheIgnoredHeadersTerm := req.Get(ast.StringTerm("cache_ignored_headers")) + allHeadersTerm := req.Get(ast.StringTerm("headers")) + // skip because no headers to delete + if cacheIgnoredHeadersTerm == nil || allHeadersTerm == nil { + // need to explicitly set cache_ignored_headers to null + // equivalent requests might have different sets of exclusion lists + key.Insert(ast.StringTerm("cache_ignored_headers"), ast.NullTerm()) + return key, nil + } + var cacheIgnoredHeaders []string + var allHeaders map[string]interface{} + err := ast.As(cacheIgnoredHeadersTerm.Value, &cacheIgnoredHeaders) + if err != nil { + return nil, err + } + err = ast.As(allHeadersTerm.Value, &allHeaders) + if err != nil { + return nil, err + } + for _, header := range cacheIgnoredHeaders { + delete(allHeaders, header) + } + val, err := ast.InterfaceToValue(allHeaders) + if err != nil { + return nil, err + } + key.Insert(ast.StringTerm("headers"), ast.NewTerm(val)) + // remove cache_ignored_headers key + key.Insert(ast.StringTerm("cache_ignored_headers"), ast.NullTerm()) + return key, nil +} + func init() { createAllowedKeys() createCacheableHTTPStatusCodes() @@ -303,7 +346,7 @@ func useSocket(rawURL string, tlsConfig *tls.Config) (bool, string, *http.Transp u.RawQuery = v.Encode() tr := http.DefaultTransport.(*http.Transport).Clone() - tr.DialContext = func(ctx context.Context, network, addr string) (net.Conn, error) { + tr.DialContext = func(ctx context.Context, _, _ string) (net.Conn, error) { return http.DefaultTransport.(*http.Transport).DialContext(ctx, "unix", socket) } tr.TLSClientConfig = tlsConfig @@ -482,7 +525,7 @@ func createHTTPRequest(bctx BuiltinContext, obj ast.Object) (*http.Request, *htt case "cache", "caching_mode", "force_cache", "force_cache_duration_seconds", "force_json_decode", "force_yaml_decode", - "raise_error", "max_retry_attempts": // no-op + "raise_error", "max_retry_attempts", "cache_ignored_headers": // no-op default: return nil, nil, fmt.Errorf("invalid parameter %q", key) } @@ -729,13 +772,13 @@ func newHTTPSendCache() *httpSendCache { } func valueHash(v util.T) int { - return v.(ast.Value).Hash() + return ast.StringTerm(v.(ast.Value).String()).Hash() } func valueEq(a, b util.T) bool { av := a.(ast.Value) bv := b.(ast.Value) - return av.Compare(bv) == 0 + return av.String() == bv.String() } func (cache *httpSendCache) get(k ast.Value) *httpSendCacheEntry { @@ -1382,20 +1425,21 @@ type httpRequestExecutor interface { // newHTTPRequestExecutor returns a new HTTP request executor that wraps either an inter-query or // intra-query cache implementation -func newHTTPRequestExecutor(bctx BuiltinContext, key ast.Object) (httpRequestExecutor, error) { - useInterQueryCache, forceCacheParams, err := useInterQueryCache(key) +func newHTTPRequestExecutor(bctx BuiltinContext, req ast.Object, key ast.Object) (httpRequestExecutor, error) { + useInterQueryCache, forceCacheParams, err := useInterQueryCache(req) if err != nil { return nil, handleHTTPSendErr(bctx, err) } if useInterQueryCache && bctx.InterQueryBuiltinCache != nil { - return newInterQueryCache(bctx, key, forceCacheParams) + return newInterQueryCache(bctx, req, key, forceCacheParams) } - return newIntraQueryCache(bctx, key) + return newIntraQueryCache(bctx, req, key) } type interQueryCache struct { bctx BuiltinContext + req ast.Object key ast.Object httpReq *http.Request httpClient *http.Client @@ -1404,8 +1448,8 @@ type interQueryCache struct { forceCacheParams *forceCacheParams } -func newInterQueryCache(bctx BuiltinContext, key ast.Object, forceCacheParams *forceCacheParams) (*interQueryCache, error) { - return &interQueryCache{bctx: bctx, key: key, forceCacheParams: forceCacheParams}, nil +func newInterQueryCache(bctx BuiltinContext, req ast.Object, key ast.Object, forceCacheParams *forceCacheParams) (*interQueryCache, error) { + return &interQueryCache{bctx: bctx, req: req, key: key, forceCacheParams: forceCacheParams}, nil } // CheckCache checks the cache for the value of the key set on this object @@ -1464,21 +1508,22 @@ func (c *interQueryCache) InsertErrorIntoCache(err error) { // ExecuteHTTPRequest executes a HTTP request func (c *interQueryCache) ExecuteHTTPRequest() (*http.Response, error) { var err error - c.httpReq, c.httpClient, err = createHTTPRequest(c.bctx, c.key) + c.httpReq, c.httpClient, err = createHTTPRequest(c.bctx, c.req) if err != nil { return nil, handleHTTPSendErr(c.bctx, err) } - return executeHTTPRequest(c.httpReq, c.httpClient, c.key) + return executeHTTPRequest(c.httpReq, c.httpClient, c.req) } type intraQueryCache struct { bctx BuiltinContext + req ast.Object key ast.Object } -func newIntraQueryCache(bctx BuiltinContext, key ast.Object) (*intraQueryCache, error) { - return &intraQueryCache{bctx: bctx, key: key}, nil +func newIntraQueryCache(bctx BuiltinContext, req ast.Object, key ast.Object) (*intraQueryCache, error) { + return &intraQueryCache{bctx: bctx, req: req, key: key}, nil } // CheckCache checks the cache for the value of the key set on this object @@ -1515,11 +1560,11 @@ func (c *intraQueryCache) InsertErrorIntoCache(err error) { // ExecuteHTTPRequest executes a HTTP request func (c *intraQueryCache) ExecuteHTTPRequest() (*http.Response, error) { - httpReq, httpClient, err := createHTTPRequest(c.bctx, c.key) + httpReq, httpClient, err := createHTTPRequest(c.bctx, c.req) if err != nil { return nil, handleHTTPSendErr(c.bctx, err) } - return executeHTTPRequest(httpReq, httpClient, c.key) + return executeHTTPRequest(httpReq, httpClient, c.req) } func useInterQueryCache(req ast.Object) (bool, *forceCacheParams, error) { diff --git a/vendor/github.com/open-policy-agent/opa/topdown/lineage/lineage.go b/vendor/github.com/open-policy-agent/opa/topdown/lineage/lineage.go index c4631d9e0..2c0ec7287 100644 --- a/vendor/github.com/open-policy-agent/opa/topdown/lineage/lineage.go +++ b/vendor/github.com/open-policy-agent/opa/topdown/lineage/lineage.go @@ -13,7 +13,7 @@ func Debug(trace []*topdown.Event) []*topdown.Event { return trace } -// Full returns a filtered trace that contains everything except Uninfy ops +// Full returns a filtered trace that contains everything except Unify ops func Full(trace []*topdown.Event) (result []*topdown.Event) { // Do not use Filter since this event will only occur at the leaf positions. for _, event := range trace { diff --git a/vendor/github.com/open-policy-agent/opa/topdown/parse_bytes.go b/vendor/github.com/open-policy-agent/opa/topdown/parse_bytes.go index 9d8fe5068..0cd4bc193 100644 --- a/vendor/github.com/open-policy-agent/opa/topdown/parse_bytes.go +++ b/vendor/github.com/open-policy-agent/opa/topdown/parse_bytes.go @@ -45,7 +45,7 @@ var ( errBytesValueIncludesSpaces = parseNumBytesError("spaces not allowed in resource strings") ) -func builtinNumBytes(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinNumBytes(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { var m big.Float raw, err := builtins.StringOperand(operands[0].Value, 1) diff --git a/vendor/github.com/open-policy-agent/opa/topdown/providers.go b/vendor/github.com/open-policy-agent/opa/topdown/providers.go index 1affac51c..77db91798 100644 --- a/vendor/github.com/open-policy-agent/opa/topdown/providers.go +++ b/vendor/github.com/open-policy-agent/opa/topdown/providers.go @@ -86,7 +86,7 @@ func validateAWSAuthParameters(o ast.Object) error { return nil } -func builtinAWSSigV4SignReq(ctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinAWSSigV4SignReq(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { // Request object. reqObj, err := builtins.ObjectOperand(operands[0].Value, 1) if err != nil { diff --git a/vendor/github.com/open-policy-agent/opa/topdown/reachable.go b/vendor/github.com/open-policy-agent/opa/topdown/reachable.go index 9cb15d51e..8d61018e7 100644 --- a/vendor/github.com/open-policy-agent/opa/topdown/reachable.go +++ b/vendor/github.com/open-policy-agent/opa/topdown/reachable.go @@ -31,7 +31,7 @@ func numberOfEdges(collection *ast.Term) int { return 0 } -func builtinReachable(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinReachable(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { // Error on wrong types for args. graph, err := builtins.ObjectOperand(operands[0].Value, 1) if err != nil { @@ -109,7 +109,7 @@ func pathBuilder(graph ast.Object, root *ast.Term, path []*ast.Term, edgeRslt as } -func builtinReachablePaths(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinReachablePaths(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { var traceResult = ast.NewSet() // Error on wrong types for args. graph, err := builtins.ObjectOperand(operands[0].Value, 1) diff --git a/vendor/github.com/open-policy-agent/opa/topdown/semver.go b/vendor/github.com/open-policy-agent/opa/topdown/semver.go index ab5aed3ee..7bb7b9c18 100644 --- a/vendor/github.com/open-policy-agent/opa/topdown/semver.go +++ b/vendor/github.com/open-policy-agent/opa/topdown/semver.go @@ -12,7 +12,7 @@ import ( "github.com/open-policy-agent/opa/topdown/builtins" ) -func builtinSemVerCompare(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinSemVerCompare(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { versionStringA, err := builtins.StringOperand(operands[0].Value, 1) if err != nil { return err @@ -37,7 +37,7 @@ func builtinSemVerCompare(bctx BuiltinContext, operands []*ast.Term, iter func(* return iter(ast.IntNumberTerm(result)) } -func builtinSemVerIsValid(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinSemVerIsValid(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { versionString, err := builtins.StringOperand(operands[0].Value, 1) if err != nil { return iter(ast.BooleanTerm(false)) diff --git a/vendor/github.com/open-policy-agent/opa/topdown/strings.go b/vendor/github.com/open-policy-agent/opa/topdown/strings.go index d5baa3197..57f8eab9c 100644 --- a/vendor/github.com/open-policy-agent/opa/topdown/strings.go +++ b/vendor/github.com/open-policy-agent/opa/topdown/strings.go @@ -16,7 +16,7 @@ import ( "github.com/open-policy-agent/opa/topdown/builtins" ) -func builtinAnyPrefixMatch(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinAnyPrefixMatch(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { a, b := operands[0].Value, operands[1].Value var strs []string @@ -50,7 +50,7 @@ func builtinAnyPrefixMatch(bctx BuiltinContext, operands []*ast.Term, iter func( return iter(ast.BooleanTerm(anyStartsWithAny(strs, prefixes))) } -func builtinAnySuffixMatch(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinAnySuffixMatch(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { a, b := operands[0].Value, operands[1].Value var strsReversed []string @@ -384,12 +384,12 @@ func builtinReplace(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) return err } - new, err := builtins.StringOperand(operands[2].Value, 3) + n, err := builtins.StringOperand(operands[2].Value, 3) if err != nil { return err } - return iter(ast.StringTerm(strings.Replace(string(s), string(old), string(new), -1))) + return iter(ast.StringTerm(strings.Replace(string(s), string(old), string(n), -1))) } func builtinReplaceN(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { diff --git a/vendor/github.com/open-policy-agent/opa/topdown/tokens.go b/vendor/github.com/open-policy-agent/opa/topdown/tokens.go index b69f3f2d2..4d5a520f2 100644 --- a/vendor/github.com/open-policy-agent/opa/topdown/tokens.go +++ b/vendor/github.com/open-policy-agent/opa/topdown/tokens.go @@ -233,7 +233,7 @@ func builtinJWTVerifyRSA(a ast.Value, b ast.Value, hasher func() hash.Hash, veri } // Implements ES256 JWT signature verification. -func builtinJWTVerifyES256(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinJWTVerifyES256(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { result, err := builtinJWTVerify(operands[0].Value, operands[1].Value, sha256.New, verifyES) if err == nil { return iter(ast.NewTerm(result)) @@ -242,7 +242,7 @@ func builtinJWTVerifyES256(bctx BuiltinContext, operands []*ast.Term, iter func( } // Implements ES384 JWT signature verification -func builtinJWTVerifyES384(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinJWTVerifyES384(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { result, err := builtinJWTVerify(operands[0].Value, operands[1].Value, sha512.New384, verifyES) if err == nil { return iter(ast.NewTerm(result)) @@ -251,7 +251,7 @@ func builtinJWTVerifyES384(bctx BuiltinContext, operands []*ast.Term, iter func( } // Implements ES512 JWT signature verification -func builtinJWTVerifyES512(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinJWTVerifyES512(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { result, err := builtinJWTVerify(operands[0].Value, operands[1].Value, sha512.New, verifyES) if err == nil { return iter(ast.NewTerm(result)) @@ -413,7 +413,7 @@ func builtinJWTVerify(a ast.Value, b ast.Value, hasher func() hash.Hash, verify } // Implements HS256 (secret) JWT signature verification -func builtinJWTVerifyHS256(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinJWTVerifyHS256(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { // Decode the JSON Web Token token, err := decodeJWT(operands[0].Value) if err != nil { @@ -442,7 +442,7 @@ func builtinJWTVerifyHS256(bctx BuiltinContext, operands []*ast.Term, iter func( } // Implements HS384 JWT signature verification -func builtinJWTVerifyHS384(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinJWTVerifyHS384(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { // Decode the JSON Web Token token, err := decodeJWT(operands[0].Value) if err != nil { @@ -471,7 +471,7 @@ func builtinJWTVerifyHS384(bctx BuiltinContext, operands []*ast.Term, iter func( } // Implements HS512 JWT signature verification -func builtinJWTVerifyHS512(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinJWTVerifyHS512(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { // Decode the JSON Web Token token, err := decodeJWT(operands[0].Value) if err != nil { @@ -793,7 +793,7 @@ func verifyRSAPSS(key interface{}, hash crypto.Hash, digest []byte, signature [] return nil } -func verifyECDSA(key interface{}, hash crypto.Hash, digest []byte, signature []byte) (err error) { +func verifyECDSA(key interface{}, _ crypto.Hash, digest []byte, signature []byte) (err error) { defer func() { if r := recover(); r != nil { err = fmt.Errorf("ECDSA signature verification error: %v", r) @@ -1048,10 +1048,9 @@ func builtinJWTDecodeVerify(bctx BuiltinContext, operands []*ast.Term, iter func // Nested JWT, go round again with payload as first argument a = p.Value continue - } else { - // Non-nested JWT (or we've reached the bottom of the nesting). - break } + // Non-nested JWT (or we've reached the bottom of the nesting). + break } payload, err := extractJSONObject(string(p.Value.(ast.String))) if err != nil { diff --git a/vendor/github.com/open-policy-agent/opa/topdown/trace.go b/vendor/github.com/open-policy-agent/opa/topdown/trace.go index 727938d9c..e77713821 100644 --- a/vendor/github.com/open-policy-agent/opa/topdown/trace.go +++ b/vendor/github.com/open-policy-agent/opa/topdown/trace.go @@ -5,8 +5,10 @@ package topdown import ( + "bytes" "fmt" "io" + "slices" "strings" iStrs "github.com/open-policy-agent/opa/internal/strings" @@ -18,7 +20,9 @@ import ( const ( minLocationWidth = 5 // len("query") maxIdealLocationWidth = 64 - locationPadding = 4 + columnPadding = 4 + maxExprVarWidth = 32 + maxPrettyExprVarWidth = 64 ) // Op defines the types of tracing events. @@ -62,7 +66,8 @@ const ( // UnifyOp is emitted when two terms are unified. Node will be set to an // equality expression with the two terms. This Node will not have location // info. - UnifyOp Op = "Unify" + UnifyOp Op = "Unify" + FailedAssertionOp Op = "FailedAssertion" ) // VarMetadata provides some user facing information about @@ -84,8 +89,9 @@ type Event struct { Message string // Contains message for Note events. Ref *ast.Ref // Identifies the subject ref for the event. Only applies to Index and Wasm operations. - input *ast.Term - bindings *bindings + input *ast.Term + bindings *bindings + localVirtualCacheSnapshot *ast.ValueMap } // HasRule returns true if the Event contains an ast.Rule. @@ -236,31 +242,162 @@ func (b *BufferTracer) Config() TraceConfig { // PrettyTrace pretty prints the trace to the writer. func PrettyTrace(w io.Writer, trace []*Event) { - prettyTraceWith(w, trace, false) + PrettyTraceWithOpts(w, trace, PrettyTraceOptions{}) } // PrettyTraceWithLocation prints the trace to the writer and includes location information func PrettyTraceWithLocation(w io.Writer, trace []*Event) { - prettyTraceWith(w, trace, true) + PrettyTraceWithOpts(w, trace, PrettyTraceOptions{Locations: true}) } -func prettyTraceWith(w io.Writer, trace []*Event, locations bool) { +type PrettyTraceOptions struct { + Locations bool // Include location information + ExprVariables bool // Include variables found in the expression + LocalVariables bool // Include all local variables +} + +type traceRow []string + +func (r *traceRow) add(s string) { + *r = append(*r, s) +} + +type traceTable struct { + rows []traceRow + maxWidths []int +} + +func (t *traceTable) add(row traceRow) { + t.rows = append(t.rows, row) + for i := range row { + if i >= len(t.maxWidths) { + t.maxWidths = append(t.maxWidths, len(row[i])) + } else if len(row[i]) > t.maxWidths[i] { + t.maxWidths[i] = len(row[i]) + } + } +} + +func (t *traceTable) write(w io.Writer, padding int) { + for _, row := range t.rows { + for i, cell := range row { + width := t.maxWidths[i] + padding + if i < len(row)-1 { + _, _ = fmt.Fprintf(w, "%-*s ", width, cell) + } else { + _, _ = fmt.Fprintf(w, "%s", cell) + } + } + _, _ = fmt.Fprintln(w) + } +} + +func PrettyTraceWithOpts(w io.Writer, trace []*Event, opts PrettyTraceOptions) { depths := depths{} - filePathAliases, longest := getShortenedFileNames(trace) + // FIXME: Can we shorten each location as we process each trace event instead of beforehand? + filePathAliases, _ := getShortenedFileNames(trace) - // Always include some padding between the trace and location - locationWidth := longest + locationPadding + table := traceTable{} for _, event := range trace { depth := depths.GetOrSet(event.QueryID, event.ParentID) - if locations { + row := traceRow{} + + if opts.Locations { location := formatLocation(event, filePathAliases) - fmt.Fprintf(w, "%-*s %s\n", locationWidth, location, formatEvent(event, depth)) - } else { - fmt.Fprintln(w, formatEvent(event, depth)) + row.add(location) + } + + row.add(formatEvent(event, depth)) + + if opts.ExprVariables { + vars := exprLocalVars(event) + keys := sortedKeys(vars) + + buf := new(bytes.Buffer) + buf.WriteString("{") + for i, k := range keys { + if i > 0 { + buf.WriteString(", ") + } + _, _ = fmt.Fprintf(buf, "%v: %s", k, iStrs.Truncate(vars.Get(k).String(), maxExprVarWidth)) + } + buf.WriteString("}") + row.add(buf.String()) + } + + if opts.LocalVariables { + if locals := event.Locals; locals != nil { + keys := sortedKeys(locals) + + buf := new(bytes.Buffer) + buf.WriteString("{") + for i, k := range keys { + if i > 0 { + buf.WriteString(", ") + } + _, _ = fmt.Fprintf(buf, "%v: %s", k, iStrs.Truncate(locals.Get(k).String(), maxExprVarWidth)) + } + buf.WriteString("}") + row.add(buf.String()) + } else { + row.add("{}") + } + } + + table.add(row) + } + + table.write(w, columnPadding) +} + +func sortedKeys(vm *ast.ValueMap) []ast.Value { + keys := make([]ast.Value, 0, vm.Len()) + vm.Iter(func(k, _ ast.Value) bool { + keys = append(keys, k) + return false + }) + slices.SortFunc(keys, func(a, b ast.Value) int { + return strings.Compare(a.String(), b.String()) + }) + return keys +} + +func exprLocalVars(e *Event) *ast.ValueMap { + vars := ast.NewValueMap() + + findVars := func(term *ast.Term) bool { + //if r, ok := term.Value.(ast.Ref); ok { + // fmt.Printf("ref: %v\n", r) + // //return true + //} + if name, ok := term.Value.(ast.Var); ok { + if meta, ok := e.LocalMetadata[name]; ok { + if val := e.Locals.Get(name); val != nil { + vars.Put(meta.Name, val) + } + } } + return false } + + if r, ok := e.Node.(*ast.Rule); ok { + // We're only interested in vars in the head, not the body + ast.WalkTerms(r.Head, findVars) + return vars + } + + // The local cache snapshot only contains a snapshot for those refs present in the event node, + // so they can all be added to the vars map. + e.localVirtualCacheSnapshot.Iter(func(k, v ast.Value) bool { + vars.Put(k, v) + return false + }) + + ast.WalkTerms(e.Node, findVars) + + return vars } func formatEvent(event *Event, depth int) string { @@ -451,6 +588,310 @@ func rewrite(event *Event) *Event { return &cpy } +type varInfo struct { + VarMetadata + val ast.Value + exprLoc *ast.Location + col int // 0-indexed column +} + +func (v varInfo) Value() string { + if v.val != nil { + return v.val.String() + } + return "undefined" +} + +func (v varInfo) Title() string { + if v.exprLoc != nil && v.exprLoc.Text != nil { + return string(v.exprLoc.Text) + } + return string(v.Name) +} + +func padLocationText(loc *ast.Location) string { + if loc == nil { + return "" + } + + text := string(loc.Text) + + if loc.Col == 0 { + return text + } + + buf := new(bytes.Buffer) + j := 0 + for i := 1; i < loc.Col; i++ { + if len(loc.Tabs) > 0 && j < len(loc.Tabs) && loc.Tabs[j] == i { + buf.WriteString("\t") + j++ + } else { + buf.WriteString(" ") + } + } + + buf.WriteString(text) + return buf.String() +} + +type PrettyEventOpts struct { + PrettyVars bool +} + +func walkTestTerms(x interface{}, f func(*ast.Term) bool) { + var vis *ast.GenericVisitor + vis = ast.NewGenericVisitor(func(x interface{}) bool { + switch x := x.(type) { + case ast.Call: + for _, t := range x[1:] { + vis.Walk(t) + } + return true + case *ast.Expr: + if x.IsCall() { + for _, o := range x.Operands() { + vis.Walk(o) + } + for i := range x.With { + vis.Walk(x.With[i]) + } + return true + } + case *ast.Term: + return f(x) + case *ast.With: + vis.Walk(x.Value) + return true + } + return false + }) + vis.Walk(x) +} + +func PrettyEvent(w io.Writer, e *Event, opts PrettyEventOpts) error { + if !opts.PrettyVars { + _, _ = fmt.Fprintln(w, padLocationText(e.Location)) + return nil + } + + buf := new(bytes.Buffer) + exprVars := map[string]varInfo{} + + findVars := func(unknownAreUndefined bool) func(term *ast.Term) bool { + return func(term *ast.Term) bool { + if term.Location == nil { + return false + } + + switch v := term.Value.(type) { + case *ast.ArrayComprehension, *ast.SetComprehension, *ast.ObjectComprehension: + // we don't report on the internals of a comprehension, as it's already evaluated, and we won't have the local vars. + return true + case ast.Var: + var info *varInfo + if meta, ok := e.LocalMetadata[v]; ok { + info = &varInfo{ + VarMetadata: meta, + val: e.Locals.Get(v), + exprLoc: term.Location, + } + } else if unknownAreUndefined { + info = &varInfo{ + VarMetadata: VarMetadata{Name: v}, + exprLoc: term.Location, + col: term.Location.Col, + } + } + + if info != nil { + if v, exists := exprVars[info.Title()]; !exists || v.val == nil { + if term.Location != nil { + info.col = term.Location.Col + } + exprVars[info.Title()] = *info + } + } + } + return false + } + } + + expr, ok := e.Node.(*ast.Expr) + if !ok || expr == nil { + return nil + } + + base := expr.BaseCogeneratedExpr() + exprText := padLocationText(base.Location) + buf.WriteString(exprText) + + e.localVirtualCacheSnapshot.Iter(func(k, v ast.Value) bool { + var info *varInfo + switch k := k.(type) { + case ast.Ref: + info = &varInfo{ + VarMetadata: VarMetadata{Name: ast.Var(k.String())}, + val: v, + exprLoc: k[0].Location, + col: k[0].Location.Col, + } + case *ast.ArrayComprehension: + info = &varInfo{ + VarMetadata: VarMetadata{Name: ast.Var(k.String())}, + val: v, + exprLoc: k.Term.Location, + col: k.Term.Location.Col, + } + case *ast.SetComprehension: + info = &varInfo{ + VarMetadata: VarMetadata{Name: ast.Var(k.String())}, + val: v, + exprLoc: k.Term.Location, + col: k.Term.Location.Col, + } + case *ast.ObjectComprehension: + info = &varInfo{ + VarMetadata: VarMetadata{Name: ast.Var(k.String())}, + val: v, + exprLoc: k.Key.Location, + col: k.Key.Location.Col, + } + } + + if info != nil { + exprVars[info.Title()] = *info + } + + return false + }) + + // If the expression is negated, we can't confidently assert that vars with unknown values are 'undefined', + // since the compiler might have opted out of the necessary rewrite. + walkTestTerms(expr, findVars(!expr.Negated)) + coExprs := expr.CogeneratedExprs() + for _, coExpr := range coExprs { + // Only the current "co-expr" can have undefined vars, if we don't know the value for a var in any other co-expr, + // it's unknown, not undefined. A var can be unknown if it hasn't been assigned a value yet, because the co-expr + // hasn't been evaluated yet (the fail happened before it). + walkTestTerms(coExpr, findVars(false)) + } + + printPrettyVars(buf, exprVars) + _, _ = fmt.Fprint(w, buf.String()) + return nil +} + +func printPrettyVars(w *bytes.Buffer, exprVars map[string]varInfo) { + containsTabs := false + varRows := make(map[int]interface{}) + for _, info := range exprVars { + if len(info.exprLoc.Tabs) > 0 { + containsTabs = true + } + varRows[info.exprLoc.Row] = nil + } + + if containsTabs && len(varRows) > 1 { + // We can't (currently) reliably point to var locations when they are on different rows that contain tabs. + // So we'll just print them in alphabetical order instead. + byName := make([]varInfo, 0, len(exprVars)) + for _, info := range exprVars { + byName = append(byName, info) + } + slices.SortStableFunc(byName, func(a, b varInfo) int { + return strings.Compare(a.Title(), b.Title()) + }) + + w.WriteString("\n\nWhere:\n") + for _, info := range byName { + w.WriteString(fmt.Sprintf("\n%s: %s", info.Title(), iStrs.Truncate(info.Value(), maxPrettyExprVarWidth))) + } + + return + } + + byCol := make([]varInfo, 0, len(exprVars)) + for _, info := range exprVars { + byCol = append(byCol, info) + } + slices.SortFunc(byCol, func(a, b varInfo) int { + // sort first by column, then by reverse row (to present vars in the same order they appear in the expr) + if a.col == b.col { + if a.exprLoc.Row == b.exprLoc.Row { + return strings.Compare(a.Title(), b.Title()) + } + return b.exprLoc.Row - a.exprLoc.Row + } + return a.col - b.col + }) + + if len(byCol) == 0 { + return + } + + w.WriteString("\n") + printArrows(w, byCol, -1) + for i := len(byCol) - 1; i >= 0; i-- { + w.WriteString("\n") + printArrows(w, byCol, i) + } +} + +func printArrows(w *bytes.Buffer, l []varInfo, printValueAt int) { + prevCol := 0 + var slice []varInfo + if printValueAt >= 0 { + slice = l[:printValueAt+1] + } else { + slice = l + } + isFirst := true + for i, info := range slice { + + isLast := i >= len(slice)-1 + col := info.col + + if !isLast && col == l[i+1].col { + // We're sharing the same column with another, subsequent var + continue + } + + spaces := col - 1 + if i > 0 && !isFirst { + spaces = (col - prevCol) - 1 + } + + for j := 0; j < spaces; j++ { + tab := false + for _, t := range info.exprLoc.Tabs { + if t == j+prevCol+1 { + w.WriteString("\t") + tab = true + break + } + } + if !tab { + w.WriteString(" ") + } + } + + if isLast && printValueAt >= 0 { + valueStr := iStrs.Truncate(info.Value(), maxPrettyExprVarWidth) + if (i > 0 && col == l[i-1].col) || (i < len(l)-1 && col == l[i+1].col) { + // There is another var on this column, so we need to include the name to differentiate them. + w.WriteString(fmt.Sprintf("%s: %s", info.Title(), valueStr)) + } else { + w.WriteString(valueStr) + } + } else { + w.WriteString("|") + } + prevCol = col + isFirst = false + } +} + func init() { RegisterBuiltinFunc(ast.Trace.Name, builtinTrace) } diff --git a/vendor/github.com/open-policy-agent/opa/types/decode.go b/vendor/github.com/open-policy-agent/opa/types/decode.go index 4e123384a..a6bd9ea03 100644 --- a/vendor/github.com/open-policy-agent/opa/types/decode.go +++ b/vendor/github.com/open-policy-agent/opa/types/decode.go @@ -77,10 +77,10 @@ func Unmarshal(bs []byte) (result Type, err error) { } } case typeAny: - var any rawunion - if err = util.UnmarshalJSON(bs, &any); err == nil { + var union rawunion + if err = util.UnmarshalJSON(bs, &union); err == nil { var of []Type - if of, err = unmarshalSlice(any.Of); err == nil { + if of, err = unmarshalSlice(union.Of); err == nil { result = NewAny(of...) } } diff --git a/vendor/github.com/open-policy-agent/opa/types/types.go b/vendor/github.com/open-policy-agent/opa/types/types.go index a4b87cd55..2a050927d 100644 --- a/vendor/github.com/open-policy-agent/opa/types/types.go +++ b/vendor/github.com/open-policy-agent/opa/types/types.go @@ -938,8 +938,8 @@ func Compare(a, b Type) int { // Contains returns true if a is a superset or equal to b. func Contains(a, b Type) bool { - if any, ok := unwrap(a).(Any); ok { - return any.Contains(b) + if x, ok := unwrap(a).(Any); ok { + return x.Contains(b) } return Compare(a, b) == 0 } @@ -994,8 +994,8 @@ func Select(a Type, x interface{}) Type { if Compare(a.of, tpe) == 0 { return a.of } - if any, ok := a.of.(Any); ok { - if any.Contains(tpe) { + if x, ok := a.of.(Any); ok { + if x.Contains(tpe) { return tpe } } diff --git a/vendor/github.com/open-policy-agent/opa/util/read_gzip_body.go b/vendor/github.com/open-policy-agent/opa/util/read_gzip_body.go new file mode 100644 index 000000000..2e33cae5f --- /dev/null +++ b/vendor/github.com/open-policy-agent/opa/util/read_gzip_body.go @@ -0,0 +1,26 @@ +package util + +import ( + "bytes" + "compress/gzip" + "io" + "net/http" + "strings" +) + +// Note(philipc): Originally taken from server/server.go +func ReadMaybeCompressedBody(r *http.Request) (io.ReadCloser, error) { + if strings.Contains(r.Header.Get("Content-Encoding"), "gzip") { + gzReader, err := gzip.NewReader(r.Body) + if err != nil { + return nil, err + } + defer gzReader.Close() + bytesBody, err := io.ReadAll(gzReader) + if err != nil { + return nil, err + } + return io.NopCloser(bytes.NewReader(bytesBody)), err + } + return r.Body, nil +} diff --git a/vendor/github.com/open-policy-agent/opa/version/version.go b/vendor/github.com/open-policy-agent/opa/version/version.go index d07db6bf1..841a54d84 100644 --- a/vendor/github.com/open-policy-agent/opa/version/version.go +++ b/vendor/github.com/open-policy-agent/opa/version/version.go @@ -11,7 +11,7 @@ import ( ) // Version is the canonical version of OPA. -var Version = "0.65.0" +var Version = "0.66.0" // GoVersion is the version of Go this was built with var GoVersion = runtime.Version() diff --git a/vendor/github.com/spf13/cobra/.golangci.yml b/vendor/github.com/spf13/cobra/.golangci.yml index a618ec24d..2c8f4808c 100644 --- a/vendor/github.com/spf13/cobra/.golangci.yml +++ b/vendor/github.com/spf13/cobra/.golangci.yml @@ -26,33 +26,28 @@ linters: - errcheck #- exhaustive #- funlen - - gas #- gochecknoinits - goconst - #- gocritic + - gocritic #- gocyclo - #- gofmt + - gofmt - goimports - - golint #- gomnd #- goprintffuncname - #- gosec - #- gosimple + - gosec + - gosimple - govet - ineffassign - - interfacer #- lll - - maligned - - megacheck - #- misspell + - misspell #- nakedret #- noctx - #- nolintlint + - nolintlint #- rowserrcheck #- scopelint - #- staticcheck + - staticcheck #- structcheck ! deprecated since v1.49.0; replaced by 'unused' - #- stylecheck + - stylecheck #- typecheck - unconvert #- unparam diff --git a/vendor/github.com/spf13/cobra/active_help.go b/vendor/github.com/spf13/cobra/active_help.go index 5f965e057..25c30e3cc 100644 --- a/vendor/github.com/spf13/cobra/active_help.go +++ b/vendor/github.com/spf13/cobra/active_help.go @@ -17,21 +17,17 @@ package cobra import ( "fmt" "os" - "regexp" - "strings" ) const ( activeHelpMarker = "_activeHelp_ " // The below values should not be changed: programs will be using them explicitly // in their user documentation, and users will be using them explicitly. - activeHelpEnvVarSuffix = "_ACTIVE_HELP" - activeHelpGlobalEnvVar = "COBRA_ACTIVE_HELP" + activeHelpEnvVarSuffix = "ACTIVE_HELP" + activeHelpGlobalEnvVar = configEnvVarGlobalPrefix + "_" + activeHelpEnvVarSuffix activeHelpGlobalDisable = "0" ) -var activeHelpEnvVarPrefixSubstRegexp = regexp.MustCompile(`[^A-Z0-9_]`) - // AppendActiveHelp adds the specified string to the specified array to be used as ActiveHelp. // Such strings will be processed by the completion script and will be shown as ActiveHelp // to the user. @@ -60,8 +56,5 @@ func GetActiveHelpConfig(cmd *Command) string { // variable. It has the format _ACTIVE_HELP where is the name of the // root command in upper case, with all non-ASCII-alphanumeric characters replaced by `_`. func activeHelpEnvVar(name string) string { - // This format should not be changed: users will be using it explicitly. - activeHelpEnvVar := strings.ToUpper(fmt.Sprintf("%s%s", name, activeHelpEnvVarSuffix)) - activeHelpEnvVar = activeHelpEnvVarPrefixSubstRegexp.ReplaceAllString(activeHelpEnvVar, "_") - return activeHelpEnvVar + return configEnvVar(name, activeHelpEnvVarSuffix) } diff --git a/vendor/github.com/spf13/cobra/args.go b/vendor/github.com/spf13/cobra/args.go index e79ec33a8..ed1e70cea 100644 --- a/vendor/github.com/spf13/cobra/args.go +++ b/vendor/github.com/spf13/cobra/args.go @@ -52,9 +52,9 @@ func OnlyValidArgs(cmd *Command, args []string) error { if len(cmd.ValidArgs) > 0 { // Remove any description that may be included in ValidArgs. // A description is following a tab character. - var validArgs []string + validArgs := make([]string, 0, len(cmd.ValidArgs)) for _, v := range cmd.ValidArgs { - validArgs = append(validArgs, strings.Split(v, "\t")[0]) + validArgs = append(validArgs, strings.SplitN(v, "\t", 2)[0]) } for _, v := range args { if !stringInSlice(v, validArgs) { diff --git a/vendor/github.com/spf13/cobra/bash_completions.go b/vendor/github.com/spf13/cobra/bash_completions.go index 8a5315184..f4d198cbc 100644 --- a/vendor/github.com/spf13/cobra/bash_completions.go +++ b/vendor/github.com/spf13/cobra/bash_completions.go @@ -597,19 +597,16 @@ func writeRequiredFlag(buf io.StringWriter, cmd *Command) { if nonCompletableFlag(flag) { return } - for key := range flag.Annotations { - switch key { - case BashCompOneRequiredFlag: - format := " must_have_one_flag+=(\"--%s" - if flag.Value.Type() != "bool" { - format += "=" - } - format += cbn - WriteStringAndCheck(buf, fmt.Sprintf(format, flag.Name)) - - if len(flag.Shorthand) > 0 { - WriteStringAndCheck(buf, fmt.Sprintf(" must_have_one_flag+=(\"-%s"+cbn, flag.Shorthand)) - } + if _, ok := flag.Annotations[BashCompOneRequiredFlag]; ok { + format := " must_have_one_flag+=(\"--%s" + if flag.Value.Type() != "bool" { + format += "=" + } + format += cbn + WriteStringAndCheck(buf, fmt.Sprintf(format, flag.Name)) + + if len(flag.Shorthand) > 0 { + WriteStringAndCheck(buf, fmt.Sprintf(" must_have_one_flag+=(\"-%s"+cbn, flag.Shorthand)) } } }) @@ -621,7 +618,7 @@ func writeRequiredNouns(buf io.StringWriter, cmd *Command) { for _, value := range cmd.ValidArgs { // Remove any description that may be included following a tab character. // Descriptions are not supported by bash completion. - value = strings.Split(value, "\t")[0] + value = strings.SplitN(value, "\t", 2)[0] WriteStringAndCheck(buf, fmt.Sprintf(" must_have_one_noun+=(%q)\n", value)) } if cmd.ValidArgsFunction != nil { diff --git a/vendor/github.com/spf13/cobra/cobra.go b/vendor/github.com/spf13/cobra/cobra.go index a6b160ce5..e0b0947b0 100644 --- a/vendor/github.com/spf13/cobra/cobra.go +++ b/vendor/github.com/spf13/cobra/cobra.go @@ -193,8 +193,6 @@ func ld(s, t string, ignoreCase bool) int { d := make([][]int, len(s)+1) for i := range d { d[i] = make([]int, len(t)+1) - } - for i := range d { d[i][0] = i } for j := range d[0] { diff --git a/vendor/github.com/spf13/cobra/command.go b/vendor/github.com/spf13/cobra/command.go index 2fbe6c131..54748fc67 100644 --- a/vendor/github.com/spf13/cobra/command.go +++ b/vendor/github.com/spf13/cobra/command.go @@ -154,8 +154,10 @@ type Command struct { // pflags contains persistent flags. pflags *flag.FlagSet // lflags contains local flags. + // This field does not represent internal state, it's used as a cache to optimise LocalFlags function call lflags *flag.FlagSet // iflags contains inherited flags. + // This field does not represent internal state, it's used as a cache to optimise InheritedFlags function call iflags *flag.FlagSet // parentsPflags is all persistent flags of cmd's parents. parentsPflags *flag.FlagSet @@ -706,7 +708,7 @@ Loop: // This is not a flag or a flag value. Check to see if it matches what we're looking for, and if so, // return the args, excluding the one at this position. if s == x { - ret := []string{} + ret := make([]string, 0, len(args)-1) ret = append(ret, args[:pos]...) ret = append(ret, args[pos+1:]...) return ret @@ -754,14 +756,14 @@ func (c *Command) findSuggestions(arg string) string { if c.SuggestionsMinimumDistance <= 0 { c.SuggestionsMinimumDistance = 2 } - suggestionsString := "" + var sb strings.Builder if suggestions := c.SuggestionsFor(arg); len(suggestions) > 0 { - suggestionsString += "\n\nDid you mean this?\n" + sb.WriteString("\n\nDid you mean this?\n") for _, s := range suggestions { - suggestionsString += fmt.Sprintf("\t%v\n", s) + _, _ = fmt.Fprintf(&sb, "\t%v\n", s) } } - return suggestionsString + return sb.String() } func (c *Command) findNext(next string) *Command { @@ -873,7 +875,7 @@ func (c *Command) ArgsLenAtDash() int { func (c *Command) execute(a []string) (err error) { if c == nil { - return fmt.Errorf("Called Execute() on a nil Command") + return fmt.Errorf("called Execute() on a nil Command") } if len(c.Deprecated) > 0 { @@ -1187,10 +1189,11 @@ func (c *Command) InitDefaultHelpFlag() { c.mergePersistentFlags() if c.Flags().Lookup("help") == nil { usage := "help for " - if c.Name() == "" { + name := c.displayName() + if name == "" { usage += "this command" } else { - usage += c.Name() + usage += name } c.Flags().BoolP("help", "h", false, usage) _ = c.Flags().SetAnnotation("help", FlagSetByCobraAnnotation, []string{"true"}) @@ -1236,7 +1239,7 @@ func (c *Command) InitDefaultHelpCmd() { Use: "help [command]", Short: "Help about any command", Long: `Help provides help for any command in the application. -Simply type ` + c.Name() + ` help [path to command] for full details.`, +Simply type ` + c.displayName() + ` help [path to command] for full details.`, ValidArgsFunction: func(c *Command, args []string, toComplete string) ([]string, ShellCompDirective) { var completions []string cmd, _, e := c.Root().Find(args) @@ -1427,6 +1430,10 @@ func (c *Command) CommandPath() string { if c.HasParent() { return c.Parent().CommandPath() + " " + c.Name() } + return c.displayName() +} + +func (c *Command) displayName() string { if displayName, ok := c.Annotations[CommandDisplayNameAnnotation]; ok { return displayName } @@ -1436,10 +1443,11 @@ func (c *Command) CommandPath() string { // UseLine puts out the full usage for a given command (including parents). func (c *Command) UseLine() string { var useline string + use := strings.Replace(c.Use, c.Name(), c.displayName(), 1) if c.HasParent() { - useline = c.parent.CommandPath() + " " + c.Use + useline = c.parent.CommandPath() + " " + use } else { - useline = c.Use + useline = use } if c.DisableFlagsInUseLine { return useline @@ -1452,7 +1460,6 @@ func (c *Command) UseLine() string { // DebugFlags used to determine which flags have been assigned to which commands // and which persist. -// nolint:goconst func (c *Command) DebugFlags() { c.Println("DebugFlags called on", c.Name()) var debugflags func(*Command) @@ -1642,7 +1649,7 @@ func (c *Command) GlobalNormalizationFunc() func(f *flag.FlagSet, name string) f // to this command (local and persistent declared here and by all parents). func (c *Command) Flags() *flag.FlagSet { if c.flags == nil { - c.flags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.flags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError) if c.flagErrorBuf == nil { c.flagErrorBuf = new(bytes.Buffer) } @@ -1653,10 +1660,11 @@ func (c *Command) Flags() *flag.FlagSet { } // LocalNonPersistentFlags are flags specific to this command which will NOT persist to subcommands. +// This function does not modify the flags of the current command, it's purpose is to return the current state. func (c *Command) LocalNonPersistentFlags() *flag.FlagSet { persistentFlags := c.PersistentFlags() - out := flag.NewFlagSet(c.Name(), flag.ContinueOnError) + out := flag.NewFlagSet(c.displayName(), flag.ContinueOnError) c.LocalFlags().VisitAll(func(f *flag.Flag) { if persistentFlags.Lookup(f.Name) == nil { out.AddFlag(f) @@ -1666,11 +1674,12 @@ func (c *Command) LocalNonPersistentFlags() *flag.FlagSet { } // LocalFlags returns the local FlagSet specifically set in the current command. +// This function does not modify the flags of the current command, it's purpose is to return the current state. func (c *Command) LocalFlags() *flag.FlagSet { c.mergePersistentFlags() if c.lflags == nil { - c.lflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.lflags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError) if c.flagErrorBuf == nil { c.flagErrorBuf = new(bytes.Buffer) } @@ -1693,11 +1702,12 @@ func (c *Command) LocalFlags() *flag.FlagSet { } // InheritedFlags returns all flags which were inherited from parent commands. +// This function does not modify the flags of the current command, it's purpose is to return the current state. func (c *Command) InheritedFlags() *flag.FlagSet { c.mergePersistentFlags() if c.iflags == nil { - c.iflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.iflags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError) if c.flagErrorBuf == nil { c.flagErrorBuf = new(bytes.Buffer) } @@ -1718,6 +1728,7 @@ func (c *Command) InheritedFlags() *flag.FlagSet { } // NonInheritedFlags returns all flags which were not inherited from parent commands. +// This function does not modify the flags of the current command, it's purpose is to return the current state. func (c *Command) NonInheritedFlags() *flag.FlagSet { return c.LocalFlags() } @@ -1725,7 +1736,7 @@ func (c *Command) NonInheritedFlags() *flag.FlagSet { // PersistentFlags returns the persistent FlagSet specifically set in the current command. func (c *Command) PersistentFlags() *flag.FlagSet { if c.pflags == nil { - c.pflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.pflags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError) if c.flagErrorBuf == nil { c.flagErrorBuf = new(bytes.Buffer) } @@ -1738,9 +1749,9 @@ func (c *Command) PersistentFlags() *flag.FlagSet { func (c *Command) ResetFlags() { c.flagErrorBuf = new(bytes.Buffer) c.flagErrorBuf.Reset() - c.flags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.flags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError) c.flags.SetOutput(c.flagErrorBuf) - c.pflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.pflags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError) c.pflags.SetOutput(c.flagErrorBuf) c.lflags = nil @@ -1857,7 +1868,7 @@ func (c *Command) mergePersistentFlags() { // If c.parentsPflags == nil, it makes new. func (c *Command) updateParentsPflags() { if c.parentsPflags == nil { - c.parentsPflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.parentsPflags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError) c.parentsPflags.SetOutput(c.flagErrorBuf) c.parentsPflags.SortFlags = false } diff --git a/vendor/github.com/spf13/cobra/completions.go b/vendor/github.com/spf13/cobra/completions.go index b60f6b200..c0c08b057 100644 --- a/vendor/github.com/spf13/cobra/completions.go +++ b/vendor/github.com/spf13/cobra/completions.go @@ -17,6 +17,8 @@ package cobra import ( "fmt" "os" + "regexp" + "strconv" "strings" "sync" @@ -211,24 +213,29 @@ func (c *Command) initCompleteCmd(args []string) { // 2- Even without completions, we need to print the directive } - noDescriptions := (cmd.CalledAs() == ShellCompNoDescRequestCmd) + noDescriptions := cmd.CalledAs() == ShellCompNoDescRequestCmd + if !noDescriptions { + if doDescriptions, err := strconv.ParseBool(getEnvConfig(cmd, configEnvVarSuffixDescriptions)); err == nil { + noDescriptions = !doDescriptions + } + } + noActiveHelp := GetActiveHelpConfig(finalCmd) == activeHelpGlobalDisable + out := finalCmd.OutOrStdout() for _, comp := range completions { - if GetActiveHelpConfig(finalCmd) == activeHelpGlobalDisable { - // Remove all activeHelp entries in this case - if strings.HasPrefix(comp, activeHelpMarker) { - continue - } + if noActiveHelp && strings.HasPrefix(comp, activeHelpMarker) { + // Remove all activeHelp entries if it's disabled. + continue } if noDescriptions { // Remove any description that may be included following a tab character. - comp = strings.Split(comp, "\t")[0] + comp = strings.SplitN(comp, "\t", 2)[0] } // Make sure we only write the first line to the output. // This is needed if a description contains a linebreak. // Otherwise the shell scripts will interpret the other lines as new flags // and could therefore provide a wrong completion. - comp = strings.Split(comp, "\n")[0] + comp = strings.SplitN(comp, "\n", 2)[0] // Finally trim the completion. This is especially important to get rid // of a trailing tab when there are no description following it. @@ -237,14 +244,14 @@ func (c *Command) initCompleteCmd(args []string) { // although there is no description). comp = strings.TrimSpace(comp) - // Print each possible completion to stdout for the completion script to consume. - fmt.Fprintln(finalCmd.OutOrStdout(), comp) + // Print each possible completion to the output for the completion script to consume. + fmt.Fprintln(out, comp) } // As the last printout, print the completion directive for the completion script to parse. // The directive integer must be that last character following a single colon (:). // The completion script expects : - fmt.Fprintf(finalCmd.OutOrStdout(), ":%d\n", directive) + fmt.Fprintf(out, ":%d\n", directive) // Print some helpful info to stderr for the user to understand. // Output from stderr must be ignored by the completion script. @@ -291,7 +298,7 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi } if err != nil { // Unable to find the real command. E.g., someInvalidCmd - return c, []string{}, ShellCompDirectiveDefault, fmt.Errorf("Unable to find a command for arguments: %v", trimmedArgs) + return c, []string{}, ShellCompDirectiveDefault, fmt.Errorf("unable to find a command for arguments: %v", trimmedArgs) } finalCmd.ctx = c.ctx @@ -899,3 +906,34 @@ func CompError(msg string) { func CompErrorln(msg string) { CompError(fmt.Sprintf("%s\n", msg)) } + +// These values should not be changed: users will be using them explicitly. +const ( + configEnvVarGlobalPrefix = "COBRA" + configEnvVarSuffixDescriptions = "COMPLETION_DESCRIPTIONS" +) + +var configEnvVarPrefixSubstRegexp = regexp.MustCompile(`[^A-Z0-9_]`) + +// configEnvVar returns the name of the program-specific configuration environment +// variable. It has the format _ where is the name of the +// root command in upper case, with all non-ASCII-alphanumeric characters replaced by `_`. +func configEnvVar(name, suffix string) string { + // This format should not be changed: users will be using it explicitly. + v := strings.ToUpper(fmt.Sprintf("%s_%s", name, suffix)) + v = configEnvVarPrefixSubstRegexp.ReplaceAllString(v, "_") + return v +} + +// getEnvConfig returns the value of the configuration environment variable +// _ where is the name of the root command in upper +// case, with all non-ASCII-alphanumeric characters replaced by `_`. +// If the value is empty or not set, the value of the environment variable +// COBRA_ is returned instead. +func getEnvConfig(cmd *Command, suffix string) string { + v := os.Getenv(configEnvVar(cmd.Root().Name(), suffix)) + if v == "" { + v = os.Getenv(configEnvVar(configEnvVarGlobalPrefix, suffix)) + } + return v +} diff --git a/vendor/github.com/spf13/cobra/flag_groups.go b/vendor/github.com/spf13/cobra/flag_groups.go index 0671ec5f2..560612fd3 100644 --- a/vendor/github.com/spf13/cobra/flag_groups.go +++ b/vendor/github.com/spf13/cobra/flag_groups.go @@ -23,9 +23,9 @@ import ( ) const ( - requiredAsGroup = "cobra_annotation_required_if_others_set" - oneRequired = "cobra_annotation_one_required" - mutuallyExclusive = "cobra_annotation_mutually_exclusive" + requiredAsGroupAnnotation = "cobra_annotation_required_if_others_set" + oneRequiredAnnotation = "cobra_annotation_one_required" + mutuallyExclusiveAnnotation = "cobra_annotation_mutually_exclusive" ) // MarkFlagsRequiredTogether marks the given flags with annotations so that Cobra errors @@ -37,7 +37,7 @@ func (c *Command) MarkFlagsRequiredTogether(flagNames ...string) { if f == nil { panic(fmt.Sprintf("Failed to find flag %q and mark it as being required in a flag group", v)) } - if err := c.Flags().SetAnnotation(v, requiredAsGroup, append(f.Annotations[requiredAsGroup], strings.Join(flagNames, " "))); err != nil { + if err := c.Flags().SetAnnotation(v, requiredAsGroupAnnotation, append(f.Annotations[requiredAsGroupAnnotation], strings.Join(flagNames, " "))); err != nil { // Only errs if the flag isn't found. panic(err) } @@ -53,7 +53,7 @@ func (c *Command) MarkFlagsOneRequired(flagNames ...string) { if f == nil { panic(fmt.Sprintf("Failed to find flag %q and mark it as being in a one-required flag group", v)) } - if err := c.Flags().SetAnnotation(v, oneRequired, append(f.Annotations[oneRequired], strings.Join(flagNames, " "))); err != nil { + if err := c.Flags().SetAnnotation(v, oneRequiredAnnotation, append(f.Annotations[oneRequiredAnnotation], strings.Join(flagNames, " "))); err != nil { // Only errs if the flag isn't found. panic(err) } @@ -70,7 +70,7 @@ func (c *Command) MarkFlagsMutuallyExclusive(flagNames ...string) { panic(fmt.Sprintf("Failed to find flag %q and mark it as being in a mutually exclusive flag group", v)) } // Each time this is called is a single new entry; this allows it to be a member of multiple groups if needed. - if err := c.Flags().SetAnnotation(v, mutuallyExclusive, append(f.Annotations[mutuallyExclusive], strings.Join(flagNames, " "))); err != nil { + if err := c.Flags().SetAnnotation(v, mutuallyExclusiveAnnotation, append(f.Annotations[mutuallyExclusiveAnnotation], strings.Join(flagNames, " "))); err != nil { panic(err) } } @@ -91,9 +91,9 @@ func (c *Command) ValidateFlagGroups() error { oneRequiredGroupStatus := map[string]map[string]bool{} mutuallyExclusiveGroupStatus := map[string]map[string]bool{} flags.VisitAll(func(pflag *flag.Flag) { - processFlagForGroupAnnotation(flags, pflag, requiredAsGroup, groupStatus) - processFlagForGroupAnnotation(flags, pflag, oneRequired, oneRequiredGroupStatus) - processFlagForGroupAnnotation(flags, pflag, mutuallyExclusive, mutuallyExclusiveGroupStatus) + processFlagForGroupAnnotation(flags, pflag, requiredAsGroupAnnotation, groupStatus) + processFlagForGroupAnnotation(flags, pflag, oneRequiredAnnotation, oneRequiredGroupStatus) + processFlagForGroupAnnotation(flags, pflag, mutuallyExclusiveAnnotation, mutuallyExclusiveGroupStatus) }) if err := validateRequiredFlagGroups(groupStatus); err != nil { @@ -130,7 +130,7 @@ func processFlagForGroupAnnotation(flags *flag.FlagSet, pflag *flag.Flag, annota continue } - groupStatus[group] = map[string]bool{} + groupStatus[group] = make(map[string]bool, len(flagnames)) for _, name := range flagnames { groupStatus[group][name] = false } @@ -232,9 +232,9 @@ func (c *Command) enforceFlagGroupsForCompletion() { oneRequiredGroupStatus := map[string]map[string]bool{} mutuallyExclusiveGroupStatus := map[string]map[string]bool{} c.Flags().VisitAll(func(pflag *flag.Flag) { - processFlagForGroupAnnotation(flags, pflag, requiredAsGroup, groupStatus) - processFlagForGroupAnnotation(flags, pflag, oneRequired, oneRequiredGroupStatus) - processFlagForGroupAnnotation(flags, pflag, mutuallyExclusive, mutuallyExclusiveGroupStatus) + processFlagForGroupAnnotation(flags, pflag, requiredAsGroupAnnotation, groupStatus) + processFlagForGroupAnnotation(flags, pflag, oneRequiredAnnotation, oneRequiredGroupStatus) + processFlagForGroupAnnotation(flags, pflag, mutuallyExclusiveAnnotation, mutuallyExclusiveGroupStatus) }) // If a flag that is part of a group is present, we make all the other flags @@ -253,17 +253,17 @@ func (c *Command) enforceFlagGroupsForCompletion() { // If none of the flags of a one-required group are present, we make all the flags // of that group required so that the shell completion suggests them automatically for flagList, flagnameAndStatus := range oneRequiredGroupStatus { - set := 0 + isSet := false - for _, isSet := range flagnameAndStatus { + for _, isSet = range flagnameAndStatus { if isSet { - set++ + break } } // None of the flags of the group are set, mark all flags in the group // as required - if set == 0 { + if !isSet { for _, fName := range strings.Split(flagList, " ") { _ = c.MarkFlagRequired(fName) } diff --git a/vendor/github.com/spf13/cobra/powershell_completions.go b/vendor/github.com/spf13/cobra/powershell_completions.go index 551951939..a830b7bca 100644 --- a/vendor/github.com/spf13/cobra/powershell_completions.go +++ b/vendor/github.com/spf13/cobra/powershell_completions.go @@ -28,8 +28,8 @@ import ( func genPowerShellComp(buf io.StringWriter, name string, includeDesc bool) { // Variables should not contain a '-' or ':' character nameForVar := name - nameForVar = strings.Replace(nameForVar, "-", "_", -1) - nameForVar = strings.Replace(nameForVar, ":", "_", -1) + nameForVar = strings.ReplaceAll(nameForVar, "-", "_") + nameForVar = strings.ReplaceAll(nameForVar, ":", "_") compCmd := ShellCompRequestCmd if !includeDesc { diff --git a/vendor/modules.txt b/vendor/modules.txt index afaa3a188..46e6e9d94 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -37,7 +37,7 @@ github.com/cespare/xxhash/v2 github.com/cncf/xds/go/udpa/annotations github.com/cncf/xds/go/xds/annotations/v3 github.com/cncf/xds/go/xds/core/v3 -# github.com/containerd/containerd v1.7.17 +# github.com/containerd/containerd v1.7.18 ## explicit; go 1.21 github.com/containerd/containerd/archive/compression github.com/containerd/containerd/content @@ -46,7 +46,6 @@ github.com/containerd/containerd/errdefs github.com/containerd/containerd/filters github.com/containerd/containerd/images github.com/containerd/containerd/labels -github.com/containerd/containerd/log github.com/containerd/containerd/pkg/randutil github.com/containerd/containerd/platforms github.com/containerd/containerd/reference @@ -58,6 +57,9 @@ github.com/containerd/containerd/remotes/docker/schema1 github.com/containerd/containerd/remotes/errors github.com/containerd/containerd/tracing github.com/containerd/containerd/version +# github.com/containerd/errdefs v0.1.0 +## explicit; go 1.20 +github.com/containerd/errdefs # github.com/containerd/log v0.1.0 ## explicit; go 1.20 github.com/containerd/log @@ -195,7 +197,7 @@ github.com/moby/locker # github.com/olekukonko/tablewriter v0.0.5 ## explicit; go 1.12 github.com/olekukonko/tablewriter -# github.com/open-policy-agent/opa v0.65.0 +# github.com/open-policy-agent/opa v0.66.0 ## explicit; go 1.21 github.com/open-policy-agent/opa/ast github.com/open-policy-agent/opa/ast/internal/scanner @@ -397,7 +399,7 @@ github.com/spf13/afero/mem # github.com/spf13/cast v1.6.0 ## explicit; go 1.19 github.com/spf13/cast -# github.com/spf13/cobra v1.8.0 +# github.com/spf13/cobra v1.8.1 ## explicit; go 1.15 github.com/spf13/cobra # github.com/spf13/pflag v1.0.5