From 9b0e25bc1f226a8c870b2dbae1e1051ea3bc91e1 Mon Sep 17 00:00:00 2001 From: Gemma Hou Date: Mon, 7 Oct 2024 00:08:25 +0000 Subject: [PATCH 01/31] add direct controller for compute firewall policy rule --- .../v1beta1/firewallpolicyrule_types.go | 9 + .../compute/firewallpolicyrule/client.go | 88 +++ .../firewallpolicyrule_controller.go | 313 ++++++++ .../firewallpolicyrule_externalresource.go | 26 + .../direct/compute/firewallpolicyrule/refs.go | 211 +++++ pkg/controller/direct/maputils.go | 5 + ...ject_computefirewallpolicyrule.golden.yaml | 33 + .../computefirewallpolicyrule/_http.log | 721 ++++++++++++++++++ ...firewallpolicyrule-egress-full.golden.yaml | 2 +- .../_http.log | 115 ++- ...irewallpolicyrule-ingress-full.golden.yaml | 2 +- .../_http.log | 22 +- .../_http.log | 17 +- 13 files changed, 1557 insertions(+), 7 deletions(-) create mode 100644 pkg/controller/direct/compute/firewallpolicyrule/client.go create mode 100644 pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go create mode 100644 pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_externalresource.go create mode 100644 pkg/controller/direct/compute/firewallpolicyrule/refs.go create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_generated_object_computefirewallpolicyrule.golden.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_http.log diff --git a/apis/compute/v1beta1/firewallpolicyrule_types.go b/apis/compute/v1beta1/firewallpolicyrule_types.go index 6a4cedfcac..da8d6eac6e 100644 --- a/apis/compute/v1beta1/firewallpolicyrule_types.go +++ b/apis/compute/v1beta1/firewallpolicyrule_types.go @@ -20,6 +20,15 @@ import ( refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" commonv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/apis/common/v1alpha1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/runtime/schema" +) + +var ( + ComputeFirewallPolicyRuleGVK = schema.GroupVersionKind{ + Group: SchemeGroupVersion.Group, + Version: SchemeGroupVersion.Version, + Kind: "ComputeFirewallPolicyRule", + } ) // +kcc:proto=google.cloud.compute.v1.FirewallPolicyRuleMatcherLayer4Config diff --git a/pkg/controller/direct/compute/firewallpolicyrule/client.go b/pkg/controller/direct/compute/firewallpolicyrule/client.go new file mode 100644 index 0000000000..cbcc0207ae --- /dev/null +++ b/pkg/controller/direct/compute/firewallpolicyrule/client.go @@ -0,0 +1,88 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package firewallpolicyrule + +import ( + "context" + "fmt" + "net/http" + + api "cloud.google.com/go/compute/apiv1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/config" + "google.golang.org/api/option" +) + +type gcpClient struct { + config config.ControllerConfig +} + +func newGCPClient(ctx context.Context, config *config.ControllerConfig) (*gcpClient, error) { + gcpClient := &gcpClient{ + config: *config, + } + return gcpClient, nil +} + +func (m *gcpClient) options() ([]option.ClientOption, error) { + var opts []option.ClientOption + if m.config.UserAgent != "" { + opts = append(opts, option.WithUserAgent(m.config.UserAgent)) + } + if m.config.HTTPClient != nil { + // TODO: Set UserAgent in this scenario (error is: WithHTTPClient is incompatible with gRPC dial options) + + httpClient := &http.Client{} + *httpClient = *m.config.HTTPClient + httpClient.Transport = &optionsRoundTripper{ + config: m.config, + inner: m.config.HTTPClient.Transport, + } + opts = append(opts, option.WithHTTPClient(httpClient)) + } + if m.config.UserProjectOverride && m.config.BillingProject != "" { + opts = append(opts, option.WithQuotaProject(m.config.BillingProject)) + } + + // TODO: support endpoints? + // if m.config.Endpoint != "" { + // opts = append(opts, option.WithEndpoint(m.config.Endpoint)) + // } + + return opts, nil +} + +type optionsRoundTripper struct { + config config.ControllerConfig + inner http.RoundTripper +} + +func (m *optionsRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { + if m.config.UserAgent != "" { + req.Header.Set("User-Agent", m.config.UserAgent) + } + return m.inner.RoundTrip(req) +} + +func (m *gcpClient) firewallPoliciesClient(ctx context.Context) (*api.FirewallPoliciesClient, error) { + opts, err := m.options() + if err != nil { + return nil, err + } + client, err := api.NewFirewallPoliciesRESTClient(ctx, opts...) + if err != nil { + return nil, fmt.Errorf("building FirewallPolicy client: %w", err) + } + return client, err +} diff --git a/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go new file mode 100644 index 0000000000..115a71e84f --- /dev/null +++ b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go @@ -0,0 +1,313 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package firewallpolicyrule + +import ( + "context" + "fmt" + + gcp "cloud.google.com/go/compute/apiv1" + computepb "cloud.google.com/go/compute/apiv1/computepb" + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/compute/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/config" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/directbase" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/registry" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/runtime" + "k8s.io/klog/v2" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +const ctrlName = "firewallpolicyrule-controller" + +func init() { + registry.RegisterModel(krm.ComputeFirewallPolicyRuleGVK, NewFirewallPolicyRuleModel) +} + +func NewFirewallPolicyRuleModel(ctx context.Context, config *config.ControllerConfig) (directbase.Model, error) { + return &firewallPolicyRuleModel{config: config}, nil +} + +type firewallPolicyRuleModel struct { + config *config.ControllerConfig +} + +// model implements the Model interface. +var _ directbase.Model = &firewallPolicyRuleModel{} + +type firewallPolicyRuleAdapter struct { + firewallPolicy string + priority int64 + firewallPoliciesClient *gcp.FirewallPoliciesClient + desired *krm.ComputeFirewallPolicyRule + actual *computepb.FirewallPolicyRule + reader client.Reader +} + +var _ directbase.Adapter = &firewallPolicyRuleAdapter{} + +func (m *firewallPolicyRuleModel) AdapterForObject(ctx context.Context, reader client.Reader, u *unstructured.Unstructured) (directbase.Adapter, error) { + obj := &krm.ComputeFirewallPolicyRule{} + if err := runtime.DefaultUnstructuredConverter.FromUnstructured(u.Object, &obj); err != nil { + return nil, fmt.Errorf("error converting to %T: %w", obj, err) + } + + // Set label managed-by-cnrm: true + obj.ObjectMeta.Labels["managed-by-cnrm"] = "true" + + // Get firewall policy + firewallPolicyRef, err := ResolveComputeFirewallPolicy(ctx, reader, obj, obj.Spec.FirewallPolicyRef) + if err != nil { + return nil, err + + } + obj.Spec.FirewallPolicyRef.External = firewallPolicyRef.External + firewallPolicy := obj.Spec.FirewallPolicyRef.External + + // Get priority + priority := obj.Spec.Priority + + firewallPolicyRuleAdapter := &firewallPolicyRuleAdapter{ + firewallPolicy: firewallPolicy, + priority: priority, + desired: obj, + reader: reader, + } + + // Get GCP client + gcpClient, err := newGCPClient(ctx, m.config) + if err != nil { + return nil, fmt.Errorf("building gcp client: %w", err) + } + + firewallPoliciesClient, err := gcpClient.firewallPoliciesClient(ctx) + if err != nil { + return nil, err + } + firewallPolicyRuleAdapter.firewallPoliciesClient = firewallPoliciesClient + + return firewallPolicyRuleAdapter, nil +} + +func (m *firewallPolicyRuleModel) AdapterForURL(ctx context.Context, url string) (directbase.Adapter, error) { + // TODO: Support URLs + return nil, nil +} + +func (a *firewallPolicyRuleAdapter) Find(ctx context.Context) (bool, error) { + log := klog.FromContext(ctx).WithName(ctrlName) + log.V(2).Info("getting ComputeFirewallPolicyRule", "priority", a.priority) + + firewallPolicyRule, err := a.get(ctx) + if err != nil { + // When a certain rule does not exist, the error has code 400(invalid) instead of 404(not found) + // example error message: + // "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", + if direct.IsInvalidValue(err) { + return false, nil + } + return false, fmt.Errorf("getting ComputeFirewallPolicyRule %d: %w", a.priority, err) + } + a.actual = firewallPolicyRule + return true, nil +} + +func (a *firewallPolicyRuleAdapter) Create(ctx context.Context, createOp *directbase.CreateOperation) error { + u := createOp.GetUnstructured() + var err error + + err = resolveDependencies(ctx, a.reader, a.desired) + if err != nil { + return err + } + + log := klog.FromContext(ctx).WithName(ctrlName) + log.V(2).Info("creating ComputeFirewallPolicyRule", "priority", a.priority) + mapCtx := &direct.MapContext{} + + desired := a.desired.DeepCopy() + + firewallPolicyRule := ComputeFirewallPolicyRuleSpec_ToProto(mapCtx, &desired.Spec) + if mapCtx.Err() != nil { + return mapCtx.Err() + } + + req := &computepb.AddRuleFirewallPolicyRequest{ + FirewallPolicyRuleResource: firewallPolicyRule, + FirewallPolicy: a.firewallPolicy, + } + op, err := a.firewallPoliciesClient.AddRule(ctx, req) + + if err != nil { + return fmt.Errorf("creating ComputeFirewallPolicyRule %d: %w", a.priority, err) + } + if !op.Done() { + err = op.Wait(ctx) + if err != nil { + return fmt.Errorf("waiting ComputeFirewallPolicyRule %d create failed: %w", a.priority, err) + } + } + log.V(2).Info("successfully created ComputeFirewallPolicyRule", "priority", a.priority) + + // Get the created resource + created := &computepb.FirewallPolicyRule{} + created, err = a.get(ctx) + if err != nil { + return fmt.Errorf("getting ComputeFirewallPolicyRule %d: %w", a.priority, err) + } + + status := &krm.ComputeFirewallPolicyRuleStatus{ + RuleTupleCount: direct.PtrTo(int64(*created.RuleTupleCount)), + Kind: direct.PtrTo("compute#firewallPolicyRule"), + } + return setStatus(u, status) +} + +func (a *firewallPolicyRuleAdapter) Update(ctx context.Context, updateOp *directbase.UpdateOperation) error { + u := updateOp.GetUnstructured() + var err error + + err = resolveDependencies(ctx, a.reader, a.desired) + if err != nil { + return err + } + + log := klog.FromContext(ctx).WithName(ctrlName) + log.V(2).Info("updating ComputeFirewallPolicyRule", "priority", a.priority) + mapCtx := &direct.MapContext{} + + desired := a.desired.DeepCopy() + firewallPolicyRule := ComputeFirewallPolicyRuleSpec_ToProto(mapCtx, &desired.Spec) + if mapCtx.Err() != nil { + return mapCtx.Err() + } + firewallPolicyRule.Priority = nil + + op := &gcp.Operation{} + updated := &computepb.FirewallPolicyRule{} + + updateReq := &computepb.PatchRuleFirewallPolicyRequest{ + FirewallPolicyRuleResource: firewallPolicyRule, + FirewallPolicy: a.firewallPolicy, + Priority: direct.PtrTo(int32(a.priority)), + } + op, err = a.firewallPoliciesClient.PatchRule(ctx, updateReq) + if err != nil { + return fmt.Errorf("updating ComputeFirewallPolicyRule %d: %w", a.priority, err) + } + if !op.Done() { + err = op.Wait(ctx) + if err != nil { + return fmt.Errorf("waiting ComputeFirewallPolicyRule %d update failed: %w", a.priority, err) + } + } + log.V(2).Info("successfully updated ComputeFirewallPolicyRule", "priority", a.priority) + + // Get the updated resource + updated, err = a.get(ctx) + if err != nil { + return fmt.Errorf("getting ComputeFirewallPolicyRule %d: %w", a.priority, err) + } + + status := &krm.ComputeFirewallPolicyRuleStatus{ + RuleTupleCount: direct.PtrTo(int64(*updated.RuleTupleCount)), + Kind: direct.PtrTo("compute#firewallPolicyRule"), + } + return setStatus(u, status) +} + +func (a *firewallPolicyRuleAdapter) Export(ctx context.Context) (*unstructured.Unstructured, error) { + if a.actual == nil { + return nil, fmt.Errorf("firewallPolicyRule %d not found", a.priority) + } + + mc := &direct.MapContext{} + spec := ComputeFirewallPolicyRuleSpec_FromProto(mc, a.actual) + specObj, err := runtime.DefaultUnstructuredConverter.ToUnstructured(spec) + if err != nil { + return nil, fmt.Errorf("error converting firewallPolicyRule spec to unstructured: %w", err) + } + + u := &unstructured.Unstructured{ + Object: make(map[string]interface{}), + } + u.SetGroupVersionKind(krm.ComputeFirewallPolicyRuleGVK) + + if err := unstructured.SetNestedField(u.Object, specObj, "spec"); err != nil { + return nil, fmt.Errorf("setting spec: %w", err) + } + + return u, nil +} + +// Delete implements the Adapter interface. +func (a *firewallPolicyRuleAdapter) Delete(ctx context.Context, deleteOp *directbase.DeleteOperation) (bool, error) { + + log := klog.FromContext(ctx).WithName(ctrlName) + log.V(2).Info("deleting ComputeFirewallPolicyRule", "priority", a.priority) + + var err error + op := &gcp.Operation{} + req := &computepb.RemoveRuleFirewallPolicyRequest{ + FirewallPolicy: a.firewallPolicy, + Priority: direct.PtrTo(int32(a.priority)), + } + op, err = a.firewallPoliciesClient.RemoveRule(ctx, req) + + if err != nil { + return false, fmt.Errorf("deleting ComputeFirewallPolicyRule %d: %w", a.priority, err) + } + if !op.Done() { + err = op.Wait(ctx) + if err != nil { + return false, fmt.Errorf("waiting ComputeFirewallPolicyRule %d delete failed: %w", a.priority, err) + } + } + log.V(2).Info("successfully deleted ComputeFirewallPolicyRule", "priority", a.priority) + + // Get the deleted rules + _, err = a.get(ctx) + if err != nil { + return true, nil + } + return true, nil +} + +func (a *firewallPolicyRuleAdapter) get(ctx context.Context) (*computepb.FirewallPolicyRule, error) { + getReq := &computepb.GetRuleFirewallPolicyRequest{ + FirewallPolicy: a.firewallPolicy, + Priority: direct.PtrTo(int32(a.priority)), + } + return a.firewallPoliciesClient.GetRule(ctx, getReq) +} + +func setStatus(u *unstructured.Unstructured, typedStatus any) error { + status, err := runtime.DefaultUnstructuredConverter.ToUnstructured(typedStatus) + if err != nil { + return fmt.Errorf("error converting status to unstructured: %w", err) + } + + old, _, _ := unstructured.NestedMap(u.Object, "status") + if old != nil { + status["conditions"] = old["conditions"] + status["observedGeneration"] = old["observedGeneration"] + status["externalRef"] = old["externalRef"] + } + + u.Object["status"] = status + + return nil +} diff --git a/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_externalresource.go b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_externalresource.go new file mode 100644 index 0000000000..fc8edc4366 --- /dev/null +++ b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_externalresource.go @@ -0,0 +1,26 @@ +/* +Copyright 2024. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package firewallpolicyrule + +const ( + serviceDomain = "//compute.googleapis.com" +) + +type FirewallPolicyRuleIdentity struct { + firewallPolicy string + priority int64 +} diff --git a/pkg/controller/direct/compute/firewallpolicyrule/refs.go b/pkg/controller/direct/compute/firewallpolicyrule/refs.go new file mode 100644 index 0000000000..cdb3e61ab5 --- /dev/null +++ b/pkg/controller/direct/compute/firewallpolicyrule/refs.go @@ -0,0 +1,211 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package firewallpolicyrule + +import ( + "context" + "fmt" + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/compute/v1beta1" + + refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/k8s" + apierrors "k8s.io/apimachinery/pkg/api/errors" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/runtime/schema" + "k8s.io/apimachinery/pkg/types" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +func ResolveComputeFirewallPolicy(ctx context.Context, reader client.Reader, src client.Object, ref *refs.ComputeFirewallPolicyRef) (*refs.ComputeFirewallPolicyRef, error) { + if ref == nil { + return nil, nil + } + + if ref.External != "" { + if ref.Name != "" { + return nil, fmt.Errorf("cannot specify both name and external on reference") + } + return ref, nil + } + + if ref.Name == "" { + return nil, fmt.Errorf("must specify either name or external on reference") + } + + key := types.NamespacedName{ + Namespace: ref.Namespace, + Name: ref.Name, + } + if key.Namespace == "" { + key.Namespace = src.GetNamespace() + } + + computeFirwallPolicy, err := resolveResourceName(ctx, reader, key, schema.GroupVersionKind{ + Group: "compute.cnrm.cloud.google.com", + Version: "v1beta1", + Kind: "ComputeFirewallPolicy", + }) + + if err != nil { + return nil, err + } + + resourceID, err := refs.GetResourceID(computeFirwallPolicy) + if err != nil { + return nil, err + } + + return &refs.ComputeFirewallPolicyRef{ + External: fmt.Sprintf("%s", resourceID)}, nil +} + +func ResolveComputeNetwork(ctx context.Context, reader client.Reader, src client.Object, ref *refs.ComputeNetworkRef) (*refs.ComputeNetworkRef, error) { + if ref == nil { + return nil, nil + } + + if ref.External != "" { + if ref.Name != "" { + return nil, fmt.Errorf("cannot specify both name and external on reference") + } + return ref, nil + } + + if ref.Name == "" { + return nil, fmt.Errorf("must specify either name or external on reference") + } + + key := types.NamespacedName{ + Namespace: ref.Namespace, + Name: ref.Name, + } + if key.Namespace == "" { + key.Namespace = src.GetNamespace() + } + + computeNetwork, err := resolveResourceName(ctx, reader, key, schema.GroupVersionKind{ + Group: "compute.cnrm.cloud.google.com", + Version: "v1beta1", + Kind: "ComputeNetwork", + }) + + if err != nil { + return nil, err + } + + resourceID, err := refs.GetResourceID(computeNetwork) + if err != nil { + return nil, err + } + + projectID, err := refs.ResolveProjectID(ctx, reader, computeNetwork) + if err != nil { + return nil, err + } + + return &refs.ComputeNetworkRef{ + External: fmt.Sprintf("https://www.googleapis.com/compute/v1/projects/%s/global/networks/%s", projectID, resourceID)}, nil +} + +func ResolveIAMSetviceAccount(ctx context.Context, reader client.Reader, src client.Object, ref *refs.IAMServiceAccountRef) (*refs.IAMServiceAccountRef, error) { + if ref == nil { + return nil, nil + } + + if ref.External != "" { + if ref.Name != "" { + return nil, fmt.Errorf("cannot specify both name and external on reference") + } + return ref, nil + } + + if ref.Name == "" { + return nil, fmt.Errorf("must specify either name or external on reference") + } + + key := types.NamespacedName{ + Namespace: ref.Namespace, + Name: ref.Name, + } + if key.Namespace == "" { + key.Namespace = src.GetNamespace() + } + + iamServiceAccount, err := resolveResourceName(ctx, reader, key, schema.GroupVersionKind{ + Group: "iam.cnrm.cloud.google.com", + Version: "v1beta1", + Kind: "IAMServiceAccount", + }) + + if err != nil { + return nil, err + } + + resourceID, err := refs.GetResourceID(iamServiceAccount) + if err != nil { + return nil, err + } + + projectID, err := refs.ResolveProjectID(ctx, reader, iamServiceAccount) + if err != nil { + return nil, err + } + + return &refs.IAMServiceAccountRef{ + External: fmt.Sprintf("projects/%s/serviceAccounts/%s@%s.iam.gserviceaccount.com", projectID, resourceID, projectID)}, nil +} + +func resolveResourceName(ctx context.Context, reader client.Reader, key client.ObjectKey, gvk schema.GroupVersionKind) (*unstructured.Unstructured, error) { + resource := &unstructured.Unstructured{} + resource.SetGroupVersionKind(gvk) + if err := reader.Get(ctx, key, resource); err != nil { + if apierrors.IsNotFound(err) { + return nil, k8s.NewReferenceNotFoundError(resource.GroupVersionKind(), key) + } + return nil, fmt.Errorf("error reading referenced %v %v: %w", gvk.Kind, key, err) + } + + return resource, nil +} + +func resolveDependencies(ctx context.Context, reader client.Reader, obj *krm.ComputeFirewallPolicyRule) error { + // Get target resources(compute network) + var targetResources []*refs.ComputeNetworkRef + if obj.Spec.TargetResources != nil { + for _, targetResource := range obj.Spec.TargetResources { + networkRef, err := ResolveComputeNetwork(ctx, reader, obj, targetResource) + if err != nil { + return err + } + targetResource.External = networkRef.External + targetResources = append(targetResources, targetResource) + } + obj.Spec.TargetResources = targetResources + } + // Get target service accounts + var targetServiceAccounts []*refs.IAMServiceAccountRef + if obj.Spec.TargetServiceAccounts != nil { + for _, targetServiceAccount := range obj.Spec.TargetServiceAccounts { + iamServiceAccount, err := ResolveIAMSetviceAccount(ctx, reader, obj, targetServiceAccount) + if err != nil { + return err + } + targetServiceAccount.External = iamServiceAccount.External + targetServiceAccounts = append(targetServiceAccounts, targetServiceAccount) + } + obj.Spec.TargetServiceAccounts = targetServiceAccounts + } + return nil +} diff --git a/pkg/controller/direct/maputils.go b/pkg/controller/direct/maputils.go index f99c7d6636..43796127dd 100644 --- a/pkg/controller/direct/maputils.go +++ b/pkg/controller/direct/maputils.go @@ -228,6 +228,11 @@ func IsNotFound(err error) bool { return HasHTTPCode(err, 404) } +// IsInvalidValue returns true if the given error is an HTTP 400. +func IsInvalidValue(err error) bool { + return HasHTTPCode(err, 400) +} + // HasHTTPCode returns true if the given error is an HTTP response with the given code. func HasHTTPCode(err error, code int) bool { if err == nil { diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_generated_object_computefirewallpolicyrule.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_generated_object_computefirewallpolicyrule.golden.yaml new file mode 100644 index 0000000000..15fc632b25 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_generated_object_computefirewallpolicyrule.golden.yaml @@ -0,0 +1,33 @@ +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicyRule +metadata: + annotations: + cnrm.cloud.google.com/management-conflict-prevention-policy: none + finalizers: + - cnrm.cloud.google.com/finalizer + - cnrm.cloud.google.com/deletion-defender + generation: 2 + labels: + cnrm-test: "true" + name: firewallpolicyrule-${uniqueId} + namespace: ${uniqueId} +spec: + action: allow + direction: INGRESS + firewallPolicyRef: + name: firewallpolicyrule-${uniqueId} + match: + layer4Configs: + - ipProtocol: tcp + srcIPRanges: + - 10.100.0.1/32 + priority: 9000 +status: + conditions: + - lastTransitionTime: "1970-01-01T00:00:00Z" + message: The resource is up to date + reason: UpToDate + status: "True" + type: Ready + observedGeneration: 2 + ruleTupleCount: 2 diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_http.log new file mode 100644 index 0000000000..5d0391a394 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_http.log @@ -0,0 +1,721 @@ +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies?alt=json&parentId=organizations%2F128653134652 +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "description": "A basic folder firewall policy", + "parent": "organizations/${organizationID}", + "shortName": "firewallpolicy-${uniqueId}" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "createFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json&parentId=organizations%2F128653134652 +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "createFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "774029050012", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "description": "A basic folder firewall policy", + "displayName": "firewallpolicy-${uniqueId}", + "fingerprint": "abcdef0123A=", + "id": "000000000000000000000", + "kind": "compute#firewallPolicy", + "name": "774029050012", + "parent": "organizations/${organizationID}", + "ruleTupleCount": 8, + "rules": [ + { + "action": "goto_next", + "description": "default egress rule ipv6", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "::/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483644, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule ipv6", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "::/0" + ] + }, + "priority": 2147483645, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default egress rule", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "0.0.0.0/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483646, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "0.0.0.0/0" + ] + }, + "priority": 2147483647, + "ruleTupleCount": 2 + } + ], + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/774029050012", + "shortName": "firewallpolicy-${uniqueId}" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=774029050012 + +400 Bad Request +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 400, + "errors": [ + { + "domain": "global", + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", + "reason": "invalid" + } + ], + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." + } +} + +--- + +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/addRule +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=774029050012 + +{ + "action": "deny", + "direction": "INGRESS", + "match": { + "layer4Configs": [ + { + "ipProtocol": "tcp" + } + ], + "srcIpRanges": [ + "10.100.0.1/32" + ] + }, + "priority": 9000 +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "addFirewallRuleToFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "774029050012", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "addFirewallRuleToFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "774029050012", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=774029050012 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "action": "deny", + "description": "", + "direction": "INGRESS", + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "tcp" + } + ], + "srcIpRanges": [ + "10.100.0.1/32" + ] + }, + "priority": 9000, + "ruleTupleCount": 2 +} + +--- + +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/patchRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=774029050012 + +{ + "action": "allow", + "direction": "INGRESS", + "match": { + "layer4Configs": [ + { + "ipProtocol": "tcp" + } + ], + "srcIpRanges": [ + "10.100.0.1/32" + ] + }, + "priority": 9000 +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "patchFirewallRuleInFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "774029050012", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "patchFirewallRuleInFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "774029050012", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=774029050012 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "action": "allow", + "description": "", + "direction": "INGRESS", + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "tcp" + } + ], + "srcIpRanges": [ + "10.100.0.1/32" + ] + }, + "priority": 9000, + "ruleTupleCount": 2 +} + +--- + +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/removeRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=774029050012 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "removeFirewallRuleFromFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "774029050012", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "removeFirewallRuleFromFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "774029050012", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "description": "A basic folder firewall policy", + "displayName": "firewallpolicy-${uniqueId}", + "fingerprint": "abcdef0123A=", + "id": "000000000000000000000", + "kind": "compute#firewallPolicy", + "name": "774029050012", + "parent": "organizations/${organizationID}", + "ruleTupleCount": 8, + "rules": [ + { + "action": "goto_next", + "description": "default egress rule ipv6", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "::/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483644, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule ipv6", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "::/0" + ] + }, + "priority": 2147483645, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default egress rule", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "0.0.0.0/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483646, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "0.0.0.0/0" + ] + }, + "priority": 2147483647, + "ruleTupleCount": 2 + } + ], + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/774029050012", + "shortName": "firewallpolicy-${uniqueId}" +} + +--- + +DELETE https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "deleteFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "774029050012", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "deleteFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "774029050012", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "The resource 'locations/global/firewallPolicies/774029050012' was not found", + "reason": "notFound" + } + ], + "message": "The resource 'locations/global/firewallPolicies/774029050012' was not found" + } +} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml index 1e0e625729..9055f653d3 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml @@ -49,4 +49,4 @@ status: type: Ready kind: compute#firewallPolicyRule observedGeneration: 2 - ruleTupleCount: 4 + ruleTupleCount: 110 diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log index 507ffb07c2..cf11fd84fc 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log @@ -304,6 +304,7 @@ X-Xss-Protection: 0 "name": "network-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { + "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", @@ -439,6 +440,7 @@ X-Xss-Protection: 0 "name": "network-2-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { + "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", @@ -534,6 +536,99 @@ X-Xss-Protection: 0 --- +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Service account projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com does not exist.", + "reason": "notFound" + } + ], + "message": "Service account projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com does not exist.", + "status": "NOT_FOUND" + } +} + +--- + +POST https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "accountId": "sa-${uniqueId}", + "serviceAccount": {} +} + +409 Conflict +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 409, + "errors": [ + { + "domain": "global", + "message": "Service account sa-${uniqueId} already exists within project projects/${projectId}.", + "reason": "alreadyExists" + } + ], + "message": "Service account sa-${uniqueId} already exists within project projects/${projectId}.", + "status": "ALREADY_EXISTS" + } +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager @@ -639,6 +734,13 @@ X-Xss-Protection: 0 { "error": { "code": 400, + "errors": [ + { + "domain": "global", + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", + "reason": "invalid" + } + ], "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." } } @@ -800,7 +902,7 @@ X-Xss-Protection: 0 ] }, "priority": 9000, - "ruleTupleCount": 4, + "ruleTupleCount": 109, "targetResources": [ "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}" ], @@ -966,7 +1068,7 @@ X-Xss-Protection: 0 ] }, "priority": 9000, - "ruleTupleCount": 4, + "ruleTupleCount": 110, "targetResources": [ "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}" ], @@ -1060,6 +1162,13 @@ X-Xss-Protection: 0 { "error": { "code": 400, + "errors": [ + { + "domain": "global", + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", + "reason": "invalid" + } + ], "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." } } @@ -1175,6 +1284,7 @@ X-Xss-Protection: 0 "name": "network-2-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { + "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", @@ -1270,6 +1380,7 @@ X-Xss-Protection: 0 "name": "network-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { + "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml index 348b62a890..bcc98e894a 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml @@ -49,4 +49,4 @@ status: type: Ready kind: compute#firewallPolicyRule observedGeneration: 2 - ruleTupleCount: 4 + ruleTupleCount: 110 diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log index e19f7aaa01..418fc65ab4 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log @@ -304,6 +304,7 @@ X-Xss-Protection: 0 "name": "network-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { + "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", @@ -439,6 +440,7 @@ X-Xss-Protection: 0 "name": "network-2-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { + "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", @@ -639,6 +641,13 @@ X-Xss-Protection: 0 { "error": { "code": 400, + "errors": [ + { + "domain": "global", + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", + "reason": "invalid" + } + ], "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." } } @@ -800,7 +809,7 @@ X-Xss-Protection: 0 ] }, "priority": 9000, - "ruleTupleCount": 4, + "ruleTupleCount": 109, "targetResources": [ "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}" ], @@ -966,7 +975,7 @@ X-Xss-Protection: 0 ] }, "priority": 9000, - "ruleTupleCount": 4, + "ruleTupleCount": 110, "targetResources": [ "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}" ], @@ -1060,6 +1069,13 @@ X-Xss-Protection: 0 { "error": { "code": 400, + "errors": [ + { + "domain": "global", + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", + "reason": "invalid" + } + ], "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." } } @@ -1175,6 +1191,7 @@ X-Xss-Protection: 0 "name": "network-2-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { + "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", @@ -1270,6 +1287,7 @@ X-Xss-Protection: 0 "name": "network-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { + "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log index 9fdf50371c..94c6402ef6 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log @@ -195,6 +195,13 @@ X-Xss-Protection: 0 { "error": { "code": 400, + "errors": [ + { + "domain": "global", + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", + "reason": "invalid" + } + ], "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." } } @@ -335,7 +342,8 @@ User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 "srcIpRanges": [ "10.100.0.1/32" ] - } + }, + "priority": 9000 } 200 OK @@ -518,6 +526,13 @@ X-Xss-Protection: 0 { "error": { "code": 400, + "errors": [ + { + "domain": "global", + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", + "reason": "invalid" + } + ], "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." } } From 79f3e1f817e4c637818f496eabf1e718868ad1a3 Mon Sep 17 00:00:00 2001 From: Gemma Hou Date: Tue, 8 Oct 2024 23:13:04 +0000 Subject: [PATCH 02/31] collect realGCP log --- ...ject_computefirewallpolicyrule.golden.yaml | 33 - .../computefirewallpolicyrule/_http.log | 721 ------------------ ...firewallpolicyrule-egress-full.golden.yaml | 1 - .../_http.log | 51 +- ...irewallpolicyrule-ingress-full.golden.yaml | 1 - .../_http.log | 51 +- ...putefirewallpolicyrule-minimal.golden.yaml | 1 - .../_http.log | 51 +- 8 files changed, 90 insertions(+), 820 deletions(-) delete mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_generated_object_computefirewallpolicyrule.golden.yaml delete mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_http.log diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_generated_object_computefirewallpolicyrule.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_generated_object_computefirewallpolicyrule.golden.yaml deleted file mode 100644 index 15fc632b25..0000000000 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_generated_object_computefirewallpolicyrule.golden.yaml +++ /dev/null @@ -1,33 +0,0 @@ -apiVersion: compute.cnrm.cloud.google.com/v1beta1 -kind: ComputeFirewallPolicyRule -metadata: - annotations: - cnrm.cloud.google.com/management-conflict-prevention-policy: none - finalizers: - - cnrm.cloud.google.com/finalizer - - cnrm.cloud.google.com/deletion-defender - generation: 2 - labels: - cnrm-test: "true" - name: firewallpolicyrule-${uniqueId} - namespace: ${uniqueId} -spec: - action: allow - direction: INGRESS - firewallPolicyRef: - name: firewallpolicyrule-${uniqueId} - match: - layer4Configs: - - ipProtocol: tcp - srcIPRanges: - - 10.100.0.1/32 - priority: 9000 -status: - conditions: - - lastTransitionTime: "1970-01-01T00:00:00Z" - message: The resource is up to date - reason: UpToDate - status: "True" - type: Ready - observedGeneration: 2 - ruleTupleCount: 2 diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_http.log deleted file mode 100644 index 5d0391a394..0000000000 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/_http.log +++ /dev/null @@ -1,721 +0,0 @@ -POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies?alt=json&parentId=organizations%2F128653134652 -Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 - -{ - "description": "A basic folder firewall policy", - "parent": "organizations/${organizationID}", - "shortName": "firewallpolicy-${uniqueId}" -} - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "id": "000000000000000000000", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "compute#operation", - "name": "${operationID}", - "operationType": "createFirewallPolicy", - "progress": 0, - "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "RUNNING", - "user": "user@example.com" -} - ---- - -GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json&parentId=organizations%2F128653134652 -Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "endTime": "2024-04-01T12:34:56.123456Z", - "id": "000000000000000000000", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "compute#operation", - "name": "${operationID}", - "operationType": "createFirewallPolicy", - "progress": 100, - "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "DONE", - "targetId": "774029050012", - "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", - "user": "user@example.com" -} - ---- - -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012?alt=json -Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "creationTimestamp": "2024-04-01T12:34:56.123456Z", - "description": "A basic folder firewall policy", - "displayName": "firewallpolicy-${uniqueId}", - "fingerprint": "abcdef0123A=", - "id": "000000000000000000000", - "kind": "compute#firewallPolicy", - "name": "774029050012", - "parent": "organizations/${organizationID}", - "ruleTupleCount": 8, - "rules": [ - { - "action": "goto_next", - "description": "default egress rule ipv6", - "direction": "EGRESS", - "enableLogging": false, - "kind": "compute#firewallPolicyRule", - "match": { - "destIpRanges": [ - "::/0" - ], - "layer4Configs": [ - { - "ipProtocol": "all" - } - ] - }, - "priority": 2147483644, - "ruleTupleCount": 2 - }, - { - "action": "goto_next", - "description": "default ingress rule ipv6", - "direction": "INGRESS", - "enableLogging": false, - "kind": "compute#firewallPolicyRule", - "match": { - "layer4Configs": [ - { - "ipProtocol": "all" - } - ], - "srcIpRanges": [ - "::/0" - ] - }, - "priority": 2147483645, - "ruleTupleCount": 2 - }, - { - "action": "goto_next", - "description": "default egress rule", - "direction": "EGRESS", - "enableLogging": false, - "kind": "compute#firewallPolicyRule", - "match": { - "destIpRanges": [ - "0.0.0.0/0" - ], - "layer4Configs": [ - { - "ipProtocol": "all" - } - ] - }, - "priority": 2147483646, - "ruleTupleCount": 2 - }, - { - "action": "goto_next", - "description": "default ingress rule", - "direction": "INGRESS", - "enableLogging": false, - "kind": "compute#firewallPolicyRule", - "match": { - "layer4Configs": [ - { - "ipProtocol": "all" - } - ], - "srcIpRanges": [ - "0.0.0.0/0" - ] - }, - "priority": 2147483647, - "ruleTupleCount": 2 - } - ], - "selfLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", - "selfLinkWithId": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/774029050012", - "shortName": "firewallpolicy-${uniqueId}" -} - ---- - -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/getRule?priority=9000 -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=774029050012 - -400 Bad Request -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "error": { - "code": 400, - "errors": [ - { - "domain": "global", - "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", - "reason": "invalid" - } - ], - "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." - } -} - ---- - -POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/addRule -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=774029050012 - -{ - "action": "deny", - "direction": "INGRESS", - "match": { - "layer4Configs": [ - { - "ipProtocol": "tcp" - } - ], - "srcIpRanges": [ - "10.100.0.1/32" - ] - }, - "priority": 9000 -} - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "id": "000000000000000000000", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "compute#operation", - "name": "${operationID}", - "operationType": "addFirewallRuleToFirewallPolicy", - "progress": 0, - "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "RUNNING", - "targetId": "774029050012", - "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", - "user": "user@example.com" -} - ---- - -GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: operation=${operationID} - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "endTime": "2024-04-01T12:34:56.123456Z", - "id": "000000000000000000000", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "compute#operation", - "name": "${operationID}", - "operationType": "addFirewallRuleToFirewallPolicy", - "progress": 100, - "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "DONE", - "targetId": "774029050012", - "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", - "user": "user@example.com" -} - ---- - -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/getRule?priority=9000 -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=774029050012 - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "action": "deny", - "description": "", - "direction": "INGRESS", - "kind": "compute#firewallPolicyRule", - "match": { - "layer4Configs": [ - { - "ipProtocol": "tcp" - } - ], - "srcIpRanges": [ - "10.100.0.1/32" - ] - }, - "priority": 9000, - "ruleTupleCount": 2 -} - ---- - -POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/patchRule?priority=9000 -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=774029050012 - -{ - "action": "allow", - "direction": "INGRESS", - "match": { - "layer4Configs": [ - { - "ipProtocol": "tcp" - } - ], - "srcIpRanges": [ - "10.100.0.1/32" - ] - }, - "priority": 9000 -} - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "id": "000000000000000000000", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "compute#operation", - "name": "${operationID}", - "operationType": "patchFirewallRuleInFirewallPolicy", - "progress": 0, - "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "RUNNING", - "targetId": "774029050012", - "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", - "user": "user@example.com" -} - ---- - -GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: operation=${operationID} - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "endTime": "2024-04-01T12:34:56.123456Z", - "id": "000000000000000000000", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "compute#operation", - "name": "${operationID}", - "operationType": "patchFirewallRuleInFirewallPolicy", - "progress": 100, - "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "DONE", - "targetId": "774029050012", - "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", - "user": "user@example.com" -} - ---- - -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/getRule?priority=9000 -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=774029050012 - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "action": "allow", - "description": "", - "direction": "INGRESS", - "kind": "compute#firewallPolicyRule", - "match": { - "layer4Configs": [ - { - "ipProtocol": "tcp" - } - ], - "srcIpRanges": [ - "10.100.0.1/32" - ] - }, - "priority": 9000, - "ruleTupleCount": 2 -} - ---- - -POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/removeRule?priority=9000 -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=774029050012 - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "id": "000000000000000000000", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "compute#operation", - "name": "${operationID}", - "operationType": "removeFirewallRuleFromFirewallPolicy", - "progress": 0, - "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "RUNNING", - "targetId": "774029050012", - "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", - "user": "user@example.com" -} - ---- - -GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: operation=${operationID} - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "endTime": "2024-04-01T12:34:56.123456Z", - "id": "000000000000000000000", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "compute#operation", - "name": "${operationID}", - "operationType": "removeFirewallRuleFromFirewallPolicy", - "progress": 100, - "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "DONE", - "targetId": "774029050012", - "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", - "user": "user@example.com" -} - ---- - -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012?alt=json -Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "creationTimestamp": "2024-04-01T12:34:56.123456Z", - "description": "A basic folder firewall policy", - "displayName": "firewallpolicy-${uniqueId}", - "fingerprint": "abcdef0123A=", - "id": "000000000000000000000", - "kind": "compute#firewallPolicy", - "name": "774029050012", - "parent": "organizations/${organizationID}", - "ruleTupleCount": 8, - "rules": [ - { - "action": "goto_next", - "description": "default egress rule ipv6", - "direction": "EGRESS", - "enableLogging": false, - "kind": "compute#firewallPolicyRule", - "match": { - "destIpRanges": [ - "::/0" - ], - "layer4Configs": [ - { - "ipProtocol": "all" - } - ] - }, - "priority": 2147483644, - "ruleTupleCount": 2 - }, - { - "action": "goto_next", - "description": "default ingress rule ipv6", - "direction": "INGRESS", - "enableLogging": false, - "kind": "compute#firewallPolicyRule", - "match": { - "layer4Configs": [ - { - "ipProtocol": "all" - } - ], - "srcIpRanges": [ - "::/0" - ] - }, - "priority": 2147483645, - "ruleTupleCount": 2 - }, - { - "action": "goto_next", - "description": "default egress rule", - "direction": "EGRESS", - "enableLogging": false, - "kind": "compute#firewallPolicyRule", - "match": { - "destIpRanges": [ - "0.0.0.0/0" - ], - "layer4Configs": [ - { - "ipProtocol": "all" - } - ] - }, - "priority": 2147483646, - "ruleTupleCount": 2 - }, - { - "action": "goto_next", - "description": "default ingress rule", - "direction": "INGRESS", - "enableLogging": false, - "kind": "compute#firewallPolicyRule", - "match": { - "layer4Configs": [ - { - "ipProtocol": "all" - } - ], - "srcIpRanges": [ - "0.0.0.0/0" - ] - }, - "priority": 2147483647, - "ruleTupleCount": 2 - } - ], - "selfLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", - "selfLinkWithId": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012/774029050012", - "shortName": "firewallpolicy-${uniqueId}" -} - ---- - -DELETE https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012?alt=json -Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "id": "000000000000000000000", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "compute#operation", - "name": "${operationID}", - "operationType": "deleteFirewallPolicy", - "progress": 0, - "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "RUNNING", - "targetId": "774029050012", - "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", - "user": "user@example.com" -} - ---- - -GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json -Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "endTime": "2024-04-01T12:34:56.123456Z", - "id": "000000000000000000000", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "compute#operation", - "name": "${operationID}", - "operationType": "deleteFirewallPolicy", - "progress": 100, - "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "DONE", - "targetId": "774029050012", - "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012", - "user": "user@example.com" -} - ---- - -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/774029050012?alt=json -Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 - -404 Not Found -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "error": { - "code": 404, - "errors": [ - { - "domain": "global", - "message": "The resource 'locations/global/firewallPolicies/774029050012' was not found", - "reason": "notFound" - } - ], - "message": "The resource 'locations/global/firewallPolicies/774029050012' was not found" - } -} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml index 9055f653d3..95335ff087 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml @@ -3,7 +3,6 @@ kind: ComputeFirewallPolicyRule metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none - cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log index cf11fd84fc..5817ee2cd4 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log @@ -716,9 +716,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 400 Bad Request Cache-Control: private @@ -747,14 +748,14 @@ X-Xss-Protection: 0 --- -POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule?alt=json +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} { "action": "deny", "direction": "EGRESS", - "firewallPolicy": "locations/global/firewallPolicies/${firewallPolicyId}", "match": { "destAddressGroups": [ "organizations/${organizationID}/locations/global/addressGroups/testnetworksecurityaddressgroup" @@ -820,9 +821,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} 200 OK Cache-Control: private @@ -853,9 +855,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 200 OK Cache-Control: private @@ -913,9 +916,10 @@ X-Xss-Protection: 0 --- -POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?alt=json&priority=9000 +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} { "action": "allow", @@ -985,9 +989,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} 200 OK Cache-Control: private @@ -1018,9 +1023,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 200 OK Cache-Control: private @@ -1079,9 +1085,10 @@ X-Xss-Protection: 0 --- -POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?alt=json&priority=9000 +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 200 OK Cache-Control: private @@ -1111,9 +1118,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} 200 OK Cache-Control: private @@ -1144,9 +1152,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 400 Bad Request Cache-Control: private diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml index bcc98e894a..65e5585f7f 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml @@ -3,7 +3,6 @@ kind: ComputeFirewallPolicyRule metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none - cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log index 418fc65ab4..17b6fb2fbf 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log @@ -623,9 +623,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 400 Bad Request Cache-Control: private @@ -654,14 +655,14 @@ X-Xss-Protection: 0 --- -POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule?alt=json +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} { "action": "deny", "direction": "INGRESS", - "firewallPolicy": "locations/global/firewallPolicies/${firewallPolicyId}", "match": { "destIpRanges": [ "10.100.0.1/32" @@ -727,9 +728,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} 200 OK Cache-Control: private @@ -760,9 +762,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 200 OK Cache-Control: private @@ -820,9 +823,10 @@ X-Xss-Protection: 0 --- -POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?alt=json&priority=9000 +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} { "action": "allow", @@ -892,9 +896,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} 200 OK Cache-Control: private @@ -925,9 +930,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 200 OK Cache-Control: private @@ -986,9 +992,10 @@ X-Xss-Protection: 0 --- -POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?alt=json&priority=9000 +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 200 OK Cache-Control: private @@ -1018,9 +1025,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} 200 OK Cache-Control: private @@ -1051,9 +1059,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 400 Bad Request Cache-Control: private diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_generated_object_computefirewallpolicyrule-minimal.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_generated_object_computefirewallpolicyrule-minimal.golden.yaml index 52d426c96b..77bb2c7fb2 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_generated_object_computefirewallpolicyrule-minimal.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_generated_object_computefirewallpolicyrule-minimal.golden.yaml @@ -3,7 +3,6 @@ kind: ComputeFirewallPolicyRule metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none - cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log index 94c6402ef6..b9c2b34321 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log @@ -177,9 +177,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 400 Bad Request Cache-Control: private @@ -208,14 +209,14 @@ X-Xss-Protection: 0 --- -POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule?alt=json +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} { "action": "deny", "direction": "INGRESS", - "firewallPolicy": "locations/global/firewallPolicies/${firewallPolicyId}", "match": { "layer4Configs": [ { @@ -257,9 +258,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} 200 OK Cache-Control: private @@ -290,9 +292,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 200 OK Cache-Control: private @@ -326,9 +329,10 @@ X-Xss-Protection: 0 --- -POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?alt=json&priority=9000 +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} { "action": "allow", @@ -374,9 +378,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} 200 OK Cache-Control: private @@ -407,9 +412,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 200 OK Cache-Control: private @@ -443,9 +449,10 @@ X-Xss-Protection: 0 --- -POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?alt=json&priority=9000 +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 200 OK Cache-Control: private @@ -475,9 +482,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} 200 OK Cache-Control: private @@ -508,9 +516,10 @@ X-Xss-Protection: 0 --- -GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} 400 Bad Request Cache-Control: private From 209539f51ede1a705e1c2277fec27ea054801463 Mon Sep 17 00:00:00 2001 From: Gemma Hou Date: Thu, 10 Oct 2024 00:57:36 +0000 Subject: [PATCH 03/31] collect mockGCP log --- mockgcp/mock_http_roundtrip.go | 9 ++ mockgcp/mockcompute/firewallpoliciesv1.go | 1 + .../firewallpolicyrule_controller.go | 7 +- ...firewallpolicyrule-egress-full.golden.yaml | 2 +- .../_http.log | 115 +----------------- ...irewallpolicyrule-ingress-full.golden.yaml | 2 +- .../_http.log | 22 +--- .../_http.log | 17 +-- 8 files changed, 19 insertions(+), 156 deletions(-) diff --git a/mockgcp/mock_http_roundtrip.go b/mockgcp/mock_http_roundtrip.go index baaf62fed0..f5a17ba4c0 100644 --- a/mockgcp/mock_http_roundtrip.go +++ b/mockgcp/mock_http_roundtrip.go @@ -326,6 +326,15 @@ func (m *mockRoundTripper) prefilterRequest(req *http.Request) error { req.Body = io.NopCloser(bytes.NewBuffer(b)) } + } else { + // When sending a delete request for a ComputeFirewallPolicyRule resource, + // The request URL looks like POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule. + // It's uncommon to use POST requests for delete operations, and a nil request body for POST method is unexpected, + // I got the "missing form body" error. Ref: https://go.dev/src/net/http/request.go?s=41070:41129 line 1340 + // So instead of sending a nil request body, send an empty request body to ensure successful processing of the remove rule request. + body := &bytes.Buffer{} + b := body.Bytes() + req.Body = io.NopCloser(bytes.NewBuffer(b)) } return nil } diff --git a/mockgcp/mockcompute/firewallpoliciesv1.go b/mockgcp/mockcompute/firewallpoliciesv1.go index 4c9ec1a0c9..d9ea6768d0 100644 --- a/mockgcp/mockcompute/firewallpoliciesv1.go +++ b/mockgcp/mockcompute/firewallpoliciesv1.go @@ -263,6 +263,7 @@ func (s *FirewallPoliciesV1) PatchRule(ctx context.Context, req *pb.PatchRuleFir return obj, nil }) } + func (s *FirewallPoliciesV1) RemoveRule(ctx context.Context, req *pb.RemoveRuleFirewallPolicyRequest) (*pb.Operation, error) { reqName := "locations/global/firewallPolicies/" + req.GetFirewallPolicy() name, err := s.parseFirewallPolicyName(reqName) diff --git a/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go index 115a71e84f..b811338249 100644 --- a/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go +++ b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go @@ -255,17 +255,14 @@ func (a *firewallPolicyRuleAdapter) Export(ctx context.Context) (*unstructured.U // Delete implements the Adapter interface. func (a *firewallPolicyRuleAdapter) Delete(ctx context.Context, deleteOp *directbase.DeleteOperation) (bool, error) { - log := klog.FromContext(ctx).WithName(ctrlName) log.V(2).Info("deleting ComputeFirewallPolicyRule", "priority", a.priority) - var err error - op := &gcp.Operation{} - req := &computepb.RemoveRuleFirewallPolicyRequest{ + delReq := &computepb.RemoveRuleFirewallPolicyRequest{ FirewallPolicy: a.firewallPolicy, Priority: direct.PtrTo(int32(a.priority)), } - op, err = a.firewallPoliciesClient.RemoveRule(ctx, req) + op, err := a.firewallPoliciesClient.RemoveRule(ctx, delReq) if err != nil { return false, fmt.Errorf("deleting ComputeFirewallPolicyRule %d: %w", a.priority, err) diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml index 95335ff087..4893b524e3 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml @@ -48,4 +48,4 @@ status: type: Ready kind: compute#firewallPolicyRule observedGeneration: 2 - ruleTupleCount: 110 + ruleTupleCount: 4 diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log index 5817ee2cd4..246637e3be 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log @@ -304,7 +304,6 @@ X-Xss-Protection: 0 "name": "network-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { - "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", @@ -440,7 +439,6 @@ X-Xss-Protection: 0 "name": "network-2-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { - "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", @@ -536,99 +534,6 @@ X-Xss-Protection: 0 --- -GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -404 Not Found -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "error": { - "code": 404, - "errors": [ - { - "domain": "global", - "message": "Service account projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com does not exist.", - "reason": "notFound" - } - ], - "message": "Service account projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com does not exist.", - "status": "NOT_FOUND" - } -} - ---- - -POST https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts?alt=json&prettyPrint=false -Content-Type: application/json -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -{ - "accountId": "sa-${uniqueId}", - "serviceAccount": {} -} - -409 Conflict -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "error": { - "code": 409, - "errors": [ - { - "domain": "global", - "message": "Service account sa-${uniqueId} already exists within project projects/${projectId}.", - "reason": "alreadyExists" - } - ], - "message": "Service account sa-${uniqueId} already exists within project projects/${projectId}.", - "status": "ALREADY_EXISTS" - } -} - ---- - -GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "email": "sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", - "etag": "abcdef0123A=", - "name": "projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", - "oauth2ClientId": "888888888888888888888", - "projectId": "${projectId}", - "uniqueId": "111111111111111111111" -} - ---- - GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager @@ -735,13 +640,6 @@ X-Xss-Protection: 0 { "error": { "code": 400, - "errors": [ - { - "domain": "global", - "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", - "reason": "invalid" - } - ], "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." } } @@ -905,7 +803,7 @@ X-Xss-Protection: 0 ] }, "priority": 9000, - "ruleTupleCount": 109, + "ruleTupleCount": 4, "targetResources": [ "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}" ], @@ -1074,7 +972,7 @@ X-Xss-Protection: 0 ] }, "priority": 9000, - "ruleTupleCount": 110, + "ruleTupleCount": 4, "targetResources": [ "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}" ], @@ -1171,13 +1069,6 @@ X-Xss-Protection: 0 { "error": { "code": 400, - "errors": [ - { - "domain": "global", - "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", - "reason": "invalid" - } - ], "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." } } @@ -1293,7 +1184,6 @@ X-Xss-Protection: 0 "name": "network-2-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { - "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", @@ -1389,7 +1279,6 @@ X-Xss-Protection: 0 "name": "network-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { - "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml index 65e5585f7f..0d2386e0df 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml @@ -48,4 +48,4 @@ status: type: Ready kind: compute#firewallPolicyRule observedGeneration: 2 - ruleTupleCount: 110 + ruleTupleCount: 4 diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log index 17b6fb2fbf..7a13ce286b 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log @@ -304,7 +304,6 @@ X-Xss-Protection: 0 "name": "network-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { - "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", @@ -440,7 +439,6 @@ X-Xss-Protection: 0 "name": "network-2-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { - "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", @@ -642,13 +640,6 @@ X-Xss-Protection: 0 { "error": { "code": 400, - "errors": [ - { - "domain": "global", - "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", - "reason": "invalid" - } - ], "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." } } @@ -812,7 +803,7 @@ X-Xss-Protection: 0 ] }, "priority": 9000, - "ruleTupleCount": 109, + "ruleTupleCount": 4, "targetResources": [ "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}" ], @@ -981,7 +972,7 @@ X-Xss-Protection: 0 ] }, "priority": 9000, - "ruleTupleCount": 110, + "ruleTupleCount": 4, "targetResources": [ "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}" ], @@ -1078,13 +1069,6 @@ X-Xss-Protection: 0 { "error": { "code": 400, - "errors": [ - { - "domain": "global", - "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", - "reason": "invalid" - } - ], "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." } } @@ -1200,7 +1184,6 @@ X-Xss-Protection: 0 "name": "network-2-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { - "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", @@ -1296,7 +1279,6 @@ X-Xss-Protection: 0 "name": "network-${uniqueId}", "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", "routingConfig": { - "bgpBestPathSelectionMode": "LEGACY", "routingMode": "REGIONAL" }, "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log index b9c2b34321..c08bdcdb0b 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log @@ -196,13 +196,6 @@ X-Xss-Protection: 0 { "error": { "code": 400, - "errors": [ - { - "domain": "global", - "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", - "reason": "invalid" - } - ], "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." } } @@ -346,8 +339,7 @@ x-goog-request-params: firewall_policy=${firewallPolicyId} "srcIpRanges": [ "10.100.0.1/32" ] - }, - "priority": 9000 + } } 200 OK @@ -535,13 +527,6 @@ X-Xss-Protection: 0 { "error": { "code": 400, - "errors": [ - { - "domain": "global", - "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", - "reason": "invalid" - } - ], "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." } } From 7fa9f2d2f612ed5546671a1861492f1f3a3c4326 Mon Sep 17 00:00:00 2001 From: Gemma Hou Date: Thu, 10 Oct 2024 17:15:11 +0000 Subject: [PATCH 04/31] Use direct controller in tests --- .../v1beta1/firewallpolicyrule_types.go | 4 +- dev/tasks/run-e2e | 2 +- .../compute/firewallpolicyrule/client.go | 88 ------------------- .../firewallpolicyrule_controller.go | 69 ++++++--------- .../firewallpolicyrule_externalresource.go | 26 ------ .../direct/compute/firewallpolicyrule/refs.go | 1 + pkg/controller/direct/maputils.go | 4 +- 7 files changed, 34 insertions(+), 160 deletions(-) delete mode 100644 pkg/controller/direct/compute/firewallpolicyrule/client.go delete mode 100644 pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_externalresource.go diff --git a/apis/compute/v1beta1/firewallpolicyrule_types.go b/apis/compute/v1beta1/firewallpolicyrule_types.go index da8d6eac6e..b64b86b02e 100644 --- a/apis/compute/v1beta1/firewallpolicyrule_types.go +++ b/apis/compute/v1beta1/firewallpolicyrule_types.go @@ -25,8 +25,8 @@ import ( var ( ComputeFirewallPolicyRuleGVK = schema.GroupVersionKind{ - Group: SchemeGroupVersion.Group, - Version: SchemeGroupVersion.Version, + Group: GroupVersion.Group, + Version: GroupVersion.Version, Kind: "ComputeFirewallPolicyRule", } ) diff --git a/dev/tasks/run-e2e b/dev/tasks/run-e2e index c649780576..99462cbe78 100755 --- a/dev/tasks/run-e2e +++ b/dev/tasks/run-e2e @@ -26,7 +26,7 @@ if [[ -z "${KUBEBUILDER_ASSETS:-}" ]]; then fi if [[ -z "${KCC_USE_DIRECT_RECONCILERS:-}" ]]; then - KCC_USE_DIRECT_RECONCILERS=ComputeForwardingRule,GKEHubFeatureMembership,SecretManagerSecret + KCC_USE_DIRECT_RECONCILERS=ComputeFirewallPolicyRule,ComputeForwardingRule,GKEHubFeatureMembership,SecretManagerSecret fi echo "Using direct controllers: $KCC_USE_DIRECT_RECONCILERS" export KCC_USE_DIRECT_RECONCILERS diff --git a/pkg/controller/direct/compute/firewallpolicyrule/client.go b/pkg/controller/direct/compute/firewallpolicyrule/client.go deleted file mode 100644 index cbcc0207ae..0000000000 --- a/pkg/controller/direct/compute/firewallpolicyrule/client.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package firewallpolicyrule - -import ( - "context" - "fmt" - "net/http" - - api "cloud.google.com/go/compute/apiv1" - "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/config" - "google.golang.org/api/option" -) - -type gcpClient struct { - config config.ControllerConfig -} - -func newGCPClient(ctx context.Context, config *config.ControllerConfig) (*gcpClient, error) { - gcpClient := &gcpClient{ - config: *config, - } - return gcpClient, nil -} - -func (m *gcpClient) options() ([]option.ClientOption, error) { - var opts []option.ClientOption - if m.config.UserAgent != "" { - opts = append(opts, option.WithUserAgent(m.config.UserAgent)) - } - if m.config.HTTPClient != nil { - // TODO: Set UserAgent in this scenario (error is: WithHTTPClient is incompatible with gRPC dial options) - - httpClient := &http.Client{} - *httpClient = *m.config.HTTPClient - httpClient.Transport = &optionsRoundTripper{ - config: m.config, - inner: m.config.HTTPClient.Transport, - } - opts = append(opts, option.WithHTTPClient(httpClient)) - } - if m.config.UserProjectOverride && m.config.BillingProject != "" { - opts = append(opts, option.WithQuotaProject(m.config.BillingProject)) - } - - // TODO: support endpoints? - // if m.config.Endpoint != "" { - // opts = append(opts, option.WithEndpoint(m.config.Endpoint)) - // } - - return opts, nil -} - -type optionsRoundTripper struct { - config config.ControllerConfig - inner http.RoundTripper -} - -func (m *optionsRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { - if m.config.UserAgent != "" { - req.Header.Set("User-Agent", m.config.UserAgent) - } - return m.inner.RoundTrip(req) -} - -func (m *gcpClient) firewallPoliciesClient(ctx context.Context) (*api.FirewallPoliciesClient, error) { - opts, err := m.options() - if err != nil { - return nil, err - } - client, err := api.NewFirewallPoliciesRESTClient(ctx, opts...) - if err != nil { - return nil, fmt.Errorf("building FirewallPolicy client: %w", err) - } - return client, err -} diff --git a/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go index b811338249..d8efc515e9 100644 --- a/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go +++ b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go @@ -18,6 +18,8 @@ import ( "context" "fmt" + "google.golang.org/api/option" + gcp "cloud.google.com/go/compute/apiv1" computepb "cloud.google.com/go/compute/apiv1/computepb" krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/compute/v1beta1" @@ -59,6 +61,19 @@ type firewallPolicyRuleAdapter struct { var _ directbase.Adapter = &firewallPolicyRuleAdapter{} +func (m *firewallPolicyRuleModel) client(ctx context.Context) (*gcp.FirewallPoliciesClient, error) { + var opts []option.ClientOption + opts, err := m.config.RESTClientOptions() + if err != nil { + return nil, err + } + gcpClient, err := gcp.NewFirewallPoliciesRESTClient(ctx, opts...) + if err != nil { + return nil, fmt.Errorf("building FirewallPolicy client: %w", err) + } + return gcpClient, err +} + func (m *firewallPolicyRuleModel) AdapterForObject(ctx context.Context, reader client.Reader, u *unstructured.Unstructured) (directbase.Adapter, error) { obj := &krm.ComputeFirewallPolicyRule{} if err := runtime.DefaultUnstructuredConverter.FromUnstructured(u.Object, &obj); err != nil { @@ -88,16 +103,11 @@ func (m *firewallPolicyRuleModel) AdapterForObject(ctx context.Context, reader c } // Get GCP client - gcpClient, err := newGCPClient(ctx, m.config) + gcpClient, err := m.client(ctx) if err != nil { return nil, fmt.Errorf("building gcp client: %w", err) } - - firewallPoliciesClient, err := gcpClient.firewallPoliciesClient(ctx) - if err != nil { - return nil, err - } - firewallPolicyRuleAdapter.firewallPoliciesClient = firewallPoliciesClient + firewallPolicyRuleAdapter.firewallPoliciesClient = gcpClient return firewallPolicyRuleAdapter, nil } @@ -113,10 +123,10 @@ func (a *firewallPolicyRuleAdapter) Find(ctx context.Context) (bool, error) { firewallPolicyRule, err := a.get(ctx) if err != nil { - // When a certain rule does not exist, the error has code 400(invalid) instead of 404(not found) + // When a certain rule does not exist, the error has code 400(bad request) instead of 404(not found) // example error message: // "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000.", - if direct.IsInvalidValue(err) { + if direct.IsBadRequest(err) { return false, nil } return false, fmt.Errorf("getting ComputeFirewallPolicyRule %d: %w", a.priority, err) @@ -126,7 +136,6 @@ func (a *firewallPolicyRuleAdapter) Find(ctx context.Context) (bool, error) { } func (a *firewallPolicyRuleAdapter) Create(ctx context.Context, createOp *directbase.CreateOperation) error { - u := createOp.GetUnstructured() var err error err = resolveDependencies(ctx, a.reader, a.desired) @@ -169,15 +178,12 @@ func (a *firewallPolicyRuleAdapter) Create(ctx context.Context, createOp *direct return fmt.Errorf("getting ComputeFirewallPolicyRule %d: %w", a.priority, err) } - status := &krm.ComputeFirewallPolicyRuleStatus{ - RuleTupleCount: direct.PtrTo(int64(*created.RuleTupleCount)), - Kind: direct.PtrTo("compute#firewallPolicyRule"), - } - return setStatus(u, status) + status := &krm.ComputeFirewallPolicyRuleStatus{} + status = ComputeFirewallPolicyRuleStatus_FromProto(mapCtx, created) + return createOp.UpdateStatus(ctx, status, nil) } func (a *firewallPolicyRuleAdapter) Update(ctx context.Context, updateOp *directbase.UpdateOperation) error { - u := updateOp.GetUnstructured() var err error err = resolveDependencies(ctx, a.reader, a.desired) @@ -194,9 +200,10 @@ func (a *firewallPolicyRuleAdapter) Update(ctx context.Context, updateOp *direct if mapCtx.Err() != nil { return mapCtx.Err() } + // The field priority should be removed from the patch request body and included as a query parameter. + // See API doc: https://cloud.google.com/compute/docs/reference/rest/v1/firewallPolicies/patchRule#query-parameters firewallPolicyRule.Priority = nil - op := &gcp.Operation{} updated := &computepb.FirewallPolicyRule{} updateReq := &computepb.PatchRuleFirewallPolicyRequest{ @@ -204,7 +211,7 @@ func (a *firewallPolicyRuleAdapter) Update(ctx context.Context, updateOp *direct FirewallPolicy: a.firewallPolicy, Priority: direct.PtrTo(int32(a.priority)), } - op, err = a.firewallPoliciesClient.PatchRule(ctx, updateReq) + op, err := a.firewallPoliciesClient.PatchRule(ctx, updateReq) if err != nil { return fmt.Errorf("updating ComputeFirewallPolicyRule %d: %w", a.priority, err) } @@ -222,11 +229,9 @@ func (a *firewallPolicyRuleAdapter) Update(ctx context.Context, updateOp *direct return fmt.Errorf("getting ComputeFirewallPolicyRule %d: %w", a.priority, err) } - status := &krm.ComputeFirewallPolicyRuleStatus{ - RuleTupleCount: direct.PtrTo(int64(*updated.RuleTupleCount)), - Kind: direct.PtrTo("compute#firewallPolicyRule"), - } - return setStatus(u, status) + status := &krm.ComputeFirewallPolicyRuleStatus{} + status = ComputeFirewallPolicyRuleStatus_FromProto(mapCtx, updated) + return updateOp.UpdateStatus(ctx, status, nil) } func (a *firewallPolicyRuleAdapter) Export(ctx context.Context) (*unstructured.Unstructured, error) { @@ -290,21 +295,3 @@ func (a *firewallPolicyRuleAdapter) get(ctx context.Context) (*computepb.Firewal } return a.firewallPoliciesClient.GetRule(ctx, getReq) } - -func setStatus(u *unstructured.Unstructured, typedStatus any) error { - status, err := runtime.DefaultUnstructuredConverter.ToUnstructured(typedStatus) - if err != nil { - return fmt.Errorf("error converting status to unstructured: %w", err) - } - - old, _, _ := unstructured.NestedMap(u.Object, "status") - if old != nil { - status["conditions"] = old["conditions"] - status["observedGeneration"] = old["observedGeneration"] - status["externalRef"] = old["externalRef"] - } - - u.Object["status"] = status - - return nil -} diff --git a/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_externalresource.go b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_externalresource.go deleted file mode 100644 index fc8edc4366..0000000000 --- a/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_externalresource.go +++ /dev/null @@ -1,26 +0,0 @@ -/* -Copyright 2024. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package firewallpolicyrule - -const ( - serviceDomain = "//compute.googleapis.com" -) - -type FirewallPolicyRuleIdentity struct { - firewallPolicy string - priority int64 -} diff --git a/pkg/controller/direct/compute/firewallpolicyrule/refs.go b/pkg/controller/direct/compute/firewallpolicyrule/refs.go index cdb3e61ab5..6b19ad6a0c 100644 --- a/pkg/controller/direct/compute/firewallpolicyrule/refs.go +++ b/pkg/controller/direct/compute/firewallpolicyrule/refs.go @@ -17,6 +17,7 @@ package firewallpolicyrule import ( "context" "fmt" + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/compute/v1beta1" refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" diff --git a/pkg/controller/direct/maputils.go b/pkg/controller/direct/maputils.go index 43796127dd..34ba5a03f5 100644 --- a/pkg/controller/direct/maputils.go +++ b/pkg/controller/direct/maputils.go @@ -228,8 +228,8 @@ func IsNotFound(err error) bool { return HasHTTPCode(err, 404) } -// IsInvalidValue returns true if the given error is an HTTP 400. -func IsInvalidValue(err error) bool { +// IsBadRequest returns true if the given error is an HTTP 400. +func IsBadRequest(err error) bool { return HasHTTPCode(err, 400) } From b65ca596308b97ae7dc60c439a489ff36d4bafef Mon Sep 17 00:00:00 2001 From: xiaoweim Date: Thu, 31 Oct 2024 22:34:20 +0000 Subject: [PATCH 05/31] tests:fix mockgcp for bigquery to mimic real GCP behavoir --- mockgcp/mockbigquery/datasets.go | 6 ++++++ .../_generated_export_basicbigquerydataset.golden | 1 + .../v1beta1/bigquerydataset/basicbigquerydataset/_http.log | 6 +++++- .../_generated_export_bigquerydatasetaccessblock.golden | 1 + .../bigquerydataset/bigquerydatasetaccessblock/_http.log | 6 +++++- .../testdata/basic/bigquery/v1beta1/bigquerytable/_http.log | 2 ++ .../bigquerydatatransferconfig-salesforce/_http.log | 2 ++ .../bigquerydatatransferconfig-scheduledquery/_http.log | 2 ++ .../streamingdataflowflextemplatejob/_http.log | 2 ++ .../pubsubsubscription/bigquerypubsubsubscription/_http.log | 2 ++ .../projectid/_generated_export_projectid.golden | 1 + .../testdata/containerannotations/projectid/_http.log | 6 +++++- .../_generated_export_bigquerydataset.golden | 1 + .../reconcileintervalannotations/bigquerydataset/_http.log | 6 +++++- .../referencewithuserspecifiedresourceid/_http.log | 2 ++ .../_generated_export_userspecifiedresourceid.golden | 1 + .../testdata/resourceid/userspecifiedresourceid/_http.log | 6 +++++- .../_generated_export_bigquerydataset#01.golden | 1 + .../testdata/stateabsentinspec/bigquerydataset/_http.log | 6 +++++- 19 files changed, 54 insertions(+), 6 deletions(-) diff --git a/mockgcp/mockbigquery/datasets.go b/mockgcp/mockbigquery/datasets.go index eeff800705..243a442b68 100644 --- a/mockgcp/mockbigquery/datasets.go +++ b/mockgcp/mockbigquery/datasets.go @@ -31,6 +31,8 @@ import ( "github.com/golang/protobuf/ptypes/empty" ) +var defaultMaxTimeTravelHours = int64(168) + type datasetsServer struct { *MockService pb.UnimplementedDatasetsServerServer @@ -52,6 +54,10 @@ func (s *datasetsServer) GetDataset(ctx context.Context, req *pb.GetDatasetReque return nil, err } + if obj.MaxTimeTravelHours == nil { + obj.MaxTimeTravelHours = &defaultMaxTimeTravelHours + } + return obj, nil } diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_export_basicbigquerydataset.golden b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_export_basicbigquerydataset.golden index 09677b54be..95391263e2 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_export_basicbigquerydataset.golden +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_export_basicbigquerydataset.golden @@ -19,6 +19,7 @@ spec: specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated location: US + maxTimeTravelHours: "168" projectRef: external: ${projectId} resourceID: bigquerydatasetsample${uniqueId} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_http.log index e955a3353f..4f53cdab6e 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_http.log @@ -146,6 +146,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -183,7 +184,8 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "US", + "maxTimeTravelHours": "168" } 200 OK @@ -231,6 +233,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -286,6 +289,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/_generated_export_bigquerydatasetaccessblock.golden b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/_generated_export_bigquerydatasetaccessblock.golden index 116ccae263..778d0db239 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/_generated_export_bigquerydatasetaccessblock.golden +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/_generated_export_bigquerydatasetaccessblock.golden @@ -19,6 +19,7 @@ spec: description: BigQuery Dataset With Access Block v2 friendlyName: bigquerydataset-accessblock location: US + maxTimeTravelHours: "168" projectRef: external: ${projectId} resourceID: bigquerydataset${uniqueId} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/_http.log index 28d595a03c..fddff7d80b 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/_http.log @@ -146,6 +146,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } @@ -181,7 +182,8 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "US", + "maxTimeTravelHours": "168" } 200 OK @@ -227,6 +229,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } @@ -280,6 +283,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_http.log index b374b49161..4263316151 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_http.log @@ -143,6 +143,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -881,6 +882,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } diff --git a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_http.log index bdf388a412..eeb7c8bab2 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_http.log @@ -143,6 +143,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "us-central1", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } @@ -407,6 +408,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "us-central1", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } diff --git a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_http.log index bf9fcf2aee..52f923dac9 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_http.log @@ -143,6 +143,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "us-central1", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } @@ -542,6 +543,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "us-central1", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } diff --git a/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/_http.log b/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/_http.log index d63f534bb0..6f2bafb6f2 100644 --- a/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/_http.log @@ -143,6 +143,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } @@ -1413,6 +1414,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } diff --git a/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/_http.log b/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/_http.log index 8223250a6e..4cb23f7d40 100644 --- a/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/_http.log @@ -830,6 +830,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } @@ -1607,6 +1608,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } diff --git a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_export_projectid.golden b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_export_projectid.golden index 09677b54be..95391263e2 100644 --- a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_export_projectid.golden +++ b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_export_projectid.golden @@ -19,6 +19,7 @@ spec: specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated location: US + maxTimeTravelHours: "168" projectRef: external: ${projectId} resourceID: bigquerydatasetsample${uniqueId} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_http.log b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_http.log index e955a3353f..4f53cdab6e 100644 --- a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_http.log +++ b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_http.log @@ -146,6 +146,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -183,7 +184,8 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "US", + "maxTimeTravelHours": "168" } 200 OK @@ -231,6 +233,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -286,6 +289,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } diff --git a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_export_bigquerydataset.golden b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_export_bigquerydataset.golden index 09677b54be..95391263e2 100644 --- a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_export_bigquerydataset.golden +++ b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_export_bigquerydataset.golden @@ -19,6 +19,7 @@ spec: specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated location: US + maxTimeTravelHours: "168" projectRef: external: ${projectId} resourceID: bigquerydatasetsample${uniqueId} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_http.log b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_http.log index e955a3353f..4f53cdab6e 100644 --- a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_http.log +++ b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_http.log @@ -146,6 +146,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -183,7 +184,8 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "US", + "maxTimeTravelHours": "168" } 200 OK @@ -231,6 +233,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -286,6 +289,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } diff --git a/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/_http.log b/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/_http.log index f30e67b6da..9c2d5b2eee 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/_http.log +++ b/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/_http.log @@ -143,6 +143,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_resourceid_${uniqueId}", "type": "DEFAULT" } @@ -445,6 +446,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_resourceid_${uniqueId}", "type": "DEFAULT" } diff --git a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_export_userspecifiedresourceid.golden b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_export_userspecifiedresourceid.golden index bba5c4cdbd..c6deeeb8c5 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_export_userspecifiedresourceid.golden +++ b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_export_userspecifiedresourceid.golden @@ -19,6 +19,7 @@ spec: specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated location: US + maxTimeTravelHours: "168" projectRef: external: ${projectId} resourceID: bigquerydataset_${uniqueId} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_http.log b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_http.log index 30769f1d3d..fd92a16a92 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_http.log +++ b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_http.log @@ -146,6 +146,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_${uniqueId}", "type": "DEFAULT" } @@ -183,7 +184,8 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "US", + "maxTimeTravelHours": "168" } 200 OK @@ -231,6 +233,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_${uniqueId}", "type": "DEFAULT" } @@ -286,6 +289,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_${uniqueId}", "type": "DEFAULT" } diff --git a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_export_bigquerydataset#01.golden b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_export_bigquerydataset#01.golden index 09677b54be..95391263e2 100644 --- a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_export_bigquerydataset#01.golden +++ b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_export_bigquerydataset#01.golden @@ -19,6 +19,7 @@ spec: specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated location: US + maxTimeTravelHours: "168" projectRef: external: ${projectId} resourceID: bigquerydatasetsample${uniqueId} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_http.log b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_http.log index e955a3353f..4f53cdab6e 100644 --- a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_http.log +++ b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_http.log @@ -146,6 +146,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -183,7 +184,8 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "US", + "maxTimeTravelHours": "168" } 200 OK @@ -231,6 +233,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -286,6 +289,7 @@ X-Xss-Protection: 0 }, "lastModifiedTime": "123456789", "location": "US", + "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } From 53fdbe180af06d0126c85b1ca5f92fe77251327d Mon Sep 17 00:00:00 2001 From: justinsb Date: Fri, 18 Oct 2024 10:12:01 -0400 Subject: [PATCH 06/31] mockgcp: support for MonitoringServiceLevelObjective --- config/tests/samples/create/harness.go | 1 + mockgcp/common/fields/updatemask.go | 21 +- .../mockmonitoring/servicelevelobjective.go | 143 ++++++ ...ct_requestbaseddistributioncut.golden.yaml | 2 +- .../requestbaseddistributioncut/_http.log | 381 ++++++++++++++++ ...ect_requestbasedgoodtotalratio.golden.yaml | 2 +- .../requestbasedgoodtotalratio/_http.log | 363 +++++++++++++++ ...uestbasedgtrtotalservicefilter.golden.yaml | 2 +- .../_http.log | 363 +++++++++++++++ ...windowbasedgoodbadmetricfilter.golden.yaml | 2 +- .../windowbasedgoodbadmetricfilter/_http.log | 351 +++++++++++++++ ..._windowbasedgtrdistributioncut.golden.yaml | 2 +- .../windowbasedgtrdistributioncut/_http.log | 417 ++++++++++++++++++ ...t_windowbasedgtrperformancegtr.golden.yaml | 2 +- .../windowbasedgtrperformancegtr/_http.log | 399 +++++++++++++++++ ...rformancegtrtotalservicefilter.golden.yaml | 2 +- .../_http.log | 399 +++++++++++++++++ ...ct_windowbasedmetricmeanfilter.golden.yaml | 2 +- .../windowbasedmetricmeanfilter/_http.log | 387 ++++++++++++++++ ...ect_windowbasedmetricsumfilter.golden.yaml | 2 +- .../windowbasedmetricsumfilter/_http.log | 390 ++++++++++++++++ 21 files changed, 3616 insertions(+), 17 deletions(-) create mode 100644 mockgcp/mockmonitoring/servicelevelobjective.go create mode 100644 pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbaseddistributioncut/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgoodtotalratio/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgtrtotalservicefilter/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgoodbadmetricfilter/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrdistributioncut/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtr/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtrtotalservicefilter/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricmeanfilter/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricsumfilter/_http.log diff --git a/config/tests/samples/create/harness.go b/config/tests/samples/create/harness.go index ba7a68dc86..e3b6bb26ed 100644 --- a/config/tests/samples/create/harness.go +++ b/config/tests/samples/create/harness.go @@ -809,6 +809,7 @@ func MaybeSkip(t *testing.T, name string, resources []*unstructured.Unstructured case schema.GroupKind{Group: "monitoring.cnrm.cloud.google.com", Kind: "MonitoringMonitoredProject"}: case schema.GroupKind{Group: "monitoring.cnrm.cloud.google.com", Kind: "MonitoringNotificationChannel"}: case schema.GroupKind{Group: "monitoring.cnrm.cloud.google.com", Kind: "MonitoringService"}: + case schema.GroupKind{Group: "monitoring.cnrm.cloud.google.com", Kind: "MonitoringServiceLevelObjective"}: case schema.GroupKind{Group: "monitoring.cnrm.cloud.google.com", Kind: "MonitoringUptimeCheckConfig"}: case schema.GroupKind{Group: "networkconnectivity.cnrm.cloud.google.com", Kind: "NetworkConnectivityServiceConnectionPolicy"}: diff --git a/mockgcp/common/fields/updatemask.go b/mockgcp/common/fields/updatemask.go index c254cbc32a..d3ece64457 100644 --- a/mockgcp/common/fields/updatemask.go +++ b/mockgcp/common/fields/updatemask.go @@ -21,6 +21,7 @@ import ( "google.golang.org/protobuf/proto" "google.golang.org/protobuf/reflect/protoreflect" + "k8s.io/klog/v2" ) // UpdateByFieldMask updates the `original` Message with the `update` Message value in the given `updatePaths` fields @@ -50,13 +51,6 @@ func replace(original, update protoreflect.Message, fieldName string) error { updateFd := update.Descriptor().Fields().ByJSONName(fieldName) updateVal := update.Get(updateFd) - if originalFd.Kind() != protoreflect.MessageKind { - if !original.IsValid() { - return fmt.Errorf("%s is read-only or empty", fieldName) - } - original.Set(updateFd, updateVal) - return nil - } // Update Map if originalFd.IsMap() { originalVal.Map().Range(func(k protoreflect.MapKey, v protoreflect.Value) bool { @@ -77,7 +71,18 @@ func replace(original, update protoreflect.Message, fieldName string) error { } return nil } - return fmt.Errorf("unhandled type for field %v", fieldName) + + switch originalFd.Kind() { + case protoreflect.MessageKind, protoreflect.StringKind, protoreflect.DoubleKind, protoreflect.Int64Kind, protoreflect.Uint64Kind, protoreflect.BoolKind, protoreflect.EnumKind: + if !original.IsValid() { + return fmt.Errorf("%s is read-only or empty", fieldName) + } + original.Set(updateFd, updateVal) + return nil + default: + klog.Warningf("unhandled type %v for field %v", originalFd.Kind(), fieldName) + return fmt.Errorf("unhandled type %v for field %v", originalFd.Kind(), fieldName) + } } // originalChildMessage get the orignal Message's mutable reference to the `fieldName“ composite. diff --git a/mockgcp/mockmonitoring/servicelevelobjective.go b/mockgcp/mockmonitoring/servicelevelobjective.go new file mode 100644 index 0000000000..268fe9831b --- /dev/null +++ b/mockgcp/mockmonitoring/servicelevelobjective.go @@ -0,0 +1,143 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +tool:mockgcp-support +// krm.apiVersion: monitoring.cnrm.cloud.google.com/v1beta1 +// krm.kind: MonitoringServiceLevelObjective +// proto.service: google.monitoring.v3.ServiceMonitoringService +// proto.resource: ServiceLevelObjective + +package mockmonitoring + +import ( + "context" + "fmt" + "strings" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/GoogleCloudPlatform/k8s-config-connector/mockgcp/common/fields" + "github.com/GoogleCloudPlatform/k8s-config-connector/mockgcp/common/projects" + pb "github.com/GoogleCloudPlatform/k8s-config-connector/mockgcp/generated/mockgcp/monitoring/v3" +) + +func (s *serviceMonitoringService) GetServiceLevelObjective(ctx context.Context, req *pb.GetServiceLevelObjectiveRequest) (*pb.ServiceLevelObjective, error) { + name, err := s.parseServiceLevelObjectiveName(req.Name) + if err != nil { + return nil, err + } + + fqn := name.String() + + obj := &pb.ServiceLevelObjective{} + if err := s.storage.Get(ctx, fqn, obj); err != nil { + if status.Code(err) == codes.NotFound { + return nil, status.Errorf(codes.NotFound, "There is no slo with id %q under service %q in project %q", name.sloID, name.serviceID, name.Project.ID) + } + return nil, err + } + return obj, nil +} + +func (s *serviceMonitoringService) CreateServiceLevelObjective(ctx context.Context, req *pb.CreateServiceLevelObjectiveRequest) (*pb.ServiceLevelObjective, error) { + reqName := fmt.Sprintf("%s/serviceLevelObjectives/%s", req.GetParent(), req.GetServiceLevelObjectiveId()) + name, err := s.parseServiceLevelObjectiveName(reqName) + if err != nil { + return nil, err + } + + fqn := name.String() + + obj := proto.Clone(req.GetServiceLevelObjective()).(*pb.ServiceLevelObjective) + obj.Name = fqn + s.populateDefaultsForServiceLevelObjective(obj) + + if err := s.storage.Create(ctx, fqn, obj); err != nil { + return nil, err + } + + return obj, nil +} + +func (s *serviceMonitoringService) populateDefaultsForServiceLevelObjective(obj *pb.ServiceLevelObjective) { + +} + +func (s *serviceMonitoringService) UpdateServiceLevelObjective(ctx context.Context, req *pb.UpdateServiceLevelObjectiveRequest) (*pb.ServiceLevelObjective, error) { + name, err := s.parseServiceLevelObjectiveName(req.GetServiceLevelObjective().GetName()) + if err != nil { + return nil, err + } + fqn := name.String() + + obj := &pb.ServiceLevelObjective{} + if err := s.storage.Get(ctx, fqn, obj); err != nil { + return nil, err + } + + if err := fields.UpdateByFieldMask(obj, req.GetServiceLevelObjective(), req.GetUpdateMask().GetPaths()); err != nil { + return nil, err + } + + if err := s.storage.Update(ctx, fqn, obj); err != nil { + return nil, err + } + return obj, nil +} + +func (s *serviceMonitoringService) DeleteServiceLevelObjective(ctx context.Context, req *pb.DeleteServiceLevelObjectiveRequest) (*emptypb.Empty, error) { + name, err := s.parseServiceLevelObjectiveName(req.Name) + if err != nil { + return nil, err + } + fqn := name.String() + deleted := &pb.ServiceLevelObjective{} + + if err := s.storage.Delete(ctx, fqn, deleted); err != nil { + return nil, err + } + + return &emptypb.Empty{}, nil +} + +type serviceLevelObjectiveName struct { + Project *projects.ProjectData + serviceID string + sloID string +} + +func (n *serviceLevelObjectiveName) String() string { + return fmt.Sprintf("projects/%d/services/%s/serviceLevelObjectives/%s", n.Project.Number, n.serviceID, n.sloID) +} + +func (s *MockService) parseServiceLevelObjectiveName(name string) (*serviceLevelObjectiveName, error) { + tokens := strings.Split(name, "/") + if len(tokens) == 6 && tokens[0] == "projects" && tokens[2] == "services" && tokens[4] == "serviceLevelObjectives" { + project, err := s.Projects.GetProjectByIDOrNumber(tokens[1]) + if err != nil { + return nil, err + } + + n := &serviceLevelObjectiveName{ + Project: project, + serviceID: tokens[3], + sloID: tokens[5], + } + return n, nil + } + return nil, status.Errorf(codes.InvalidArgument, "invalid service name %q", name) +} diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbaseddistributioncut/_generated_object_requestbaseddistributioncut.golden.yaml b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbaseddistributioncut/_generated_object_requestbaseddistributioncut.golden.yaml index ae4be80f36..2180ab851c 100644 --- a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbaseddistributioncut/_generated_object_requestbaseddistributioncut.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbaseddistributioncut/_generated_object_requestbaseddistributioncut.golden.yaml @@ -3,7 +3,7 @@ kind: MonitoringServiceLevelObjective metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none - cnrm.cloud.google.com/state-into-spec: merge + cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbaseddistributioncut/_http.log b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbaseddistributioncut/_http.log new file mode 100644 index 0000000000..5c593589e6 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbaseddistributioncut/_http.log @@ -0,0 +1,381 @@ +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no service with id 'monitoringservice-${uniqueId}' in project '${projectNumber}'", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services?alt=json&serviceId=monitoringservice-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no slo with id \"monitoringservicelevelobjective-${uniqueId}\" under service \"monitoringservice-${uniqueId}\" in project \"${projectId}\"", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives?alt=json&serviceLevelObjectiveId=monitoringservicelevelobjective-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "displayName": "A request based distribution cut filter", + "goal": 0.9, + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "requestBased": { + "distributionCut": { + "distributionFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_latencies\" \n resource.type=\"api\" ", + "range": { + "max": 100, + "min": 50 + } + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "A request based distribution cut filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "requestBased": { + "distributionCut": { + "distributionFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_latencies\" \n resource.type=\"api\" ", + "range": { + "max": 100, + "min": 50 + } + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "A request based distribution cut filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "requestBased": { + "distributionCut": { + "distributionFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_latencies\" \n resource.type=\"api\" ", + "range": { + "max": 100, + "min": 50 + } + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +PATCH https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json&updateMask=displayName%2Cgoal%2CrollingPeriod%2CserviceLevelIndicator.requestBased.distributionCut.range.max%2CserviceLevelIndicator.requestBased.distributionCut.range.min%2CuserLabels +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "displayName": "update request - a request based distribution cut filter", + "goal": 0.8, + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "requestBased": { + "distributionCut": { + "distributionFilter": "metric.type=\"serviceruntime.googleapis.com/api/request_latencies\" \n resource.type=\"api\" ", + "range": { + "max": 80, + "min": 20 + } + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "update request - a request based distribution cut filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "requestBased": { + "distributionCut": { + "distributionFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_latencies\" \n resource.type=\"api\" ", + "range": { + "max": 80, + "min": 20 + } + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "update request - a request based distribution cut filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "requestBased": { + "distributionCut": { + "distributionFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_latencies\" \n resource.type=\"api\" ", + "range": { + "max": 80, + "min": 20 + } + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgoodtotalratio/_generated_object_requestbasedgoodtotalratio.golden.yaml b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgoodtotalratio/_generated_object_requestbasedgoodtotalratio.golden.yaml index 4df578c967..8cfb8c497d 100644 --- a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgoodtotalratio/_generated_object_requestbasedgoodtotalratio.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgoodtotalratio/_generated_object_requestbasedgoodtotalratio.golden.yaml @@ -3,7 +3,7 @@ kind: MonitoringServiceLevelObjective metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none - cnrm.cloud.google.com/state-into-spec: merge + cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgoodtotalratio/_http.log b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgoodtotalratio/_http.log new file mode 100644 index 0000000000..04b1ff0501 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgoodtotalratio/_http.log @@ -0,0 +1,363 @@ +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no service with id 'monitoringservice-${uniqueId}' in project '${projectNumber}'", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services?alt=json&serviceId=monitoringservice-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no slo with id \"monitoringservicelevelobjective-${uniqueId}\" under service \"monitoringservice-${uniqueId}\" in project \"${projectId}\"", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives?alt=json&serviceLevelObjectiveId=monitoringservicelevelobjective-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "displayName": "A request based good total ratio filter", + "goal": 0.9, + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "requestBased": { + "goodTotalRatio": { + "badServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "A request based good total ratio filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "requestBased": { + "goodTotalRatio": { + "badServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "A request based good total ratio filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "requestBased": { + "goodTotalRatio": { + "badServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +PATCH https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json&updateMask=displayName%2Cgoal%2CrollingPeriod%2CuserLabels +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "displayName": "update request - a request based good total ratio filter", + "goal": 0.8, + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "requestBased": { + "goodTotalRatio": { + "badServiceFilter": "metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "goodServiceFilter": "metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "update request - a request based good total ratio filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "requestBased": { + "goodTotalRatio": { + "badServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "update request - a request based good total ratio filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "requestBased": { + "goodTotalRatio": { + "badServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgtrtotalservicefilter/_generated_object_requestbasedgtrtotalservicefilter.golden.yaml b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgtrtotalservicefilter/_generated_object_requestbasedgtrtotalservicefilter.golden.yaml index a9a08a1af3..13d63818c0 100644 --- a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgtrtotalservicefilter/_generated_object_requestbasedgtrtotalservicefilter.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgtrtotalservicefilter/_generated_object_requestbasedgtrtotalservicefilter.golden.yaml @@ -3,7 +3,7 @@ kind: MonitoringServiceLevelObjective metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none - cnrm.cloud.google.com/state-into-spec: merge + cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgtrtotalservicefilter/_http.log b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgtrtotalservicefilter/_http.log new file mode 100644 index 0000000000..e9c35bd6e4 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/requestbasedgtrtotalservicefilter/_http.log @@ -0,0 +1,363 @@ +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no service with id 'monitoringservice-${uniqueId}' in project '${projectNumber}'", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services?alt=json&serviceId=monitoringservice-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no slo with id \"monitoringservicelevelobjective-${uniqueId}\" under service \"monitoringservice-${uniqueId}\" in project \"${projectId}\"", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives?alt=json&serviceLevelObjectiveId=monitoringservicelevelobjective-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "displayName": "A request based good total ratio total service filter", + "goal": 0.9, + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "requestBased": { + "goodTotalRatio": { + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "totalServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "A request based good total ratio total service filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "requestBased": { + "goodTotalRatio": { + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "totalServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "A request based good total ratio total service filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "requestBased": { + "goodTotalRatio": { + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "totalServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +PATCH https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json&updateMask=displayName%2Cgoal%2CrollingPeriod%2CuserLabels +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "displayName": "update request- a request based good total ratio total service filter", + "goal": 0.8, + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "requestBased": { + "goodTotalRatio": { + "goodServiceFilter": "metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "totalServiceFilter": "metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "update request- a request based good total ratio total service filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "requestBased": { + "goodTotalRatio": { + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "totalServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "update request- a request based good total ratio total service filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "requestBased": { + "goodTotalRatio": { + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "totalServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgoodbadmetricfilter/_generated_object_windowbasedgoodbadmetricfilter.golden.yaml b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgoodbadmetricfilter/_generated_object_windowbasedgoodbadmetricfilter.golden.yaml index 1f6f51b665..855abd96bb 100644 --- a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgoodbadmetricfilter/_generated_object_windowbasedgoodbadmetricfilter.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgoodbadmetricfilter/_generated_object_windowbasedgoodbadmetricfilter.golden.yaml @@ -3,7 +3,7 @@ kind: MonitoringServiceLevelObjective metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none - cnrm.cloud.google.com/state-into-spec: merge + cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgoodbadmetricfilter/_http.log b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgoodbadmetricfilter/_http.log new file mode 100644 index 0000000000..281b443fd2 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgoodbadmetricfilter/_http.log @@ -0,0 +1,351 @@ +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no service with id 'monitoringservice-${uniqueId}' in project '${projectNumber}'", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services?alt=json&serviceId=monitoringservice-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no slo with id \"monitoringservicelevelobjective-${uniqueId}\" under service \"monitoringservice-${uniqueId}\" in project \"${projectId}\"", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives?alt=json&serviceLevelObjectiveId=monitoringservicelevelobjective-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "calendarPeriod": "DAY", + "displayName": "A window based good bad metric filter", + "goal": 0.9, + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodBadMetricFilter": "project = ${projectId}\n metric.type=\"monitoring.googleapis.com/uptime_check/check_passed\" \n resource.type=\"uptime_url\"", + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "DAY", + "displayName": "A window based good bad metric filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodBadMetricFilter": "project = ${projectId}\n metric.type=\"monitoring.googleapis.com/uptime_check/check_passed\" \n resource.type=\"uptime_url\"", + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "DAY", + "displayName": "A window based good bad metric filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodBadMetricFilter": "project = ${projectId}\n metric.type=\"monitoring.googleapis.com/uptime_check/check_passed\" \n resource.type=\"uptime_url\"", + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +PATCH https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json&updateMask=calendarPeriod%2CdisplayName%2Cgoal%2CserviceLevelIndicator.windowsBased.windowPeriod%2CuserLabels +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "calendarPeriod": "WEEK", + "displayName": "update request - a window based good bad metric filter", + "goal": 0.8, + "serviceLevelIndicator": { + "windowsBased": { + "goodBadMetricFilter": "metric.type=\"monitoring.googleapis.com/uptime_check/check_passed\" \n resource.type=\"uptime_url\"", + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "WEEK", + "displayName": "update request - a window based good bad metric filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodBadMetricFilter": "project = ${projectId}\n metric.type=\"monitoring.googleapis.com/uptime_check/check_passed\" \n resource.type=\"uptime_url\"", + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "WEEK", + "displayName": "update request - a window based good bad metric filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodBadMetricFilter": "project = ${projectId}\n metric.type=\"monitoring.googleapis.com/uptime_check/check_passed\" \n resource.type=\"uptime_url\"", + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrdistributioncut/_generated_object_windowbasedgtrdistributioncut.golden.yaml b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrdistributioncut/_generated_object_windowbasedgtrdistributioncut.golden.yaml index a7a7b19898..e1adbd51c9 100644 --- a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrdistributioncut/_generated_object_windowbasedgtrdistributioncut.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrdistributioncut/_generated_object_windowbasedgtrdistributioncut.golden.yaml @@ -3,7 +3,7 @@ kind: MonitoringServiceLevelObjective metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none - cnrm.cloud.google.com/state-into-spec: merge + cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrdistributioncut/_http.log b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrdistributioncut/_http.log new file mode 100644 index 0000000000..86845a4140 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrdistributioncut/_http.log @@ -0,0 +1,417 @@ +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no service with id 'monitoringservice-${uniqueId}' in project '${projectNumber}'", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services?alt=json&serviceId=monitoringservice-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no slo with id \"monitoringservicelevelobjective-${uniqueId}\" under service \"monitoringservice-${uniqueId}\" in project \"${projectId}\"", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives?alt=json&serviceLevelObjectiveId=monitoringservicelevelobjective-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "calendarPeriod": "DAY", + "displayName": "A window based good total ratio distribution cut filter", + "goal": 0.9, + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "distributionCut": { + "distributionFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_latencies\" resource.type=\"api\" ", + "range": { + "max": 100, + "min": 50 + } + } + }, + "threshold": 0.9 + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "DAY", + "displayName": "A window based good total ratio distribution cut filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "distributionCut": { + "distributionFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_latencies\" resource.type=\"api\" ", + "range": { + "max": 100, + "min": 50 + } + } + }, + "threshold": 0.9 + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "DAY", + "displayName": "A window based good total ratio distribution cut filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "distributionCut": { + "distributionFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_latencies\" resource.type=\"api\" ", + "range": { + "max": 100, + "min": 50 + } + } + }, + "threshold": 0.9 + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +PATCH https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json&updateMask=calendarPeriod%2CdisplayName%2Cgoal%2CserviceLevelIndicator.windowsBased.goodTotalRatioThreshold.performance.distributionCut.distributionFilter%2CserviceLevelIndicator.windowsBased.goodTotalRatioThreshold.performance.distributionCut.range.max%2CserviceLevelIndicator.windowsBased.goodTotalRatioThreshold.performance.distributionCut.range.min%2CserviceLevelIndicator.windowsBased.goodTotalRatioThreshold.threshold%2CserviceLevelIndicator.windowsBased.windowPeriod%2CuserLabels +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "calendarPeriod": "FORTNIGHT", + "displayName": "update request - a window based good total ratio distribution cut filter", + "goal": 0.8, + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "distributionCut": { + "distributionFilter": "metric.type=\"serviceruntime.googleapis.com/api/request_latencies\" resource.type=\"api\" ", + "range": { + "max": 80, + "min": 20 + } + } + }, + "threshold": 0.8 + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "FORTNIGHT", + "displayName": "update request - a window based good total ratio distribution cut filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "distributionCut": { + "distributionFilter": "metric.type=\"serviceruntime.googleapis.com/api/request_latencies\" resource.type=\"api\" ", + "range": { + "max": 80, + "min": 20 + } + } + }, + "threshold": 0.8 + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "FORTNIGHT", + "displayName": "update request - a window based good total ratio distribution cut filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "distributionCut": { + "distributionFilter": "metric.type=\"serviceruntime.googleapis.com/api/request_latencies\" resource.type=\"api\" ", + "range": { + "max": 80, + "min": 20 + } + } + }, + "threshold": 0.8 + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtr/_generated_object_windowbasedgtrperformancegtr.golden.yaml b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtr/_generated_object_windowbasedgtrperformancegtr.golden.yaml index 979a3e27a7..c9dc6fca04 100644 --- a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtr/_generated_object_windowbasedgtrperformancegtr.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtr/_generated_object_windowbasedgtrperformancegtr.golden.yaml @@ -3,7 +3,7 @@ kind: MonitoringServiceLevelObjective metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none - cnrm.cloud.google.com/state-into-spec: merge + cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtr/_http.log b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtr/_http.log new file mode 100644 index 0000000000..703f54b9a4 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtr/_http.log @@ -0,0 +1,399 @@ +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no service with id 'monitoringservice-${uniqueId}' in project '${projectNumber}'", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services?alt=json&serviceId=monitoringservice-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no slo with id \"monitoringservicelevelobjective-${uniqueId}\" under service \"monitoringservice-${uniqueId}\" in project \"${projectId}\"", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives?alt=json&serviceLevelObjectiveId=monitoringservicelevelobjective-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "calendarPeriod": "DAY", + "displayName": "A window based good total ratio performance filter", + "goal": 0.9, + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "goodTotalRatio": { + "badServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + }, + "threshold": 0.9 + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "DAY", + "displayName": "A window based good total ratio performance filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "goodTotalRatio": { + "badServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + }, + "threshold": 0.9 + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "DAY", + "displayName": "A window based good total ratio performance filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "goodTotalRatio": { + "badServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + }, + "threshold": 0.9 + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +PATCH https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json&updateMask=calendarPeriod%2Cgoal%2CserviceLevelIndicator.windowsBased.goodTotalRatioThreshold.threshold%2CserviceLevelIndicator.windowsBased.windowPeriod%2CuserLabels +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "calendarPeriod": "WEEK", + "displayName": "A window based good total ratio performance filter", + "goal": 0.8, + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "goodTotalRatio": { + "badServiceFilter": "metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "goodServiceFilter": "metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + }, + "threshold": 0.8 + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "WEEK", + "displayName": "A window based good total ratio performance filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "goodTotalRatio": { + "badServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + }, + "threshold": 0.8 + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "WEEK", + "displayName": "A window based good total ratio performance filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "goodTotalRatio": { + "badServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + }, + "threshold": 0.8 + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtrtotalservicefilter/_generated_object_windowbasedgtrperformancegtrtotalservicefilter.golden.yaml b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtrtotalservicefilter/_generated_object_windowbasedgtrperformancegtrtotalservicefilter.golden.yaml index d111ddef7c..47b5e93dd1 100644 --- a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtrtotalservicefilter/_generated_object_windowbasedgtrperformancegtrtotalservicefilter.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtrtotalservicefilter/_generated_object_windowbasedgtrperformancegtrtotalservicefilter.golden.yaml @@ -3,7 +3,7 @@ kind: MonitoringServiceLevelObjective metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none - cnrm.cloud.google.com/state-into-spec: merge + cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtrtotalservicefilter/_http.log b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtrtotalservicefilter/_http.log new file mode 100644 index 0000000000..542d649fcf --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedgtrperformancegtrtotalservicefilter/_http.log @@ -0,0 +1,399 @@ +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no service with id 'monitoringservice-${uniqueId}' in project '${projectNumber}'", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services?alt=json&serviceId=monitoringservice-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no slo with id \"monitoringservicelevelobjective-${uniqueId}\" under service \"monitoringservice-${uniqueId}\" in project \"${projectId}\"", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives?alt=json&serviceLevelObjectiveId=monitoringservicelevelobjective-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "calendarPeriod": "DAY", + "displayName": "A window based good total ratio performance filter", + "goal": 0.9, + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "goodTotalRatio": { + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "totalServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + }, + "threshold": 0.9 + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "DAY", + "displayName": "A window based good total ratio performance filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "goodTotalRatio": { + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "totalServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + }, + "threshold": 0.9 + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "DAY", + "displayName": "A window based good total ratio performance filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "goodTotalRatio": { + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "totalServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + }, + "threshold": 0.9 + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +PATCH https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json&updateMask=calendarPeriod%2CdisplayName%2Cgoal%2CserviceLevelIndicator.windowsBased.goodTotalRatioThreshold.threshold%2CserviceLevelIndicator.windowsBased.windowPeriod%2CuserLabels +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "calendarPeriod": "WEEK", + "displayName": "update request - a window based good total ratio performance filter", + "goal": 0.8, + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "goodTotalRatio": { + "goodServiceFilter": "metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "totalServiceFilter": "metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + }, + "threshold": 0.8 + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "WEEK", + "displayName": "update request - a window based good total ratio performance filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "goodTotalRatio": { + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "totalServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + }, + "threshold": 0.8 + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "calendarPeriod": "WEEK", + "displayName": "update request - a window based good total ratio performance filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "serviceLevelIndicator": { + "windowsBased": { + "goodTotalRatioThreshold": { + "performance": { + "goodTotalRatio": { + "goodServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" ", + "totalServiceFilter": "project = ${projectId}\n metric.type=\"serviceruntime.googleapis.com/api/request_count\" \n resource.type=\"api\" " + } + }, + "threshold": 0.8 + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricmeanfilter/_generated_object_windowbasedmetricmeanfilter.golden.yaml b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricmeanfilter/_generated_object_windowbasedmetricmeanfilter.golden.yaml index 5602739cda..bde260fa4f 100644 --- a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricmeanfilter/_generated_object_windowbasedmetricmeanfilter.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricmeanfilter/_generated_object_windowbasedmetricmeanfilter.golden.yaml @@ -3,7 +3,7 @@ kind: MonitoringServiceLevelObjective metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none - cnrm.cloud.google.com/state-into-spec: merge + cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricmeanfilter/_http.log b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricmeanfilter/_http.log new file mode 100644 index 0000000000..5baa9b25be --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricmeanfilter/_http.log @@ -0,0 +1,387 @@ +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no service with id 'monitoringservice-${uniqueId}' in project '${projectNumber}'", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services?alt=json&serviceId=monitoringservice-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no slo with id \"monitoringservicelevelobjective-${uniqueId}\" under service \"monitoringservice-${uniqueId}\" in project \"${projectId}\"", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives?alt=json&serviceLevelObjectiveId=monitoringservicelevelobjective-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "displayName": "A window based metric mean filter", + "goal": 0.9, + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "windowsBased": { + "metricMeanInRange": { + "range": { + "max": 100, + "min": 50 + }, + "timeSeries": "project = ${projectId}\n resource.type=\"gce_instance\" \nmetric.type=\"compute.googleapis.com/instance/cpu/usage_time\"" + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "A window based metric mean filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "windowsBased": { + "metricMeanInRange": { + "range": { + "max": 100, + "min": 50 + }, + "timeSeries": "project = ${projectId}\n resource.type=\"gce_instance\" \nmetric.type=\"compute.googleapis.com/instance/cpu/usage_time\"" + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "A window based metric mean filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "windowsBased": { + "metricMeanInRange": { + "range": { + "max": 100, + "min": 50 + }, + "timeSeries": "project = ${projectId}\n resource.type=\"gce_instance\" \nmetric.type=\"compute.googleapis.com/instance/cpu/usage_time\"" + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +PATCH https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json&updateMask=displayName%2Cgoal%2CrollingPeriod%2CserviceLevelIndicator.windowsBased.metricMeanInRange.range.max%2CserviceLevelIndicator.windowsBased.metricMeanInRange.range.min%2CserviceLevelIndicator.windowsBased.windowPeriod%2CuserLabels +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "displayName": "update request - window based metric mean filter", + "goal": 0.8, + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "windowsBased": { + "metricMeanInRange": { + "range": { + "max": 80, + "min": 20 + }, + "timeSeries": "resource.type=\"gce_instance\" \nmetric.type=\"compute.googleapis.com/instance/cpu/usage_time\"" + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "update request - window based metric mean filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "windowsBased": { + "metricMeanInRange": { + "range": { + "max": 80, + "min": 20 + }, + "timeSeries": "project = ${projectId}\n resource.type=\"gce_instance\" \nmetric.type=\"compute.googleapis.com/instance/cpu/usage_time\"" + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "update request - window based metric mean filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "windowsBased": { + "metricMeanInRange": { + "range": { + "max": 80, + "min": 20 + }, + "timeSeries": "project = ${projectId}\n resource.type=\"gce_instance\" \nmetric.type=\"compute.googleapis.com/instance/cpu/usage_time\"" + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value2" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricsumfilter/_generated_object_windowbasedmetricsumfilter.golden.yaml b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricsumfilter/_generated_object_windowbasedmetricsumfilter.golden.yaml index 311bff0c0b..16d8b7c166 100644 --- a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricsumfilter/_generated_object_windowbasedmetricsumfilter.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricsumfilter/_generated_object_windowbasedmetricsumfilter.golden.yaml @@ -3,7 +3,7 @@ kind: MonitoringServiceLevelObjective metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none - cnrm.cloud.google.com/state-into-spec: merge + cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricsumfilter/_http.log b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricsumfilter/_http.log new file mode 100644 index 0000000000..7bbafa0252 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/monitoring/v1beta1/monitoringservicelevelobjective/windowbasedmetricsumfilter/_http.log @@ -0,0 +1,390 @@ +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no service with id 'monitoringservice-${uniqueId}' in project '${projectNumber}'", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services?alt=json&serviceId=monitoringservice-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "There is no slo with id \"monitoringservicelevelobjective-${uniqueId}\" under service \"monitoringservice-${uniqueId}\" in project \"${projectId}\"", + "status": "NOT_FOUND" + } +} + +--- + +POST https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives?alt=json&serviceLevelObjectiveId=monitoringservicelevelobjective-${uniqueId} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "displayName": "A window based metric sum filter", + "goal": 0.9, + "name": "projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "windowsBased": { + "metricSumInRange": { + "range": { + "max": 100, + "min": 50 + }, + "timeSeries": "project = ${projectId}\n resource.type=\"gce_instance\" \nmetric.type=\"compute.googleapis.com/instance/cpu/usage_time\"" + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "A window based metric sum filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "windowsBased": { + "metricSumInRange": { + "range": { + "max": 100, + "min": 50 + }, + "timeSeries": "project = ${projectId}\n resource.type=\"gce_instance\" \nmetric.type=\"compute.googleapis.com/instance/cpu/usage_time\"" + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "A window based metric sum filter", + "goal": 0.9, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "86400s", + "serviceLevelIndicator": { + "windowsBased": { + "metricSumInRange": { + "range": { + "max": 100, + "min": 50 + }, + "timeSeries": "project = ${projectId}\n resource.type=\"gce_instance\" \nmetric.type=\"compute.googleapis.com/instance/cpu/usage_time\"" + }, + "windowPeriod": "60s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "value1" + } +} + +--- + +PATCH https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json&updateMask=displayName%2Cgoal%2CrollingPeriod%2CserviceLevelIndicator.windowsBased.metricSumInRange.range.max%2CserviceLevelIndicator.windowsBased.metricSumInRange.range.min%2CserviceLevelIndicator.windowsBased.windowPeriod%2CuserLabels +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "displayName": "updae request - a window based metric sum filter", + "goal": 0.8, + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "windowsBased": { + "metricSumInRange": { + "range": { + "max": 80, + "min": 20 + }, + "timeSeries": "resource.type=\"gce_instance\" \nmetric.type=\"compute.googleapis.com/instance/cpu/usage_time\"" + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "updatevalue1", + "test2": "value2" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "updae request - a window based metric sum filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "windowsBased": { + "metricSumInRange": { + "range": { + "max": 80, + "min": 20 + }, + "timeSeries": "project = ${projectId}\n resource.type=\"gce_instance\" \nmetric.type=\"compute.googleapis.com/instance/cpu/usage_time\"" + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "updatevalue1", + "test2": "value2" + } +} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "displayName": "updae request - a window based metric sum filter", + "goal": 0.8, + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}", + "rollingPeriod": "172800s", + "serviceLevelIndicator": { + "windowsBased": { + "metricSumInRange": { + "range": { + "max": 80, + "min": 20 + }, + "timeSeries": "project = ${projectId}\n resource.type=\"gce_instance\" \nmetric.type=\"compute.googleapis.com/instance/cpu/usage_time\"" + }, + "windowPeriod": "120s" + } + }, + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true", + "test1": "updatevalue1", + "test2": "value2" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}/serviceLevelObjectives/monitoringservicelevelobjective-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "custom": {}, + "displayName": "A basic monitoring service.", + "name": "projects/${projectNumber}/services/monitoringservice-${uniqueId}", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } +} + +--- + +DELETE https://monitoring.googleapis.com/v3/projects/${projectId}/services/monitoringservice-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} \ No newline at end of file From c2ffa541810b68521a23a895b15283b79cd92a15 Mon Sep 17 00:00:00 2001 From: Yuwen Ma Date: Fri, 1 Nov 2024 19:57:50 +0000 Subject: [PATCH 07/31] feat: secretRef in BQCC --- .../v1alpha1/connection_types.go | 8 ++ .../v1alpha1/types.generated.go | 9 --- .../v1alpha1/zz_generated.deepcopy.go | 12 +-- apis/refs/v1beta1/secret/basicauth.go | 57 +++++++++++++ apis/refs/v1beta1/secret/interface.go | 62 ++++++++++++++ ...queryconnection.cnrm.cloud.google.com.yaml | 22 +++-- .../connection_controller.go | 80 ++++++++++++------- .../bigqueryconnection/connection_mapping.go | 23 ++++++ .../bigqueryconnection/mapper.generated.go | 18 ----- 9 files changed, 219 insertions(+), 72 deletions(-) create mode 100644 apis/refs/v1beta1/secret/basicauth.go create mode 100644 apis/refs/v1beta1/secret/interface.go diff --git a/apis/bigqueryconnection/v1alpha1/connection_types.go b/apis/bigqueryconnection/v1alpha1/connection_types.go index 45dbadd1d8..4a6143ebb1 100644 --- a/apis/bigqueryconnection/v1alpha1/connection_types.go +++ b/apis/bigqueryconnection/v1alpha1/connection_types.go @@ -16,6 +16,7 @@ package v1alpha1 import ( refv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + refsv1beta1secret "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1/secret" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/apis/k8s/v1alpha1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) @@ -302,6 +303,13 @@ type SparkPropertiesStatus struct { ServiceAccountID *string `json:"serviceAccountID,omitempty"` } +// +kcc:proto=google.cloud.bigquery.connection.v1.CloudSqlCredential +type CloudSqlCredential struct { + // The Kubernetes Secret object that stores the "username" and "password" information. + // The Secret type has to be `kubernetes.io/basic-auth`. + SecretRef *refsv1beta1secret.BasicAuthSecret `json:"secretRef,omitempty"` +} + // +genclient // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object // +kubebuilder:resource:categories=gcp,shortName=gcpbigqueryconnectionconnection;gcpbigqueryconnectionconnections diff --git a/apis/bigqueryconnection/v1alpha1/types.generated.go b/apis/bigqueryconnection/v1alpha1/types.generated.go index 5d6c5c4773..a1fe20d015 100644 --- a/apis/bigqueryconnection/v1alpha1/types.generated.go +++ b/apis/bigqueryconnection/v1alpha1/types.generated.go @@ -141,15 +141,6 @@ type CloudSpannerProperties struct { DatabaseRole *string `json:"databaseRole,omitempty"` } -// +kcc:proto=google.cloud.bigquery.connection.v1.CloudSqlCredential -type CloudSqlCredential struct { - // The username for the credential. - Username *string `json:"username,omitempty"` - - // The password for the credential. - Password *string `json:"password,omitempty"` -} - // +kcc:proto=google.cloud.bigquery.connection.v1.CloudSqlProperties type CloudSqlProperties struct { // Cloud SQL instance ID in the form `project:location:instance`. diff --git a/apis/bigqueryconnection/v1alpha1/zz_generated.deepcopy.go b/apis/bigqueryconnection/v1alpha1/zz_generated.deepcopy.go index 6b8e30b089..7584da0990 100644 --- a/apis/bigqueryconnection/v1alpha1/zz_generated.deepcopy.go +++ b/apis/bigqueryconnection/v1alpha1/zz_generated.deepcopy.go @@ -20,6 +20,7 @@ package v1alpha1 import ( "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1/secret" k8sv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/apis/k8s/v1alpha1" runtime "k8s.io/apimachinery/pkg/runtime" ) @@ -672,14 +673,9 @@ func (in *CloudSpannerPropertiesSpec) DeepCopy() *CloudSpannerPropertiesSpec { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *CloudSqlCredential) DeepCopyInto(out *CloudSqlCredential) { *out = *in - if in.Username != nil { - in, out := &in.Username, &out.Username - *out = new(string) - **out = **in - } - if in.Password != nil { - in, out := &in.Password, &out.Password - *out = new(string) + if in.SecretRef != nil { + in, out := &in.SecretRef, &out.SecretRef + *out = new(secret.BasicAuthSecret) **out = **in } } diff --git a/apis/refs/v1beta1/secret/basicauth.go b/apis/refs/v1beta1/secret/basicauth.go new file mode 100644 index 0000000000..4287a712b3 --- /dev/null +++ b/apis/refs/v1beta1/secret/basicauth.go @@ -0,0 +1,57 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package secret + +import ( + "fmt" + + corev1 "k8s.io/api/core/v1" +) + +var _ SecretRef = &BasicAuthSecret{} + +type BasicAuthSecret struct { + // +required + // The `metadata.name` field of a Kubernetes `Secret` + Name string `json:"name,omitempty"` + // The `metadata.namespace` field of a Kubernetes `Secret`. + Namespace string `json:"namespace,omitempty"` + + Username string `json:"-"` + Password string `json:"-"` +} + +func (b *BasicAuthSecret) GetName() string { + return b.Name +} +func (b *BasicAuthSecret) GetNamespace() string { + return b.Namespace +} + +func (b *BasicAuthSecret) Set(secret *corev1.Secret) error { + if secret.Type != corev1.SecretTypeBasicAuth { + return fmt.Errorf("the referenced Secret in `spec.cloudSQL.credential.secretRef` should use type %s, got %s", + corev1.SecretTypeBasicAuth, secret.Type) + } + if secret.Data != nil { + b.Username = string(secret.Data["username"]) + b.Password = string(secret.Data["password"]) + } + if secret.StringData != nil { + b.Username = secret.StringData["username"] + b.Password = secret.StringData["password"] + } + return nil +} diff --git a/apis/refs/v1beta1/secret/interface.go b/apis/refs/v1beta1/secret/interface.go new file mode 100644 index 0000000000..8d32aae272 --- /dev/null +++ b/apis/refs/v1beta1/secret/interface.go @@ -0,0 +1,62 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package secret + +import ( + "context" + "fmt" + + corev1 "k8s.io/api/core/v1" + apierrors "k8s.io/apimachinery/pkg/api/errors" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +type SecretRef interface { + GetName() string + GetNamespace() string + Set(*corev1.Secret) error +} + +func NormalizedSecret(ctx context.Context, r SecretRef, reader client.Reader, otherNamespace string) error { + if r == nil { + return nil + } + if r.GetName() == "" { + return fmt.Errorf("Secret `name` is required ") + } + nn := types.NamespacedName{ + Namespace: r.GetNamespace(), + Name: r.GetName(), + } + if nn.Namespace == "" { + nn.Namespace = otherNamespace + } + + secret := &corev1.Secret{ + TypeMeta: metav1.TypeMeta{ + APIVersion: "v1", + Kind: "Secret", + }, + } + if err := reader.Get(ctx, nn, secret); err != nil { + if apierrors.IsNotFound(err) { + return fmt.Errorf("referenced Secret %v not found", nn) + } + return fmt.Errorf("error reading referenced Secret %v: %w", nn, err) + } + return r.Set(secret) +} diff --git a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigqueryconnectionconnections.bigqueryconnection.cnrm.cloud.google.com.yaml b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigqueryconnectionconnections.bigqueryconnection.cnrm.cloud.google.com.yaml index 905fb094a6..11536fbff5 100644 --- a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigqueryconnectionconnections.bigqueryconnection.cnrm.cloud.google.com.yaml +++ b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigqueryconnectionconnections.bigqueryconnection.cnrm.cloud.google.com.yaml @@ -100,12 +100,22 @@ spec: credential: description: Cloud SQL credential. properties: - password: - description: The password for the credential. - type: string - username: - description: The username for the credential. - type: string + secretRef: + description: The Kubernetes Secret object that stores the + "username" and "password" information. The Secret type has + to be `kubernetes.io/basic-auth`. + properties: + name: + description: The `metadata.name` field of a Kubernetes + `Secret` + type: string + namespace: + description: The `metadata.namespace` field of a Kubernetes + `Secret`. + type: string + required: + - name + type: object type: object database: description: Database name. diff --git a/pkg/controller/direct/bigqueryconnection/connection_controller.go b/pkg/controller/direct/bigqueryconnection/connection_controller.go index 745d94fec1..83d1241937 100644 --- a/pkg/controller/direct/bigqueryconnection/connection_controller.go +++ b/pkg/controller/direct/bigqueryconnection/connection_controller.go @@ -19,17 +19,16 @@ import ( "fmt" "strings" + gcp "cloud.google.com/go/bigquery/connection/apiv1" + bigqueryconnectionpb "cloud.google.com/go/bigquery/connection/apiv1/connectionpb" krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigqueryconnection/v1alpha1" refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + refsv1beta1secret "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1/secret" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/config" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/common" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/directbase" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/registry" - - gcp "cloud.google.com/go/bigquery/connection/apiv1" - - bigqueryconnectionpb "cloud.google.com/go/bigquery/connection/apiv1/connectionpb" "google.golang.org/api/option" "google.golang.org/protobuf/types/known/fieldmaskpb" @@ -77,24 +76,49 @@ func (m *model) AdapterForObject(ctx context.Context, reader client.Reader, u *u if err := runtime.DefaultUnstructuredConverter.FromUnstructured(u.Object, &obj); err != nil { return nil, fmt.Errorf("error converting to %T: %w", obj, err) } + connectionRef, err := krm.NewBigQueryConnectionConnectionRef(ctx, reader, obj) + if err != nil { + return nil, err + } + // Get bigqueryconnection GCP client + gcpClient, err := m.client(ctx) + if err != nil { + return nil, err + } + return &Adapter{ + id: connectionRef, + gcpClient: gcpClient, + desired: obj, + reader: reader, + namespace: obj.Namespace, + }, nil +} +func (a *Adapter) NormalizeReference(ctx context.Context) error { + obj := a.desired // Resolve SQLInstanceRef if obj.Spec.CloudSQLSpec != nil { - if obj.Spec.CloudSQLSpec.InstanceRef != nil { - instance, err := refs.ResolveSQLInstanceRef(ctx, reader, obj, obj.Spec.CloudSQLSpec.InstanceRef) + sql := obj.Spec.CloudSQLSpec + if sql.InstanceRef != nil { + instance, err := refs.ResolveSQLInstanceRef(ctx, a.reader, obj, sql.InstanceRef) if err != nil { - return nil, err + return err + } + sql.InstanceRef.External = instance.ConnectionName() + } + if sql.Credential != nil { + if err := refsv1beta1secret.NormalizedSecret(ctx, sql.Credential.SecretRef, a.reader, a.namespace); err != nil { + return err } - obj.Spec.CloudSQLSpec.InstanceRef.External = instance.ConnectionName() } } // Resolve SpannerDatabaseRef if obj.Spec.CloudSpannerSpec != nil { if obj.Spec.CloudSpannerSpec.DatabaseRef != nil { - database, err := refs.ResolveSpannerDatabaseRef(ctx, reader, obj, obj.Spec.CloudSpannerSpec.DatabaseRef) + database, err := refs.ResolveSpannerDatabaseRef(ctx, a.reader, obj, obj.Spec.CloudSpannerSpec.DatabaseRef) if err != nil { - return nil, err + return err } obj.Spec.CloudSpannerSpec.DatabaseRef.External = database.String() } @@ -104,9 +128,9 @@ func (m *model) AdapterForObject(ctx context.Context, reader client.Reader, u *u if obj.Spec.SparkSpec != nil { if obj.Spec.SparkSpec.SparkHistoryServer != nil { if obj.Spec.SparkSpec.SparkHistoryServer.DataprocClusterRef != nil { - cluster, err := refs.ResolveDataprocClusterRef(ctx, reader, obj, obj.Spec.SparkSpec.SparkHistoryServer.DataprocClusterRef) + cluster, err := refs.ResolveDataprocClusterRef(ctx, a.reader, obj, obj.Spec.SparkSpec.SparkHistoryServer.DataprocClusterRef) if err != nil { - return nil, err + return err } obj.Spec.SparkSpec.SparkHistoryServer.DataprocClusterRef.External = cluster.String() } @@ -114,30 +138,15 @@ func (m *model) AdapterForObject(ctx context.Context, reader client.Reader, u *u if obj.Spec.SparkSpec.MetastoreService != nil { if obj.Spec.SparkSpec.MetastoreService.MetastoreServiceRef != nil { - service, err := refs.ResolveMetastoreServiceRef(ctx, reader, obj, obj.Spec.SparkSpec.MetastoreService.MetastoreServiceRef) + service, err := refs.ResolveMetastoreServiceRef(ctx, a.reader, obj, obj.Spec.SparkSpec.MetastoreService.MetastoreServiceRef) if err != nil { - return nil, err + return err } obj.Spec.SparkSpec.MetastoreService.MetastoreServiceRef.External = service.String() } } } - - connectionRef, err := krm.NewBigQueryConnectionConnectionRef(ctx, reader, obj) - if err != nil { - return nil, err - } - - // Get bigqueryconnection GCP client - gcpClient, err := m.client(ctx) - if err != nil { - return nil, err - } - return &Adapter{ - id: connectionRef, - gcpClient: gcpClient, - desired: obj, - }, nil + return nil } func (m *model) AdapterForURL(ctx context.Context, url string) (directbase.Adapter, error) { @@ -149,6 +158,8 @@ type Adapter struct { gcpClient *gcp.Client desired *krm.BigQueryConnectionConnection actual *bigqueryconnectionpb.Connection + reader client.Reader + namespace string } var _ directbase.Adapter = &Adapter{} @@ -180,8 +191,11 @@ func (a *Adapter) Create(ctx context.Context, createOp *directbase.CreateOperati log := klog.FromContext(ctx).WithName(ctrlName) log.V(2).Info("creating Connection", "name", a.id.External) - mapCtx := &direct.MapContext{} + if err := a.NormalizeReference(ctx); err != nil { + return err + } + mapCtx := &direct.MapContext{} desired := a.desired.DeepCopy() resource := BigQueryConnectionConnectionSpec_ToProto(mapCtx, &desired.Spec) if mapCtx.Err() != nil { @@ -223,6 +237,10 @@ func (a *Adapter) Update(ctx context.Context, updateOp *directbase.UpdateOperati log := klog.FromContext(ctx).WithName(ctrlName) log.V(2).Info("updating Connection", "name", a.id.External) + + if err := a.NormalizeReference(ctx); err != nil { + return err + } mapCtx := &direct.MapContext{} desired := a.desired.DeepCopy() connection := BigQueryConnectionConnectionSpec_ToProto(mapCtx, &desired.Spec) diff --git a/pkg/controller/direct/bigqueryconnection/connection_mapping.go b/pkg/controller/direct/bigqueryconnection/connection_mapping.go index 8d0ac7c5a7..6204215c7a 100644 --- a/pkg/controller/direct/bigqueryconnection/connection_mapping.go +++ b/pkg/controller/direct/bigqueryconnection/connection_mapping.go @@ -288,3 +288,26 @@ func BigQueryConnectionConnectionSpec_ToProto(mapCtx *direct.MapContext, in *krm // MISSING: SalesforceDataCloud return out } + +func CloudSqlCredential_FromProto(mapCtx *direct.MapContext, in *pb.CloudSqlCredential) *krm.CloudSqlCredential { + if in == nil { + return nil + } + out := &krm.CloudSqlCredential{} + // This is sensitive data, input-only field. + // out.Username = direct.LazyPtr(in.GetUsername()) + // out.Password = direct.LazyPtr(in.GetPassword()) + return out +} + +func CloudSqlCredential_ToProto(mapCtx *direct.MapContext, in *krm.CloudSqlCredential) *pb.CloudSqlCredential { + if in == nil { + return nil + } + out := &pb.CloudSqlCredential{} + if in.SecretRef != nil { + out.Username = in.SecretRef.Username + out.Password = in.SecretRef.Password + } + return out +} diff --git a/pkg/controller/direct/bigqueryconnection/mapper.generated.go b/pkg/controller/direct/bigqueryconnection/mapper.generated.go index c19849a5f0..3191f33cd4 100644 --- a/pkg/controller/direct/bigqueryconnection/mapper.generated.go +++ b/pkg/controller/direct/bigqueryconnection/mapper.generated.go @@ -301,24 +301,6 @@ func CloudSpannerProperties_ToProto(mapCtx *direct.MapContext, in *krm.CloudSpan out.DatabaseRole = direct.ValueOf(in.DatabaseRole) return out } -func CloudSqlCredential_FromProto(mapCtx *direct.MapContext, in *pb.CloudSqlCredential) *krm.CloudSqlCredential { - if in == nil { - return nil - } - out := &krm.CloudSqlCredential{} - out.Username = direct.LazyPtr(in.GetUsername()) - out.Password = direct.LazyPtr(in.GetPassword()) - return out -} -func CloudSqlCredential_ToProto(mapCtx *direct.MapContext, in *krm.CloudSqlCredential) *pb.CloudSqlCredential { - if in == nil { - return nil - } - out := &pb.CloudSqlCredential{} - out.Username = direct.ValueOf(in.Username) - out.Password = direct.ValueOf(in.Password) - return out -} func CloudSqlProperties_FromProto(mapCtx *direct.MapContext, in *pb.CloudSqlProperties) *krm.CloudSqlProperties { if in == nil { return nil From 16a808a1d3e0c68381a7848ed58d944e1cc096b7 Mon Sep 17 00:00:00 2001 From: Yuwen Ma Date: Sat, 2 Nov 2024 02:19:19 +0000 Subject: [PATCH 08/31] record real gcp --- ...object_cloudsqlconnectionbasic.golden.yaml | 38 - .../cloudsqlconnectionbasic/_http.log | 1831 ----------------- .../cloudsqlconnectionbasic/create.yaml | 4 +- .../cloudsqlconnectionbasic/dependencies.yaml | 34 +- 4 files changed, 7 insertions(+), 1900 deletions(-) delete mode 100644 pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_generated_object_cloudsqlconnectionbasic.golden.yaml delete mode 100644 pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_http.log diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_generated_object_cloudsqlconnectionbasic.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_generated_object_cloudsqlconnectionbasic.golden.yaml deleted file mode 100644 index 9e87fd3a07..0000000000 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_generated_object_cloudsqlconnectionbasic.golden.yaml +++ /dev/null @@ -1,38 +0,0 @@ -apiVersion: bigqueryconnection.cnrm.cloud.google.com/v1alpha1 -kind: BigQueryConnectionConnection -metadata: - annotations: - cnrm.cloud.google.com/management-conflict-prevention-policy: none - finalizers: - - cnrm.cloud.google.com/finalizer - - cnrm.cloud.google.com/deletion-defender - generation: 1 - labels: - cnrm-test: "true" - name: bigqueryconnectionconnection-${uniqueId} - namespace: ${uniqueId} -spec: - cloudSQL: - credential: - password: cGFzc3dvcmQ= - username: sqluser-${uniqueId} - database: sqldatabase-sample-${uniqueId} - instanceRef: - name: sqlinstance-sample-${uniqueId} - type: MYSQL - location: us-central1 - projectRef: - external: ${projectId} -status: - conditions: - - lastTransitionTime: "1970-01-01T00:00:00Z" - message: The resource is up to date - reason: UpToDate - status: "True" - type: Ready - externalRef: projects/${projectId}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be - observedGeneration: 1 - observedState: - cloudSQL: - serviceAccountID: service-${projectNumber}@gcp-sa-bigqueryconnection.iam.gserviceaccount.com - hasCredential: true diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_http.log b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_http.log deleted file mode 100644 index 95bb0c5401..0000000000 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_http.log +++ /dev/null @@ -1,1831 +0,0 @@ -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -404 Not Found -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "error": { - "code": 404, - "errors": [ - { - "domain": "global", - "message": "The Cloud SQL instance does not exist.", - "reason": "instanceDoesNotExist" - } - ], - "message": "The Cloud SQL instance does not exist." - } -} - ---- - -POST https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances?alt=json&prettyPrint=false -Content-Type: application/json -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -{ - "databaseVersion": "MYSQL_5_7", - "name": "sqlinstance-sample-${uniqueId}", - "region": "us-central1", - "settings": { - "activationPolicy": "ALWAYS", - "availabilityType": "ZONAL", - "dataDiskType": "PD_SSD", - "edition": "ENTERPRISE", - "locationPreference": { - "zone": "us-central1-a" - }, - "pricingPlan": "PER_USE", - "storageAutoResize": true, - "tier": "db-custom-1-3840", - "userLabels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - } - } -} - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "CREATE", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "status": "PENDING", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "endTime": "2024-04-01T12:34:56.123456Z", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "CREATE", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "DONE", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "items": [ - { - "etag": "abcdef0123A=", - "host": "", - "instance": "sqlinstance-sample-${uniqueId}", - "kind": "sql#user", - "name": "root", - "project": "${projectId}" - } - ], - "kind": "sql#usersList" -} - ---- - -DELETE https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&host=%25&name=root&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "DELETE_USER", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "status": "PENDING", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "endTime": "2024-04-01T12:34:56.123456Z", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "DELETE_USER", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "DONE", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "backendType": "SECOND_GEN", - "connectionName": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", - "createTime": "2024-04-01T12:34:56.123456Z", - "databaseInstalledVersion": "MYSQL_5_7_44", - "databaseVersion": "MYSQL_5_7", - "etag": "abcdef0123A=", - "gceZone": "us-central1-a", - "geminiConfig": { - "entitled": false, - "flagRecommenderEnabled": false, - "indexAdvisorEnabled": false - }, - "instanceType": "CLOUD_SQL_INSTANCE", - "ipAddresses": [ - { - "ipAddress": "10.1.2.3", - "type": "PRIMARY" - } - ], - "kind": "sql#instance", - "maintenanceVersion": "MYSQL_5_7_44.R20231105.01_03", - "name": "sqlinstance-sample-${uniqueId}", - "project": "${projectId}", - "region": "us-central1", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "serverCaCert": { - "cert": "-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----\n", - "certSerialNumber": "0", - "commonName": "common-name", - "createTime": "2024-04-01T12:34:56.123456Z", - "expirationTime": "2024-04-01T12:34:56.123456Z", - "instance": "sqlinstance-sample-${uniqueId}", - "kind": "sql#sslCert", - "sha1Fingerprint": "12345678" - }, - "serviceAccountEmailAddress": "p${projectNumber}-abcdef@gcp-sa-cloud-sql.iam.gserviceaccount.com", - "settings": { - "activationPolicy": "ALWAYS", - "authorizedGaeApplications": [], - "availabilityType": "ZONAL", - "backupConfiguration": { - "backupRetentionSettings": { - "retainedBackups": 7, - "retentionUnit": "COUNT" - }, - "enabled": false, - "kind": "sql#backupConfiguration", - "startTime": "12:00", - "transactionLogRetentionDays": 7, - "transactionalLogStorageState": "TRANSACTIONAL_LOG_STORAGE_STATE_UNSPECIFIED" - }, - "connectorEnforcement": "NOT_REQUIRED", - "dataDiskSizeGb": "10", - "dataDiskType": "PD_SSD", - "deletionProtectionEnabled": false, - "edition": "ENTERPRISE", - "ipConfiguration": { - "authorizedNetworks": [], - "ipv4Enabled": true, - "requireSsl": false, - "sslMode": "ALLOW_UNENCRYPTED_AND_ENCRYPTED" - }, - "kind": "sql#settings", - "locationPreference": { - "zone": "us-central1-a" - }, - "pricingPlan": "PER_USE", - "replicationType": "SYNCHRONOUS", - "settingsVersion": "123", - "storageAutoResize": true, - "storageAutoResizeLimit": "0", - "tier": "db-custom-1-3840", - "userLabels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - } - }, - "sqlNetworkArchitecture": "NEW_NETWORK_ARCHITECTURE", - "state": "RUNNABLE", - "upgradableDatabaseVersions": [ - { - "displayName": "MySQL 8.0", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0" - }, - { - "displayName": "MySQL 8.0.18", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_18" - }, - { - "displayName": "MySQL 8.0.26", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_26" - }, - { - "displayName": "MySQL 8.0.27", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_27" - }, - { - "displayName": "MySQL 8.0.28", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_28" - }, - { - "displayName": "MySQL 8.0.29", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_29" - }, - { - "displayName": "MySQL 8.0.30", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_30" - }, - { - "displayName": "MySQL 8.0.31", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_31" - }, - { - "displayName": "MySQL 8.0.32", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_32" - }, - { - "displayName": "MySQL 8.0.33", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_33" - }, - { - "displayName": "MySQL 8.0.34", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_34" - }, - { - "displayName": "MySQL 8.0.35", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_35" - }, - { - "displayName": "MySQL 8.0.36", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_36" - }, - { - "displayName": "MySQL 8.0.37", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_37" - } - ] -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}?alt=json -Content-Type: application/json -User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -404 Not Found -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "error": { - "code": 404, - "errors": [ - { - "domain": "global", - "message": "database \"projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}\" not found", - "reason": "notFound" - } - ], - "message": "database \"projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}\" not found", - "status": "NOT_FOUND" - } -} - ---- - -POST https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases?alt=json -Content-Type: application/json -User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -{ - "charset": "utf8", - "instance": "sqlinstance-sample-${uniqueId}", - "name": "sqldatabase-sample-${uniqueId}" -} - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "CREATE_DATABASE", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "status": "PENDING", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "endTime": "2024-04-01T12:34:56.123456Z", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "CREATE_DATABASE", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "DONE", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}?alt=json -Content-Type: application/json -User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "charset": "utf8", - "collation": "utf8_general_ci", - "etag": "abcdef0123A=", - "instance": "sqlinstance-sample-${uniqueId}", - "kind": "sql#database", - "name": "sqldatabase-sample-${uniqueId}", - "project": "${projectId}", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "kind": "sql#usersList" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "backendType": "SECOND_GEN", - "connectionName": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", - "createTime": "2024-04-01T12:34:56.123456Z", - "databaseInstalledVersion": "MYSQL_5_7_44", - "databaseVersion": "MYSQL_5_7", - "etag": "abcdef0123A=", - "gceZone": "us-central1-a", - "geminiConfig": { - "entitled": false, - "flagRecommenderEnabled": false, - "indexAdvisorEnabled": false - }, - "instanceType": "CLOUD_SQL_INSTANCE", - "ipAddresses": [ - { - "ipAddress": "10.1.2.3", - "type": "PRIMARY" - } - ], - "kind": "sql#instance", - "maintenanceVersion": "MYSQL_5_7_44.R20231105.01_03", - "name": "sqlinstance-sample-${uniqueId}", - "project": "${projectId}", - "region": "us-central1", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "serverCaCert": { - "cert": "-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----\n", - "certSerialNumber": "0", - "commonName": "common-name", - "createTime": "2024-04-01T12:34:56.123456Z", - "expirationTime": "2024-04-01T12:34:56.123456Z", - "instance": "sqlinstance-sample-${uniqueId}", - "kind": "sql#sslCert", - "sha1Fingerprint": "12345678" - }, - "serviceAccountEmailAddress": "p${projectNumber}-abcdef@gcp-sa-cloud-sql.iam.gserviceaccount.com", - "settings": { - "activationPolicy": "ALWAYS", - "authorizedGaeApplications": [], - "availabilityType": "ZONAL", - "backupConfiguration": { - "backupRetentionSettings": { - "retainedBackups": 7, - "retentionUnit": "COUNT" - }, - "enabled": false, - "kind": "sql#backupConfiguration", - "startTime": "12:00", - "transactionLogRetentionDays": 7, - "transactionalLogStorageState": "TRANSACTIONAL_LOG_STORAGE_STATE_UNSPECIFIED" - }, - "connectorEnforcement": "NOT_REQUIRED", - "dataDiskSizeGb": "10", - "dataDiskType": "PD_SSD", - "deletionProtectionEnabled": false, - "edition": "ENTERPRISE", - "ipConfiguration": { - "authorizedNetworks": [], - "ipv4Enabled": true, - "requireSsl": false, - "sslMode": "ALLOW_UNENCRYPTED_AND_ENCRYPTED" - }, - "kind": "sql#settings", - "locationPreference": { - "zone": "us-central1-a" - }, - "pricingPlan": "PER_USE", - "replicationType": "SYNCHRONOUS", - "settingsVersion": "123", - "storageAutoResize": true, - "storageAutoResizeLimit": "0", - "tier": "db-custom-1-3840", - "userLabels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - } - }, - "sqlNetworkArchitecture": "NEW_NETWORK_ARCHITECTURE", - "state": "RUNNABLE", - "upgradableDatabaseVersions": [ - { - "displayName": "MySQL 8.0", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0" - }, - { - "displayName": "MySQL 8.0.18", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_18" - }, - { - "displayName": "MySQL 8.0.26", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_26" - }, - { - "displayName": "MySQL 8.0.27", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_27" - }, - { - "displayName": "MySQL 8.0.28", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_28" - }, - { - "displayName": "MySQL 8.0.29", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_29" - }, - { - "displayName": "MySQL 8.0.30", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_30" - }, - { - "displayName": "MySQL 8.0.31", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_31" - }, - { - "displayName": "MySQL 8.0.32", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_32" - }, - { - "displayName": "MySQL 8.0.33", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_33" - }, - { - "displayName": "MySQL 8.0.34", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_34" - }, - { - "displayName": "MySQL 8.0.35", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_35" - }, - { - "displayName": "MySQL 8.0.36", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_36" - }, - { - "displayName": "MySQL 8.0.37", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_37" - } - ] -} - ---- - -POST https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&prettyPrint=false -Content-Type: application/json -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -{ - "host": "10.1.2.3", - "instance": "sqlinstance-sample-${uniqueId}", - "name": "sqluser-${uniqueId}", - "password": "password" -} - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "CREATE_USER", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "status": "PENDING", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "endTime": "2024-04-01T12:34:56.123456Z", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "CREATE_USER", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "DONE", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "items": [ - { - "etag": "abcdef0123A=", - "host": "", - "instance": "sqlinstance-sample-${uniqueId}", - "kind": "sql#user", - "name": "sqluser-${uniqueId}", - "password": "password", - "project": "${projectId}" - } - ], - "kind": "sql#usersList" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "backendType": "SECOND_GEN", - "connectionName": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", - "createTime": "2024-04-01T12:34:56.123456Z", - "databaseInstalledVersion": "MYSQL_5_7_44", - "databaseVersion": "MYSQL_5_7", - "etag": "abcdef0123A=", - "gceZone": "us-central1-a", - "geminiConfig": { - "entitled": false, - "flagRecommenderEnabled": false, - "indexAdvisorEnabled": false - }, - "instanceType": "CLOUD_SQL_INSTANCE", - "ipAddresses": [ - { - "ipAddress": "10.1.2.3", - "type": "PRIMARY" - } - ], - "kind": "sql#instance", - "maintenanceVersion": "MYSQL_5_7_44.R20231105.01_03", - "name": "sqlinstance-sample-${uniqueId}", - "project": "${projectId}", - "region": "us-central1", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "serverCaCert": { - "cert": "-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----\n", - "certSerialNumber": "0", - "commonName": "common-name", - "createTime": "2024-04-01T12:34:56.123456Z", - "expirationTime": "2024-04-01T12:34:56.123456Z", - "instance": "sqlinstance-sample-${uniqueId}", - "kind": "sql#sslCert", - "sha1Fingerprint": "12345678" - }, - "serviceAccountEmailAddress": "p${projectNumber}-abcdef@gcp-sa-cloud-sql.iam.gserviceaccount.com", - "settings": { - "activationPolicy": "ALWAYS", - "authorizedGaeApplications": [], - "availabilityType": "ZONAL", - "backupConfiguration": { - "backupRetentionSettings": { - "retainedBackups": 7, - "retentionUnit": "COUNT" - }, - "enabled": false, - "kind": "sql#backupConfiguration", - "startTime": "12:00", - "transactionLogRetentionDays": 7, - "transactionalLogStorageState": "TRANSACTIONAL_LOG_STORAGE_STATE_UNSPECIFIED" - }, - "connectorEnforcement": "NOT_REQUIRED", - "dataDiskSizeGb": "10", - "dataDiskType": "PD_SSD", - "deletionProtectionEnabled": false, - "edition": "ENTERPRISE", - "ipConfiguration": { - "authorizedNetworks": [], - "ipv4Enabled": true, - "requireSsl": false, - "sslMode": "ALLOW_UNENCRYPTED_AND_ENCRYPTED" - }, - "kind": "sql#settings", - "locationPreference": { - "zone": "us-central1-a" - }, - "pricingPlan": "PER_USE", - "replicationType": "SYNCHRONOUS", - "settingsVersion": "123", - "storageAutoResize": true, - "storageAutoResizeLimit": "0", - "tier": "db-custom-1-3840", - "userLabels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - } - }, - "sqlNetworkArchitecture": "NEW_NETWORK_ARCHITECTURE", - "state": "RUNNABLE", - "upgradableDatabaseVersions": [ - { - "displayName": "MySQL 8.0", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0" - }, - { - "displayName": "MySQL 8.0.18", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_18" - }, - { - "displayName": "MySQL 8.0.26", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_26" - }, - { - "displayName": "MySQL 8.0.27", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_27" - }, - { - "displayName": "MySQL 8.0.28", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_28" - }, - { - "displayName": "MySQL 8.0.29", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_29" - }, - { - "displayName": "MySQL 8.0.30", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_30" - }, - { - "displayName": "MySQL 8.0.31", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_31" - }, - { - "displayName": "MySQL 8.0.32", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_32" - }, - { - "displayName": "MySQL 8.0.33", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_33" - }, - { - "displayName": "MySQL 8.0.34", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_34" - }, - { - "displayName": "MySQL 8.0.35", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_35" - }, - { - "displayName": "MySQL 8.0.36", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_36" - }, - { - "displayName": "MySQL 8.0.37", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_37" - } - ] -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "items": [ - { - "etag": "abcdef0123A=", - "host": "", - "instance": "sqlinstance-sample-${uniqueId}", - "kind": "sql#user", - "name": "sqluser-${uniqueId}", - "password": "password", - "project": "${projectId}" - } - ], - "kind": "sql#usersList" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "backendType": "SECOND_GEN", - "connectionName": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", - "createTime": "2024-04-01T12:34:56.123456Z", - "databaseInstalledVersion": "MYSQL_5_7_44", - "databaseVersion": "MYSQL_5_7", - "etag": "abcdef0123A=", - "gceZone": "us-central1-a", - "geminiConfig": { - "entitled": false, - "flagRecommenderEnabled": false, - "indexAdvisorEnabled": false - }, - "instanceType": "CLOUD_SQL_INSTANCE", - "ipAddresses": [ - { - "ipAddress": "10.1.2.3", - "type": "PRIMARY" - } - ], - "kind": "sql#instance", - "maintenanceVersion": "MYSQL_5_7_44.R20231105.01_03", - "name": "sqlinstance-sample-${uniqueId}", - "project": "${projectId}", - "region": "us-central1", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "serverCaCert": { - "cert": "-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----\n", - "certSerialNumber": "0", - "commonName": "common-name", - "createTime": "2024-04-01T12:34:56.123456Z", - "expirationTime": "2024-04-01T12:34:56.123456Z", - "instance": "sqlinstance-sample-${uniqueId}", - "kind": "sql#sslCert", - "sha1Fingerprint": "12345678" - }, - "serviceAccountEmailAddress": "p${projectNumber}-abcdef@gcp-sa-cloud-sql.iam.gserviceaccount.com", - "settings": { - "activationPolicy": "ALWAYS", - "authorizedGaeApplications": [], - "availabilityType": "ZONAL", - "backupConfiguration": { - "backupRetentionSettings": { - "retainedBackups": 7, - "retentionUnit": "COUNT" - }, - "enabled": false, - "kind": "sql#backupConfiguration", - "startTime": "12:00", - "transactionLogRetentionDays": 7, - "transactionalLogStorageState": "TRANSACTIONAL_LOG_STORAGE_STATE_UNSPECIFIED" - }, - "connectorEnforcement": "NOT_REQUIRED", - "dataDiskSizeGb": "10", - "dataDiskType": "PD_SSD", - "deletionProtectionEnabled": false, - "edition": "ENTERPRISE", - "ipConfiguration": { - "authorizedNetworks": [], - "ipv4Enabled": true, - "requireSsl": false, - "sslMode": "ALLOW_UNENCRYPTED_AND_ENCRYPTED" - }, - "kind": "sql#settings", - "locationPreference": { - "zone": "us-central1-a" - }, - "pricingPlan": "PER_USE", - "replicationType": "SYNCHRONOUS", - "settingsVersion": "123", - "storageAutoResize": true, - "storageAutoResizeLimit": "0", - "tier": "db-custom-1-3840", - "userLabels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - } - }, - "sqlNetworkArchitecture": "NEW_NETWORK_ARCHITECTURE", - "state": "RUNNABLE", - "upgradableDatabaseVersions": [ - { - "displayName": "MySQL 8.0", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0" - }, - { - "displayName": "MySQL 8.0.18", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_18" - }, - { - "displayName": "MySQL 8.0.26", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_26" - }, - { - "displayName": "MySQL 8.0.27", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_27" - }, - { - "displayName": "MySQL 8.0.28", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_28" - }, - { - "displayName": "MySQL 8.0.29", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_29" - }, - { - "displayName": "MySQL 8.0.30", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_30" - }, - { - "displayName": "MySQL 8.0.31", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_31" - }, - { - "displayName": "MySQL 8.0.32", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_32" - }, - { - "displayName": "MySQL 8.0.33", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_33" - }, - { - "displayName": "MySQL 8.0.34", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_34" - }, - { - "displayName": "MySQL 8.0.35", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_35" - }, - { - "displayName": "MySQL 8.0.36", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_36" - }, - { - "displayName": "MySQL 8.0.37", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_37" - } - ] -} - ---- - -POST https://bigqueryconnection.googleapis.com/v1/projects/${projectId}/locations/us-central1/connections?%24alt=json%3Benum-encoding%3Dint -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: parent=projects%2F${projectId}%2Flocations%2Fus-central1 - -{ - "cloudSql": { - "credential": { - "password": "cGFzc3dvcmQ=", - "username": "sqluser-${uniqueId}" - }, - "database": "sqldatabase-sample-${uniqueId}", - "instanceId": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", - "type": 2 - } -} - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "cloudSql": { - "database": "sqldatabase-sample-${uniqueId}", - "instanceId": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", - "serviceAccountId": "service-${projectNumber}@gcp-sa-bigqueryconnection.iam.gserviceaccount.com", - "type": 2 - }, - "creationTime": "123456789", - "hasCredential": true, - "lastModifiedTime": "123456789", - "name": "projects/${projectNumber}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be" -} - ---- - -GET https://bigqueryconnection.googleapis.com/v1/projects/${projectId}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be?%24alt=json%3Benum-encoding%3Dint -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: name=projects%2F${projectId}%2Flocations%2Fus-central1%2Fconnections%2F71389360-831c-431d-8975-837aee2153be - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "cloudSql": { - "database": "sqldatabase-sample-${uniqueId}", - "instanceId": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", - "serviceAccountId": "service-${projectNumber}@gcp-sa-bigqueryconnection.iam.gserviceaccount.com", - "type": 2 - }, - "creationTime": "123456789", - "hasCredential": true, - "lastModifiedTime": "123456789", - "name": "projects/${projectNumber}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be" -} - ---- - -DELETE https://bigqueryconnection.googleapis.com/v1/projects/${projectId}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be?%24alt=json%3Benum-encoding%3Dint -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: name=projects%2F${projectId}%2Flocations%2Fus-central1%2Fconnections%2F71389360-831c-431d-8975-837aee2153be - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "items": [ - { - "etag": "abcdef0123A=", - "host": "", - "instance": "sqlinstance-sample-${uniqueId}", - "kind": "sql#user", - "name": "sqluser-${uniqueId}", - "password": "password", - "project": "${projectId}" - } - ], - "kind": "sql#usersList" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "backendType": "SECOND_GEN", - "connectionName": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", - "createTime": "2024-04-01T12:34:56.123456Z", - "databaseInstalledVersion": "MYSQL_5_7_44", - "databaseVersion": "MYSQL_5_7", - "etag": "abcdef0123A=", - "gceZone": "us-central1-a", - "geminiConfig": { - "entitled": false, - "flagRecommenderEnabled": false, - "indexAdvisorEnabled": false - }, - "instanceType": "CLOUD_SQL_INSTANCE", - "ipAddresses": [ - { - "ipAddress": "10.1.2.3", - "type": "PRIMARY" - } - ], - "kind": "sql#instance", - "maintenanceVersion": "MYSQL_5_7_44.R20231105.01_03", - "name": "sqlinstance-sample-${uniqueId}", - "project": "${projectId}", - "region": "us-central1", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "serverCaCert": { - "cert": "-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----\n", - "certSerialNumber": "0", - "commonName": "common-name", - "createTime": "2024-04-01T12:34:56.123456Z", - "expirationTime": "2024-04-01T12:34:56.123456Z", - "instance": "sqlinstance-sample-${uniqueId}", - "kind": "sql#sslCert", - "sha1Fingerprint": "12345678" - }, - "serviceAccountEmailAddress": "p${projectNumber}-abcdef@gcp-sa-cloud-sql.iam.gserviceaccount.com", - "settings": { - "activationPolicy": "ALWAYS", - "authorizedGaeApplications": [], - "availabilityType": "ZONAL", - "backupConfiguration": { - "backupRetentionSettings": { - "retainedBackups": 7, - "retentionUnit": "COUNT" - }, - "enabled": false, - "kind": "sql#backupConfiguration", - "startTime": "12:00", - "transactionLogRetentionDays": 7, - "transactionalLogStorageState": "TRANSACTIONAL_LOG_STORAGE_STATE_UNSPECIFIED" - }, - "connectorEnforcement": "NOT_REQUIRED", - "dataDiskSizeGb": "10", - "dataDiskType": "PD_SSD", - "deletionProtectionEnabled": false, - "edition": "ENTERPRISE", - "ipConfiguration": { - "authorizedNetworks": [], - "ipv4Enabled": true, - "requireSsl": false, - "sslMode": "ALLOW_UNENCRYPTED_AND_ENCRYPTED" - }, - "kind": "sql#settings", - "locationPreference": { - "zone": "us-central1-a" - }, - "pricingPlan": "PER_USE", - "replicationType": "SYNCHRONOUS", - "settingsVersion": "123", - "storageAutoResize": true, - "storageAutoResizeLimit": "0", - "tier": "db-custom-1-3840", - "userLabels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - } - }, - "sqlNetworkArchitecture": "NEW_NETWORK_ARCHITECTURE", - "state": "RUNNABLE", - "upgradableDatabaseVersions": [ - { - "displayName": "MySQL 8.0", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0" - }, - { - "displayName": "MySQL 8.0.18", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_18" - }, - { - "displayName": "MySQL 8.0.26", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_26" - }, - { - "displayName": "MySQL 8.0.27", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_27" - }, - { - "displayName": "MySQL 8.0.28", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_28" - }, - { - "displayName": "MySQL 8.0.29", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_29" - }, - { - "displayName": "MySQL 8.0.30", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_30" - }, - { - "displayName": "MySQL 8.0.31", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_31" - }, - { - "displayName": "MySQL 8.0.32", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_32" - }, - { - "displayName": "MySQL 8.0.33", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_33" - }, - { - "displayName": "MySQL 8.0.34", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_34" - }, - { - "displayName": "MySQL 8.0.35", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_35" - }, - { - "displayName": "MySQL 8.0.36", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_36" - }, - { - "displayName": "MySQL 8.0.37", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_37" - } - ] -} - ---- - -DELETE https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&host=foo&name=sqluser-${uniqueId}&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "DELETE_USER", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "status": "PENDING", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "endTime": "2024-04-01T12:34:56.123456Z", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "DELETE_USER", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "DONE", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}?alt=json -Content-Type: application/json -User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "charset": "utf8", - "collation": "utf8_general_ci", - "etag": "abcdef0123A=", - "instance": "sqlinstance-sample-${uniqueId}", - "kind": "sql#database", - "name": "sqldatabase-sample-${uniqueId}", - "project": "${projectId}", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}" -} - ---- - -DELETE https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}?alt=json -Content-Type: application/json -User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "DELETE_DATABASE", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "status": "PENDING", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "endTime": "2024-04-01T12:34:56.123456Z", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "DELETE_DATABASE", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "DONE", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "backendType": "SECOND_GEN", - "connectionName": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", - "createTime": "2024-04-01T12:34:56.123456Z", - "databaseInstalledVersion": "MYSQL_5_7_44", - "databaseVersion": "MYSQL_5_7", - "etag": "abcdef0123A=", - "gceZone": "us-central1-a", - "geminiConfig": { - "entitled": false, - "flagRecommenderEnabled": false, - "indexAdvisorEnabled": false - }, - "instanceType": "CLOUD_SQL_INSTANCE", - "ipAddresses": [ - { - "ipAddress": "10.1.2.3", - "type": "PRIMARY" - } - ], - "kind": "sql#instance", - "maintenanceVersion": "MYSQL_5_7_44.R20231105.01_03", - "name": "sqlinstance-sample-${uniqueId}", - "project": "${projectId}", - "region": "us-central1", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "serverCaCert": { - "cert": "-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----\n", - "certSerialNumber": "0", - "commonName": "common-name", - "createTime": "2024-04-01T12:34:56.123456Z", - "expirationTime": "2024-04-01T12:34:56.123456Z", - "instance": "sqlinstance-sample-${uniqueId}", - "kind": "sql#sslCert", - "sha1Fingerprint": "12345678" - }, - "serviceAccountEmailAddress": "p${projectNumber}-abcdef@gcp-sa-cloud-sql.iam.gserviceaccount.com", - "settings": { - "activationPolicy": "ALWAYS", - "authorizedGaeApplications": [], - "availabilityType": "ZONAL", - "backupConfiguration": { - "backupRetentionSettings": { - "retainedBackups": 7, - "retentionUnit": "COUNT" - }, - "enabled": false, - "kind": "sql#backupConfiguration", - "startTime": "12:00", - "transactionLogRetentionDays": 7, - "transactionalLogStorageState": "TRANSACTIONAL_LOG_STORAGE_STATE_UNSPECIFIED" - }, - "connectorEnforcement": "NOT_REQUIRED", - "dataDiskSizeGb": "10", - "dataDiskType": "PD_SSD", - "deletionProtectionEnabled": false, - "edition": "ENTERPRISE", - "ipConfiguration": { - "authorizedNetworks": [], - "ipv4Enabled": true, - "requireSsl": false, - "sslMode": "ALLOW_UNENCRYPTED_AND_ENCRYPTED" - }, - "kind": "sql#settings", - "locationPreference": { - "zone": "us-central1-a" - }, - "pricingPlan": "PER_USE", - "replicationType": "SYNCHRONOUS", - "settingsVersion": "123", - "storageAutoResize": true, - "storageAutoResizeLimit": "0", - "tier": "db-custom-1-3840", - "userLabels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - } - }, - "sqlNetworkArchitecture": "NEW_NETWORK_ARCHITECTURE", - "state": "RUNNABLE", - "upgradableDatabaseVersions": [ - { - "displayName": "MySQL 8.0", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0" - }, - { - "displayName": "MySQL 8.0.18", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_18" - }, - { - "displayName": "MySQL 8.0.26", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_26" - }, - { - "displayName": "MySQL 8.0.27", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_27" - }, - { - "displayName": "MySQL 8.0.28", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_28" - }, - { - "displayName": "MySQL 8.0.29", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_29" - }, - { - "displayName": "MySQL 8.0.30", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_30" - }, - { - "displayName": "MySQL 8.0.31", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_31" - }, - { - "displayName": "MySQL 8.0.32", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_32" - }, - { - "displayName": "MySQL 8.0.33", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_33" - }, - { - "displayName": "MySQL 8.0.34", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_34" - }, - { - "displayName": "MySQL 8.0.35", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_35" - }, - { - "displayName": "MySQL 8.0.36", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_36" - }, - { - "displayName": "MySQL 8.0.37", - "majorVersion": "MYSQL_8_0", - "name": "MYSQL_8_0_37" - } - ] -} - ---- - -DELETE https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "DELETE", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "status": "PENDING", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} - ---- - -GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false -User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager - -200 OK -Cache-Control: private -Content-Type: application/json; charset=UTF-8 -Server: ESF -Vary: Origin -Vary: X-Origin -Vary: Referer -X-Content-Type-Options: nosniff -X-Frame-Options: SAMEORIGIN -X-Xss-Protection: 0 - -{ - "endTime": "2024-04-01T12:34:56.123456Z", - "insertTime": "2024-04-01T12:34:56.123456Z", - "kind": "sql#operation", - "name": "${operationID}", - "operationType": "DELETE", - "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", - "startTime": "2024-04-01T12:34:56.123456Z", - "status": "DONE", - "targetId": "sqlinstance-sample-${uniqueId}", - "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", - "targetProject": "${projectId}", - "user": "user@example.com" -} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/create.yaml index 79b8e3db7a..eade77cd48 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/create.yaml @@ -27,5 +27,5 @@ spec: database: sqldatabase-sample-${uniqueId} type: "MYSQL" credential: - username: sqluser-${uniqueId} - password: cGFzc3dvcmQ= \ No newline at end of file + secretRef: + name: bigqueryconnectionconnection-${uniqueId} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/dependencies.yaml index c7875ee9a0..8522191076 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/dependencies.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/dependencies.yaml @@ -11,12 +11,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - apiVersion: v1 kind: Secret metadata: - name: secret-${uniqueId} -data: + name: bigqueryconnectionconnection-${uniqueId} +type: kubernetes.io/basic-auth +stringData: + username: sqluser-${uniqueId} password: cGFzc3dvcmQ= --- apiVersion: sql.cnrm.cloud.google.com/v1beta1 @@ -29,29 +30,4 @@ spec: settings: locationPreference: zone: us-central1-a - tier: db-custom-1-3840 ---- -apiVersion: sql.cnrm.cloud.google.com/v1beta1 -kind: SQLDatabase -metadata: - name: sqldatabase-sample-${uniqueId} -spec: - charset: utf8 - instanceRef: - name: sqlinstance-sample-${uniqueId} ---- -apiVersion: sql.cnrm.cloud.google.com/v1beta1 -kind: SQLUser -metadata: - labels: - label-one: "value-one" - name: sqluser-${uniqueId} -spec: - instanceRef: - name: sqlinstance-sample-${uniqueId} - host: foo - password: - valueFrom: - secretKeyRef: - name: secret-${uniqueId} - key: password \ No newline at end of file + tier: db-custom-1-3840 \ No newline at end of file From b93de1d5a09cc425c651abbbb08917d005272950 Mon Sep 17 00:00:00 2001 From: Yuwen Ma Date: Sat, 2 Nov 2024 02:20:02 +0000 Subject: [PATCH 09/31] mockgcp --- ...object_cloudsqlconnectionbasic.golden.yaml | 38 + .../cloudsqlconnectionbasic/_http.log | 694 ++++++++++++++++++ 2 files changed, 732 insertions(+) create mode 100644 pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_generated_object_cloudsqlconnectionbasic.golden.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_http.log diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_generated_object_cloudsqlconnectionbasic.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_generated_object_cloudsqlconnectionbasic.golden.yaml new file mode 100644 index 0000000000..50f9a3f9fc --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_generated_object_cloudsqlconnectionbasic.golden.yaml @@ -0,0 +1,38 @@ +apiVersion: bigqueryconnection.cnrm.cloud.google.com/v1alpha1 +kind: BigQueryConnectionConnection +metadata: + annotations: + cnrm.cloud.google.com/management-conflict-prevention-policy: none + finalizers: + - cnrm.cloud.google.com/finalizer + - cnrm.cloud.google.com/deletion-defender + generation: 1 + labels: + cnrm-test: "true" + name: bigqueryconnectionconnection-${uniqueId} + namespace: ${uniqueId} +spec: + cloudSQL: + credential: + secretRef: + name: bigqueryconnectionconnection-${uniqueId} + database: sqldatabase-sample-${uniqueId} + instanceRef: + name: sqlinstance-sample-${uniqueId} + type: MYSQL + location: us-central1 + projectRef: + external: ${projectId} +status: + conditions: + - lastTransitionTime: "1970-01-01T00:00:00Z" + message: The resource is up to date + reason: UpToDate + status: "True" + type: Ready + externalRef: projects/${projectId}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be + observedGeneration: 1 + observedState: + cloudSQL: + serviceAccountID: service-${projectNumber}@gcp-sa-bigqueryconnection.iam.gserviceaccount.com + hasCredential: true diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_http.log b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_http.log new file mode 100644 index 0000000000..c2126bcbad --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_http.log @@ -0,0 +1,694 @@ +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "The Cloud SQL instance does not exist.", + "reason": "instanceDoesNotExist" + } + ], + "message": "The Cloud SQL instance does not exist." + } +} + +--- + +POST https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "databaseVersion": "MYSQL_5_7", + "name": "sqlinstance-sample-${uniqueId}", + "region": "us-central1", + "settings": { + "activationPolicy": "ALWAYS", + "availabilityType": "ZONAL", + "dataDiskType": "PD_SSD", + "edition": "ENTERPRISE", + "locationPreference": { + "zone": "us-central1-a" + }, + "pricingPlan": "PER_USE", + "storageAutoResize": true, + "tier": "db-custom-1-3840", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "CREATE", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "status": "PENDING", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "CREATE", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "items": [ + { + "etag": "abcdef0123A=", + "host": "", + "instance": "sqlinstance-sample-${uniqueId}", + "kind": "sql#user", + "name": "root", + "project": "${projectId}" + } + ], + "kind": "sql#usersList" +} + +--- + +DELETE https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&host=%25&name=root&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "DELETE_USER", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "status": "PENDING", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "DELETE_USER", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "backendType": "SECOND_GEN", + "connectionName": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", + "createTime": "2024-04-01T12:34:56.123456Z", + "databaseInstalledVersion": "MYSQL_5_7_44", + "databaseVersion": "MYSQL_5_7", + "etag": "abcdef0123A=", + "gceZone": "us-central1-a", + "geminiConfig": { + "entitled": false, + "flagRecommenderEnabled": false, + "indexAdvisorEnabled": false + }, + "instanceType": "CLOUD_SQL_INSTANCE", + "ipAddresses": [ + { + "ipAddress": "10.1.2.3", + "type": "PRIMARY" + } + ], + "kind": "sql#instance", + "maintenanceVersion": "MYSQL_5_7_44.R20231105.01_03", + "name": "sqlinstance-sample-${uniqueId}", + "project": "${projectId}", + "region": "us-central1", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "serverCaCert": { + "cert": "-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----\n", + "certSerialNumber": "0", + "commonName": "common-name", + "createTime": "2024-04-01T12:34:56.123456Z", + "expirationTime": "2024-04-01T12:34:56.123456Z", + "instance": "sqlinstance-sample-${uniqueId}", + "kind": "sql#sslCert", + "sha1Fingerprint": "12345678" + }, + "serviceAccountEmailAddress": "p${projectNumber}-abcdef@gcp-sa-cloud-sql.iam.gserviceaccount.com", + "settings": { + "activationPolicy": "ALWAYS", + "authorizedGaeApplications": [], + "availabilityType": "ZONAL", + "backupConfiguration": { + "backupRetentionSettings": { + "retainedBackups": 7, + "retentionUnit": "COUNT" + }, + "enabled": false, + "kind": "sql#backupConfiguration", + "startTime": "12:00", + "transactionLogRetentionDays": 7, + "transactionalLogStorageState": "TRANSACTIONAL_LOG_STORAGE_STATE_UNSPECIFIED" + }, + "connectorEnforcement": "NOT_REQUIRED", + "dataDiskSizeGb": "10", + "dataDiskType": "PD_SSD", + "deletionProtectionEnabled": false, + "edition": "ENTERPRISE", + "ipConfiguration": { + "authorizedNetworks": [], + "ipv4Enabled": true, + "requireSsl": false, + "sslMode": "ALLOW_UNENCRYPTED_AND_ENCRYPTED" + }, + "kind": "sql#settings", + "locationPreference": { + "zone": "us-central1-a" + }, + "pricingPlan": "PER_USE", + "replicationType": "SYNCHRONOUS", + "settingsVersion": "123", + "storageAutoResize": true, + "storageAutoResizeLimit": "0", + "tier": "db-custom-1-3840", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } + }, + "sqlNetworkArchitecture": "NEW_NETWORK_ARCHITECTURE", + "state": "RUNNABLE", + "upgradableDatabaseVersions": [ + { + "displayName": "MySQL 8.0", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0" + }, + { + "displayName": "MySQL 8.0.18", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_18" + }, + { + "displayName": "MySQL 8.0.26", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_26" + }, + { + "displayName": "MySQL 8.0.27", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_27" + }, + { + "displayName": "MySQL 8.0.28", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_28" + }, + { + "displayName": "MySQL 8.0.29", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_29" + }, + { + "displayName": "MySQL 8.0.30", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_30" + }, + { + "displayName": "MySQL 8.0.31", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_31" + }, + { + "displayName": "MySQL 8.0.32", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_32" + }, + { + "displayName": "MySQL 8.0.33", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_33" + }, + { + "displayName": "MySQL 8.0.34", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_34" + }, + { + "displayName": "MySQL 8.0.35", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_35" + }, + { + "displayName": "MySQL 8.0.36", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_36" + }, + { + "displayName": "MySQL 8.0.37", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_37" + } + ] +} + +--- + +POST https://bigqueryconnection.googleapis.com/v1/projects/${projectId}/locations/us-central1/connections?%24alt=json%3Benum-encoding%3Dint +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: parent=projects%2F${projectId}%2Flocations%2Fus-central1 + +{ + "cloudSql": { + "credential": { + "password": "cGFzc3dvcmQ=", + "username": "sqluser-${uniqueId}" + }, + "database": "sqldatabase-sample-${uniqueId}", + "instanceId": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", + "type": 2 + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "cloudSql": { + "database": "sqldatabase-sample-${uniqueId}", + "instanceId": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", + "serviceAccountId": "service-${projectNumber}@gcp-sa-bigqueryconnection.iam.gserviceaccount.com", + "type": 2 + }, + "creationTime": "123456789", + "hasCredential": true, + "lastModifiedTime": "123456789", + "name": "projects/${projectNumber}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be" +} + +--- + +GET https://bigqueryconnection.googleapis.com/v1/projects/${projectId}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be?%24alt=json%3Benum-encoding%3Dint +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: name=projects%2F${projectId}%2Flocations%2Fus-central1%2Fconnections%2F71389360-831c-431d-8975-837aee2153be + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "cloudSql": { + "database": "sqldatabase-sample-${uniqueId}", + "instanceId": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", + "serviceAccountId": "service-${projectNumber}@gcp-sa-bigqueryconnection.iam.gserviceaccount.com", + "type": 2 + }, + "creationTime": "123456789", + "hasCredential": true, + "lastModifiedTime": "123456789", + "name": "projects/${projectNumber}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be" +} + +--- + +DELETE https://bigqueryconnection.googleapis.com/v1/projects/${projectId}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be?%24alt=json%3Benum-encoding%3Dint +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: name=projects%2F${projectId}%2Flocations%2Fus-central1%2Fconnections%2F71389360-831c-431d-8975-837aee2153be + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "backendType": "SECOND_GEN", + "connectionName": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", + "createTime": "2024-04-01T12:34:56.123456Z", + "databaseInstalledVersion": "MYSQL_5_7_44", + "databaseVersion": "MYSQL_5_7", + "etag": "abcdef0123A=", + "gceZone": "us-central1-a", + "geminiConfig": { + "entitled": false, + "flagRecommenderEnabled": false, + "indexAdvisorEnabled": false + }, + "instanceType": "CLOUD_SQL_INSTANCE", + "ipAddresses": [ + { + "ipAddress": "10.1.2.3", + "type": "PRIMARY" + } + ], + "kind": "sql#instance", + "maintenanceVersion": "MYSQL_5_7_44.R20231105.01_03", + "name": "sqlinstance-sample-${uniqueId}", + "project": "${projectId}", + "region": "us-central1", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "serverCaCert": { + "cert": "-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----\n", + "certSerialNumber": "0", + "commonName": "common-name", + "createTime": "2024-04-01T12:34:56.123456Z", + "expirationTime": "2024-04-01T12:34:56.123456Z", + "instance": "sqlinstance-sample-${uniqueId}", + "kind": "sql#sslCert", + "sha1Fingerprint": "12345678" + }, + "serviceAccountEmailAddress": "p${projectNumber}-abcdef@gcp-sa-cloud-sql.iam.gserviceaccount.com", + "settings": { + "activationPolicy": "ALWAYS", + "authorizedGaeApplications": [], + "availabilityType": "ZONAL", + "backupConfiguration": { + "backupRetentionSettings": { + "retainedBackups": 7, + "retentionUnit": "COUNT" + }, + "enabled": false, + "kind": "sql#backupConfiguration", + "startTime": "12:00", + "transactionLogRetentionDays": 7, + "transactionalLogStorageState": "TRANSACTIONAL_LOG_STORAGE_STATE_UNSPECIFIED" + }, + "connectorEnforcement": "NOT_REQUIRED", + "dataDiskSizeGb": "10", + "dataDiskType": "PD_SSD", + "deletionProtectionEnabled": false, + "edition": "ENTERPRISE", + "ipConfiguration": { + "authorizedNetworks": [], + "ipv4Enabled": true, + "requireSsl": false, + "sslMode": "ALLOW_UNENCRYPTED_AND_ENCRYPTED" + }, + "kind": "sql#settings", + "locationPreference": { + "zone": "us-central1-a" + }, + "pricingPlan": "PER_USE", + "replicationType": "SYNCHRONOUS", + "settingsVersion": "123", + "storageAutoResize": true, + "storageAutoResizeLimit": "0", + "tier": "db-custom-1-3840", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } + }, + "sqlNetworkArchitecture": "NEW_NETWORK_ARCHITECTURE", + "state": "RUNNABLE", + "upgradableDatabaseVersions": [ + { + "displayName": "MySQL 8.0", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0" + }, + { + "displayName": "MySQL 8.0.18", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_18" + }, + { + "displayName": "MySQL 8.0.26", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_26" + }, + { + "displayName": "MySQL 8.0.27", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_27" + }, + { + "displayName": "MySQL 8.0.28", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_28" + }, + { + "displayName": "MySQL 8.0.29", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_29" + }, + { + "displayName": "MySQL 8.0.30", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_30" + }, + { + "displayName": "MySQL 8.0.31", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_31" + }, + { + "displayName": "MySQL 8.0.32", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_32" + }, + { + "displayName": "MySQL 8.0.33", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_33" + }, + { + "displayName": "MySQL 8.0.34", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_34" + }, + { + "displayName": "MySQL 8.0.35", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_35" + }, + { + "displayName": "MySQL 8.0.36", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_36" + }, + { + "displayName": "MySQL 8.0.37", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_37" + } + ] +} + +--- + +DELETE https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "DELETE", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "status": "PENDING", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "DELETE", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" +} \ No newline at end of file From c05ec2d433d72792a51af775b559131359c33806 Mon Sep 17 00:00:00 2001 From: Yuwen Ma Date: Sat, 2 Nov 2024 02:29:09 +0000 Subject: [PATCH 10/31] nits --- .../v1alpha1/connection_types.go | 2 +- .../v1alpha1/zz_generated.deepcopy.go | 2 +- apis/refs/v1beta1/secret/basicauth.go | 21 +++++++++---------- .../bigqueryconnectionconnection_types.go | 8 ++----- .../v1alpha1/zz_generated.deepcopy.go | 11 +++------- .../connection_controller.go | 6 +++--- 6 files changed, 20 insertions(+), 30 deletions(-) diff --git a/apis/bigqueryconnection/v1alpha1/connection_types.go b/apis/bigqueryconnection/v1alpha1/connection_types.go index 4a6143ebb1..a71d606b04 100644 --- a/apis/bigqueryconnection/v1alpha1/connection_types.go +++ b/apis/bigqueryconnection/v1alpha1/connection_types.go @@ -307,7 +307,7 @@ type SparkPropertiesStatus struct { type CloudSqlCredential struct { // The Kubernetes Secret object that stores the "username" and "password" information. // The Secret type has to be `kubernetes.io/basic-auth`. - SecretRef *refsv1beta1secret.BasicAuthSecret `json:"secretRef,omitempty"` + SecretRef *refsv1beta1secret.BasicAuthSecretRef `json:"secretRef,omitempty"` } // +genclient diff --git a/apis/bigqueryconnection/v1alpha1/zz_generated.deepcopy.go b/apis/bigqueryconnection/v1alpha1/zz_generated.deepcopy.go index 7584da0990..b3806139de 100644 --- a/apis/bigqueryconnection/v1alpha1/zz_generated.deepcopy.go +++ b/apis/bigqueryconnection/v1alpha1/zz_generated.deepcopy.go @@ -675,7 +675,7 @@ func (in *CloudSqlCredential) DeepCopyInto(out *CloudSqlCredential) { *out = *in if in.SecretRef != nil { in, out := &in.SecretRef, &out.SecretRef - *out = new(secret.BasicAuthSecret) + *out = new(secret.BasicAuthSecretRef) **out = **in } } diff --git a/apis/refs/v1beta1/secret/basicauth.go b/apis/refs/v1beta1/secret/basicauth.go index 4287a712b3..28dddbe221 100644 --- a/apis/refs/v1beta1/secret/basicauth.go +++ b/apis/refs/v1beta1/secret/basicauth.go @@ -20,38 +20,37 @@ import ( corev1 "k8s.io/api/core/v1" ) -var _ SecretRef = &BasicAuthSecret{} +var _ SecretRef = &BasicAuthSecretRef{} -type BasicAuthSecret struct { +type BasicAuthSecretRef struct { // +required // The `metadata.name` field of a Kubernetes `Secret` Name string `json:"name,omitempty"` // The `metadata.namespace` field of a Kubernetes `Secret`. Namespace string `json:"namespace,omitempty"` + // The public field with json:"-" tag is to skip the field + // in the CRD, and bypass "the unexported field error" + // when controller-gen parses the Unstructured object to a typed object. Username string `json:"-"` Password string `json:"-"` } -func (b *BasicAuthSecret) GetName() string { +func (b *BasicAuthSecretRef) GetName() string { return b.Name } -func (b *BasicAuthSecret) GetNamespace() string { +func (b *BasicAuthSecretRef) GetNamespace() string { return b.Namespace } -func (b *BasicAuthSecret) Set(secret *corev1.Secret) error { +func (b *BasicAuthSecretRef) Set(secret *corev1.Secret) error { if secret.Type != corev1.SecretTypeBasicAuth { - return fmt.Errorf("the referenced Secret in `spec.cloudSQL.credential.secretRef` should use type %s, got %s", - corev1.SecretTypeBasicAuth, secret.Type) + return fmt.Errorf("the referenced Secret %s should use type %s, got %s", + b.Name, corev1.SecretTypeBasicAuth, secret.Type) } if secret.Data != nil { b.Username = string(secret.Data["username"]) b.Password = string(secret.Data["password"]) } - if secret.StringData != nil { - b.Username = secret.StringData["username"] - b.Password = secret.StringData["password"] - } return nil } diff --git a/pkg/clients/generated/apis/bigqueryconnection/v1alpha1/bigqueryconnectionconnection_types.go b/pkg/clients/generated/apis/bigqueryconnection/v1alpha1/bigqueryconnectionconnection_types.go index 27d6fea984..aba87c0057 100644 --- a/pkg/clients/generated/apis/bigqueryconnection/v1alpha1/bigqueryconnectionconnection_types.go +++ b/pkg/clients/generated/apis/bigqueryconnection/v1alpha1/bigqueryconnectionconnection_types.go @@ -118,13 +118,9 @@ type ConnectionCloudSpanner struct { } type ConnectionCredential struct { - /* The password for the credential. */ + /* The Kubernetes Secret object that stores the "username" and "password" information. The Secret type has to be `kubernetes.io/basic-auth`. */ // +optional - Password *string `json:"password,omitempty"` - - /* The username for the credential. */ - // +optional - Username *string `json:"username,omitempty"` + SecretRef *v1alpha1.ResourceRef `json:"secretRef,omitempty"` } type ConnectionMetastoreService struct { diff --git a/pkg/clients/generated/apis/bigqueryconnection/v1alpha1/zz_generated.deepcopy.go b/pkg/clients/generated/apis/bigqueryconnection/v1alpha1/zz_generated.deepcopy.go index 3536c5211c..d74493c2a7 100644 --- a/pkg/clients/generated/apis/bigqueryconnection/v1alpha1/zz_generated.deepcopy.go +++ b/pkg/clients/generated/apis/bigqueryconnection/v1alpha1/zz_generated.deepcopy.go @@ -446,14 +446,9 @@ func (in *ConnectionCloudSpanner) DeepCopy() *ConnectionCloudSpanner { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ConnectionCredential) DeepCopyInto(out *ConnectionCredential) { *out = *in - if in.Password != nil { - in, out := &in.Password, &out.Password - *out = new(string) - **out = **in - } - if in.Username != nil { - in, out := &in.Username, &out.Username - *out = new(string) + if in.SecretRef != nil { + in, out := &in.SecretRef, &out.SecretRef + *out = new(k8sv1alpha1.ResourceRef) **out = **in } return diff --git a/pkg/controller/direct/bigqueryconnection/connection_controller.go b/pkg/controller/direct/bigqueryconnection/connection_controller.go index 83d1241937..d5fde8fc86 100644 --- a/pkg/controller/direct/bigqueryconnection/connection_controller.go +++ b/pkg/controller/direct/bigqueryconnection/connection_controller.go @@ -94,7 +94,7 @@ func (m *model) AdapterForObject(ctx context.Context, reader client.Reader, u *u }, nil } -func (a *Adapter) NormalizeReference(ctx context.Context) error { +func (a *Adapter) normalizeReference(ctx context.Context) error { obj := a.desired // Resolve SQLInstanceRef if obj.Spec.CloudSQLSpec != nil { @@ -192,7 +192,7 @@ func (a *Adapter) Create(ctx context.Context, createOp *directbase.CreateOperati log := klog.FromContext(ctx).WithName(ctrlName) log.V(2).Info("creating Connection", "name", a.id.External) - if err := a.NormalizeReference(ctx); err != nil { + if err := a.normalizeReference(ctx); err != nil { return err } mapCtx := &direct.MapContext{} @@ -238,7 +238,7 @@ func (a *Adapter) Update(ctx context.Context, updateOp *directbase.UpdateOperati log := klog.FromContext(ctx).WithName(ctrlName) log.V(2).Info("updating Connection", "name", a.id.External) - if err := a.NormalizeReference(ctx); err != nil { + if err := a.normalizeReference(ctx); err != nil { return err } mapCtx := &direct.MapContext{} From 2b170917c7f6f278b9e89939e6752513d4af6707 Mon Sep 17 00:00:00 2001 From: Yuwen Ma Date: Sat, 2 Nov 2024 19:18:00 +0000 Subject: [PATCH 11/31] put back the sqluser and sqldatabase in dependencies.yaml to make the e diff be more straightforward --- ...object_cloudsqlconnectionbasic.golden.yaml | 2 +- .../cloudsqlconnectionbasic/_http.log | 1215 ++++++++++++++++- .../cloudsqlconnectionbasic/create.yaml | 2 +- .../cloudsqlconnectionbasic/dependencies.yaml | 29 +- 4 files changed, 1205 insertions(+), 43 deletions(-) diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_generated_object_cloudsqlconnectionbasic.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_generated_object_cloudsqlconnectionbasic.golden.yaml index 50f9a3f9fc..3b6d7d20c0 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_generated_object_cloudsqlconnectionbasic.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_generated_object_cloudsqlconnectionbasic.golden.yaml @@ -15,7 +15,7 @@ spec: cloudSQL: credential: secretRef: - name: bigqueryconnectionconnection-${uniqueId} + name: secret-${uniqueId} database: sqldatabase-sample-${uniqueId} instanceRef: name: sqlinstance-sample-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_http.log b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_http.log index c2126bcbad..93d4937b57 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/_http.log @@ -371,23 +371,48 @@ X-Xss-Protection: 0 --- -POST https://bigqueryconnection.googleapis.com/v1/projects/${projectId}/locations/us-central1/connections?%24alt=json%3Benum-encoding%3Dint +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: parent=projects%2F${projectId}%2Flocations%2Fus-central1 +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 { - "cloudSql": { - "credential": { - "password": "cGFzc3dvcmQ=", - "username": "sqluser-${uniqueId}" - }, - "database": "sqldatabase-sample-${uniqueId}", - "instanceId": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", - "type": 2 + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "database \"projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}\" not found", + "reason": "notFound" + } + ], + "message": "database \"projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}\" not found", + "status": "NOT_FOUND" } } +--- + +POST https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "charset": "utf8", + "instance": "sqlinstance-sample-${uniqueId}", + "name": "sqldatabase-sample-${uniqueId}" +} + 200 OK Cache-Control: private Content-Type: application/json; charset=UTF-8 @@ -400,24 +425,22 @@ X-Frame-Options: SAMEORIGIN X-Xss-Protection: 0 { - "cloudSql": { - "database": "sqldatabase-sample-${uniqueId}", - "instanceId": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", - "serviceAccountId": "service-${projectNumber}@gcp-sa-bigqueryconnection.iam.gserviceaccount.com", - "type": 2 - }, - "creationTime": "123456789", - "hasCredential": true, - "lastModifiedTime": "123456789", - "name": "projects/${projectNumber}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be" + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "CREATE_DATABASE", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "status": "PENDING", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" } --- -GET https://bigqueryconnection.googleapis.com/v1/projects/${projectId}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be?%24alt=json%3Benum-encoding%3Dint -Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: name=projects%2F${projectId}%2Flocations%2Fus-central1%2Fconnections%2F71389360-831c-431d-8975-837aee2153be +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager 200 OK Cache-Control: private @@ -431,24 +454,25 @@ X-Frame-Options: SAMEORIGIN X-Xss-Protection: 0 { - "cloudSql": { - "database": "sqldatabase-sample-${uniqueId}", - "instanceId": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", - "serviceAccountId": "service-${projectNumber}@gcp-sa-bigqueryconnection.iam.gserviceaccount.com", - "type": 2 - }, - "creationTime": "123456789", - "hasCredential": true, - "lastModifiedTime": "123456789", - "name": "projects/${projectNumber}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be" + "endTime": "2024-04-01T12:34:56.123456Z", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "CREATE_DATABASE", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" } --- -DELETE https://bigqueryconnection.googleapis.com/v1/projects/${projectId}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be?%24alt=json%3Benum-encoding%3Dint +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: name=projects%2F${projectId}%2Flocations%2Fus-central1%2Fconnections%2F71389360-831c-431d-8975-837aee2153be +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager 200 OK Cache-Control: private @@ -461,7 +485,1120 @@ X-Content-Type-Options: nosniff X-Frame-Options: SAMEORIGIN X-Xss-Protection: 0 -{} +{ + "charset": "utf8", + "collation": "utf8_general_ci", + "etag": "abcdef0123A=", + "instance": "sqlinstance-sample-${uniqueId}", + "kind": "sql#database", + "name": "sqldatabase-sample-${uniqueId}", + "project": "${projectId}", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "kind": "sql#usersList" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "backendType": "SECOND_GEN", + "connectionName": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", + "createTime": "2024-04-01T12:34:56.123456Z", + "databaseInstalledVersion": "MYSQL_5_7_44", + "databaseVersion": "MYSQL_5_7", + "etag": "abcdef0123A=", + "gceZone": "us-central1-a", + "geminiConfig": { + "entitled": false, + "flagRecommenderEnabled": false, + "indexAdvisorEnabled": false + }, + "instanceType": "CLOUD_SQL_INSTANCE", + "ipAddresses": [ + { + "ipAddress": "10.1.2.3", + "type": "PRIMARY" + } + ], + "kind": "sql#instance", + "maintenanceVersion": "MYSQL_5_7_44.R20231105.01_03", + "name": "sqlinstance-sample-${uniqueId}", + "project": "${projectId}", + "region": "us-central1", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "serverCaCert": { + "cert": "-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----\n", + "certSerialNumber": "0", + "commonName": "common-name", + "createTime": "2024-04-01T12:34:56.123456Z", + "expirationTime": "2024-04-01T12:34:56.123456Z", + "instance": "sqlinstance-sample-${uniqueId}", + "kind": "sql#sslCert", + "sha1Fingerprint": "12345678" + }, + "serviceAccountEmailAddress": "p${projectNumber}-abcdef@gcp-sa-cloud-sql.iam.gserviceaccount.com", + "settings": { + "activationPolicy": "ALWAYS", + "authorizedGaeApplications": [], + "availabilityType": "ZONAL", + "backupConfiguration": { + "backupRetentionSettings": { + "retainedBackups": 7, + "retentionUnit": "COUNT" + }, + "enabled": false, + "kind": "sql#backupConfiguration", + "startTime": "12:00", + "transactionLogRetentionDays": 7, + "transactionalLogStorageState": "TRANSACTIONAL_LOG_STORAGE_STATE_UNSPECIFIED" + }, + "connectorEnforcement": "NOT_REQUIRED", + "dataDiskSizeGb": "10", + "dataDiskType": "PD_SSD", + "deletionProtectionEnabled": false, + "edition": "ENTERPRISE", + "ipConfiguration": { + "authorizedNetworks": [], + "ipv4Enabled": true, + "requireSsl": false, + "sslMode": "ALLOW_UNENCRYPTED_AND_ENCRYPTED" + }, + "kind": "sql#settings", + "locationPreference": { + "zone": "us-central1-a" + }, + "pricingPlan": "PER_USE", + "replicationType": "SYNCHRONOUS", + "settingsVersion": "123", + "storageAutoResize": true, + "storageAutoResizeLimit": "0", + "tier": "db-custom-1-3840", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } + }, + "sqlNetworkArchitecture": "NEW_NETWORK_ARCHITECTURE", + "state": "RUNNABLE", + "upgradableDatabaseVersions": [ + { + "displayName": "MySQL 8.0", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0" + }, + { + "displayName": "MySQL 8.0.18", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_18" + }, + { + "displayName": "MySQL 8.0.26", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_26" + }, + { + "displayName": "MySQL 8.0.27", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_27" + }, + { + "displayName": "MySQL 8.0.28", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_28" + }, + { + "displayName": "MySQL 8.0.29", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_29" + }, + { + "displayName": "MySQL 8.0.30", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_30" + }, + { + "displayName": "MySQL 8.0.31", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_31" + }, + { + "displayName": "MySQL 8.0.32", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_32" + }, + { + "displayName": "MySQL 8.0.33", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_33" + }, + { + "displayName": "MySQL 8.0.34", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_34" + }, + { + "displayName": "MySQL 8.0.35", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_35" + }, + { + "displayName": "MySQL 8.0.36", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_36" + }, + { + "displayName": "MySQL 8.0.37", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_37" + } + ] +} + +--- + +POST https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "host": "10.1.2.3", + "instance": "sqlinstance-sample-${uniqueId}", + "name": "sqluser-${uniqueId}", + "password": "cGFzc3dvcmQ=" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "CREATE_USER", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "status": "PENDING", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "CREATE_USER", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "items": [ + { + "etag": "abcdef0123A=", + "host": "", + "instance": "sqlinstance-sample-${uniqueId}", + "kind": "sql#user", + "name": "sqluser-${uniqueId}", + "password": "cGFzc3dvcmQ=", + "project": "${projectId}" + } + ], + "kind": "sql#usersList" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "backendType": "SECOND_GEN", + "connectionName": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", + "createTime": "2024-04-01T12:34:56.123456Z", + "databaseInstalledVersion": "MYSQL_5_7_44", + "databaseVersion": "MYSQL_5_7", + "etag": "abcdef0123A=", + "gceZone": "us-central1-a", + "geminiConfig": { + "entitled": false, + "flagRecommenderEnabled": false, + "indexAdvisorEnabled": false + }, + "instanceType": "CLOUD_SQL_INSTANCE", + "ipAddresses": [ + { + "ipAddress": "10.1.2.3", + "type": "PRIMARY" + } + ], + "kind": "sql#instance", + "maintenanceVersion": "MYSQL_5_7_44.R20231105.01_03", + "name": "sqlinstance-sample-${uniqueId}", + "project": "${projectId}", + "region": "us-central1", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "serverCaCert": { + "cert": "-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----\n", + "certSerialNumber": "0", + "commonName": "common-name", + "createTime": "2024-04-01T12:34:56.123456Z", + "expirationTime": "2024-04-01T12:34:56.123456Z", + "instance": "sqlinstance-sample-${uniqueId}", + "kind": "sql#sslCert", + "sha1Fingerprint": "12345678" + }, + "serviceAccountEmailAddress": "p${projectNumber}-abcdef@gcp-sa-cloud-sql.iam.gserviceaccount.com", + "settings": { + "activationPolicy": "ALWAYS", + "authorizedGaeApplications": [], + "availabilityType": "ZONAL", + "backupConfiguration": { + "backupRetentionSettings": { + "retainedBackups": 7, + "retentionUnit": "COUNT" + }, + "enabled": false, + "kind": "sql#backupConfiguration", + "startTime": "12:00", + "transactionLogRetentionDays": 7, + "transactionalLogStorageState": "TRANSACTIONAL_LOG_STORAGE_STATE_UNSPECIFIED" + }, + "connectorEnforcement": "NOT_REQUIRED", + "dataDiskSizeGb": "10", + "dataDiskType": "PD_SSD", + "deletionProtectionEnabled": false, + "edition": "ENTERPRISE", + "ipConfiguration": { + "authorizedNetworks": [], + "ipv4Enabled": true, + "requireSsl": false, + "sslMode": "ALLOW_UNENCRYPTED_AND_ENCRYPTED" + }, + "kind": "sql#settings", + "locationPreference": { + "zone": "us-central1-a" + }, + "pricingPlan": "PER_USE", + "replicationType": "SYNCHRONOUS", + "settingsVersion": "123", + "storageAutoResize": true, + "storageAutoResizeLimit": "0", + "tier": "db-custom-1-3840", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } + }, + "sqlNetworkArchitecture": "NEW_NETWORK_ARCHITECTURE", + "state": "RUNNABLE", + "upgradableDatabaseVersions": [ + { + "displayName": "MySQL 8.0", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0" + }, + { + "displayName": "MySQL 8.0.18", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_18" + }, + { + "displayName": "MySQL 8.0.26", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_26" + }, + { + "displayName": "MySQL 8.0.27", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_27" + }, + { + "displayName": "MySQL 8.0.28", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_28" + }, + { + "displayName": "MySQL 8.0.29", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_29" + }, + { + "displayName": "MySQL 8.0.30", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_30" + }, + { + "displayName": "MySQL 8.0.31", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_31" + }, + { + "displayName": "MySQL 8.0.32", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_32" + }, + { + "displayName": "MySQL 8.0.33", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_33" + }, + { + "displayName": "MySQL 8.0.34", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_34" + }, + { + "displayName": "MySQL 8.0.35", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_35" + }, + { + "displayName": "MySQL 8.0.36", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_36" + }, + { + "displayName": "MySQL 8.0.37", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_37" + } + ] +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "items": [ + { + "etag": "abcdef0123A=", + "host": "", + "instance": "sqlinstance-sample-${uniqueId}", + "kind": "sql#user", + "name": "sqluser-${uniqueId}", + "password": "cGFzc3dvcmQ=", + "project": "${projectId}" + } + ], + "kind": "sql#usersList" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "backendType": "SECOND_GEN", + "connectionName": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", + "createTime": "2024-04-01T12:34:56.123456Z", + "databaseInstalledVersion": "MYSQL_5_7_44", + "databaseVersion": "MYSQL_5_7", + "etag": "abcdef0123A=", + "gceZone": "us-central1-a", + "geminiConfig": { + "entitled": false, + "flagRecommenderEnabled": false, + "indexAdvisorEnabled": false + }, + "instanceType": "CLOUD_SQL_INSTANCE", + "ipAddresses": [ + { + "ipAddress": "10.1.2.3", + "type": "PRIMARY" + } + ], + "kind": "sql#instance", + "maintenanceVersion": "MYSQL_5_7_44.R20231105.01_03", + "name": "sqlinstance-sample-${uniqueId}", + "project": "${projectId}", + "region": "us-central1", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "serverCaCert": { + "cert": "-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----\n", + "certSerialNumber": "0", + "commonName": "common-name", + "createTime": "2024-04-01T12:34:56.123456Z", + "expirationTime": "2024-04-01T12:34:56.123456Z", + "instance": "sqlinstance-sample-${uniqueId}", + "kind": "sql#sslCert", + "sha1Fingerprint": "12345678" + }, + "serviceAccountEmailAddress": "p${projectNumber}-abcdef@gcp-sa-cloud-sql.iam.gserviceaccount.com", + "settings": { + "activationPolicy": "ALWAYS", + "authorizedGaeApplications": [], + "availabilityType": "ZONAL", + "backupConfiguration": { + "backupRetentionSettings": { + "retainedBackups": 7, + "retentionUnit": "COUNT" + }, + "enabled": false, + "kind": "sql#backupConfiguration", + "startTime": "12:00", + "transactionLogRetentionDays": 7, + "transactionalLogStorageState": "TRANSACTIONAL_LOG_STORAGE_STATE_UNSPECIFIED" + }, + "connectorEnforcement": "NOT_REQUIRED", + "dataDiskSizeGb": "10", + "dataDiskType": "PD_SSD", + "deletionProtectionEnabled": false, + "edition": "ENTERPRISE", + "ipConfiguration": { + "authorizedNetworks": [], + "ipv4Enabled": true, + "requireSsl": false, + "sslMode": "ALLOW_UNENCRYPTED_AND_ENCRYPTED" + }, + "kind": "sql#settings", + "locationPreference": { + "zone": "us-central1-a" + }, + "pricingPlan": "PER_USE", + "replicationType": "SYNCHRONOUS", + "settingsVersion": "123", + "storageAutoResize": true, + "storageAutoResizeLimit": "0", + "tier": "db-custom-1-3840", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } + }, + "sqlNetworkArchitecture": "NEW_NETWORK_ARCHITECTURE", + "state": "RUNNABLE", + "upgradableDatabaseVersions": [ + { + "displayName": "MySQL 8.0", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0" + }, + { + "displayName": "MySQL 8.0.18", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_18" + }, + { + "displayName": "MySQL 8.0.26", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_26" + }, + { + "displayName": "MySQL 8.0.27", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_27" + }, + { + "displayName": "MySQL 8.0.28", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_28" + }, + { + "displayName": "MySQL 8.0.29", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_29" + }, + { + "displayName": "MySQL 8.0.30", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_30" + }, + { + "displayName": "MySQL 8.0.31", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_31" + }, + { + "displayName": "MySQL 8.0.32", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_32" + }, + { + "displayName": "MySQL 8.0.33", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_33" + }, + { + "displayName": "MySQL 8.0.34", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_34" + }, + { + "displayName": "MySQL 8.0.35", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_35" + }, + { + "displayName": "MySQL 8.0.36", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_36" + }, + { + "displayName": "MySQL 8.0.37", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_37" + } + ] +} + +--- + +POST https://bigqueryconnection.googleapis.com/v1/projects/${projectId}/locations/us-central1/connections?%24alt=json%3Benum-encoding%3Dint +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: parent=projects%2F${projectId}%2Flocations%2Fus-central1 + +{ + "cloudSql": { + "credential": { + "password": "cGFzc3dvcmQ=", + "username": "sqluser-${uniqueId}" + }, + "database": "sqldatabase-sample-${uniqueId}", + "instanceId": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", + "type": 2 + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "cloudSql": { + "database": "sqldatabase-sample-${uniqueId}", + "instanceId": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", + "serviceAccountId": "service-${projectNumber}@gcp-sa-bigqueryconnection.iam.gserviceaccount.com", + "type": 2 + }, + "creationTime": "123456789", + "hasCredential": true, + "lastModifiedTime": "123456789", + "name": "projects/${projectNumber}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be" +} + +--- + +GET https://bigqueryconnection.googleapis.com/v1/projects/${projectId}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be?%24alt=json%3Benum-encoding%3Dint +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: name=projects%2F${projectId}%2Flocations%2Fus-central1%2Fconnections%2F71389360-831c-431d-8975-837aee2153be + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "cloudSql": { + "database": "sqldatabase-sample-${uniqueId}", + "instanceId": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", + "serviceAccountId": "service-${projectNumber}@gcp-sa-bigqueryconnection.iam.gserviceaccount.com", + "type": 2 + }, + "creationTime": "123456789", + "hasCredential": true, + "lastModifiedTime": "123456789", + "name": "projects/${projectNumber}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be" +} + +--- + +DELETE https://bigqueryconnection.googleapis.com/v1/projects/${projectId}/locations/us-central1/connections/71389360-831c-431d-8975-837aee2153be?%24alt=json%3Benum-encoding%3Dint +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: name=projects%2F${projectId}%2Flocations%2Fus-central1%2Fconnections%2F71389360-831c-431d-8975-837aee2153be + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "items": [ + { + "etag": "abcdef0123A=", + "host": "", + "instance": "sqlinstance-sample-${uniqueId}", + "kind": "sql#user", + "name": "sqluser-${uniqueId}", + "password": "cGFzc3dvcmQ=", + "project": "${projectId}" + } + ], + "kind": "sql#usersList" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "backendType": "SECOND_GEN", + "connectionName": "${projectId}:us-central1:sqlinstance-sample-${uniqueId}", + "createTime": "2024-04-01T12:34:56.123456Z", + "databaseInstalledVersion": "MYSQL_5_7_44", + "databaseVersion": "MYSQL_5_7", + "etag": "abcdef0123A=", + "gceZone": "us-central1-a", + "geminiConfig": { + "entitled": false, + "flagRecommenderEnabled": false, + "indexAdvisorEnabled": false + }, + "instanceType": "CLOUD_SQL_INSTANCE", + "ipAddresses": [ + { + "ipAddress": "10.1.2.3", + "type": "PRIMARY" + } + ], + "kind": "sql#instance", + "maintenanceVersion": "MYSQL_5_7_44.R20231105.01_03", + "name": "sqlinstance-sample-${uniqueId}", + "project": "${projectId}", + "region": "us-central1", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "serverCaCert": { + "cert": "-----BEGIN CERTIFICATE-----\n-----END CERTIFICATE-----\n", + "certSerialNumber": "0", + "commonName": "common-name", + "createTime": "2024-04-01T12:34:56.123456Z", + "expirationTime": "2024-04-01T12:34:56.123456Z", + "instance": "sqlinstance-sample-${uniqueId}", + "kind": "sql#sslCert", + "sha1Fingerprint": "12345678" + }, + "serviceAccountEmailAddress": "p${projectNumber}-abcdef@gcp-sa-cloud-sql.iam.gserviceaccount.com", + "settings": { + "activationPolicy": "ALWAYS", + "authorizedGaeApplications": [], + "availabilityType": "ZONAL", + "backupConfiguration": { + "backupRetentionSettings": { + "retainedBackups": 7, + "retentionUnit": "COUNT" + }, + "enabled": false, + "kind": "sql#backupConfiguration", + "startTime": "12:00", + "transactionLogRetentionDays": 7, + "transactionalLogStorageState": "TRANSACTIONAL_LOG_STORAGE_STATE_UNSPECIFIED" + }, + "connectorEnforcement": "NOT_REQUIRED", + "dataDiskSizeGb": "10", + "dataDiskType": "PD_SSD", + "deletionProtectionEnabled": false, + "edition": "ENTERPRISE", + "ipConfiguration": { + "authorizedNetworks": [], + "ipv4Enabled": true, + "requireSsl": false, + "sslMode": "ALLOW_UNENCRYPTED_AND_ENCRYPTED" + }, + "kind": "sql#settings", + "locationPreference": { + "zone": "us-central1-a" + }, + "pricingPlan": "PER_USE", + "replicationType": "SYNCHRONOUS", + "settingsVersion": "123", + "storageAutoResize": true, + "storageAutoResizeLimit": "0", + "tier": "db-custom-1-3840", + "userLabels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + } + }, + "sqlNetworkArchitecture": "NEW_NETWORK_ARCHITECTURE", + "state": "RUNNABLE", + "upgradableDatabaseVersions": [ + { + "displayName": "MySQL 8.0", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0" + }, + { + "displayName": "MySQL 8.0.18", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_18" + }, + { + "displayName": "MySQL 8.0.26", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_26" + }, + { + "displayName": "MySQL 8.0.27", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_27" + }, + { + "displayName": "MySQL 8.0.28", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_28" + }, + { + "displayName": "MySQL 8.0.29", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_29" + }, + { + "displayName": "MySQL 8.0.30", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_30" + }, + { + "displayName": "MySQL 8.0.31", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_31" + }, + { + "displayName": "MySQL 8.0.32", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_32" + }, + { + "displayName": "MySQL 8.0.33", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_33" + }, + { + "displayName": "MySQL 8.0.34", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_34" + }, + { + "displayName": "MySQL 8.0.35", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_35" + }, + { + "displayName": "MySQL 8.0.36", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_36" + }, + { + "displayName": "MySQL 8.0.37", + "majorVersion": "MYSQL_8_0", + "name": "MYSQL_8_0_37" + } + ] +} + +--- + +DELETE https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/users?alt=json&host=foo&name=sqluser-${uniqueId}&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "DELETE_USER", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "status": "PENDING", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "DELETE_USER", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "charset": "utf8", + "collation": "utf8_general_ci", + "etag": "abcdef0123A=", + "instance": "sqlinstance-sample-${uniqueId}", + "kind": "sql#database", + "name": "sqldatabase-sample-${uniqueId}", + "project": "${projectId}", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}" +} + +--- + +DELETE https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "DELETE_DATABASE", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "status": "PENDING", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" +} + +--- + +GET https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "sql#operation", + "name": "${operationID}", + "operationType": "DELETE_DATABASE", + "selfLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "sqlinstance-sample-${uniqueId}", + "targetLink": "https://sqladmin.googleapis.com/sql/v1beta4/projects/${projectId}/instances/sqlinstance-sample-${uniqueId}/databases/sqldatabase-sample-${uniqueId}", + "targetProject": "${projectId}", + "user": "user@example.com" +} --- diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/create.yaml index eade77cd48..b6887e317a 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/create.yaml @@ -28,4 +28,4 @@ spec: type: "MYSQL" credential: secretRef: - name: bigqueryconnectionconnection-${uniqueId} \ No newline at end of file + name: secret-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/dependencies.yaml index 8522191076..30b7b8347c 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/dependencies.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryconnection/v1alpha1/bigqueryconnectionconnection/cloudsqlconnectionbasic/dependencies.yaml @@ -14,7 +14,7 @@ apiVersion: v1 kind: Secret metadata: - name: bigqueryconnectionconnection-${uniqueId} + name: secret-${uniqueId} type: kubernetes.io/basic-auth stringData: username: sqluser-${uniqueId} @@ -30,4 +30,29 @@ spec: settings: locationPreference: zone: us-central1-a - tier: db-custom-1-3840 \ No newline at end of file + tier: db-custom-1-3840 +--- +apiVersion: sql.cnrm.cloud.google.com/v1beta1 +kind: SQLDatabase +metadata: + name: sqldatabase-sample-${uniqueId} +spec: + charset: utf8 + instanceRef: + name: sqlinstance-sample-${uniqueId} +--- +apiVersion: sql.cnrm.cloud.google.com/v1beta1 +kind: SQLUser +metadata: + labels: + label-one: "value-one" + name: sqluser-${uniqueId} +spec: + instanceRef: + name: sqlinstance-sample-${uniqueId} + host: foo + password: + valueFrom: + secretKeyRef: + name: secret-${uniqueId} + key: password \ No newline at end of file From 076e4457ff2a8a8cd248df2d303fd1eb21b63fbe Mon Sep 17 00:00:00 2001 From: justinsb Date: Sat, 2 Nov 2024 15:55:56 -0400 Subject: [PATCH 12/31] tests: add e2e for cloudbuild --- dev/ci/periodics/e2e-service-cloudbuild | 25 +++++++++++++++++++++++++ dev/tasks/create-test-project | 3 ++- 2 files changed, 27 insertions(+), 1 deletion(-) create mode 100755 dev/ci/periodics/e2e-service-cloudbuild diff --git a/dev/ci/periodics/e2e-service-cloudbuild b/dev/ci/periodics/e2e-service-cloudbuild new file mode 100755 index 0000000000..cf4c994c60 --- /dev/null +++ b/dev/ci/periodics/e2e-service-cloudbuild @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +REPO_ROOT="$(git rev-parse --show-toplevel)" +cd ${REPO_ROOT} + +export ONLY_TEST_APIGROUPS=cloudbuild.cnrm.cloud.google.com + +dev/ci/periodics/_create_project_and_run_e2e diff --git a/dev/tasks/create-test-project b/dev/tasks/create-test-project index dc939216ee..66cba1cd0a 100755 --- a/dev/tasks/create-test-project +++ b/dev/tasks/create-test-project @@ -54,8 +54,9 @@ gcloud config set project "${GCP_PROJECT_ID}" gcloud services enable \ compute.googleapis.com \ - dataflow.googleapis.com \ + cloudbuild.googleapis.com \ cloudkms.googleapis.com \ + dataflow.googleapis.com \ firestore.googleapis.com \ logging.googleapis.com \ monitoring.googleapis.com \ From ab7ac6ffb14bbfb06328278ca93b89ce770cada1 Mon Sep 17 00:00:00 2001 From: justinsb Date: Mon, 4 Nov 2024 12:08:44 -0500 Subject: [PATCH 13/31] mockgcp: support int32 in update_mask It just wasn't in the initial list. --- mockgcp/common/fields/updatemask.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mockgcp/common/fields/updatemask.go b/mockgcp/common/fields/updatemask.go index d3ece64457..a66f7a9cad 100644 --- a/mockgcp/common/fields/updatemask.go +++ b/mockgcp/common/fields/updatemask.go @@ -73,7 +73,7 @@ func replace(original, update protoreflect.Message, fieldName string) error { } switch originalFd.Kind() { - case protoreflect.MessageKind, protoreflect.StringKind, protoreflect.DoubleKind, protoreflect.Int64Kind, protoreflect.Uint64Kind, protoreflect.BoolKind, protoreflect.EnumKind: + case protoreflect.MessageKind, protoreflect.StringKind, protoreflect.DoubleKind, protoreflect.Int32Kind, protoreflect.Int64Kind, protoreflect.Uint64Kind, protoreflect.BoolKind, protoreflect.EnumKind: if !original.IsValid() { return fmt.Errorf("%s is read-only or empty", fieldName) } From 0e5fcfc6175431957e49b541a21dc49593b6398e Mon Sep 17 00:00:00 2001 From: justinsb Date: Mon, 4 Nov 2024 16:54:34 -0500 Subject: [PATCH 14/31] tests: add e2e for discoveryengine --- dev/ci/periodics/e2e-service-discoveryengine | 25 ++++++++++++++++++++ dev/tasks/create-test-project | 1 + 2 files changed, 26 insertions(+) create mode 100755 dev/ci/periodics/e2e-service-discoveryengine diff --git a/dev/ci/periodics/e2e-service-discoveryengine b/dev/ci/periodics/e2e-service-discoveryengine new file mode 100755 index 0000000000..fcfa2d9f77 --- /dev/null +++ b/dev/ci/periodics/e2e-service-discoveryengine @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +REPO_ROOT="$(git rev-parse --show-toplevel)" +cd ${REPO_ROOT} + +export ONLY_TEST_APIGROUPS=discoveryengine.cnrm.cloud.google.com + +dev/ci/periodics/_create_project_and_run_e2e diff --git a/dev/tasks/create-test-project b/dev/tasks/create-test-project index 66cba1cd0a..b33abbf487 100755 --- a/dev/tasks/create-test-project +++ b/dev/tasks/create-test-project @@ -57,6 +57,7 @@ gcloud services enable \ cloudbuild.googleapis.com \ cloudkms.googleapis.com \ dataflow.googleapis.com \ + discoveryengine.googleapis.com \ firestore.googleapis.com \ logging.googleapis.com \ monitoring.googleapis.com \ From 1fe5937bb4a4b5b87fb65c266bdc439b325a0638 Mon Sep 17 00:00:00 2001 From: Alex Pana <8968914+acpana@users.noreply.github.com> Date: Wed, 30 Oct 2024 23:42:35 +0000 Subject: [PATCH 15/31] feat: promote DataExchange to beta Signed-off-by: Alex Pana <8968914+acpana@users.noreply.github.com> --- .../v1beta1/dataexchange_types.go | 141 +++++ apis/bigqueryanalyticshub/v1beta1/doc.go | 16 + .../v1beta1/groupversion_info.go | 39 ++ .../v1beta1/types.generated.go | 228 +++++++ .../v1beta1/zz_generated.deepcopy.go | 576 ++++++++++++++++++ ...eryanalyticshub.cnrm.cloud.google.com.yaml | 170 ++++++ ...eta1_bigqueryanalyticshubdataexchange.yaml | 27 + .../servicemappings/bigqueryanalyticshub.yaml | 27 + .../bigqueryanalyticshub/v1alpha1/register.go | 6 - .../v1alpha1/zz_generated.deepcopy.go | 165 ----- .../bigqueryanalyticshubdataexchange_types.go | 2 +- .../apis/bigqueryanalyticshub/v1beta1/doc.go | 38 ++ .../bigqueryanalyticshub/v1beta1/register.go | 63 ++ .../v1beta1/zz_generated.deepcopy.go | 195 ++++++ .../client/clientset/versioned/clientset.go | 13 + .../versioned/fake/clientset_generated.go | 7 + .../clientset/versioned/fake/register.go | 2 + .../clientset/versioned/scheme/register.go | 2 + .../v1alpha1/bigqueryanalyticshub_client.go | 5 - .../fake/fake_bigqueryanalyticshub_client.go | 4 - .../v1alpha1/generated_expansion.go | 2 - .../v1beta1/bigqueryanalyticshub_client.go | 110 ++++ .../bigqueryanalyticshubdataexchange.go | 42 +- .../typed/bigqueryanalyticshub/v1beta1/doc.go | 23 + .../bigqueryanalyticshub/v1beta1/fake/doc.go | 23 + .../fake/fake_bigqueryanalyticshub_client.go | 43 ++ .../fake_bigqueryanalyticshubdataexchange.go | 50 +- .../v1beta1/generated_expansion.go | 24 + .../dataexchange_controller.go | 2 +- .../direct/bigqueryanalyticshub/mapper.go | 2 +- pkg/gvks/supportedgvks/gvks_generated.go | 11 + pkg/test/resourcefixture/sets.go | 1 + ...yanalyticshubdataexchange-base.golden.yaml | 2 +- .../_http.log | 0 .../create.yaml | 2 +- ...yanalyticshubdataexchange-full.golden.yaml | 2 +- .../_http.log | 0 .../create.yaml | 2 +- .../update.yaml | 2 +- .../resource-reference/_toc.yaml | 4 + .../bigqueryanalyticshubdataexchange.md | 350 +++++++++++ .../resource-reference/overview.md | 4 + ...shub_bigqueryanalyticshubdataexchange.tmpl | 54 ++ .../resource-autogen/allowlist/allowlist.go | 1 - 44 files changed, 2245 insertions(+), 237 deletions(-) create mode 100644 apis/bigqueryanalyticshub/v1beta1/dataexchange_types.go create mode 100644 apis/bigqueryanalyticshub/v1beta1/doc.go create mode 100644 apis/bigqueryanalyticshub/v1beta1/groupversion_info.go create mode 100644 apis/bigqueryanalyticshub/v1beta1/types.generated.go create mode 100644 apis/bigqueryanalyticshub/v1beta1/zz_generated.deepcopy.go create mode 100644 config/samples/resources/bigqueryanalyticshubdataexchange/bigqueryanalyticshub_v1beta1_bigqueryanalyticshubdataexchange.yaml create mode 100644 config/servicemappings/bigqueryanalyticshub.yaml rename pkg/clients/generated/apis/bigqueryanalyticshub/{v1alpha1 => v1beta1}/bigqueryanalyticshubdataexchange_types.go (99%) create mode 100644 pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/doc.go create mode 100644 pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/register.go create mode 100644 pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/zz_generated.deepcopy.go create mode 100644 pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/bigqueryanalyticshub_client.go rename pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/{v1alpha1 => v1beta1}/bigqueryanalyticshubdataexchange.go (77%) create mode 100644 pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/doc.go create mode 100644 pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/fake/doc.go create mode 100644 pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/fake/fake_bigqueryanalyticshub_client.go rename pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/{v1alpha1 => v1beta1}/fake/fake_bigqueryanalyticshubdataexchange.go (66%) create mode 100644 pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/generated_expansion.go rename pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/{v1alpha1 => v1beta1}/bigqueryanalyticshubdataexchange-base/_generated_object_bigqueryanalyticshubdataexchange-base.golden.yaml (93%) rename pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/{v1alpha1 => v1beta1}/bigqueryanalyticshubdataexchange-base/_http.log (100%) rename pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/{v1alpha1 => v1beta1}/bigqueryanalyticshubdataexchange-base/create.yaml (92%) rename pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/{v1alpha1 => v1beta1}/bigqueryanalyticshubdataexchange-full/_generated_object_bigqueryanalyticshubdataexchange-full.golden.yaml (94%) rename pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/{v1alpha1 => v1beta1}/bigqueryanalyticshubdataexchange-full/_http.log (100%) rename pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/{v1alpha1 => v1beta1}/bigqueryanalyticshubdataexchange-full/create.yaml (93%) rename pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/{v1alpha1 => v1beta1}/bigqueryanalyticshubdataexchange-full/update.yaml (93%) create mode 100644 scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigqueryanalyticshub/bigqueryanalyticshubdataexchange.md create mode 100644 scripts/generate-google3-docs/resource-reference/templates/bigqueryanalyticshub_bigqueryanalyticshubdataexchange.tmpl diff --git a/apis/bigqueryanalyticshub/v1beta1/dataexchange_types.go b/apis/bigqueryanalyticshub/v1beta1/dataexchange_types.go new file mode 100644 index 0000000000..8525e7240c --- /dev/null +++ b/apis/bigqueryanalyticshub/v1beta1/dataexchange_types.go @@ -0,0 +1,141 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1beta1 + +import ( + refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/apis/k8s/v1alpha1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +var BigQueryAnalyticsHubDataExchangeGVK = GroupVersion.WithKind("BigQueryAnalyticsHubDataExchange") + +// BigQueryAnalyticsHubDataExchangeSpec defines the desired state of BigQueryAnalyticsHubDataExchange +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.DataExchange +type BigQueryAnalyticsHubDataExchangeSpec struct { + // Required. Human-readable display name of the data exchange. The display + // name must contain only Unicode letters, numbers (0-9), underscores (_), + // dashes (-), spaces ( ), ampersands (&) and must not start or end with + // spaces. Default value is an empty string. Max length: 63 bytes. + DisplayName *string `json:"displayName,omitempty"` + + // Optional. Description of the data exchange. The description must not + // contain Unicode non-characters as well as C0 and C1 control codes except + // tabs (HT), new lines (LF), carriage returns (CR), and page breaks (FF). + // Default value is an empty string. + // Max length: 2000 bytes. + Description *string `json:"description,omitempty"` + + // Optional. Email or URL of the primary point of contact of the data + // exchange. Max Length: 1000 bytes. + PrimaryContact *string `json:"primaryContact,omitempty"` + + // Optional. Documentation describing the data exchange. + Documentation *string `json:"documentation,omitempty"` + + // TODO(KCC): NOT YET + // // Optional. Base64 encoded image representing the data exchange. Max + // // Size: 3.0MiB Expected image dimensions are 512x512 pixels, however the API + // // only performs validation on size of the encoded data. Note: For byte + // // fields, the content of the fields are base64-encoded (which increases the + // // size of the data by 33-36%) when using JSON on the wire. + // Icon *string `json:"icon,omitempty"` + + // As of now SharingEnvironmentConfig is empty or output only so let's not include it as + // part of the spec yet. + // // Optional. Configurable data sharing environment option for a data exchange. + // SharingEnvironmentConfig *SharingEnvironmentConfig `json:"sharingEnvironmentConfig,omitempty"` + + // Optional. Type of discovery on the discovery page for all the listings + // under this exchange. Updating this field also updates (overwrites) the + // discovery_type field for all the listings under this exchange. + DiscoveryType *string `json:"discoveryType,omitempty"` + + /* Immutable. The name of the location this data exchange. */ + Location string `json:"location"` + + /* The project that this resource belongs to. */ + ProjectRef *refs.ProjectRef `json:"projectRef"` + + // +kubebuilder:validation:XValidation:rule="self == oldSelf",message="ResourceID field is immutable" + // Immutable. + // The BigQueryAnalyticsHubDataExchange name. If not given, the metadata.name will be used. + // + optional + ResourceID *string `json:"resourceID,omitempty"` +} + +// BigQueryAnalyticsHubDataExchangeStatus defines the config connector machine state of BigQueryAnalyticsHubDataExchange +type BigQueryAnalyticsHubDataExchangeStatus struct { + /* Conditions represent the latest available observations of the + object's current state. */ + Conditions []v1alpha1.Condition `json:"conditions,omitempty"` + + // ObservedGeneration is the generation of the resource that was most recently observed by the Config Connector controller. If this is equal to metadata.generation, then that means that the current reported status reflects the most recent desired state of the resource. + ObservedGeneration *int64 `json:"observedGeneration,omitempty"` + + // A unique specifier for the BigQueryAnalyticsHubDataExchange resource in GCP. + ExternalRef *string `json:"externalRef,omitempty"` + + // ObservedState is the state of the resource as most recently observed in GCP. + ObservedState *BigQueryAnalyticsHubDataExchangeObservedState `json:"observedState,omitempty"` +} + +// BigQueryAnalyticsHubDataExchangeSpec defines the desired state of BigQueryAnalyticsHubDataExchange +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.DataExchange +type BigQueryAnalyticsHubDataExchangeObservedState struct { + // This field is in the same format as our externalRef! So it's redundant. + // // Output only. The resource name of the data exchange. + // // e.g. `projects/myproject/locations/US/dataExchanges/123`. + // Name *string `json:"name,omitempty"` + + /* Number of listings contained in the data exchange. */ + // +optional + ListingCount *int64 `json:"listingCount,omitempty"` +} + +// +genclient +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +// +kubebuilder:resource:categories=gcp,shortName=gcpbigqueryanalyticshubdataexchange;gcpbigqueryanalyticshubdataexchanges +// +kubebuilder:resource:categories=gcp +// +kubebuilder:subresource:status +// +kubebuilder:metadata:labels="cnrm.cloud.google.com/managed-by-kcc=true";"cnrm.cloud.google.com/system=true";"cnrm.cloud.google.com/stability-level=alpha" +// +kubebuilder:printcolumn:name="Age",JSONPath=".metadata.creationTimestamp",type="date" +// +kubebuilder:printcolumn:name="Ready",JSONPath=".status.conditions[?(@.type=='Ready')].status",type="string",description="When 'True', the most recent reconcile of the resource succeeded" +// +kubebuilder:printcolumn:name="Status",JSONPath=".status.conditions[?(@.type=='Ready')].reason",type="string",description="The reason for the value in 'Ready'" +// +kubebuilder:printcolumn:name="Status Age",JSONPath=".status.conditions[?(@.type=='Ready')].lastTransitionTime",type="date",description="The last transition time for the value in 'Status'" +// +kubebuilder:storageversion +// BigQueryAnalyticsHubDataExchange is the Schema for the BigQueryAnalyticsHubDataExchange API +// +k8s:openapi-gen=true +type BigQueryAnalyticsHubDataExchange struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata,omitempty"` + + // +required + Spec BigQueryAnalyticsHubDataExchangeSpec `json:"spec,omitempty"` + Status BigQueryAnalyticsHubDataExchangeStatus `json:"status,omitempty"` +} + +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +// BigQueryAnalyticsHubDataExchangeList contains a list of BigQueryAnalyticsHubDataExchange +type BigQueryAnalyticsHubDataExchangeList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []BigQueryAnalyticsHubDataExchange `json:"items"` +} + +func init() { + SchemeBuilder.Register(&BigQueryAnalyticsHubDataExchange{}, &BigQueryAnalyticsHubDataExchangeList{}) +} diff --git a/apis/bigqueryanalyticshub/v1beta1/doc.go b/apis/bigqueryanalyticshub/v1beta1/doc.go new file mode 100644 index 0000000000..a438a8a027 --- /dev/null +++ b/apis/bigqueryanalyticshub/v1beta1/doc.go @@ -0,0 +1,16 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +kcc:proto=google.cloud.bigquery.analyticshub.v1 +package v1beta1 diff --git a/apis/bigqueryanalyticshub/v1beta1/groupversion_info.go b/apis/bigqueryanalyticshub/v1beta1/groupversion_info.go new file mode 100644 index 0000000000..f391538450 --- /dev/null +++ b/apis/bigqueryanalyticshub/v1beta1/groupversion_info.go @@ -0,0 +1,39 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +kubebuilder:object:generate=true +// +groupName=bigqueryanalyticshub.cnrm.cloud.google.com +package v1beta1 + +import ( + "k8s.io/apimachinery/pkg/runtime/schema" + "sigs.k8s.io/controller-runtime/pkg/scheme" +) + +var ( + // GroupVersion is group version used to register these objects + GroupVersion = schema.GroupVersion{Group: "bigqueryanalyticshub.cnrm.cloud.google.com", Version: "v1alpha1"} + + // SchemeBuilder is used to add go types to the GroupVersionKind scheme + SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} + + // AddToScheme adds the types in this group-version to the given scheme. + AddToScheme = SchemeBuilder.AddToScheme + + DataExchangeGVK = schema.GroupVersionKind{ + Group: GroupVersion.Group, + Version: GroupVersion.Version, + Kind: "BigQueryAnalyticsHubDataExchange", + } +) diff --git a/apis/bigqueryanalyticshub/v1beta1/types.generated.go b/apis/bigqueryanalyticshub/v1beta1/types.generated.go new file mode 100644 index 0000000000..ae8b17d0f8 --- /dev/null +++ b/apis/bigqueryanalyticshub/v1beta1/types.generated.go @@ -0,0 +1,228 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1beta1 + +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.DataExchange +type DataExchange struct { + // Output only. The resource name of the data exchange. + // e.g. `projects/myproject/locations/US/dataExchanges/123`. + Name *string `json:"name,omitempty"` + + // Required. Human-readable display name of the data exchange. The display + // name must contain only Unicode letters, numbers (0-9), underscores (_), + // dashes (-), spaces ( ), ampersands (&) and must not start or end with + // spaces. Default value is an empty string. Max length: 63 bytes. + DisplayName *string `json:"displayName,omitempty"` + + // Optional. Description of the data exchange. The description must not + // contain Unicode non-characters as well as C0 and C1 control codes except + // tabs (HT), new lines (LF), carriage returns (CR), and page breaks (FF). + // Default value is an empty string. + // Max length: 2000 bytes. + Description *string `json:"description,omitempty"` + + // Optional. Email or URL of the primary point of contact of the data + // exchange. Max Length: 1000 bytes. + PrimaryContact *string `json:"primaryContact,omitempty"` + + // Optional. Documentation describing the data exchange. + Documentation *string `json:"documentation,omitempty"` + + // Optional. Base64 encoded image representing the data exchange. Max + // Size: 3.0MiB Expected image dimensions are 512x512 pixels, however the API + // only performs validation on size of the encoded data. Note: For byte + // fields, the content of the fields are base64-encoded (which increases the + // size of the data by 33-36%) when using JSON on the wire. + Icon []byte `json:"icon,omitempty"` + + // Optional. Configurable data sharing environment option for a data exchange. + SharingEnvironmentConfig *SharingEnvironmentConfig `json:"sharingEnvironmentConfig,omitempty"` + + // Optional. Type of discovery on the discovery page for all the listings + // under this exchange. Updating this field also updates (overwrites) the + // discovery_type field for all the listings under this exchange. + DiscoveryType *string `json:"discoveryType,omitempty"` +} + +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.SharingEnvironmentConfig +type SharingEnvironmentConfig struct { + // Default Analytics Hub data exchange, used for secured data sharing. + DefaultExchangeConfig *SharingEnvironmentConfig_DefaultExchangeConfig `json:"defaultExchangeConfig,omitempty"` + + // Data Clean Room (DCR), used for privacy-safe and secured data sharing. + DcrExchangeConfig *SharingEnvironmentConfig_DcrExchangeConfig `json:"dcrExchangeConfig,omitempty"` +} + +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.SharingEnvironmentConfig.DcrExchangeConfig +type SharingEnvironmentConfig_DcrExchangeConfig struct { + // Output only. If True, this DCR restricts the contributors to sharing + // only a single resource in a Listing. And no two resources should have the + // same IDs. So if a contributor adds a view with a conflicting name, the + // CreateListing API will reject the request. if False, the data contributor + // can publish an entire dataset (as before). This is not configurable, and + // by default, all new DCRs will have the restriction set to True. + SingleSelectedResourceSharingRestriction *bool `json:"singleSelectedResourceSharingRestriction,omitempty"` + + // Output only. If True, when subscribing to this DCR, it will create only + // one linked dataset containing all resources shared within the + // cleanroom. If False, when subscribing to this DCR, it will + // create 1 linked dataset per listing. This is not configurable, and by + // default, all new DCRs will have the restriction set to True. + SingleLinkedDatasetPerCleanroom *bool `json:"singleLinkedDatasetPerCleanroom,omitempty"` +} + +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.SharingEnvironmentConfig.DefaultExchangeConfig +type SharingEnvironmentConfig_DefaultExchangeConfig struct { +} + +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.DataProvider +type DataProvider struct { + // Optional. Name of the data provider. + Name *string `json:"name,omitempty"` + + // Optional. Email or URL of the data provider. + // Max Length: 1000 bytes. + PrimaryContact *string `json:"primaryContact,omitempty"` +} + +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.Listing +type Listing struct { + // Required. Shared dataset i.e. BigQuery dataset source. + BigqueryDataset *Listing_BigQueryDatasetSource `json:"bigqueryDataset,omitempty"` + + // Output only. The resource name of the listing. + // e.g. `projects/myproject/locations/US/dataExchanges/123/listings/456` + Name *string `json:"name,omitempty"` + + // Required. Human-readable display name of the listing. The display name must + // contain only Unicode letters, numbers (0-9), underscores (_), dashes (-), + // spaces ( ), ampersands (&) and can't start or end with spaces. Default + // value is an empty string. Max length: 63 bytes. + DisplayName *string `json:"displayName,omitempty"` + + // Optional. Short description of the listing. The description must not + // contain Unicode non-characters and C0 and C1 control codes except tabs + // (HT), new lines (LF), carriage returns (CR), and page breaks (FF). Default + // value is an empty string. Max length: 2000 bytes. + Description *string `json:"description,omitempty"` + + // Optional. Email or URL of the primary point of contact of the listing. + // Max Length: 1000 bytes. + PrimaryContact *string `json:"primaryContact,omitempty"` + + // Optional. Documentation describing the listing. + Documentation *string `json:"documentation,omitempty"` + + // Output only. Current state of the listing. + State *string `json:"state,omitempty"` + + // Optional. Base64 encoded image representing the listing. Max Size: 3.0MiB + // Expected image dimensions are 512x512 pixels, however the API only + // performs validation on size of the encoded data. + // Note: For byte fields, the contents of the field are base64-encoded (which + // increases the size of the data by 33-36%) when using JSON on the wire. + Icon []byte `json:"icon,omitempty"` + + // Optional. Details of the data provider who owns the source data. + DataProvider *DataProvider `json:"dataProvider,omitempty"` + + // Optional. Categories of the listing. Up to two categories are allowed. + Categories []string `json:"categories,omitempty"` + + // Optional. Details of the publisher who owns the listing and who can share + // the source data. + Publisher *Publisher `json:"publisher,omitempty"` + + // Optional. Email or URL of the request access of the listing. + // Subscribers can use this reference to request access. + // Max Length: 1000 bytes. + RequestAccess *string `json:"requestAccess,omitempty"` + + // Optional. If set, restricted export configuration will be propagated and + // enforced on the linked dataset. + RestrictedExportConfig *Listing_RestrictedExportConfig `json:"restrictedExportConfig,omitempty"` + + // Optional. Type of discovery of the listing on the discovery page. + DiscoveryType *string `json:"discoveryType,omitempty"` +} + +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.Listing.BigQueryDatasetSource +type Listing_BigQueryDatasetSource struct { + // Resource name of the dataset source for this listing. + // e.g. `projects/myproject/datasets/123` + Dataset *string `json:"dataset,omitempty"` + + // Optional. Resources in this dataset that are selectively shared. + // If this field is empty, then the entire dataset (all resources) are + // shared. This field is only valid for data clean room exchanges. + SelectedResources []Listing_BigQueryDatasetSource_SelectedResource `json:"selectedResources,omitempty"` + + // Optional. If set, restricted export policy will be propagated and + // enforced on the linked dataset. + RestrictedExportPolicy *Listing_BigQueryDatasetSource_RestrictedExportPolicy `json:"restrictedExportPolicy,omitempty"` +} + +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.Listing.BigQueryDatasetSource.RestrictedExportPolicy +type Listing_BigQueryDatasetSource_RestrictedExportPolicy struct { + // Optional. If true, enable restricted export. + Enabled *BoolValue `json:"enabled,omitempty"` + + // Optional. If true, restrict direct table access (read + // api/tabledata.list) on linked table. + RestrictDirectTableAccess *BoolValue `json:"restrictDirectTableAccess,omitempty"` + + // Optional. If true, restrict export of query result derived from + // restricted linked dataset table. + RestrictQueryResult *BoolValue `json:"restrictQueryResult,omitempty"` +} + +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.Listing.BigQueryDatasetSource.SelectedResource +type Listing_BigQueryDatasetSource_SelectedResource struct { + // Optional. Format: + // For table: + // `projects/{projectId}/datasets/{datasetId}/tables/{tableId}` + // Example:"projects/test_project/datasets/test_dataset/tables/test_table" + Table *string `json:"table,omitempty"` +} + +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.Listing.RestrictedExportConfig +type Listing_RestrictedExportConfig struct { + // Optional. If true, enable restricted export. + Enabled *bool `json:"enabled,omitempty"` + + // Output only. If true, restrict direct table access(read + // api/tabledata.list) on linked table. + RestrictDirectTableAccess *bool `json:"restrictDirectTableAccess,omitempty"` + + // Optional. If true, restrict export of query result derived from + // restricted linked dataset table. + RestrictQueryResult *bool `json:"restrictQueryResult,omitempty"` +} + +// +kcc:proto=google.cloud.bigquery.analyticshub.v1.Publisher +type Publisher struct { + // Optional. Name of the listing publisher. + Name *string `json:"name,omitempty"` + + // Optional. Email or URL of the listing publisher. + // Max Length: 1000 bytes. + PrimaryContact *string `json:"primaryContact,omitempty"` +} + +// +kcc:proto=google.protobuf.BoolValue +type BoolValue struct { + // The bool value. + Value *bool `json:"value,omitempty"` +} diff --git a/apis/bigqueryanalyticshub/v1beta1/zz_generated.deepcopy.go b/apis/bigqueryanalyticshub/v1beta1/zz_generated.deepcopy.go new file mode 100644 index 0000000000..294a071038 --- /dev/null +++ b/apis/bigqueryanalyticshub/v1beta1/zz_generated.deepcopy.go @@ -0,0 +1,576 @@ +//go:build !ignore_autogenerated + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by controller-gen. DO NOT EDIT. + +package v1beta1 + +import ( + refsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/apis/k8s/v1alpha1" + runtime "k8s.io/apimachinery/pkg/runtime" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryAnalyticsHubDataExchange) DeepCopyInto(out *BigQueryAnalyticsHubDataExchange) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + in.Status.DeepCopyInto(&out.Status) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchange. +func (in *BigQueryAnalyticsHubDataExchange) DeepCopy() *BigQueryAnalyticsHubDataExchange { + if in == nil { + return nil + } + out := new(BigQueryAnalyticsHubDataExchange) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *BigQueryAnalyticsHubDataExchange) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryAnalyticsHubDataExchangeList) DeepCopyInto(out *BigQueryAnalyticsHubDataExchangeList) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]BigQueryAnalyticsHubDataExchange, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchangeList. +func (in *BigQueryAnalyticsHubDataExchangeList) DeepCopy() *BigQueryAnalyticsHubDataExchangeList { + if in == nil { + return nil + } + out := new(BigQueryAnalyticsHubDataExchangeList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *BigQueryAnalyticsHubDataExchangeList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryAnalyticsHubDataExchangeObservedState) DeepCopyInto(out *BigQueryAnalyticsHubDataExchangeObservedState) { + *out = *in + if in.ListingCount != nil { + in, out := &in.ListingCount, &out.ListingCount + *out = new(int64) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchangeObservedState. +func (in *BigQueryAnalyticsHubDataExchangeObservedState) DeepCopy() *BigQueryAnalyticsHubDataExchangeObservedState { + if in == nil { + return nil + } + out := new(BigQueryAnalyticsHubDataExchangeObservedState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryAnalyticsHubDataExchangeSpec) DeepCopyInto(out *BigQueryAnalyticsHubDataExchangeSpec) { + *out = *in + if in.DisplayName != nil { + in, out := &in.DisplayName, &out.DisplayName + *out = new(string) + **out = **in + } + if in.Description != nil { + in, out := &in.Description, &out.Description + *out = new(string) + **out = **in + } + if in.PrimaryContact != nil { + in, out := &in.PrimaryContact, &out.PrimaryContact + *out = new(string) + **out = **in + } + if in.Documentation != nil { + in, out := &in.Documentation, &out.Documentation + *out = new(string) + **out = **in + } + if in.DiscoveryType != nil { + in, out := &in.DiscoveryType, &out.DiscoveryType + *out = new(string) + **out = **in + } + if in.ProjectRef != nil { + in, out := &in.ProjectRef, &out.ProjectRef + *out = new(refsv1beta1.ProjectRef) + **out = **in + } + if in.ResourceID != nil { + in, out := &in.ResourceID, &out.ResourceID + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchangeSpec. +func (in *BigQueryAnalyticsHubDataExchangeSpec) DeepCopy() *BigQueryAnalyticsHubDataExchangeSpec { + if in == nil { + return nil + } + out := new(BigQueryAnalyticsHubDataExchangeSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryAnalyticsHubDataExchangeStatus) DeepCopyInto(out *BigQueryAnalyticsHubDataExchangeStatus) { + *out = *in + if in.Conditions != nil { + in, out := &in.Conditions, &out.Conditions + *out = make([]v1alpha1.Condition, len(*in)) + copy(*out, *in) + } + if in.ObservedGeneration != nil { + in, out := &in.ObservedGeneration, &out.ObservedGeneration + *out = new(int64) + **out = **in + } + if in.ExternalRef != nil { + in, out := &in.ExternalRef, &out.ExternalRef + *out = new(string) + **out = **in + } + if in.ObservedState != nil { + in, out := &in.ObservedState, &out.ObservedState + *out = new(BigQueryAnalyticsHubDataExchangeObservedState) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchangeStatus. +func (in *BigQueryAnalyticsHubDataExchangeStatus) DeepCopy() *BigQueryAnalyticsHubDataExchangeStatus { + if in == nil { + return nil + } + out := new(BigQueryAnalyticsHubDataExchangeStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BoolValue) DeepCopyInto(out *BoolValue) { + *out = *in + if in.Value != nil { + in, out := &in.Value, &out.Value + *out = new(bool) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BoolValue. +func (in *BoolValue) DeepCopy() *BoolValue { + if in == nil { + return nil + } + out := new(BoolValue) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DataExchange) DeepCopyInto(out *DataExchange) { + *out = *in + if in.Name != nil { + in, out := &in.Name, &out.Name + *out = new(string) + **out = **in + } + if in.DisplayName != nil { + in, out := &in.DisplayName, &out.DisplayName + *out = new(string) + **out = **in + } + if in.Description != nil { + in, out := &in.Description, &out.Description + *out = new(string) + **out = **in + } + if in.PrimaryContact != nil { + in, out := &in.PrimaryContact, &out.PrimaryContact + *out = new(string) + **out = **in + } + if in.Documentation != nil { + in, out := &in.Documentation, &out.Documentation + *out = new(string) + **out = **in + } + if in.Icon != nil { + in, out := &in.Icon, &out.Icon + *out = make([]byte, len(*in)) + copy(*out, *in) + } + if in.SharingEnvironmentConfig != nil { + in, out := &in.SharingEnvironmentConfig, &out.SharingEnvironmentConfig + *out = new(SharingEnvironmentConfig) + (*in).DeepCopyInto(*out) + } + if in.DiscoveryType != nil { + in, out := &in.DiscoveryType, &out.DiscoveryType + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataExchange. +func (in *DataExchange) DeepCopy() *DataExchange { + if in == nil { + return nil + } + out := new(DataExchange) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DataProvider) DeepCopyInto(out *DataProvider) { + *out = *in + if in.Name != nil { + in, out := &in.Name, &out.Name + *out = new(string) + **out = **in + } + if in.PrimaryContact != nil { + in, out := &in.PrimaryContact, &out.PrimaryContact + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProvider. +func (in *DataProvider) DeepCopy() *DataProvider { + if in == nil { + return nil + } + out := new(DataProvider) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Listing) DeepCopyInto(out *Listing) { + *out = *in + if in.BigqueryDataset != nil { + in, out := &in.BigqueryDataset, &out.BigqueryDataset + *out = new(Listing_BigQueryDatasetSource) + (*in).DeepCopyInto(*out) + } + if in.Name != nil { + in, out := &in.Name, &out.Name + *out = new(string) + **out = **in + } + if in.DisplayName != nil { + in, out := &in.DisplayName, &out.DisplayName + *out = new(string) + **out = **in + } + if in.Description != nil { + in, out := &in.Description, &out.Description + *out = new(string) + **out = **in + } + if in.PrimaryContact != nil { + in, out := &in.PrimaryContact, &out.PrimaryContact + *out = new(string) + **out = **in + } + if in.Documentation != nil { + in, out := &in.Documentation, &out.Documentation + *out = new(string) + **out = **in + } + if in.State != nil { + in, out := &in.State, &out.State + *out = new(string) + **out = **in + } + if in.Icon != nil { + in, out := &in.Icon, &out.Icon + *out = make([]byte, len(*in)) + copy(*out, *in) + } + if in.DataProvider != nil { + in, out := &in.DataProvider, &out.DataProvider + *out = new(DataProvider) + (*in).DeepCopyInto(*out) + } + if in.Categories != nil { + in, out := &in.Categories, &out.Categories + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.Publisher != nil { + in, out := &in.Publisher, &out.Publisher + *out = new(Publisher) + (*in).DeepCopyInto(*out) + } + if in.RequestAccess != nil { + in, out := &in.RequestAccess, &out.RequestAccess + *out = new(string) + **out = **in + } + if in.RestrictedExportConfig != nil { + in, out := &in.RestrictedExportConfig, &out.RestrictedExportConfig + *out = new(Listing_RestrictedExportConfig) + (*in).DeepCopyInto(*out) + } + if in.DiscoveryType != nil { + in, out := &in.DiscoveryType, &out.DiscoveryType + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Listing. +func (in *Listing) DeepCopy() *Listing { + if in == nil { + return nil + } + out := new(Listing) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Listing_BigQueryDatasetSource) DeepCopyInto(out *Listing_BigQueryDatasetSource) { + *out = *in + if in.Dataset != nil { + in, out := &in.Dataset, &out.Dataset + *out = new(string) + **out = **in + } + if in.SelectedResources != nil { + in, out := &in.SelectedResources, &out.SelectedResources + *out = make([]Listing_BigQueryDatasetSource_SelectedResource, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.RestrictedExportPolicy != nil { + in, out := &in.RestrictedExportPolicy, &out.RestrictedExportPolicy + *out = new(Listing_BigQueryDatasetSource_RestrictedExportPolicy) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Listing_BigQueryDatasetSource. +func (in *Listing_BigQueryDatasetSource) DeepCopy() *Listing_BigQueryDatasetSource { + if in == nil { + return nil + } + out := new(Listing_BigQueryDatasetSource) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Listing_BigQueryDatasetSource_RestrictedExportPolicy) DeepCopyInto(out *Listing_BigQueryDatasetSource_RestrictedExportPolicy) { + *out = *in + if in.Enabled != nil { + in, out := &in.Enabled, &out.Enabled + *out = new(BoolValue) + (*in).DeepCopyInto(*out) + } + if in.RestrictDirectTableAccess != nil { + in, out := &in.RestrictDirectTableAccess, &out.RestrictDirectTableAccess + *out = new(BoolValue) + (*in).DeepCopyInto(*out) + } + if in.RestrictQueryResult != nil { + in, out := &in.RestrictQueryResult, &out.RestrictQueryResult + *out = new(BoolValue) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Listing_BigQueryDatasetSource_RestrictedExportPolicy. +func (in *Listing_BigQueryDatasetSource_RestrictedExportPolicy) DeepCopy() *Listing_BigQueryDatasetSource_RestrictedExportPolicy { + if in == nil { + return nil + } + out := new(Listing_BigQueryDatasetSource_RestrictedExportPolicy) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Listing_BigQueryDatasetSource_SelectedResource) DeepCopyInto(out *Listing_BigQueryDatasetSource_SelectedResource) { + *out = *in + if in.Table != nil { + in, out := &in.Table, &out.Table + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Listing_BigQueryDatasetSource_SelectedResource. +func (in *Listing_BigQueryDatasetSource_SelectedResource) DeepCopy() *Listing_BigQueryDatasetSource_SelectedResource { + if in == nil { + return nil + } + out := new(Listing_BigQueryDatasetSource_SelectedResource) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Listing_RestrictedExportConfig) DeepCopyInto(out *Listing_RestrictedExportConfig) { + *out = *in + if in.Enabled != nil { + in, out := &in.Enabled, &out.Enabled + *out = new(bool) + **out = **in + } + if in.RestrictDirectTableAccess != nil { + in, out := &in.RestrictDirectTableAccess, &out.RestrictDirectTableAccess + *out = new(bool) + **out = **in + } + if in.RestrictQueryResult != nil { + in, out := &in.RestrictQueryResult, &out.RestrictQueryResult + *out = new(bool) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Listing_RestrictedExportConfig. +func (in *Listing_RestrictedExportConfig) DeepCopy() *Listing_RestrictedExportConfig { + if in == nil { + return nil + } + out := new(Listing_RestrictedExportConfig) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Publisher) DeepCopyInto(out *Publisher) { + *out = *in + if in.Name != nil { + in, out := &in.Name, &out.Name + *out = new(string) + **out = **in + } + if in.PrimaryContact != nil { + in, out := &in.PrimaryContact, &out.PrimaryContact + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Publisher. +func (in *Publisher) DeepCopy() *Publisher { + if in == nil { + return nil + } + out := new(Publisher) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SharingEnvironmentConfig) DeepCopyInto(out *SharingEnvironmentConfig) { + *out = *in + if in.DefaultExchangeConfig != nil { + in, out := &in.DefaultExchangeConfig, &out.DefaultExchangeConfig + *out = new(SharingEnvironmentConfig_DefaultExchangeConfig) + **out = **in + } + if in.DcrExchangeConfig != nil { + in, out := &in.DcrExchangeConfig, &out.DcrExchangeConfig + *out = new(SharingEnvironmentConfig_DcrExchangeConfig) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SharingEnvironmentConfig. +func (in *SharingEnvironmentConfig) DeepCopy() *SharingEnvironmentConfig { + if in == nil { + return nil + } + out := new(SharingEnvironmentConfig) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SharingEnvironmentConfig_DcrExchangeConfig) DeepCopyInto(out *SharingEnvironmentConfig_DcrExchangeConfig) { + *out = *in + if in.SingleSelectedResourceSharingRestriction != nil { + in, out := &in.SingleSelectedResourceSharingRestriction, &out.SingleSelectedResourceSharingRestriction + *out = new(bool) + **out = **in + } + if in.SingleLinkedDatasetPerCleanroom != nil { + in, out := &in.SingleLinkedDatasetPerCleanroom, &out.SingleLinkedDatasetPerCleanroom + *out = new(bool) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SharingEnvironmentConfig_DcrExchangeConfig. +func (in *SharingEnvironmentConfig_DcrExchangeConfig) DeepCopy() *SharingEnvironmentConfig_DcrExchangeConfig { + if in == nil { + return nil + } + out := new(SharingEnvironmentConfig_DcrExchangeConfig) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SharingEnvironmentConfig_DefaultExchangeConfig) DeepCopyInto(out *SharingEnvironmentConfig_DefaultExchangeConfig) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SharingEnvironmentConfig_DefaultExchangeConfig. +func (in *SharingEnvironmentConfig_DefaultExchangeConfig) DeepCopy() *SharingEnvironmentConfig_DefaultExchangeConfig { + if in == nil { + return nil + } + out := new(SharingEnvironmentConfig_DefaultExchangeConfig) + in.DeepCopyInto(out) + return out +} diff --git a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigqueryanalyticshubdataexchanges.bigqueryanalyticshub.cnrm.cloud.google.com.yaml b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigqueryanalyticshubdataexchanges.bigqueryanalyticshub.cnrm.cloud.google.com.yaml index b9a90ec2b0..c246a03f79 100644 --- a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigqueryanalyticshubdataexchanges.bigqueryanalyticshub.cnrm.cloud.google.com.yaml +++ b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigqueryanalyticshubdataexchanges.bigqueryanalyticshub.cnrm.cloud.google.com.yaml @@ -188,6 +188,176 @@ spec: - spec type: object served: true + storage: false + subresources: + status: {} + - additionalPrinterColumns: + - jsonPath: .metadata.creationTimestamp + name: Age + type: date + - description: When 'True', the most recent reconcile of the resource succeeded + jsonPath: .status.conditions[?(@.type=='Ready')].status + name: Ready + type: string + - description: The reason for the value in 'Ready' + jsonPath: .status.conditions[?(@.type=='Ready')].reason + name: Status + type: string + - description: The last transition time for the value in 'Status' + jsonPath: .status.conditions[?(@.type=='Ready')].lastTransitionTime + name: Status Age + type: date + name: v1beta1 + schema: + openAPIV3Schema: + description: BigQueryAnalyticsHubDataExchange is the Schema for the BigQueryAnalyticsHubDataExchange + API + properties: + apiVersion: + description: 'APIVersion defines the versioned schema of this representation + of an object. Servers should convert recognized schemas to the latest + internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources' + type: string + kind: + description: 'Kind is a string value representing the REST resource this + object represents. Servers may infer this from the endpoint the client + submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds' + type: string + metadata: + type: object + spec: + description: BigQueryAnalyticsHubDataExchangeSpec defines the desired + state of BigQueryAnalyticsHubDataExchange + properties: + description: + description: 'Optional. Description of the data exchange. The description + must not contain Unicode non-characters as well as C0 and C1 control + codes except tabs (HT), new lines (LF), carriage returns (CR), and + page breaks (FF). Default value is an empty string. Max length: + 2000 bytes.' + type: string + discoveryType: + description: Optional. Type of discovery on the discovery page for + all the listings under this exchange. Updating this field also updates + (overwrites) the discovery_type field for all the listings under + this exchange. + type: string + displayName: + description: 'Required. Human-readable display name of the data exchange. + The display name must contain only Unicode letters, numbers (0-9), + underscores (_), dashes (-), spaces ( ), ampersands (&) and must + not start or end with spaces. Default value is an empty string. + Max length: 63 bytes.' + type: string + documentation: + description: Optional. Documentation describing the data exchange. + type: string + location: + description: Immutable. The name of the location this data exchange. + type: string + primaryContact: + description: 'Optional. Email or URL of the primary point of contact + of the data exchange. Max Length: 1000 bytes.' + type: string + projectRef: + description: The project that this resource belongs to. + oneOf: + - not: + required: + - external + required: + - name + - not: + anyOf: + - required: + - name + - required: + - namespace + required: + - external + properties: + external: + description: The `projectID` field of a project, when not managed + by Config Connector. + type: string + kind: + description: The kind of the Project resource; optional but must + be `Project` if provided. + type: string + name: + description: The `name` field of a `Project` resource. + type: string + namespace: + description: The `namespace` field of a `Project` resource. + type: string + type: object + resourceID: + description: Immutable. The BigQueryAnalyticsHubDataExchange name. + If not given, the metadata.name will be used. + type: string + x-kubernetes-validations: + - message: ResourceID field is immutable + rule: self == oldSelf + required: + - location + - projectRef + type: object + status: + description: BigQueryAnalyticsHubDataExchangeStatus defines the config + connector machine state of BigQueryAnalyticsHubDataExchange + properties: + conditions: + description: Conditions represent the latest available observations + of the object's current state. + items: + properties: + lastTransitionTime: + description: Last time the condition transitioned from one status + to another. + type: string + message: + description: Human-readable message indicating details about + last transition. + type: string + reason: + description: Unique, one-word, CamelCase reason for the condition's + last transition. + type: string + status: + description: Status is the status of the condition. Can be True, + False, Unknown. + type: string + type: + description: Type is the type of the condition. + type: string + type: object + type: array + externalRef: + description: A unique specifier for the BigQueryAnalyticsHubDataExchange + resource in GCP. + type: string + observedGeneration: + description: ObservedGeneration is the generation of the resource + that was most recently observed by the Config Connector controller. + If this is equal to metadata.generation, then that means that the + current reported status reflects the most recent desired state of + the resource. + format: int64 + type: integer + observedState: + description: ObservedState is the state of the resource as most recently + observed in GCP. + properties: + listingCount: + description: Number of listings contained in the data exchange. + format: int64 + type: integer + type: object + type: object + required: + - spec + type: object + served: true storage: true subresources: status: {} diff --git a/config/samples/resources/bigqueryanalyticshubdataexchange/bigqueryanalyticshub_v1beta1_bigqueryanalyticshubdataexchange.yaml b/config/samples/resources/bigqueryanalyticshubdataexchange/bigqueryanalyticshub_v1beta1_bigqueryanalyticshubdataexchange.yaml new file mode 100644 index 0000000000..86336b9111 --- /dev/null +++ b/config/samples/resources/bigqueryanalyticshubdataexchange/bigqueryanalyticshub_v1beta1_bigqueryanalyticshubdataexchange.yaml @@ -0,0 +1,27 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigqueryanalyticshub.cnrm.cloud.google.com/v1beta1 +kind: BigQueryAnalyticsHubDataExchange +metadata: + name: bigqueryanalyticshubdataexchangesample +spec: + displayName: my_data_exchange + description: example data exchange + primaryContact: a@contact.com + documentation: a documentation + discoveryType: DISCOVERY_TYPE_PRIVATE + location: US + projectRef: + external: ${PROJECT_ID?} diff --git a/config/servicemappings/bigqueryanalyticshub.yaml b/config/servicemappings/bigqueryanalyticshub.yaml new file mode 100644 index 0000000000..4e52eb9763 --- /dev/null +++ b/config/servicemappings/bigqueryanalyticshub.yaml @@ -0,0 +1,27 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: core.cnrm.cloud.google.com/v1alpha1 +kind: ServiceMapping +metadata: + name: bigqueryanalyticshub.cnrm.cloud.google.com + namespace: cnrm-system +spec: + name: BigQueryAnalyticsHub + version: v1beta1 + serviceHostName: "analyticshub.googleapis.com" + resources: + - name: google_bigquery_analytics_hub_data_exchange + kind: BigQueryAnalyticsHubDataExchange + direct: true \ No newline at end of file diff --git a/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1/register.go b/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1/register.go index f8e5303f12..c8cc4c8702 100644 --- a/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1/register.go +++ b/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1/register.go @@ -53,12 +53,6 @@ var ( // AddToScheme is a global function that registers this API group & version to a scheme AddToScheme = SchemeBuilder.AddToScheme - BigQueryAnalyticsHubDataExchangeGVK = schema.GroupVersionKind{ - Group: SchemeGroupVersion.Group, - Version: SchemeGroupVersion.Version, - Kind: reflect.TypeOf(BigQueryAnalyticsHubDataExchange{}).Name(), - } - BigQueryAnalyticsHubListingGVK = schema.GroupVersionKind{ Group: SchemeGroupVersion.Group, Version: SchemeGroupVersion.Version, diff --git a/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1/zz_generated.deepcopy.go b/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1/zz_generated.deepcopy.go index 75d5db8c6a..76208fe3ea 100644 --- a/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1/zz_generated.deepcopy.go +++ b/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1/zz_generated.deepcopy.go @@ -29,150 +29,6 @@ import ( runtime "k8s.io/apimachinery/pkg/runtime" ) -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigQueryAnalyticsHubDataExchange) DeepCopyInto(out *BigQueryAnalyticsHubDataExchange) { - *out = *in - out.TypeMeta = in.TypeMeta - in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) - in.Spec.DeepCopyInto(&out.Spec) - in.Status.DeepCopyInto(&out.Status) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchange. -func (in *BigQueryAnalyticsHubDataExchange) DeepCopy() *BigQueryAnalyticsHubDataExchange { - if in == nil { - return nil - } - out := new(BigQueryAnalyticsHubDataExchange) - in.DeepCopyInto(out) - return out -} - -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *BigQueryAnalyticsHubDataExchange) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigQueryAnalyticsHubDataExchangeList) DeepCopyInto(out *BigQueryAnalyticsHubDataExchangeList) { - *out = *in - out.TypeMeta = in.TypeMeta - in.ListMeta.DeepCopyInto(&out.ListMeta) - if in.Items != nil { - in, out := &in.Items, &out.Items - *out = make([]BigQueryAnalyticsHubDataExchange, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchangeList. -func (in *BigQueryAnalyticsHubDataExchangeList) DeepCopy() *BigQueryAnalyticsHubDataExchangeList { - if in == nil { - return nil - } - out := new(BigQueryAnalyticsHubDataExchangeList) - in.DeepCopyInto(out) - return out -} - -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *BigQueryAnalyticsHubDataExchangeList) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigQueryAnalyticsHubDataExchangeSpec) DeepCopyInto(out *BigQueryAnalyticsHubDataExchangeSpec) { - *out = *in - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.DiscoveryType != nil { - in, out := &in.DiscoveryType, &out.DiscoveryType - *out = new(string) - **out = **in - } - if in.DisplayName != nil { - in, out := &in.DisplayName, &out.DisplayName - *out = new(string) - **out = **in - } - if in.Documentation != nil { - in, out := &in.Documentation, &out.Documentation - *out = new(string) - **out = **in - } - if in.PrimaryContact != nil { - in, out := &in.PrimaryContact, &out.PrimaryContact - *out = new(string) - **out = **in - } - out.ProjectRef = in.ProjectRef - if in.ResourceID != nil { - in, out := &in.ResourceID, &out.ResourceID - *out = new(string) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchangeSpec. -func (in *BigQueryAnalyticsHubDataExchangeSpec) DeepCopy() *BigQueryAnalyticsHubDataExchangeSpec { - if in == nil { - return nil - } - out := new(BigQueryAnalyticsHubDataExchangeSpec) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigQueryAnalyticsHubDataExchangeStatus) DeepCopyInto(out *BigQueryAnalyticsHubDataExchangeStatus) { - *out = *in - if in.Conditions != nil { - in, out := &in.Conditions, &out.Conditions - *out = make([]k8sv1alpha1.Condition, len(*in)) - copy(*out, *in) - } - if in.ExternalRef != nil { - in, out := &in.ExternalRef, &out.ExternalRef - *out = new(string) - **out = **in - } - if in.ObservedGeneration != nil { - in, out := &in.ObservedGeneration, &out.ObservedGeneration - *out = new(int64) - **out = **in - } - if in.ObservedState != nil { - in, out := &in.ObservedState, &out.ObservedState - *out = new(DataexchangeObservedStateStatus) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchangeStatus. -func (in *BigQueryAnalyticsHubDataExchangeStatus) DeepCopy() *BigQueryAnalyticsHubDataExchangeStatus { - if in == nil { - return nil - } - out := new(BigQueryAnalyticsHubDataExchangeStatus) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *BigQueryAnalyticsHubListing) DeepCopyInto(out *BigQueryAnalyticsHubListing) { *out = *in @@ -334,27 +190,6 @@ func (in *BigQueryAnalyticsHubListingStatus) DeepCopy() *BigQueryAnalyticsHubLis return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataexchangeObservedStateStatus) DeepCopyInto(out *DataexchangeObservedStateStatus) { - *out = *in - if in.ListingCount != nil { - in, out := &in.ListingCount, &out.ListingCount - *out = new(int64) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataexchangeObservedStateStatus. -func (in *DataexchangeObservedStateStatus) DeepCopy() *DataexchangeObservedStateStatus { - if in == nil { - return nil - } - out := new(DataexchangeObservedStateStatus) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ListingBigQueryDatasetSource) DeepCopyInto(out *ListingBigQueryDatasetSource) { *out = *in diff --git a/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange_types.go b/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange_types.go similarity index 99% rename from pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange_types.go rename to pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange_types.go index ff289f3469..0d0b0e45f3 100644 --- a/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange_types.go +++ b/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange_types.go @@ -28,7 +28,7 @@ // that future versions of the go-client may include breaking changes. // Please try it out and give us feedback! -package v1alpha1 +package v1beta1 import ( "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/k8s/v1alpha1" diff --git a/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/doc.go b/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/doc.go new file mode 100644 index 0000000000..d8b2494792 --- /dev/null +++ b/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/doc.go @@ -0,0 +1,38 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Config Connector and manual +// changes will be clobbered when the file is regenerated. +// +// ---------------------------------------------------------------------------- + +// *** DISCLAIMER *** +// Config Connector's go-client for CRDs is currently in ALPHA, which means +// that future versions of the go-client may include breaking changes. +// Please try it out and give us feedback! + +// Package v1beta1 contains API Schema definitions for the bigqueryanalyticshub v1beta1 API group. +// +k8s:openapi-gen=true +// +k8s:deepcopy-gen=package,register +// +k8s:conversion-gen=github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/pkg/apis/bigqueryanalyticshub +// +k8s:defaulter-gen=TypeMeta +// +groupName=bigqueryanalyticshub.cnrm.cloud.google.com + +package v1beta1 diff --git a/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/register.go b/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/register.go new file mode 100644 index 0000000000..7e6bbbdeaa --- /dev/null +++ b/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/register.go @@ -0,0 +1,63 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Config Connector and manual +// changes will be clobbered when the file is regenerated. +// +// ---------------------------------------------------------------------------- + +// *** DISCLAIMER *** +// Config Connector's go-client for CRDs is currently in ALPHA, which means +// that future versions of the go-client may include breaking changes. +// Please try it out and give us feedback! + +// Package v1beta1 contains API Schema definitions for the bigqueryanalyticshub v1beta1 API group. +// +k8s:openapi-gen=true +// +k8s:deepcopy-gen=package,register +// +k8s:conversion-gen=github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/pkg/apis/bigqueryanalyticshub +// +k8s:defaulter-gen=TypeMeta +// +groupName=bigqueryanalyticshub.cnrm.cloud.google.com +package v1beta1 + +import ( + "reflect" + + "k8s.io/apimachinery/pkg/runtime/schema" + "sigs.k8s.io/controller-runtime/pkg/scheme" +) + +var ( + // SchemeGroupVersion is the group version used to register these objects. + SchemeGroupVersion = schema.GroupVersion{Group: "bigqueryanalyticshub.cnrm.cloud.google.com", Version: "v1beta1"} + + // SchemeBuilder is used to add go types to the GroupVersionKind scheme. + SchemeBuilder = &scheme.Builder{GroupVersion: SchemeGroupVersion} + + // AddToScheme is a global function that registers this API group & version to a scheme + AddToScheme = SchemeBuilder.AddToScheme + + BigQueryAnalyticsHubDataExchangeGVK = schema.GroupVersionKind{ + Group: SchemeGroupVersion.Group, + Version: SchemeGroupVersion.Version, + Kind: reflect.TypeOf(BigQueryAnalyticsHubDataExchange{}).Name(), + } + + bigqueryanalyticshubAPIVersion = SchemeGroupVersion.String() +) diff --git a/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/zz_generated.deepcopy.go b/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/zz_generated.deepcopy.go new file mode 100644 index 0000000000..84471fd77a --- /dev/null +++ b/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1/zz_generated.deepcopy.go @@ -0,0 +1,195 @@ +//go:build !ignore_autogenerated +// +build !ignore_autogenerated + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// *** DISCLAIMER *** +// Config Connector's go-client for CRDs is currently in ALPHA, which means +// that future versions of the go-client may include breaking changes. +// Please try it out and give us feedback! + +// Code generated by deepcopy-gen. DO NOT EDIT. + +package v1beta1 + +import ( + v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/k8s/v1alpha1" + runtime "k8s.io/apimachinery/pkg/runtime" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryAnalyticsHubDataExchange) DeepCopyInto(out *BigQueryAnalyticsHubDataExchange) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + in.Status.DeepCopyInto(&out.Status) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchange. +func (in *BigQueryAnalyticsHubDataExchange) DeepCopy() *BigQueryAnalyticsHubDataExchange { + if in == nil { + return nil + } + out := new(BigQueryAnalyticsHubDataExchange) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *BigQueryAnalyticsHubDataExchange) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryAnalyticsHubDataExchangeList) DeepCopyInto(out *BigQueryAnalyticsHubDataExchangeList) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]BigQueryAnalyticsHubDataExchange, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchangeList. +func (in *BigQueryAnalyticsHubDataExchangeList) DeepCopy() *BigQueryAnalyticsHubDataExchangeList { + if in == nil { + return nil + } + out := new(BigQueryAnalyticsHubDataExchangeList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *BigQueryAnalyticsHubDataExchangeList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryAnalyticsHubDataExchangeSpec) DeepCopyInto(out *BigQueryAnalyticsHubDataExchangeSpec) { + *out = *in + if in.Description != nil { + in, out := &in.Description, &out.Description + *out = new(string) + **out = **in + } + if in.DiscoveryType != nil { + in, out := &in.DiscoveryType, &out.DiscoveryType + *out = new(string) + **out = **in + } + if in.DisplayName != nil { + in, out := &in.DisplayName, &out.DisplayName + *out = new(string) + **out = **in + } + if in.Documentation != nil { + in, out := &in.Documentation, &out.Documentation + *out = new(string) + **out = **in + } + if in.PrimaryContact != nil { + in, out := &in.PrimaryContact, &out.PrimaryContact + *out = new(string) + **out = **in + } + out.ProjectRef = in.ProjectRef + if in.ResourceID != nil { + in, out := &in.ResourceID, &out.ResourceID + *out = new(string) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchangeSpec. +func (in *BigQueryAnalyticsHubDataExchangeSpec) DeepCopy() *BigQueryAnalyticsHubDataExchangeSpec { + if in == nil { + return nil + } + out := new(BigQueryAnalyticsHubDataExchangeSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryAnalyticsHubDataExchangeStatus) DeepCopyInto(out *BigQueryAnalyticsHubDataExchangeStatus) { + *out = *in + if in.Conditions != nil { + in, out := &in.Conditions, &out.Conditions + *out = make([]v1alpha1.Condition, len(*in)) + copy(*out, *in) + } + if in.ExternalRef != nil { + in, out := &in.ExternalRef, &out.ExternalRef + *out = new(string) + **out = **in + } + if in.ObservedGeneration != nil { + in, out := &in.ObservedGeneration, &out.ObservedGeneration + *out = new(int64) + **out = **in + } + if in.ObservedState != nil { + in, out := &in.ObservedState, &out.ObservedState + *out = new(DataexchangeObservedStateStatus) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryAnalyticsHubDataExchangeStatus. +func (in *BigQueryAnalyticsHubDataExchangeStatus) DeepCopy() *BigQueryAnalyticsHubDataExchangeStatus { + if in == nil { + return nil + } + out := new(BigQueryAnalyticsHubDataExchangeStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DataexchangeObservedStateStatus) DeepCopyInto(out *DataexchangeObservedStateStatus) { + *out = *in + if in.ListingCount != nil { + in, out := &in.ListingCount, &out.ListingCount + *out = new(int64) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataexchangeObservedStateStatus. +func (in *DataexchangeObservedStateStatus) DeepCopy() *DataexchangeObservedStateStatus { + if in == nil { + return nil + } + out := new(DataexchangeObservedStateStatus) + in.DeepCopyInto(out) + return out +} diff --git a/pkg/clients/generated/client/clientset/versioned/clientset.go b/pkg/clients/generated/client/clientset/versioned/clientset.go index 574df82955..6c1153d140 100644 --- a/pkg/clients/generated/client/clientset/versioned/clientset.go +++ b/pkg/clients/generated/client/clientset/versioned/clientset.go @@ -38,6 +38,7 @@ import ( bigqueryv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquery/v1alpha1" bigqueryv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquery/v1beta1" bigqueryanalyticshubv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1" + bigqueryanalyticshubv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1" bigqueryconnectionv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryconnection/v1alpha1" bigquerydatapolicyv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatapolicy/v1alpha1" bigquerydatatransferv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1" @@ -166,6 +167,7 @@ type Interface interface { BigqueryV1alpha1() bigqueryv1alpha1.BigqueryV1alpha1Interface BigqueryV1beta1() bigqueryv1beta1.BigqueryV1beta1Interface BigqueryanalyticshubV1alpha1() bigqueryanalyticshubv1alpha1.BigqueryanalyticshubV1alpha1Interface + BigqueryanalyticshubV1beta1() bigqueryanalyticshubv1beta1.BigqueryanalyticshubV1beta1Interface BigqueryconnectionV1alpha1() bigqueryconnectionv1alpha1.BigqueryconnectionV1alpha1Interface BigquerydatapolicyV1alpha1() bigquerydatapolicyv1alpha1.BigquerydatapolicyV1alpha1Interface BigquerydatatransferV1beta1() bigquerydatatransferv1beta1.BigquerydatatransferV1beta1Interface @@ -292,6 +294,7 @@ type Clientset struct { bigqueryV1alpha1 *bigqueryv1alpha1.BigqueryV1alpha1Client bigqueryV1beta1 *bigqueryv1beta1.BigqueryV1beta1Client bigqueryanalyticshubV1alpha1 *bigqueryanalyticshubv1alpha1.BigqueryanalyticshubV1alpha1Client + bigqueryanalyticshubV1beta1 *bigqueryanalyticshubv1beta1.BigqueryanalyticshubV1beta1Client bigqueryconnectionV1alpha1 *bigqueryconnectionv1alpha1.BigqueryconnectionV1alpha1Client bigquerydatapolicyV1alpha1 *bigquerydatapolicyv1alpha1.BigquerydatapolicyV1alpha1Client bigquerydatatransferV1beta1 *bigquerydatatransferv1beta1.BigquerydatatransferV1beta1Client @@ -467,6 +470,11 @@ func (c *Clientset) BigqueryanalyticshubV1alpha1() bigqueryanalyticshubv1alpha1. return c.bigqueryanalyticshubV1alpha1 } +// BigqueryanalyticshubV1beta1 retrieves the BigqueryanalyticshubV1beta1Client +func (c *Clientset) BigqueryanalyticshubV1beta1() bigqueryanalyticshubv1beta1.BigqueryanalyticshubV1beta1Interface { + return c.bigqueryanalyticshubV1beta1 +} + // BigqueryconnectionV1alpha1 retrieves the BigqueryconnectionV1alpha1Client func (c *Clientset) BigqueryconnectionV1alpha1() bigqueryconnectionv1alpha1.BigqueryconnectionV1alpha1Interface { return c.bigqueryconnectionV1alpha1 @@ -1103,6 +1111,10 @@ func NewForConfigAndClient(c *rest.Config, httpClient *http.Client) (*Clientset, if err != nil { return nil, err } + cs.bigqueryanalyticshubV1beta1, err = bigqueryanalyticshubv1beta1.NewForConfigAndClient(&configShallowCopy, httpClient) + if err != nil { + return nil, err + } cs.bigqueryconnectionV1alpha1, err = bigqueryconnectionv1alpha1.NewForConfigAndClient(&configShallowCopy, httpClient) if err != nil { return nil, err @@ -1569,6 +1581,7 @@ func New(c rest.Interface) *Clientset { cs.bigqueryV1alpha1 = bigqueryv1alpha1.New(c) cs.bigqueryV1beta1 = bigqueryv1beta1.New(c) cs.bigqueryanalyticshubV1alpha1 = bigqueryanalyticshubv1alpha1.New(c) + cs.bigqueryanalyticshubV1beta1 = bigqueryanalyticshubv1beta1.New(c) cs.bigqueryconnectionV1alpha1 = bigqueryconnectionv1alpha1.New(c) cs.bigquerydatapolicyV1alpha1 = bigquerydatapolicyv1alpha1.New(c) cs.bigquerydatatransferV1beta1 = bigquerydatatransferv1beta1.New(c) diff --git a/pkg/clients/generated/client/clientset/versioned/fake/clientset_generated.go b/pkg/clients/generated/client/clientset/versioned/fake/clientset_generated.go index 03c38cbede..72077fce72 100644 --- a/pkg/clients/generated/client/clientset/versioned/fake/clientset_generated.go +++ b/pkg/clients/generated/client/clientset/versioned/fake/clientset_generated.go @@ -49,6 +49,8 @@ import ( fakebigqueryv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquery/v1beta1/fake" bigqueryanalyticshubv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1" fakebigqueryanalyticshubv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/fake" + bigqueryanalyticshubv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1" + fakebigqueryanalyticshubv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/fake" bigqueryconnectionv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryconnection/v1alpha1" fakebigqueryconnectionv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryconnection/v1alpha1/fake" bigquerydatapolicyv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatapolicy/v1alpha1" @@ -387,6 +389,11 @@ func (c *Clientset) BigqueryanalyticshubV1alpha1() bigqueryanalyticshubv1alpha1. return &fakebigqueryanalyticshubv1alpha1.FakeBigqueryanalyticshubV1alpha1{Fake: &c.Fake} } +// BigqueryanalyticshubV1beta1 retrieves the BigqueryanalyticshubV1beta1Client +func (c *Clientset) BigqueryanalyticshubV1beta1() bigqueryanalyticshubv1beta1.BigqueryanalyticshubV1beta1Interface { + return &fakebigqueryanalyticshubv1beta1.FakeBigqueryanalyticshubV1beta1{Fake: &c.Fake} +} + // BigqueryconnectionV1alpha1 retrieves the BigqueryconnectionV1alpha1Client func (c *Clientset) BigqueryconnectionV1alpha1() bigqueryconnectionv1alpha1.BigqueryconnectionV1alpha1Interface { return &fakebigqueryconnectionv1alpha1.FakeBigqueryconnectionV1alpha1{Fake: &c.Fake} diff --git a/pkg/clients/generated/client/clientset/versioned/fake/register.go b/pkg/clients/generated/client/clientset/versioned/fake/register.go index 0752d27d80..80b32ac689 100644 --- a/pkg/clients/generated/client/clientset/versioned/fake/register.go +++ b/pkg/clients/generated/client/clientset/versioned/fake/register.go @@ -35,6 +35,7 @@ import ( bigqueryv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquery/v1alpha1" bigqueryv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquery/v1beta1" bigqueryanalyticshubv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1" + bigqueryanalyticshubv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1" bigqueryconnectionv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryconnection/v1alpha1" bigquerydatapolicyv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatapolicy/v1alpha1" bigquerydatatransferv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1" @@ -167,6 +168,7 @@ var localSchemeBuilder = runtime.SchemeBuilder{ bigqueryv1alpha1.AddToScheme, bigqueryv1beta1.AddToScheme, bigqueryanalyticshubv1alpha1.AddToScheme, + bigqueryanalyticshubv1beta1.AddToScheme, bigqueryconnectionv1alpha1.AddToScheme, bigquerydatapolicyv1alpha1.AddToScheme, bigquerydatatransferv1beta1.AddToScheme, diff --git a/pkg/clients/generated/client/clientset/versioned/scheme/register.go b/pkg/clients/generated/client/clientset/versioned/scheme/register.go index f769711b87..6644207546 100644 --- a/pkg/clients/generated/client/clientset/versioned/scheme/register.go +++ b/pkg/clients/generated/client/clientset/versioned/scheme/register.go @@ -35,6 +35,7 @@ import ( bigqueryv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquery/v1alpha1" bigqueryv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquery/v1beta1" bigqueryanalyticshubv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1" + bigqueryanalyticshubv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1" bigqueryconnectionv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryconnection/v1alpha1" bigquerydatapolicyv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatapolicy/v1alpha1" bigquerydatatransferv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1" @@ -167,6 +168,7 @@ var localSchemeBuilder = runtime.SchemeBuilder{ bigqueryv1alpha1.AddToScheme, bigqueryv1beta1.AddToScheme, bigqueryanalyticshubv1alpha1.AddToScheme, + bigqueryanalyticshubv1beta1.AddToScheme, bigqueryconnectionv1alpha1.AddToScheme, bigquerydatapolicyv1alpha1.AddToScheme, bigquerydatatransferv1beta1.AddToScheme, diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshub_client.go b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshub_client.go index 695c41612a..ff1c08a113 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshub_client.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshub_client.go @@ -31,7 +31,6 @@ import ( type BigqueryanalyticshubV1alpha1Interface interface { RESTClient() rest.Interface - BigQueryAnalyticsHubDataExchangesGetter BigQueryAnalyticsHubListingsGetter } @@ -40,10 +39,6 @@ type BigqueryanalyticshubV1alpha1Client struct { restClient rest.Interface } -func (c *BigqueryanalyticshubV1alpha1Client) BigQueryAnalyticsHubDataExchanges(namespace string) BigQueryAnalyticsHubDataExchangeInterface { - return newBigQueryAnalyticsHubDataExchanges(c, namespace) -} - func (c *BigqueryanalyticshubV1alpha1Client) BigQueryAnalyticsHubListings(namespace string) BigQueryAnalyticsHubListingInterface { return newBigQueryAnalyticsHubListings(c, namespace) } diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/fake/fake_bigqueryanalyticshub_client.go b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/fake/fake_bigqueryanalyticshub_client.go index 50c9f4a45b..433e3566b1 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/fake/fake_bigqueryanalyticshub_client.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/fake/fake_bigqueryanalyticshub_client.go @@ -31,10 +31,6 @@ type FakeBigqueryanalyticshubV1alpha1 struct { *testing.Fake } -func (c *FakeBigqueryanalyticshubV1alpha1) BigQueryAnalyticsHubDataExchanges(namespace string) v1alpha1.BigQueryAnalyticsHubDataExchangeInterface { - return &FakeBigQueryAnalyticsHubDataExchanges{c, namespace} -} - func (c *FakeBigqueryanalyticshubV1alpha1) BigQueryAnalyticsHubListings(namespace string) v1alpha1.BigQueryAnalyticsHubListingInterface { return &FakeBigQueryAnalyticsHubListings{c, namespace} } diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/generated_expansion.go b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/generated_expansion.go index 1799e8b385..9494ec9b8e 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/generated_expansion.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/generated_expansion.go @@ -21,6 +21,4 @@ package v1alpha1 -type BigQueryAnalyticsHubDataExchangeExpansion interface{} - type BigQueryAnalyticsHubListingExpansion interface{} diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/bigqueryanalyticshub_client.go b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/bigqueryanalyticshub_client.go new file mode 100644 index 0000000000..6b12773dd4 --- /dev/null +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/bigqueryanalyticshub_client.go @@ -0,0 +1,110 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// *** DISCLAIMER *** +// Config Connector's go-client for CRDs is currently in ALPHA, which means +// that future versions of the go-client may include breaking changes. +// Please try it out and give us feedback! + +// Code generated by client-gen. DO NOT EDIT. + +package v1beta1 + +import ( + "net/http" + + v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/scheme" + rest "k8s.io/client-go/rest" +) + +type BigqueryanalyticshubV1beta1Interface interface { + RESTClient() rest.Interface + BigQueryAnalyticsHubDataExchangesGetter +} + +// BigqueryanalyticshubV1beta1Client is used to interact with features provided by the bigqueryanalyticshub.cnrm.cloud.google.com group. +type BigqueryanalyticshubV1beta1Client struct { + restClient rest.Interface +} + +func (c *BigqueryanalyticshubV1beta1Client) BigQueryAnalyticsHubDataExchanges(namespace string) BigQueryAnalyticsHubDataExchangeInterface { + return newBigQueryAnalyticsHubDataExchanges(c, namespace) +} + +// NewForConfig creates a new BigqueryanalyticshubV1beta1Client for the given config. +// NewForConfig is equivalent to NewForConfigAndClient(c, httpClient), +// where httpClient was generated with rest.HTTPClientFor(c). +func NewForConfig(c *rest.Config) (*BigqueryanalyticshubV1beta1Client, error) { + config := *c + if err := setConfigDefaults(&config); err != nil { + return nil, err + } + httpClient, err := rest.HTTPClientFor(&config) + if err != nil { + return nil, err + } + return NewForConfigAndClient(&config, httpClient) +} + +// NewForConfigAndClient creates a new BigqueryanalyticshubV1beta1Client for the given config and http client. +// Note the http client provided takes precedence over the configured transport values. +func NewForConfigAndClient(c *rest.Config, h *http.Client) (*BigqueryanalyticshubV1beta1Client, error) { + config := *c + if err := setConfigDefaults(&config); err != nil { + return nil, err + } + client, err := rest.RESTClientForConfigAndClient(&config, h) + if err != nil { + return nil, err + } + return &BigqueryanalyticshubV1beta1Client{client}, nil +} + +// NewForConfigOrDie creates a new BigqueryanalyticshubV1beta1Client for the given config and +// panics if there is an error in the config. +func NewForConfigOrDie(c *rest.Config) *BigqueryanalyticshubV1beta1Client { + client, err := NewForConfig(c) + if err != nil { + panic(err) + } + return client +} + +// New creates a new BigqueryanalyticshubV1beta1Client for the given RESTClient. +func New(c rest.Interface) *BigqueryanalyticshubV1beta1Client { + return &BigqueryanalyticshubV1beta1Client{c} +} + +func setConfigDefaults(config *rest.Config) error { + gv := v1beta1.SchemeGroupVersion + config.GroupVersion = &gv + config.APIPath = "/apis" + config.NegotiatedSerializer = scheme.Codecs.WithoutConversion() + + if config.UserAgent == "" { + config.UserAgent = rest.DefaultKubernetesUserAgent() + } + + return nil +} + +// RESTClient returns a RESTClient that is used to communicate +// with API server by this client implementation. +func (c *BigqueryanalyticshubV1beta1Client) RESTClient() rest.Interface { + if c == nil { + return nil + } + return c.restClient +} diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange.go b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange.go similarity index 77% rename from pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange.go rename to pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange.go index 721b3f83f6..c565baddd4 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange.go @@ -19,13 +19,13 @@ // Code generated by client-gen. DO NOT EDIT. -package v1alpha1 +package v1beta1 import ( "context" "time" - v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1" + v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1" scheme "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/scheme" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" @@ -41,15 +41,15 @@ type BigQueryAnalyticsHubDataExchangesGetter interface { // BigQueryAnalyticsHubDataExchangeInterface has methods to work with BigQueryAnalyticsHubDataExchange resources. type BigQueryAnalyticsHubDataExchangeInterface interface { - Create(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1alpha1.BigQueryAnalyticsHubDataExchange, opts v1.CreateOptions) (*v1alpha1.BigQueryAnalyticsHubDataExchange, error) - Update(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1alpha1.BigQueryAnalyticsHubDataExchange, opts v1.UpdateOptions) (*v1alpha1.BigQueryAnalyticsHubDataExchange, error) - UpdateStatus(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1alpha1.BigQueryAnalyticsHubDataExchange, opts v1.UpdateOptions) (*v1alpha1.BigQueryAnalyticsHubDataExchange, error) + Create(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1beta1.BigQueryAnalyticsHubDataExchange, opts v1.CreateOptions) (*v1beta1.BigQueryAnalyticsHubDataExchange, error) + Update(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1beta1.BigQueryAnalyticsHubDataExchange, opts v1.UpdateOptions) (*v1beta1.BigQueryAnalyticsHubDataExchange, error) + UpdateStatus(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1beta1.BigQueryAnalyticsHubDataExchange, opts v1.UpdateOptions) (*v1beta1.BigQueryAnalyticsHubDataExchange, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error - Get(ctx context.Context, name string, opts v1.GetOptions) (*v1alpha1.BigQueryAnalyticsHubDataExchange, error) - List(ctx context.Context, opts v1.ListOptions) (*v1alpha1.BigQueryAnalyticsHubDataExchangeList, error) + Get(ctx context.Context, name string, opts v1.GetOptions) (*v1beta1.BigQueryAnalyticsHubDataExchange, error) + List(ctx context.Context, opts v1.ListOptions) (*v1beta1.BigQueryAnalyticsHubDataExchangeList, error) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) - Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.BigQueryAnalyticsHubDataExchange, err error) + Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.BigQueryAnalyticsHubDataExchange, err error) BigQueryAnalyticsHubDataExchangeExpansion } @@ -60,7 +60,7 @@ type bigQueryAnalyticsHubDataExchanges struct { } // newBigQueryAnalyticsHubDataExchanges returns a BigQueryAnalyticsHubDataExchanges -func newBigQueryAnalyticsHubDataExchanges(c *BigqueryanalyticshubV1alpha1Client, namespace string) *bigQueryAnalyticsHubDataExchanges { +func newBigQueryAnalyticsHubDataExchanges(c *BigqueryanalyticshubV1beta1Client, namespace string) *bigQueryAnalyticsHubDataExchanges { return &bigQueryAnalyticsHubDataExchanges{ client: c.RESTClient(), ns: namespace, @@ -68,8 +68,8 @@ func newBigQueryAnalyticsHubDataExchanges(c *BigqueryanalyticshubV1alpha1Client, } // Get takes name of the bigQueryAnalyticsHubDataExchange, and returns the corresponding bigQueryAnalyticsHubDataExchange object, and an error if there is any. -func (c *bigQueryAnalyticsHubDataExchanges) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.BigQueryAnalyticsHubDataExchange, err error) { - result = &v1alpha1.BigQueryAnalyticsHubDataExchange{} +func (c *bigQueryAnalyticsHubDataExchanges) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.BigQueryAnalyticsHubDataExchange, err error) { + result = &v1beta1.BigQueryAnalyticsHubDataExchange{} err = c.client.Get(). Namespace(c.ns). Resource("bigqueryanalyticshubdataexchanges"). @@ -81,12 +81,12 @@ func (c *bigQueryAnalyticsHubDataExchanges) Get(ctx context.Context, name string } // List takes label and field selectors, and returns the list of BigQueryAnalyticsHubDataExchanges that match those selectors. -func (c *bigQueryAnalyticsHubDataExchanges) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.BigQueryAnalyticsHubDataExchangeList, err error) { +func (c *bigQueryAnalyticsHubDataExchanges) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.BigQueryAnalyticsHubDataExchangeList, err error) { var timeout time.Duration if opts.TimeoutSeconds != nil { timeout = time.Duration(*opts.TimeoutSeconds) * time.Second } - result = &v1alpha1.BigQueryAnalyticsHubDataExchangeList{} + result = &v1beta1.BigQueryAnalyticsHubDataExchangeList{} err = c.client.Get(). Namespace(c.ns). Resource("bigqueryanalyticshubdataexchanges"). @@ -113,8 +113,8 @@ func (c *bigQueryAnalyticsHubDataExchanges) Watch(ctx context.Context, opts v1.L } // Create takes the representation of a bigQueryAnalyticsHubDataExchange and creates it. Returns the server's representation of the bigQueryAnalyticsHubDataExchange, and an error, if there is any. -func (c *bigQueryAnalyticsHubDataExchanges) Create(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1alpha1.BigQueryAnalyticsHubDataExchange, opts v1.CreateOptions) (result *v1alpha1.BigQueryAnalyticsHubDataExchange, err error) { - result = &v1alpha1.BigQueryAnalyticsHubDataExchange{} +func (c *bigQueryAnalyticsHubDataExchanges) Create(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1beta1.BigQueryAnalyticsHubDataExchange, opts v1.CreateOptions) (result *v1beta1.BigQueryAnalyticsHubDataExchange, err error) { + result = &v1beta1.BigQueryAnalyticsHubDataExchange{} err = c.client.Post(). Namespace(c.ns). Resource("bigqueryanalyticshubdataexchanges"). @@ -126,8 +126,8 @@ func (c *bigQueryAnalyticsHubDataExchanges) Create(ctx context.Context, bigQuery } // Update takes the representation of a bigQueryAnalyticsHubDataExchange and updates it. Returns the server's representation of the bigQueryAnalyticsHubDataExchange, and an error, if there is any. -func (c *bigQueryAnalyticsHubDataExchanges) Update(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1alpha1.BigQueryAnalyticsHubDataExchange, opts v1.UpdateOptions) (result *v1alpha1.BigQueryAnalyticsHubDataExchange, err error) { - result = &v1alpha1.BigQueryAnalyticsHubDataExchange{} +func (c *bigQueryAnalyticsHubDataExchanges) Update(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1beta1.BigQueryAnalyticsHubDataExchange, opts v1.UpdateOptions) (result *v1beta1.BigQueryAnalyticsHubDataExchange, err error) { + result = &v1beta1.BigQueryAnalyticsHubDataExchange{} err = c.client.Put(). Namespace(c.ns). Resource("bigqueryanalyticshubdataexchanges"). @@ -141,8 +141,8 @@ func (c *bigQueryAnalyticsHubDataExchanges) Update(ctx context.Context, bigQuery // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *bigQueryAnalyticsHubDataExchanges) UpdateStatus(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1alpha1.BigQueryAnalyticsHubDataExchange, opts v1.UpdateOptions) (result *v1alpha1.BigQueryAnalyticsHubDataExchange, err error) { - result = &v1alpha1.BigQueryAnalyticsHubDataExchange{} +func (c *bigQueryAnalyticsHubDataExchanges) UpdateStatus(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1beta1.BigQueryAnalyticsHubDataExchange, opts v1.UpdateOptions) (result *v1beta1.BigQueryAnalyticsHubDataExchange, err error) { + result = &v1beta1.BigQueryAnalyticsHubDataExchange{} err = c.client.Put(). Namespace(c.ns). Resource("bigqueryanalyticshubdataexchanges"). @@ -183,8 +183,8 @@ func (c *bigQueryAnalyticsHubDataExchanges) DeleteCollection(ctx context.Context } // Patch applies the patch and returns the patched bigQueryAnalyticsHubDataExchange. -func (c *bigQueryAnalyticsHubDataExchanges) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.BigQueryAnalyticsHubDataExchange, err error) { - result = &v1alpha1.BigQueryAnalyticsHubDataExchange{} +func (c *bigQueryAnalyticsHubDataExchanges) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.BigQueryAnalyticsHubDataExchange, err error) { + result = &v1beta1.BigQueryAnalyticsHubDataExchange{} err = c.client.Patch(pt). Namespace(c.ns). Resource("bigqueryanalyticshubdataexchanges"). diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/doc.go b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/doc.go new file mode 100644 index 0000000000..41dbecdb4a --- /dev/null +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/doc.go @@ -0,0 +1,23 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// *** DISCLAIMER *** +// Config Connector's go-client for CRDs is currently in ALPHA, which means +// that future versions of the go-client may include breaking changes. +// Please try it out and give us feedback! + +// Code generated by client-gen. DO NOT EDIT. + +// This package has the automatically generated typed clients. +package v1beta1 diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/fake/doc.go b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/fake/doc.go new file mode 100644 index 0000000000..dfbe79f9af --- /dev/null +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/fake/doc.go @@ -0,0 +1,23 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// *** DISCLAIMER *** +// Config Connector's go-client for CRDs is currently in ALPHA, which means +// that future versions of the go-client may include breaking changes. +// Please try it out and give us feedback! + +// Code generated by client-gen. DO NOT EDIT. + +// Package fake has the automatically generated clients. +package fake diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/fake/fake_bigqueryanalyticshub_client.go b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/fake/fake_bigqueryanalyticshub_client.go new file mode 100644 index 0000000000..eb3515683a --- /dev/null +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/fake/fake_bigqueryanalyticshub_client.go @@ -0,0 +1,43 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// *** DISCLAIMER *** +// Config Connector's go-client for CRDs is currently in ALPHA, which means +// that future versions of the go-client may include breaking changes. +// Please try it out and give us feedback! + +// Code generated by client-gen. DO NOT EDIT. + +package fake + +import ( + v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1" + rest "k8s.io/client-go/rest" + testing "k8s.io/client-go/testing" +) + +type FakeBigqueryanalyticshubV1beta1 struct { + *testing.Fake +} + +func (c *FakeBigqueryanalyticshubV1beta1) BigQueryAnalyticsHubDataExchanges(namespace string) v1beta1.BigQueryAnalyticsHubDataExchangeInterface { + return &FakeBigQueryAnalyticsHubDataExchanges{c, namespace} +} + +// RESTClient returns a RESTClient that is used to communicate +// with API server by this client implementation. +func (c *FakeBigqueryanalyticshubV1beta1) RESTClient() rest.Interface { + var ret *rest.RESTClient + return ret +} diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/fake/fake_bigqueryanalyticshubdataexchange.go b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/fake/fake_bigqueryanalyticshubdataexchange.go similarity index 66% rename from pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/fake/fake_bigqueryanalyticshubdataexchange.go rename to pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/fake/fake_bigqueryanalyticshubdataexchange.go index 9aca3fe237..0546758729 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1/fake/fake_bigqueryanalyticshubdataexchange.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/fake/fake_bigqueryanalyticshubdataexchange.go @@ -24,7 +24,7 @@ package fake import ( "context" - v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1" + v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryanalyticshub/v1beta1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" labels "k8s.io/apimachinery/pkg/labels" types "k8s.io/apimachinery/pkg/types" @@ -34,29 +34,29 @@ import ( // FakeBigQueryAnalyticsHubDataExchanges implements BigQueryAnalyticsHubDataExchangeInterface type FakeBigQueryAnalyticsHubDataExchanges struct { - Fake *FakeBigqueryanalyticshubV1alpha1 + Fake *FakeBigqueryanalyticshubV1beta1 ns string } -var bigqueryanalyticshubdataexchangesResource = v1alpha1.SchemeGroupVersion.WithResource("bigqueryanalyticshubdataexchanges") +var bigqueryanalyticshubdataexchangesResource = v1beta1.SchemeGroupVersion.WithResource("bigqueryanalyticshubdataexchanges") -var bigqueryanalyticshubdataexchangesKind = v1alpha1.SchemeGroupVersion.WithKind("BigQueryAnalyticsHubDataExchange") +var bigqueryanalyticshubdataexchangesKind = v1beta1.SchemeGroupVersion.WithKind("BigQueryAnalyticsHubDataExchange") // Get takes name of the bigQueryAnalyticsHubDataExchange, and returns the corresponding bigQueryAnalyticsHubDataExchange object, and an error if there is any. -func (c *FakeBigQueryAnalyticsHubDataExchanges) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.BigQueryAnalyticsHubDataExchange, err error) { +func (c *FakeBigQueryAnalyticsHubDataExchanges) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.BigQueryAnalyticsHubDataExchange, err error) { obj, err := c.Fake. - Invokes(testing.NewGetAction(bigqueryanalyticshubdataexchangesResource, c.ns, name), &v1alpha1.BigQueryAnalyticsHubDataExchange{}) + Invokes(testing.NewGetAction(bigqueryanalyticshubdataexchangesResource, c.ns, name), &v1beta1.BigQueryAnalyticsHubDataExchange{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.BigQueryAnalyticsHubDataExchange), err + return obj.(*v1beta1.BigQueryAnalyticsHubDataExchange), err } // List takes label and field selectors, and returns the list of BigQueryAnalyticsHubDataExchanges that match those selectors. -func (c *FakeBigQueryAnalyticsHubDataExchanges) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.BigQueryAnalyticsHubDataExchangeList, err error) { +func (c *FakeBigQueryAnalyticsHubDataExchanges) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.BigQueryAnalyticsHubDataExchangeList, err error) { obj, err := c.Fake. - Invokes(testing.NewListAction(bigqueryanalyticshubdataexchangesResource, bigqueryanalyticshubdataexchangesKind, c.ns, opts), &v1alpha1.BigQueryAnalyticsHubDataExchangeList{}) + Invokes(testing.NewListAction(bigqueryanalyticshubdataexchangesResource, bigqueryanalyticshubdataexchangesKind, c.ns, opts), &v1beta1.BigQueryAnalyticsHubDataExchangeList{}) if obj == nil { return nil, err @@ -66,8 +66,8 @@ func (c *FakeBigQueryAnalyticsHubDataExchanges) List(ctx context.Context, opts v if label == nil { label = labels.Everything() } - list := &v1alpha1.BigQueryAnalyticsHubDataExchangeList{ListMeta: obj.(*v1alpha1.BigQueryAnalyticsHubDataExchangeList).ListMeta} - for _, item := range obj.(*v1alpha1.BigQueryAnalyticsHubDataExchangeList).Items { + list := &v1beta1.BigQueryAnalyticsHubDataExchangeList{ListMeta: obj.(*v1beta1.BigQueryAnalyticsHubDataExchangeList).ListMeta} + for _, item := range obj.(*v1beta1.BigQueryAnalyticsHubDataExchangeList).Items { if label.Matches(labels.Set(item.Labels)) { list.Items = append(list.Items, item) } @@ -83,43 +83,43 @@ func (c *FakeBigQueryAnalyticsHubDataExchanges) Watch(ctx context.Context, opts } // Create takes the representation of a bigQueryAnalyticsHubDataExchange and creates it. Returns the server's representation of the bigQueryAnalyticsHubDataExchange, and an error, if there is any. -func (c *FakeBigQueryAnalyticsHubDataExchanges) Create(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1alpha1.BigQueryAnalyticsHubDataExchange, opts v1.CreateOptions) (result *v1alpha1.BigQueryAnalyticsHubDataExchange, err error) { +func (c *FakeBigQueryAnalyticsHubDataExchanges) Create(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1beta1.BigQueryAnalyticsHubDataExchange, opts v1.CreateOptions) (result *v1beta1.BigQueryAnalyticsHubDataExchange, err error) { obj, err := c.Fake. - Invokes(testing.NewCreateAction(bigqueryanalyticshubdataexchangesResource, c.ns, bigQueryAnalyticsHubDataExchange), &v1alpha1.BigQueryAnalyticsHubDataExchange{}) + Invokes(testing.NewCreateAction(bigqueryanalyticshubdataexchangesResource, c.ns, bigQueryAnalyticsHubDataExchange), &v1beta1.BigQueryAnalyticsHubDataExchange{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.BigQueryAnalyticsHubDataExchange), err + return obj.(*v1beta1.BigQueryAnalyticsHubDataExchange), err } // Update takes the representation of a bigQueryAnalyticsHubDataExchange and updates it. Returns the server's representation of the bigQueryAnalyticsHubDataExchange, and an error, if there is any. -func (c *FakeBigQueryAnalyticsHubDataExchanges) Update(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1alpha1.BigQueryAnalyticsHubDataExchange, opts v1.UpdateOptions) (result *v1alpha1.BigQueryAnalyticsHubDataExchange, err error) { +func (c *FakeBigQueryAnalyticsHubDataExchanges) Update(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1beta1.BigQueryAnalyticsHubDataExchange, opts v1.UpdateOptions) (result *v1beta1.BigQueryAnalyticsHubDataExchange, err error) { obj, err := c.Fake. - Invokes(testing.NewUpdateAction(bigqueryanalyticshubdataexchangesResource, c.ns, bigQueryAnalyticsHubDataExchange), &v1alpha1.BigQueryAnalyticsHubDataExchange{}) + Invokes(testing.NewUpdateAction(bigqueryanalyticshubdataexchangesResource, c.ns, bigQueryAnalyticsHubDataExchange), &v1beta1.BigQueryAnalyticsHubDataExchange{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.BigQueryAnalyticsHubDataExchange), err + return obj.(*v1beta1.BigQueryAnalyticsHubDataExchange), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeBigQueryAnalyticsHubDataExchanges) UpdateStatus(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1alpha1.BigQueryAnalyticsHubDataExchange, opts v1.UpdateOptions) (*v1alpha1.BigQueryAnalyticsHubDataExchange, error) { +func (c *FakeBigQueryAnalyticsHubDataExchanges) UpdateStatus(ctx context.Context, bigQueryAnalyticsHubDataExchange *v1beta1.BigQueryAnalyticsHubDataExchange, opts v1.UpdateOptions) (*v1beta1.BigQueryAnalyticsHubDataExchange, error) { obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(bigqueryanalyticshubdataexchangesResource, "status", c.ns, bigQueryAnalyticsHubDataExchange), &v1alpha1.BigQueryAnalyticsHubDataExchange{}) + Invokes(testing.NewUpdateSubresourceAction(bigqueryanalyticshubdataexchangesResource, "status", c.ns, bigQueryAnalyticsHubDataExchange), &v1beta1.BigQueryAnalyticsHubDataExchange{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.BigQueryAnalyticsHubDataExchange), err + return obj.(*v1beta1.BigQueryAnalyticsHubDataExchange), err } // Delete takes name of the bigQueryAnalyticsHubDataExchange and deletes it. Returns an error if one occurs. func (c *FakeBigQueryAnalyticsHubDataExchanges) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { _, err := c.Fake. - Invokes(testing.NewDeleteActionWithOptions(bigqueryanalyticshubdataexchangesResource, c.ns, name, opts), &v1alpha1.BigQueryAnalyticsHubDataExchange{}) + Invokes(testing.NewDeleteActionWithOptions(bigqueryanalyticshubdataexchangesResource, c.ns, name, opts), &v1beta1.BigQueryAnalyticsHubDataExchange{}) return err } @@ -128,17 +128,17 @@ func (c *FakeBigQueryAnalyticsHubDataExchanges) Delete(ctx context.Context, name func (c *FakeBigQueryAnalyticsHubDataExchanges) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { action := testing.NewDeleteCollectionAction(bigqueryanalyticshubdataexchangesResource, c.ns, listOpts) - _, err := c.Fake.Invokes(action, &v1alpha1.BigQueryAnalyticsHubDataExchangeList{}) + _, err := c.Fake.Invokes(action, &v1beta1.BigQueryAnalyticsHubDataExchangeList{}) return err } // Patch applies the patch and returns the patched bigQueryAnalyticsHubDataExchange. -func (c *FakeBigQueryAnalyticsHubDataExchanges) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.BigQueryAnalyticsHubDataExchange, err error) { +func (c *FakeBigQueryAnalyticsHubDataExchanges) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.BigQueryAnalyticsHubDataExchange, err error) { obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(bigqueryanalyticshubdataexchangesResource, c.ns, name, pt, data, subresources...), &v1alpha1.BigQueryAnalyticsHubDataExchange{}) + Invokes(testing.NewPatchSubresourceAction(bigqueryanalyticshubdataexchangesResource, c.ns, name, pt, data, subresources...), &v1beta1.BigQueryAnalyticsHubDataExchange{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.BigQueryAnalyticsHubDataExchange), err + return obj.(*v1beta1.BigQueryAnalyticsHubDataExchange), err } diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/generated_expansion.go b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/generated_expansion.go new file mode 100644 index 0000000000..1d51247319 --- /dev/null +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1beta1/generated_expansion.go @@ -0,0 +1,24 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// *** DISCLAIMER *** +// Config Connector's go-client for CRDs is currently in ALPHA, which means +// that future versions of the go-client may include breaking changes. +// Please try it out and give us feedback! + +// Code generated by client-gen. DO NOT EDIT. + +package v1beta1 + +type BigQueryAnalyticsHubDataExchangeExpansion interface{} diff --git a/pkg/controller/direct/bigqueryanalyticshub/dataexchange_controller.go b/pkg/controller/direct/bigqueryanalyticshub/dataexchange_controller.go index ab787fee93..9b3ae36eb1 100644 --- a/pkg/controller/direct/bigqueryanalyticshub/dataexchange_controller.go +++ b/pkg/controller/direct/bigqueryanalyticshub/dataexchange_controller.go @@ -19,7 +19,7 @@ import ( "fmt" "reflect" - krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigqueryanalyticshub/v1alpha1" + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigqueryanalyticshub/v1beta1" refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/config" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" diff --git a/pkg/controller/direct/bigqueryanalyticshub/mapper.go b/pkg/controller/direct/bigqueryanalyticshub/mapper.go index 57efa0cbaf..bce98b0020 100644 --- a/pkg/controller/direct/bigqueryanalyticshub/mapper.go +++ b/pkg/controller/direct/bigqueryanalyticshub/mapper.go @@ -16,7 +16,7 @@ package bigqueryanalyticshub import ( pb "cloud.google.com/go/bigquery/analyticshub/apiv1/analyticshubpb" - krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigqueryanalyticshub/v1alpha1" + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigqueryanalyticshub/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" ) diff --git a/pkg/gvks/supportedgvks/gvks_generated.go b/pkg/gvks/supportedgvks/gvks_generated.go index 6f875cc818..17274db582 100644 --- a/pkg/gvks/supportedgvks/gvks_generated.go +++ b/pkg/gvks/supportedgvks/gvks_generated.go @@ -478,6 +478,17 @@ var SupportedGVKs = map[schema.GroupVersionKind]GVKMetadata{ "cnrm.cloud.google.com/system": "true", }, }, + { + Group: "bigqueryanalyticshub.cnrm.cloud.google.com", + Version: "v1beta1", + Kind: "BigQueryAnalyticsHubDataExchange", + }: { + Labels: map[string]string{ + "cnrm.cloud.google.com/managed-by-kcc": "true", + "cnrm.cloud.google.com/stability-level": "alpha", + "cnrm.cloud.google.com/system": "true", + }, + }, { Group: "bigqueryanalyticshub.cnrm.cloud.google.com", Version: "v1alpha1", diff --git a/pkg/test/resourcefixture/sets.go b/pkg/test/resourcefixture/sets.go index 76f9b6ed14..38d26911ed 100644 --- a/pkg/test/resourcefixture/sets.go +++ b/pkg/test/resourcefixture/sets.go @@ -94,6 +94,7 @@ func IsPureDirectResource(gk schema.GroupKind) bool { "NetworkConnectivityServiceConnectionPolicy", "PrivilegedAccessManagerEntitlement", "RedisCluster", + "BigQueryAnalyticsHubDataExchange", } return slices.Contains(pureDirectResources, gk.Kind) } diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-base/_generated_object_bigqueryanalyticshubdataexchange-base.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-base/_generated_object_bigqueryanalyticshubdataexchange-base.golden.yaml similarity index 93% rename from pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-base/_generated_object_bigqueryanalyticshubdataexchange-base.golden.yaml rename to pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-base/_generated_object_bigqueryanalyticshubdataexchange-base.golden.yaml index d71889a272..f1a4b7b201 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-base/_generated_object_bigqueryanalyticshubdataexchange-base.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-base/_generated_object_bigqueryanalyticshubdataexchange-base.golden.yaml @@ -1,4 +1,4 @@ -apiVersion: bigqueryanalyticshub.cnrm.cloud.google.com/v1alpha1 +apiVersion: bigqueryanalyticshub.cnrm.cloud.google.com/v1beta1 kind: BigQueryAnalyticsHubDataExchange metadata: annotations: diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-base/_http.log b/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-base/_http.log similarity index 100% rename from pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-base/_http.log rename to pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-base/_http.log diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-base/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-base/create.yaml similarity index 92% rename from pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-base/create.yaml rename to pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-base/create.yaml index 7ae7b5c8f7..102f5bab73 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-base/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-base/create.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: bigqueryanalyticshub.cnrm.cloud.google.com/v1alpha1 +apiVersion: bigqueryanalyticshub.cnrm.cloud.google.com/v1beta1 kind: BigQueryAnalyticsHubDataExchange metadata: name: bigqueryanalyticshubdataexchange${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-full/_generated_object_bigqueryanalyticshubdataexchange-full.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-full/_generated_object_bigqueryanalyticshubdataexchange-full.golden.yaml similarity index 94% rename from pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-full/_generated_object_bigqueryanalyticshubdataexchange-full.golden.yaml rename to pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-full/_generated_object_bigqueryanalyticshubdataexchange-full.golden.yaml index 89ff4100ea..f9cf75f71d 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-full/_generated_object_bigqueryanalyticshubdataexchange-full.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-full/_generated_object_bigqueryanalyticshubdataexchange-full.golden.yaml @@ -1,4 +1,4 @@ -apiVersion: bigqueryanalyticshub.cnrm.cloud.google.com/v1alpha1 +apiVersion: bigqueryanalyticshub.cnrm.cloud.google.com/v1beta1 kind: BigQueryAnalyticsHubDataExchange metadata: annotations: diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-full/_http.log b/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-full/_http.log similarity index 100% rename from pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-full/_http.log rename to pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-full/_http.log diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-full/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-full/create.yaml similarity index 93% rename from pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-full/create.yaml rename to pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-full/create.yaml index 275ad44d22..1e33740d97 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-full/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-full/create.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: bigqueryanalyticshub.cnrm.cloud.google.com/v1alpha1 +apiVersion: bigqueryanalyticshub.cnrm.cloud.google.com/v1beta1 kind: BigQueryAnalyticsHubDataExchange metadata: name: bigqueryanalyticshubdataexchange${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-full/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-full/update.yaml similarity index 93% rename from pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-full/update.yaml rename to pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-full/update.yaml index 62a2491184..9a90ac0381 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1alpha1/bigqueryanalyticshubdataexchange-full/update.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigqueryanalyticshub/v1beta1/bigqueryanalyticshubdataexchange-full/update.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: bigqueryanalyticshub.cnrm.cloud.google.com/v1alpha1 +apiVersion: bigqueryanalyticshub.cnrm.cloud.google.com/v1beta1 kind: BigQueryAnalyticsHubDataExchange metadata: name: bigqueryanalyticshubdataexchange${uniqueId} diff --git a/scripts/generate-google3-docs/resource-reference/_toc.yaml b/scripts/generate-google3-docs/resource-reference/_toc.yaml index 26a603c592..f599ad09cc 100644 --- a/scripts/generate-google3-docs/resource-reference/_toc.yaml +++ b/scripts/generate-google3-docs/resource-reference/_toc.yaml @@ -43,6 +43,10 @@ toc: section: - title: "ArtifactRegistryRepository" path: /config-connector/docs/reference/resource-docs/artifactregistry/artifactregistryrepository.md +- title: "BigQueryAnalyticsHub" + section: + - title: "BigQueryAnalyticsHubDataExchange" + path: /config-connector/docs/reference/resource-docs/bigqueranalyticshub/bigqueranalyticshubydataexchange.md - title: "BigQuery" section: - title: "BigQueryDataset" diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigqueryanalyticshub/bigqueryanalyticshubdataexchange.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigqueryanalyticshub/bigqueryanalyticshubdataexchange.md new file mode 100644 index 0000000000..5f129ab807 --- /dev/null +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigqueryanalyticshub/bigqueryanalyticshubdataexchange.md @@ -0,0 +1,350 @@ +{# AUTOGENERATED. DO NOT EDIT. #} + +{% extends "config-connector/_base.html" %} + +{% block page_title %}BigQueryAnalyticsHubDataExchange{% endblock %} +{% block body %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyValue
{{gcp_name_short}} Service NameAnalytics Hub
{{gcp_name_short}} Service Documentation/bigquery/docs/query-overview
{{gcp_name_short}} REST Resource Namev1.projects.locations.dataExchange
{{gcp_name_short}} REST Resource Documentation/bigquery/docs/reference/analytics-hub/rest/v1/projects.locations.dataExchangess
{{product_name_short}} Resource Short Namesbigqueryanalyticshubdataexchange
{{product_name_short}} Service Nameanalyticshub.googleapis.com
{{product_name_short}} Resource Fully Qualified Namebigqueryanalyticshubdataexchanges.bigqueryanalyticshub.cnrm.cloud.google.com
Can Be Referenced by IAMPolicy/IAMPolicyMemberNo
{{product_name_short}} Default Average Reconcile Interval In Seconds600
+ +## Custom Resource Definition Properties + + + +### Spec +#### Schema +```yaml +description: string +discoveryType: string +displayName: string +documentation: string +location: string +primaryContact: string +projectRef: + external: string + kind: string + name: string + namespace: string +resourceID: string +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Fields
+

description

+

Optional

+
+

string

+

{% verbatim %}Optional. Description of the data exchange. The description must not contain Unicode non-characters as well as C0 and C1 control codes except tabs (HT), new lines (LF), carriage returns (CR), and page breaks (FF). Default value is an empty string. Max length: 2000 bytes.{% endverbatim %}

+
+

discoveryType

+

Optional

+
+

string

+

{% verbatim %}Optional. Type of discovery on the discovery page for all the listings under this exchange. Updating this field also updates (overwrites) the discovery_type field for all the listings under this exchange.{% endverbatim %}

+
+

displayName

+

Optional

+
+

string

+

{% verbatim %}Required. Human-readable display name of the data exchange. The display name must contain only Unicode letters, numbers (0-9), underscores (_), dashes (-), spaces ( ), ampersands (&) and must not start or end with spaces. Default value is an empty string. Max length: 63 bytes.{% endverbatim %}

+
+

documentation

+

Optional

+
+

string

+

{% verbatim %}Optional. Documentation describing the data exchange.{% endverbatim %}

+
+

location

+

Required

+
+

string

+

{% verbatim %}Immutable. The name of the location this data exchange.{% endverbatim %}

+
+

primaryContact

+

Optional

+
+

string

+

{% verbatim %}Optional. Email or URL of the primary point of contact of the data exchange. Max Length: 1000 bytes.{% endverbatim %}

+
+

projectRef

+

Required

+
+

object

+

{% verbatim %}The project that this resource belongs to.{% endverbatim %}

+
+

projectRef.external

+

Optional

+
+

string

+

{% verbatim %}The `projectID` field of a project, when not managed by Config Connector.{% endverbatim %}

+
+

projectRef.kind

+

Optional

+
+

string

+

{% verbatim %}The kind of the Project resource; optional but must be `Project` if provided.{% endverbatim %}

+
+

projectRef.name

+

Optional

+
+

string

+

{% verbatim %}The `name` field of a `Project` resource.{% endverbatim %}

+
+

projectRef.namespace

+

Optional

+
+

string

+

{% verbatim %}The `namespace` field of a `Project` resource.{% endverbatim %}

+
+

resourceID

+

Optional

+
+

string

+

{% verbatim %}Immutable. The BigQueryAnalyticsHubDataExchange name. If not given, the metadata.name will be used.{% endverbatim %}

+
+ + + +### Status +#### Schema +```yaml +conditions: +- lastTransitionTime: string + message: string + reason: string + status: string + type: string +externalRef: string +observedGeneration: integer +observedState: + listingCount: integer +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Fields
conditions +

list (object)

+

{% verbatim %}Conditions represent the latest available observations of the object's current state.{% endverbatim %}

+
conditions[] +

object

+

{% verbatim %}{% endverbatim %}

+
conditions[].lastTransitionTime +

string

+

{% verbatim %}Last time the condition transitioned from one status to another.{% endverbatim %}

+
conditions[].message +

string

+

{% verbatim %}Human-readable message indicating details about last transition.{% endverbatim %}

+
conditions[].reason +

string

+

{% verbatim %}Unique, one-word, CamelCase reason for the condition's last transition.{% endverbatim %}

+
conditions[].status +

string

+

{% verbatim %}Status is the status of the condition. Can be True, False, Unknown.{% endverbatim %}

+
conditions[].type +

string

+

{% verbatim %}Type is the type of the condition.{% endverbatim %}

+
externalRef +

string

+

{% verbatim %}A unique specifier for the BigQueryAnalyticsHubDataExchange resource in GCP.{% endverbatim %}

+
observedGeneration +

integer

+

{% verbatim %}ObservedGeneration is the generation of the resource that was most recently observed by the Config Connector controller. If this is equal to metadata.generation, then that means that the current reported status reflects the most recent desired state of the resource.{% endverbatim %}

+
observedState +

object

+

{% verbatim %}ObservedState is the state of the resource as most recently observed in GCP.{% endverbatim %}

+
observedState.listingCount +

integer

+

{% verbatim %}Number of listings contained in the data exchange.{% endverbatim %}

+
+ +## Sample YAML(s) + +### Typical Use Case +```yaml +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigqueryanalyticshub.cnrm.cloud.google.com/v1beta1 +kind: BigQueryAnalyticsHubDataExchange +metadata: + name: bigqueryanalyticshubdataexchangesample +spec: + displayName: my_data_exchange + description: example data exchange + primaryContact: a@contact.com + documentation: a documentation + discoveryType: DISCOVERY_TYPE_PRIVATE + location: US + projectRef: + external: ${PROJECT_ID?} +``` + + +Note: If you have any trouble with instantiating the resource, refer to Troubleshoot Config Connector. + +{% endblock %} diff --git a/scripts/generate-google3-docs/resource-reference/overview.md b/scripts/generate-google3-docs/resource-reference/overview.md index 492191526c..3b059de445 100644 --- a/scripts/generate-google3-docs/resource-reference/overview.md +++ b/scripts/generate-google3-docs/resource-reference/overview.md @@ -73,6 +73,10 @@ issues for {{product_name_short}}. {{apigee_x_name}} ApigeeOrganization + + {{analytics_hub_name}} + BigQueryAnalyticsHubDataExchange + {{bigquery_name}} BigQueryDataset diff --git a/scripts/generate-google3-docs/resource-reference/templates/bigqueryanalyticshub_bigqueryanalyticshubdataexchange.tmpl b/scripts/generate-google3-docs/resource-reference/templates/bigqueryanalyticshub_bigqueryanalyticshubdataexchange.tmpl new file mode 100644 index 0000000000..07a8ec39e2 --- /dev/null +++ b/scripts/generate-google3-docs/resource-reference/templates/bigqueryanalyticshub_bigqueryanalyticshubdataexchange.tmpl @@ -0,0 +1,54 @@ +{{template "headercomment.tmpl" .}} + +{% extends "config-connector/_base.html" %} + +{% block page_title %}{{ .Kind}}{% endblock %} +{% block body %} +{{template "alphadisclaimer.tmpl" .}} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +{{template "iamsupport.tmpl" .}} + + + + + +
PropertyValue
{{"{{gcp_name_short}}"}} Service NameAnalytics Hub
{{"{{gcp_name_short}}"}} Service Documentation/bigquery/docs/query-overview
{{"{{gcp_name_short}}"}} REST Resource Namev1.projects.locations.dataExchange
{{"{{gcp_name_short}}"}} REST Resource Documentation/bigquery/docs/reference/analytics-hub/rest/v1/projects.locations.dataExchangess
{{"{{product_name_short}}"}} Resource Short Names{{ .ShortNames}}
{{"{{product_name_short}}"}} Service Nameanalyticshub.googleapis.com
{{"{{product_name_short}}"}} Resource Fully Qualified Name{{ .FullyQualifiedName}}
{{"{{product_name_short}}"}} Default Average Reconcile Interval In Seconds{{ .DefaultReconcileInterval}}
+ +{{template "resource.tmpl" .}} +{{template "endnote.tmpl" .}} +{% endblock %} diff --git a/scripts/resource-autogen/allowlist/allowlist.go b/scripts/resource-autogen/allowlist/allowlist.go index 75f0c0be98..db1a1000ca 100644 --- a/scripts/resource-autogen/allowlist/allowlist.go +++ b/scripts/resource-autogen/allowlist/allowlist.go @@ -55,7 +55,6 @@ var ( "beyondcorp/google_beyondcorp_app_connector", "beyondcorp/google_beyondcorp_app_gateway", "bigquery/google_bigquery_dataset_access", - "bigquery_analytics_hub/google_bigquery_analytics_hub_data_exchange", "bigquery_analytics_hub/google_bigquery_analytics_hub_listing", "bigquery_connection/google_bigquery_connection", "bigquery_datapolicy/google_bigquery_datapolicy_data_policy", From a165093f02e125d5d2a439086daf8b07c9533bcd Mon Sep 17 00:00:00 2001 From: Joyce Ma Date: Tue, 5 Nov 2024 00:16:59 +0000 Subject: [PATCH 16/31] Quote the strings that contain colon (':') followed by only env var(s) in samples --- .../external-project-level-policy/iam_v1beta1_iampolicy.yaml | 2 +- .../iampolicy/project-level-policy/iam_v1beta1_iampolicy.yaml | 2 +- ...essmanager_v1beta1_privilegedaccessmanagerentitlement.yaml | 2 +- .../generated/resource-docs/iam/iampolicy.md | 4 ++-- .../privilegedaccessmanagerentitlement.md | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/config/samples/resources/iampolicy/external-project-level-policy/iam_v1beta1_iampolicy.yaml b/config/samples/resources/iampolicy/external-project-level-policy/iam_v1beta1_iampolicy.yaml index b6639225de..4546853404 100644 --- a/config/samples/resources/iampolicy/external-project-level-policy/iam_v1beta1_iampolicy.yaml +++ b/config/samples/resources/iampolicy/external-project-level-policy/iam_v1beta1_iampolicy.yaml @@ -32,7 +32,7 @@ spec: # Replace ${GSA_EMAIL?} with the Config Connector service account's # email address. This ensures that the Config Connector service account # can continue to manage the referenced project. - - serviceAccount:${GSA_EMAIL?} + - "serviceAccount:${GSA_EMAIL?}" role: roles/owner - members: - serviceAccount:iampolicy-dep-external-project@iampolicy-dep-external-project.iam.gserviceaccount.com diff --git a/config/samples/resources/iampolicy/project-level-policy/iam_v1beta1_iampolicy.yaml b/config/samples/resources/iampolicy/project-level-policy/iam_v1beta1_iampolicy.yaml index 518585afa0..04220afae6 100644 --- a/config/samples/resources/iampolicy/project-level-policy/iam_v1beta1_iampolicy.yaml +++ b/config/samples/resources/iampolicy/project-level-policy/iam_v1beta1_iampolicy.yaml @@ -32,7 +32,7 @@ spec: # Replace ${GSA_EMAIL?} with the Config Connector service account's # email address. This ensures that the Config Connector service account # can continue to manage the referenced project. - - serviceAccount:${GSA_EMAIL?} + - "serviceAccount:${GSA_EMAIL?}" role: roles/owner - members: - serviceAccount:iampolicy-dep-project@iampolicy-dep-project.iam.gserviceaccount.com diff --git a/config/samples/resources/privilegedaccessmanagerentitlement/folder-level-entitlement/privilegedaccessmanager_v1beta1_privilegedaccessmanagerentitlement.yaml b/config/samples/resources/privilegedaccessmanagerentitlement/folder-level-entitlement/privilegedaccessmanager_v1beta1_privilegedaccessmanagerentitlement.yaml index 328011ea1d..6d5c3d3955 100644 --- a/config/samples/resources/privilegedaccessmanagerentitlement/folder-level-entitlement/privilegedaccessmanager_v1beta1_privilegedaccessmanagerentitlement.yaml +++ b/config/samples/resources/privilegedaccessmanagerentitlement/folder-level-entitlement/privilegedaccessmanager_v1beta1_privilegedaccessmanagerentitlement.yaml @@ -52,4 +52,4 @@ spec: approvers: - principals: # Replace ${GROUP_EMAIL?} with your group email. - - group:${GROUP_EMAIL?} + - "group:${GROUP_EMAIL?}" diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/iam/iampolicy.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/iam/iampolicy.md index 0abad17507..0ce7ba4dac 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/iam/iampolicy.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/iam/iampolicy.md @@ -931,7 +931,7 @@ spec: # Replace ${GSA_EMAIL?} with the Config Connector service account's # email address. This ensures that the Config Connector service account # can continue to manage the referenced project. - - serviceAccount:${GSA_EMAIL?} + - "serviceAccount:${GSA_EMAIL?}" role: roles/owner - members: - serviceAccount:iampolicy-dep-external-project@iampolicy-dep-external-project.iam.gserviceaccount.com @@ -1044,7 +1044,7 @@ spec: # Replace ${GSA_EMAIL?} with the Config Connector service account's # email address. This ensures that the Config Connector service account # can continue to manage the referenced project. - - serviceAccount:${GSA_EMAIL?} + - "serviceAccount:${GSA_EMAIL?}" role: roles/owner - members: - serviceAccount:iampolicy-dep-project@iampolicy-dep-project.iam.gserviceaccount.com diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/privilegedaccessmanager/privilegedaccessmanagerentitlement.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/privilegedaccessmanager/privilegedaccessmanagerentitlement.md index 90a89b71d9..598da25b7a 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/privilegedaccessmanager/privilegedaccessmanagerentitlement.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/privilegedaccessmanager/privilegedaccessmanagerentitlement.md @@ -756,7 +756,7 @@ spec: approvers: - principals: # Replace ${GROUP_EMAIL?} with your group email. - - group:${GROUP_EMAIL?} + - "group:${GROUP_EMAIL?}" --- apiVersion: iam.cnrm.cloud.google.com/v1beta1 kind: IAMServiceAccount From 35e747616a3e70292acbf8d8e973042c086c20aa Mon Sep 17 00:00:00 2001 From: Gemma Hou Date: Tue, 5 Nov 2024 01:15:59 +0000 Subject: [PATCH 17/31] Address code review comments --- .../v1beta1/firewallpolicyrule_types.go | 7 +- dev/tasks/run-e2e | 2 +- mockgcp/mock_http_roundtrip.go | 3 +- .../firewallpolicyrule_controller.go | 33 +- .../direct/compute/firewallpolicyrule/refs.go | 4 +- ...lpolicyrule-egress-full-direct.golden.yaml | 52 + .../_http.log | 1556 +++++++++++++++++ .../create.yaml | 47 + .../dependencies.yaml | 53 + .../update.yaml | 48 + ...firewallpolicyrule-egress-full.golden.yaml | 1 + .../_http.log | 51 +- ...policyrule-ingress-full-direct.golden.yaml | 52 + .../_http.log | 1556 +++++++++++++++++ .../create.yaml | 47 + .../dependencies.yaml | 53 + .../update.yaml | 48 + ...irewallpolicyrule-ingress-full.golden.yaml | 1 + .../_http.log | 51 +- ...ewallpolicyrule-minimal-direct.golden.yaml | 35 + .../_http.log | 738 ++++++++ .../create.yaml | 31 + .../dependencies.yaml | 23 + .../update.yaml | 31 + ...putefirewallpolicyrule-minimal.golden.yaml | 1 + .../_http.log | 51 +- 26 files changed, 4453 insertions(+), 122 deletions(-) create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/_generated_object_computefirewallpolicyrule-egress-full-direct.golden.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/create.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/dependencies.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/update.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/_generated_object_computefirewallpolicyrule-ingress-full-direct.golden.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/create.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/dependencies.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/update.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/_generated_object_computefirewallpolicyrule-minimal-direct.golden.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/create.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/dependencies.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/update.yaml diff --git a/apis/compute/v1beta1/firewallpolicyrule_types.go b/apis/compute/v1beta1/firewallpolicyrule_types.go index b64b86b02e..f546f0f18f 100644 --- a/apis/compute/v1beta1/firewallpolicyrule_types.go +++ b/apis/compute/v1beta1/firewallpolicyrule_types.go @@ -20,15 +20,10 @@ import ( refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" commonv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/apis/common/v1alpha1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/runtime/schema" ) var ( - ComputeFirewallPolicyRuleGVK = schema.GroupVersionKind{ - Group: GroupVersion.Group, - Version: GroupVersion.Version, - Kind: "ComputeFirewallPolicyRule", - } + ComputeFirewallPolicyRuleGVK = GroupVersion.WithKind("ComputeFirewallPolicyRule") ) // +kcc:proto=google.cloud.compute.v1.FirewallPolicyRuleMatcherLayer4Config diff --git a/dev/tasks/run-e2e b/dev/tasks/run-e2e index 99462cbe78..c649780576 100755 --- a/dev/tasks/run-e2e +++ b/dev/tasks/run-e2e @@ -26,7 +26,7 @@ if [[ -z "${KUBEBUILDER_ASSETS:-}" ]]; then fi if [[ -z "${KCC_USE_DIRECT_RECONCILERS:-}" ]]; then - KCC_USE_DIRECT_RECONCILERS=ComputeFirewallPolicyRule,ComputeForwardingRule,GKEHubFeatureMembership,SecretManagerSecret + KCC_USE_DIRECT_RECONCILERS=ComputeForwardingRule,GKEHubFeatureMembership,SecretManagerSecret fi echo "Using direct controllers: $KCC_USE_DIRECT_RECONCILERS" export KCC_USE_DIRECT_RECONCILERS diff --git a/mockgcp/mock_http_roundtrip.go b/mockgcp/mock_http_roundtrip.go index f5a17ba4c0..50310c41b6 100644 --- a/mockgcp/mock_http_roundtrip.go +++ b/mockgcp/mock_http_roundtrip.go @@ -333,8 +333,7 @@ func (m *mockRoundTripper) prefilterRequest(req *http.Request) error { // I got the "missing form body" error. Ref: https://go.dev/src/net/http/request.go?s=41070:41129 line 1340 // So instead of sending a nil request body, send an empty request body to ensure successful processing of the remove rule request. body := &bytes.Buffer{} - b := body.Bytes() - req.Body = io.NopCloser(bytes.NewBuffer(b)) + req.Body = io.NopCloser(body) } return nil } diff --git a/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go index d8efc515e9..d305c44218 100644 --- a/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go +++ b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go @@ -52,7 +52,7 @@ var _ directbase.Model = &firewallPolicyRuleModel{} type firewallPolicyRuleAdapter struct { firewallPolicy string - priority int64 + priority int32 firewallPoliciesClient *gcp.FirewallPoliciesClient desired *krm.ComputeFirewallPolicyRule actual *computepb.FirewallPolicyRule @@ -80,9 +80,6 @@ func (m *firewallPolicyRuleModel) AdapterForObject(ctx context.Context, reader c return nil, fmt.Errorf("error converting to %T: %w", obj, err) } - // Set label managed-by-cnrm: true - obj.ObjectMeta.Labels["managed-by-cnrm"] = "true" - // Get firewall policy firewallPolicyRef, err := ResolveComputeFirewallPolicy(ctx, reader, obj, obj.Spec.FirewallPolicyRef) if err != nil { @@ -93,7 +90,7 @@ func (m *firewallPolicyRuleModel) AdapterForObject(ctx context.Context, reader c firewallPolicy := obj.Spec.FirewallPolicyRef.External // Get priority - priority := obj.Spec.Priority + priority := int32(obj.Spec.Priority) firewallPolicyRuleAdapter := &firewallPolicyRuleAdapter{ firewallPolicy: firewallPolicy, @@ -118,7 +115,7 @@ func (m *firewallPolicyRuleModel) AdapterForURL(ctx context.Context, url string) } func (a *firewallPolicyRuleAdapter) Find(ctx context.Context) (bool, error) { - log := klog.FromContext(ctx).WithName(ctrlName) + log := klog.FromContext(ctx) log.V(2).Info("getting ComputeFirewallPolicyRule", "priority", a.priority) firewallPolicyRule, err := a.get(ctx) @@ -136,14 +133,12 @@ func (a *firewallPolicyRuleAdapter) Find(ctx context.Context) (bool, error) { } func (a *firewallPolicyRuleAdapter) Create(ctx context.Context, createOp *directbase.CreateOperation) error { - var err error - - err = resolveDependencies(ctx, a.reader, a.desired) + err := resolveDependencies(ctx, a.reader, a.desired) if err != nil { return err } - log := klog.FromContext(ctx).WithName(ctrlName) + log := klog.FromContext(ctx) log.V(2).Info("creating ComputeFirewallPolicyRule", "priority", a.priority) mapCtx := &direct.MapContext{} @@ -172,8 +167,7 @@ func (a *firewallPolicyRuleAdapter) Create(ctx context.Context, createOp *direct log.V(2).Info("successfully created ComputeFirewallPolicyRule", "priority", a.priority) // Get the created resource - created := &computepb.FirewallPolicyRule{} - created, err = a.get(ctx) + created, err := a.get(ctx) if err != nil { return fmt.Errorf("getting ComputeFirewallPolicyRule %d: %w", a.priority, err) } @@ -191,7 +185,7 @@ func (a *firewallPolicyRuleAdapter) Update(ctx context.Context, updateOp *direct return err } - log := klog.FromContext(ctx).WithName(ctrlName) + log := klog.FromContext(ctx) log.V(2).Info("updating ComputeFirewallPolicyRule", "priority", a.priority) mapCtx := &direct.MapContext{} @@ -209,7 +203,7 @@ func (a *firewallPolicyRuleAdapter) Update(ctx context.Context, updateOp *direct updateReq := &computepb.PatchRuleFirewallPolicyRequest{ FirewallPolicyRuleResource: firewallPolicyRule, FirewallPolicy: a.firewallPolicy, - Priority: direct.PtrTo(int32(a.priority)), + Priority: direct.PtrTo(a.priority), } op, err := a.firewallPoliciesClient.PatchRule(ctx, updateReq) if err != nil { @@ -246,9 +240,7 @@ func (a *firewallPolicyRuleAdapter) Export(ctx context.Context) (*unstructured.U return nil, fmt.Errorf("error converting firewallPolicyRule spec to unstructured: %w", err) } - u := &unstructured.Unstructured{ - Object: make(map[string]interface{}), - } + u := &unstructured.Unstructured{} u.SetGroupVersionKind(krm.ComputeFirewallPolicyRuleGVK) if err := unstructured.SetNestedField(u.Object, specObj, "spec"); err != nil { @@ -260,15 +252,14 @@ func (a *firewallPolicyRuleAdapter) Export(ctx context.Context) (*unstructured.U // Delete implements the Adapter interface. func (a *firewallPolicyRuleAdapter) Delete(ctx context.Context, deleteOp *directbase.DeleteOperation) (bool, error) { - log := klog.FromContext(ctx).WithName(ctrlName) + log := klog.FromContext(ctx) log.V(2).Info("deleting ComputeFirewallPolicyRule", "priority", a.priority) delReq := &computepb.RemoveRuleFirewallPolicyRequest{ FirewallPolicy: a.firewallPolicy, - Priority: direct.PtrTo(int32(a.priority)), + Priority: direct.PtrTo(a.priority), } op, err := a.firewallPoliciesClient.RemoveRule(ctx, delReq) - if err != nil { return false, fmt.Errorf("deleting ComputeFirewallPolicyRule %d: %w", a.priority, err) } @@ -291,7 +282,7 @@ func (a *firewallPolicyRuleAdapter) Delete(ctx context.Context, deleteOp *direct func (a *firewallPolicyRuleAdapter) get(ctx context.Context) (*computepb.FirewallPolicyRule, error) { getReq := &computepb.GetRuleFirewallPolicyRequest{ FirewallPolicy: a.firewallPolicy, - Priority: direct.PtrTo(int32(a.priority)), + Priority: direct.PtrTo(a.priority), } return a.firewallPoliciesClient.GetRule(ctx, getReq) } diff --git a/pkg/controller/direct/compute/firewallpolicyrule/refs.go b/pkg/controller/direct/compute/firewallpolicyrule/refs.go index 6b19ad6a0c..1597cb1c5f 100644 --- a/pkg/controller/direct/compute/firewallpolicyrule/refs.go +++ b/pkg/controller/direct/compute/firewallpolicyrule/refs.go @@ -120,7 +120,7 @@ func ResolveComputeNetwork(ctx context.Context, reader client.Reader, src client External: fmt.Sprintf("https://www.googleapis.com/compute/v1/projects/%s/global/networks/%s", projectID, resourceID)}, nil } -func ResolveIAMSetviceAccount(ctx context.Context, reader client.Reader, src client.Object, ref *refs.IAMServiceAccountRef) (*refs.IAMServiceAccountRef, error) { +func ResolveIAMServiceAccount(ctx context.Context, reader client.Reader, src client.Object, ref *refs.IAMServiceAccountRef) (*refs.IAMServiceAccountRef, error) { if ref == nil { return nil, nil } @@ -199,7 +199,7 @@ func resolveDependencies(ctx context.Context, reader client.Reader, obj *krm.Com var targetServiceAccounts []*refs.IAMServiceAccountRef if obj.Spec.TargetServiceAccounts != nil { for _, targetServiceAccount := range obj.Spec.TargetServiceAccounts { - iamServiceAccount, err := ResolveIAMSetviceAccount(ctx, reader, obj, targetServiceAccount) + iamServiceAccount, err := ResolveIAMServiceAccount(ctx, reader, obj, targetServiceAccount) if err != nil { return err } diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/_generated_object_computefirewallpolicyrule-egress-full-direct.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/_generated_object_computefirewallpolicyrule-egress-full-direct.golden.yaml new file mode 100644 index 0000000000..29528ddc81 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/_generated_object_computefirewallpolicyrule-egress-full-direct.golden.yaml @@ -0,0 +1,52 @@ +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicyRule +metadata: + annotations: + alpha.cnrm.cloud.google.com/reconciler: direct + cnrm.cloud.google.com/management-conflict-prevention-policy: none + finalizers: + - cnrm.cloud.google.com/finalizer + - cnrm.cloud.google.com/deletion-defender + generation: 2 + labels: + cnrm-test: "true" + name: firewallpolicyrule-${uniqueId} + namespace: ${uniqueId} +spec: + action: allow + direction: EGRESS + firewallPolicyRef: + name: firewallpolicy-${uniqueId} + match: + destAddressGroups: + - organizations/${organizationID}/locations/global/addressGroups/testnetworksecurityaddressgroup-2 + destFqdns: + - www.github.com + destIPRanges: + - 11.100.0.2/32 + destRegionCodes: + - AU + destThreatIntelligences: + - iplist-known-malicious-ips + - iplist-tor-exit-nodes + layer4Configs: + - ipProtocol: udp + ports: + - "8081" + srcIPRanges: + - 10.100.0.2/32 + priority: 9000 + targetResources: + - name: network-2-${uniqueId} + targetServiceAccounts: + - name: sa-2-${uniqueId} +status: + conditions: + - lastTransitionTime: "1970-01-01T00:00:00Z" + message: The resource is up to date + reason: UpToDate + status: "True" + type: Ready + kind: compute#firewallPolicyRule + observedGeneration: 2 + ruleTupleCount: 4 diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/_http.log new file mode 100644 index 0000000000..246637e3be --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/_http.log @@ -0,0 +1,1556 @@ +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies?alt=json&parentId=organizations%2F${organizationID} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "description": "A basic folder firewall policy", + "parent": "organizations/${organizationID}", + "shortName": "firewallpolicy-${uniqueId}" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "createFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json&parentId=organizations%2F${organizationID} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "createFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "description": "A basic folder firewall policy", + "displayName": "firewallpolicy-${uniqueId}", + "fingerprint": "abcdef0123A=", + "id": "000000000000000000000", + "kind": "compute#firewallPolicy", + "name": "${firewallPolicyId}", + "parent": "organizations/${organizationID}", + "ruleTupleCount": 8, + "rules": [ + { + "action": "goto_next", + "description": "default egress rule ipv6", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "::/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483644, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule ipv6", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "::/0" + ] + }, + "priority": 2147483645, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default egress rule", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "0.0.0.0/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483646, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "0.0.0.0/0" + ] + }, + "priority": 2147483647, + "ruleTupleCount": 2 + } + ], + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}/${firewallPolicyId}", + "shortName": "firewallpolicy-${uniqueId}" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "The resource 'projects/${projectId}/global/networks/network-${uniqueId}' was not found", + "reason": "notFound" + } + ], + "message": "The resource 'projects/${projectId}/global/networks/network-${uniqueId}' was not found" + } +} + +--- + +POST https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "autoCreateSubnetworks": false, + "name": "network-${uniqueId}", + "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", + "routingConfig": { + "routingMode": "REGIONAL" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "insert", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "insert", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "autoCreateSubnetworks": false, + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "kind": "compute#network", + "name": "network-${uniqueId}", + "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", + "routingConfig": { + "routingMode": "REGIONAL" + }, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "The resource 'projects/${projectId}/global/networks/network-2-${uniqueId}' was not found", + "reason": "notFound" + } + ], + "message": "The resource 'projects/${projectId}/global/networks/network-2-${uniqueId}' was not found" + } +} + +--- + +POST https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "autoCreateSubnetworks": false, + "name": "network-2-${uniqueId}", + "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", + "routingConfig": { + "routingMode": "REGIONAL" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "insert", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "insert", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "autoCreateSubnetworks": false, + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "kind": "compute#network", + "name": "network-2-${uniqueId}", + "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", + "routingConfig": { + "routingMode": "REGIONAL" + }, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}" +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Unknown service account", + "reason": "notFound" + } + ], + "message": "Unknown service account", + "status": "NOT_FOUND" + } +} + +--- + +POST https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "accountId": "sa-${uniqueId}", + "serviceAccount": {} +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Unknown service account", + "reason": "notFound" + } + ], + "message": "Unknown service account", + "status": "NOT_FOUND" + } +} + +--- + +POST https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "accountId": "sa-2-${uniqueId}", + "serviceAccount": {} +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +400 Bad Request +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 400, + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." + } +} + +--- + +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +{ + "action": "deny", + "direction": "EGRESS", + "match": { + "destAddressGroups": [ + "organizations/${organizationID}/locations/global/addressGroups/testnetworksecurityaddressgroup" + ], + "destFqdns": [ + "www.google.com" + ], + "destIpRanges": [ + "11.100.0.1/32" + ], + "destRegionCodes": [ + "US" + ], + "destThreatIntelligences": [ + "iplist-known-malicious-ips" + ], + "layer4Configs": [ + { + "ipProtocol": "tcp", + "ports": [ + "8080" + ] + } + ], + "srcIpRanges": [ + "10.100.0.1/32" + ] + }, + "priority": 9000, + "targetResources": [ + "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}" + ], + "targetServiceAccounts": [ + "projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com" + ] +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "addFirewallRuleToFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "addFirewallRuleToFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "action": "deny", + "description": "", + "direction": "EGRESS", + "kind": "compute#firewallPolicyRule", + "match": { + "destAddressGroups": [ + "organizations/${organizationID}/locations/global/addressGroups/testnetworksecurityaddressgroup" + ], + "destFqdns": [ + "www.google.com" + ], + "destIpRanges": [ + "11.100.0.1/32" + ], + "destRegionCodes": [ + "US" + ], + "destThreatIntelligences": [ + "iplist-known-malicious-ips" + ], + "layer4Configs": [ + { + "ipProtocol": "tcp", + "ports": [ + "8080" + ] + } + ], + "srcIpRanges": [ + "10.100.0.1/32" + ] + }, + "priority": 9000, + "ruleTupleCount": 4, + "targetResources": [ + "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}" + ], + "targetServiceAccounts": [ + "projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com" + ] +} + +--- + +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +{ + "action": "allow", + "direction": "EGRESS", + "match": { + "destAddressGroups": [ + "organizations/${organizationID}/locations/global/addressGroups/testnetworksecurityaddressgroup-2" + ], + "destFqdns": [ + "www.github.com" + ], + "destIpRanges": [ + "11.100.0.2/32" + ], + "destRegionCodes": [ + "AU" + ], + "destThreatIntelligences": [ + "iplist-known-malicious-ips", + "iplist-tor-exit-nodes" + ], + "layer4Configs": [ + { + "ipProtocol": "udp", + "ports": [ + "8081" + ] + } + ], + "srcIpRanges": [ + "10.100.0.2/32" + ] + }, + "targetResources": [ + "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}" + ], + "targetServiceAccounts": [ + "projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com" + ] +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "patchFirewallRuleInFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "patchFirewallRuleInFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "action": "allow", + "description": "", + "direction": "EGRESS", + "kind": "compute#firewallPolicyRule", + "match": { + "destAddressGroups": [ + "organizations/${organizationID}/locations/global/addressGroups/testnetworksecurityaddressgroup-2" + ], + "destFqdns": [ + "www.github.com" + ], + "destIpRanges": [ + "11.100.0.2/32" + ], + "destRegionCodes": [ + "AU" + ], + "destThreatIntelligences": [ + "iplist-known-malicious-ips", + "iplist-tor-exit-nodes" + ], + "layer4Configs": [ + { + "ipProtocol": "udp", + "ports": [ + "8081" + ] + } + ], + "srcIpRanges": [ + "10.100.0.2/32" + ] + }, + "priority": 9000, + "ruleTupleCount": 4, + "targetResources": [ + "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}" + ], + "targetServiceAccounts": [ + "projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com" + ] +} + +--- + +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "removeFirewallRuleFromFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "removeFirewallRuleFromFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +400 Bad Request +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 400, + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." + } +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +DELETE https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +DELETE https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "autoCreateSubnetworks": false, + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "kind": "compute#network", + "name": "network-2-${uniqueId}", + "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", + "routingConfig": { + "routingMode": "REGIONAL" + }, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}" +} + +--- + +DELETE https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "delete", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "delete", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "autoCreateSubnetworks": false, + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "kind": "compute#network", + "name": "network-${uniqueId}", + "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", + "routingConfig": { + "routingMode": "REGIONAL" + }, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}" +} + +--- + +DELETE https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "delete", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "delete", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "description": "A basic folder firewall policy", + "displayName": "firewallpolicy-${uniqueId}", + "fingerprint": "abcdef0123A=", + "id": "000000000000000000000", + "kind": "compute#firewallPolicy", + "name": "${firewallPolicyId}", + "parent": "organizations/${organizationID}", + "ruleTupleCount": 8, + "rules": [ + { + "action": "goto_next", + "description": "default egress rule ipv6", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "::/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483644, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule ipv6", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "::/0" + ] + }, + "priority": 2147483645, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default egress rule", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "0.0.0.0/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483646, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "0.0.0.0/0" + ] + }, + "priority": 2147483647, + "ruleTupleCount": 2 + } + ], + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}/${firewallPolicyId}", + "shortName": "firewallpolicy-${uniqueId}" +} + +--- + +DELETE https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "deleteFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "deleteFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "The resource 'locations/global/firewallPolicies/${firewallPolicyId}' was not found", + "reason": "notFound" + } + ], + "message": "The resource 'locations/global/firewallPolicies/${firewallPolicyId}' was not found" + } +} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/create.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/create.yaml new file mode 100644 index 0000000000..0507a15183 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/create.yaml @@ -0,0 +1,47 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicyRule +metadata: + name: firewallpolicyrule-${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + action: "deny" + direction: "EGRESS" + firewallPolicyRef: + name: firewallpolicy-${uniqueId} + match: + destAddressGroups: + - "organizations/${TEST_ORG_ID}/locations/global/addressGroups/testnetworksecurityaddressgroup" + destFqdns: + - "www.google.com" + destIPRanges: + - "11.100.0.1/32" + destRegionCodes: + - "US" + destThreatIntelligences: + - "iplist-known-malicious-ips" + layer4Configs: + - ipProtocol: "tcp" + ports: + - "8080" + srcIPRanges: + - "10.100.0.1/32" + priority: 9000 + targetResources: + - name: network-${uniqueId} + targetServiceAccounts: + - name: sa-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/dependencies.yaml new file mode 100644 index 0000000000..ef1c287cb7 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/dependencies.yaml @@ -0,0 +1,53 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicy +metadata: + name: firewallpolicy-${uniqueId} +spec: + organizationRef: + external: "organizations/${TEST_ORG_ID}" + shortName: firewallpolicy-${uniqueId} + description: "A basic folder firewall policy" +--- +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeNetwork +metadata: + name: network-${uniqueId} +spec: + routingMode: REGIONAL + autoCreateSubnetworks: false +--- +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeNetwork +metadata: + name: network-2-${uniqueId} +spec: + routingMode: REGIONAL + autoCreateSubnetworks: false +--- +apiVersion: iam.cnrm.cloud.google.com/v1beta1 +kind: IAMServiceAccount +metadata: + annotations: + cnrm.cloud.google.com/project-id: ${projectId} + name: sa-${uniqueId} +--- +apiVersion: iam.cnrm.cloud.google.com/v1beta1 +kind: IAMServiceAccount +metadata: + annotations: + cnrm.cloud.google.com/project-id: ${projectId} + name: sa-2-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/update.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/update.yaml new file mode 100644 index 0000000000..5604054d61 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/update.yaml @@ -0,0 +1,48 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicyRule +metadata: + name: firewallpolicyrule-${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + action: "allow" + direction: "EGRESS" + firewallPolicyRef: + name: firewallpolicy-${uniqueId} + match: + destAddressGroups: + - "organizations/${TEST_ORG_ID}/locations/global/addressGroups/testnetworksecurityaddressgroup-2" + destFqdns: + - "www.github.com" + destIPRanges: + - "11.100.0.2/32" + destRegionCodes: + - "AU" + destThreatIntelligences: + - "iplist-known-malicious-ips" + - "iplist-tor-exit-nodes" + layer4Configs: + - ipProtocol: "udp" + ports: + - "8081" + srcIPRanges: + - "10.100.0.2/32" + priority: 9000 + targetResources: + - name: network-2-${uniqueId} + targetServiceAccounts: + - name: sa-2-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml index 4893b524e3..1e0e625729 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_generated_object_computefirewallpolicyrule-egress-full.golden.yaml @@ -3,6 +3,7 @@ kind: ComputeFirewallPolicyRule metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none + cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log index 246637e3be..507ffb07c2 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full/_http.log @@ -621,10 +621,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 400 Bad Request Cache-Control: private @@ -646,14 +645,14 @@ X-Xss-Protection: 0 --- -POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 { "action": "deny", "direction": "EGRESS", + "firewallPolicy": "locations/global/firewallPolicies/${firewallPolicyId}", "match": { "destAddressGroups": [ "organizations/${organizationID}/locations/global/addressGroups/testnetworksecurityaddressgroup" @@ -719,10 +718,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: operation=${operationID} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -753,10 +751,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -814,10 +811,9 @@ X-Xss-Protection: 0 --- -POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?priority=9000 +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 { "action": "allow", @@ -887,10 +883,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: operation=${operationID} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -921,10 +916,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -983,10 +977,9 @@ X-Xss-Protection: 0 --- -POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?priority=9000 +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -1016,10 +1009,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: operation=${operationID} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -1050,10 +1042,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 400 Bad Request Cache-Control: private diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/_generated_object_computefirewallpolicyrule-ingress-full-direct.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/_generated_object_computefirewallpolicyrule-ingress-full-direct.golden.yaml new file mode 100644 index 0000000000..e9d64550a1 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/_generated_object_computefirewallpolicyrule-ingress-full-direct.golden.yaml @@ -0,0 +1,52 @@ +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicyRule +metadata: + annotations: + alpha.cnrm.cloud.google.com/reconciler: direct + cnrm.cloud.google.com/management-conflict-prevention-policy: none + finalizers: + - cnrm.cloud.google.com/finalizer + - cnrm.cloud.google.com/deletion-defender + generation: 2 + labels: + cnrm-test: "true" + name: firewallpolicyrule-${uniqueId} + namespace: ${uniqueId} +spec: + action: allow + direction: INGRESS + firewallPolicyRef: + name: firewallpolicy-${uniqueId} + match: + destIPRanges: + - 10.100.0.2/32 + layer4Configs: + - ipProtocol: udp + ports: + - "8081" + srcAddressGroups: + - organizations/${organizationID}/locations/global/addressGroups/testnetworksecurityaddressgroup-2 + srcFqdns: + - www.github.com + srcIPRanges: + - 11.100.0.2/32 + srcRegionCodes: + - AU + srcThreatIntelligences: + - iplist-known-malicious-ips + - iplist-tor-exit-nodes + priority: 9000 + targetResources: + - name: network-2-${uniqueId} + targetServiceAccounts: + - name: sa-2-${uniqueId} +status: + conditions: + - lastTransitionTime: "1970-01-01T00:00:00Z" + message: The resource is up to date + reason: UpToDate + status: "True" + type: Ready + kind: compute#firewallPolicyRule + observedGeneration: 2 + ruleTupleCount: 4 diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/_http.log new file mode 100644 index 0000000000..7a13ce286b --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/_http.log @@ -0,0 +1,1556 @@ +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies?alt=json&parentId=organizations%2F${organizationID} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "description": "A basic folder firewall policy", + "parent": "organizations/${organizationID}", + "shortName": "firewallpolicy-${uniqueId}" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "createFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json&parentId=organizations%2F${organizationID} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "createFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "description": "A basic folder firewall policy", + "displayName": "firewallpolicy-${uniqueId}", + "fingerprint": "abcdef0123A=", + "id": "000000000000000000000", + "kind": "compute#firewallPolicy", + "name": "${firewallPolicyId}", + "parent": "organizations/${organizationID}", + "ruleTupleCount": 8, + "rules": [ + { + "action": "goto_next", + "description": "default egress rule ipv6", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "::/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483644, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule ipv6", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "::/0" + ] + }, + "priority": 2147483645, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default egress rule", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "0.0.0.0/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483646, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "0.0.0.0/0" + ] + }, + "priority": 2147483647, + "ruleTupleCount": 2 + } + ], + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}/${firewallPolicyId}", + "shortName": "firewallpolicy-${uniqueId}" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "The resource 'projects/${projectId}/global/networks/network-${uniqueId}' was not found", + "reason": "notFound" + } + ], + "message": "The resource 'projects/${projectId}/global/networks/network-${uniqueId}' was not found" + } +} + +--- + +POST https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "autoCreateSubnetworks": false, + "name": "network-${uniqueId}", + "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", + "routingConfig": { + "routingMode": "REGIONAL" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "insert", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "insert", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "autoCreateSubnetworks": false, + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "kind": "compute#network", + "name": "network-${uniqueId}", + "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", + "routingConfig": { + "routingMode": "REGIONAL" + }, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "The resource 'projects/${projectId}/global/networks/network-2-${uniqueId}' was not found", + "reason": "notFound" + } + ], + "message": "The resource 'projects/${projectId}/global/networks/network-2-${uniqueId}' was not found" + } +} + +--- + +POST https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "autoCreateSubnetworks": false, + "name": "network-2-${uniqueId}", + "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", + "routingConfig": { + "routingMode": "REGIONAL" + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "insert", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "insert", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "autoCreateSubnetworks": false, + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "kind": "compute#network", + "name": "network-2-${uniqueId}", + "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", + "routingConfig": { + "routingMode": "REGIONAL" + }, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}" +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Unknown service account", + "reason": "notFound" + } + ], + "message": "Unknown service account", + "status": "NOT_FOUND" + } +} + +--- + +POST https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "accountId": "sa-${uniqueId}", + "serviceAccount": {} +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Unknown service account", + "reason": "notFound" + } + ], + "message": "Unknown service account", + "status": "NOT_FOUND" + } +} + +--- + +POST https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "accountId": "sa-2-${uniqueId}", + "serviceAccount": {} +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +400 Bad Request +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 400, + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." + } +} + +--- + +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +{ + "action": "deny", + "direction": "INGRESS", + "match": { + "destIpRanges": [ + "10.100.0.1/32" + ], + "layer4Configs": [ + { + "ipProtocol": "tcp", + "ports": [ + "8080" + ] + } + ], + "srcAddressGroups": [ + "organizations/${organizationID}/locations/global/addressGroups/testnetworksecurityaddressgroup" + ], + "srcFqdns": [ + "www.google.com" + ], + "srcIpRanges": [ + "11.100.0.1/32" + ], + "srcRegionCodes": [ + "US" + ], + "srcThreatIntelligences": [ + "iplist-known-malicious-ips" + ] + }, + "priority": 9000, + "targetResources": [ + "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}" + ], + "targetServiceAccounts": [ + "projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com" + ] +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "addFirewallRuleToFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "addFirewallRuleToFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "action": "deny", + "description": "", + "direction": "INGRESS", + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "10.100.0.1/32" + ], + "layer4Configs": [ + { + "ipProtocol": "tcp", + "ports": [ + "8080" + ] + } + ], + "srcAddressGroups": [ + "organizations/${organizationID}/locations/global/addressGroups/testnetworksecurityaddressgroup" + ], + "srcFqdns": [ + "www.google.com" + ], + "srcIpRanges": [ + "11.100.0.1/32" + ], + "srcRegionCodes": [ + "US" + ], + "srcThreatIntelligences": [ + "iplist-known-malicious-ips" + ] + }, + "priority": 9000, + "ruleTupleCount": 4, + "targetResources": [ + "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}" + ], + "targetServiceAccounts": [ + "projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com" + ] +} + +--- + +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +{ + "action": "allow", + "direction": "INGRESS", + "match": { + "destIpRanges": [ + "10.100.0.2/32" + ], + "layer4Configs": [ + { + "ipProtocol": "udp", + "ports": [ + "8081" + ] + } + ], + "srcAddressGroups": [ + "organizations/${organizationID}/locations/global/addressGroups/testnetworksecurityaddressgroup-2" + ], + "srcFqdns": [ + "www.github.com" + ], + "srcIpRanges": [ + "11.100.0.2/32" + ], + "srcRegionCodes": [ + "AU" + ], + "srcThreatIntelligences": [ + "iplist-known-malicious-ips", + "iplist-tor-exit-nodes" + ] + }, + "targetResources": [ + "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}" + ], + "targetServiceAccounts": [ + "projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com" + ] +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "patchFirewallRuleInFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "patchFirewallRuleInFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "action": "allow", + "description": "", + "direction": "INGRESS", + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "10.100.0.2/32" + ], + "layer4Configs": [ + { + "ipProtocol": "udp", + "ports": [ + "8081" + ] + } + ], + "srcAddressGroups": [ + "organizations/${organizationID}/locations/global/addressGroups/testnetworksecurityaddressgroup-2" + ], + "srcFqdns": [ + "www.github.com" + ], + "srcIpRanges": [ + "11.100.0.2/32" + ], + "srcRegionCodes": [ + "AU" + ], + "srcThreatIntelligences": [ + "iplist-known-malicious-ips", + "iplist-tor-exit-nodes" + ] + }, + "priority": 9000, + "ruleTupleCount": 4, + "targetResources": [ + "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}" + ], + "targetServiceAccounts": [ + "projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com" + ] +} + +--- + +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "removeFirewallRuleFromFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "removeFirewallRuleFromFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +400 Bad Request +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 400, + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." + } +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +DELETE https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +DELETE https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/sa-${uniqueId}@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "autoCreateSubnetworks": false, + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "kind": "compute#network", + "name": "network-2-${uniqueId}", + "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", + "routingConfig": { + "routingMode": "REGIONAL" + }, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}" +} + +--- + +DELETE https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "delete", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "delete", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-2-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "autoCreateSubnetworks": false, + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "kind": "compute#network", + "name": "network-${uniqueId}", + "networkFirewallPolicyEnforcementOrder": "AFTER_CLASSIC_FIREWALL", + "routingConfig": { + "routingMode": "REGIONAL" + }, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}" +} + +--- + +DELETE https://compute.googleapis.com/compute/v1/projects/${projectId}/global/networks/${networkID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "delete", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "delete", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${networkID}", + "targetLink": "https://www.googleapis.com/compute/v1/projects/${projectId}/global/networks/network-${uniqueId}", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "description": "A basic folder firewall policy", + "displayName": "firewallpolicy-${uniqueId}", + "fingerprint": "abcdef0123A=", + "id": "000000000000000000000", + "kind": "compute#firewallPolicy", + "name": "${firewallPolicyId}", + "parent": "organizations/${organizationID}", + "ruleTupleCount": 8, + "rules": [ + { + "action": "goto_next", + "description": "default egress rule ipv6", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "::/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483644, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule ipv6", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "::/0" + ] + }, + "priority": 2147483645, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default egress rule", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "0.0.0.0/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483646, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "0.0.0.0/0" + ] + }, + "priority": 2147483647, + "ruleTupleCount": 2 + } + ], + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}/${firewallPolicyId}", + "shortName": "firewallpolicy-${uniqueId}" +} + +--- + +DELETE https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "deleteFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "deleteFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "The resource 'locations/global/firewallPolicies/${firewallPolicyId}' was not found", + "reason": "notFound" + } + ], + "message": "The resource 'locations/global/firewallPolicies/${firewallPolicyId}' was not found" + } +} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/create.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/create.yaml new file mode 100644 index 0000000000..ee278f172e --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/create.yaml @@ -0,0 +1,47 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicyRule +metadata: + name: firewallpolicyrule-${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + action: "deny" + direction: "INGRESS" + firewallPolicyRef: + name: firewallpolicy-${uniqueId} + match: + srcAddressGroups: + - "organizations/${TEST_ORG_ID}/locations/global/addressGroups/testnetworksecurityaddressgroup" + srcFqdns: + - "www.google.com" + srcIPRanges: + - "11.100.0.1/32" + srcRegionCodes: + - "US" + srcThreatIntelligences: + - "iplist-known-malicious-ips" + layer4Configs: + - ipProtocol: "tcp" + ports: + - "8080" + destIPRanges: + - "10.100.0.1/32" + priority: 9000 + targetResources: + - name: network-${uniqueId} + targetServiceAccounts: + - name: sa-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/dependencies.yaml new file mode 100644 index 0000000000..ef1c287cb7 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/dependencies.yaml @@ -0,0 +1,53 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicy +metadata: + name: firewallpolicy-${uniqueId} +spec: + organizationRef: + external: "organizations/${TEST_ORG_ID}" + shortName: firewallpolicy-${uniqueId} + description: "A basic folder firewall policy" +--- +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeNetwork +metadata: + name: network-${uniqueId} +spec: + routingMode: REGIONAL + autoCreateSubnetworks: false +--- +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeNetwork +metadata: + name: network-2-${uniqueId} +spec: + routingMode: REGIONAL + autoCreateSubnetworks: false +--- +apiVersion: iam.cnrm.cloud.google.com/v1beta1 +kind: IAMServiceAccount +metadata: + annotations: + cnrm.cloud.google.com/project-id: ${projectId} + name: sa-${uniqueId} +--- +apiVersion: iam.cnrm.cloud.google.com/v1beta1 +kind: IAMServiceAccount +metadata: + annotations: + cnrm.cloud.google.com/project-id: ${projectId} + name: sa-2-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/update.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/update.yaml new file mode 100644 index 0000000000..4b0d3ea8cc --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/update.yaml @@ -0,0 +1,48 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicyRule +metadata: + name: firewallpolicyrule-${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + action: "allow" + direction: "INGRESS" + firewallPolicyRef: + name: firewallpolicy-${uniqueId} + match: + srcAddressGroups: + - "organizations/${TEST_ORG_ID}/locations/global/addressGroups/testnetworksecurityaddressgroup-2" + srcFqdns: + - "www.github.com" + srcIPRanges: + - "11.100.0.2/32" + srcRegionCodes: + - "AU" + srcThreatIntelligences: + - "iplist-known-malicious-ips" + - "iplist-tor-exit-nodes" + layer4Configs: + - ipProtocol: "udp" + ports: + - "8081" + destIPRanges: + - "10.100.0.2/32" + priority: 9000 + targetResources: + - name: network-2-${uniqueId} + targetServiceAccounts: + - name: sa-2-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml index 0d2386e0df..348b62a890 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_generated_object_computefirewallpolicyrule-ingress-full.golden.yaml @@ -3,6 +3,7 @@ kind: ComputeFirewallPolicyRule metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none + cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log index 7a13ce286b..e19f7aaa01 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full/_http.log @@ -621,10 +621,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 400 Bad Request Cache-Control: private @@ -646,14 +645,14 @@ X-Xss-Protection: 0 --- -POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 { "action": "deny", "direction": "INGRESS", + "firewallPolicy": "locations/global/firewallPolicies/${firewallPolicyId}", "match": { "destIpRanges": [ "10.100.0.1/32" @@ -719,10 +718,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: operation=${operationID} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -753,10 +751,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -814,10 +811,9 @@ X-Xss-Protection: 0 --- -POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?priority=9000 +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 { "action": "allow", @@ -887,10 +883,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: operation=${operationID} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -921,10 +916,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -983,10 +977,9 @@ X-Xss-Protection: 0 --- -POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?priority=9000 +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -1016,10 +1009,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: operation=${operationID} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -1050,10 +1042,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 400 Bad Request Cache-Control: private diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/_generated_object_computefirewallpolicyrule-minimal-direct.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/_generated_object_computefirewallpolicyrule-minimal-direct.golden.yaml new file mode 100644 index 0000000000..c032270873 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/_generated_object_computefirewallpolicyrule-minimal-direct.golden.yaml @@ -0,0 +1,35 @@ +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicyRule +metadata: + annotations: + alpha.cnrm.cloud.google.com/reconciler: direct + cnrm.cloud.google.com/management-conflict-prevention-policy: none + finalizers: + - cnrm.cloud.google.com/finalizer + - cnrm.cloud.google.com/deletion-defender + generation: 2 + labels: + cnrm-test: "true" + name: firewallpolicyrule-${uniqueId} + namespace: ${uniqueId} +spec: + action: allow + direction: INGRESS + firewallPolicyRef: + name: firewallpolicyrule-${uniqueId} + match: + layer4Configs: + - ipProtocol: tcp + srcIPRanges: + - 10.100.0.1/32 + priority: 9000 +status: + conditions: + - lastTransitionTime: "1970-01-01T00:00:00Z" + message: The resource is up to date + reason: UpToDate + status: "True" + type: Ready + kind: compute#firewallPolicyRule + observedGeneration: 2 + ruleTupleCount: 2 diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/_http.log new file mode 100644 index 0000000000..c08bdcdb0b --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/_http.log @@ -0,0 +1,738 @@ +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies?alt=json&parentId=organizations%2F${organizationID} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +{ + "description": "A basic organization firewall policy", + "parent": "organizations/${organizationID}", + "shortName": "firewallpolicy-${uniqueId}" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "createFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json&parentId=organizations%2F${organizationID} +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "createFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "description": "A basic organization firewall policy", + "displayName": "firewallpolicy-${uniqueId}", + "fingerprint": "abcdef0123A=", + "id": "000000000000000000000", + "kind": "compute#firewallPolicy", + "name": "${firewallPolicyId}", + "parent": "organizations/${organizationID}", + "ruleTupleCount": 8, + "rules": [ + { + "action": "goto_next", + "description": "default egress rule ipv6", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "::/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483644, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule ipv6", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "::/0" + ] + }, + "priority": 2147483645, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default egress rule", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "0.0.0.0/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483646, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "0.0.0.0/0" + ] + }, + "priority": 2147483647, + "ruleTupleCount": 2 + } + ], + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}/${firewallPolicyId}", + "shortName": "firewallpolicy-${uniqueId}" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +400 Bad Request +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 400, + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." + } +} + +--- + +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +{ + "action": "deny", + "direction": "INGRESS", + "match": { + "layer4Configs": [ + { + "ipProtocol": "tcp" + } + ], + "srcIpRanges": [ + "10.100.0.1/32" + ] + }, + "priority": 9000 +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "addFirewallRuleToFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "addFirewallRuleToFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "action": "deny", + "description": "", + "direction": "INGRESS", + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "tcp" + } + ], + "srcIpRanges": [ + "10.100.0.1/32" + ] + }, + "priority": 9000, + "ruleTupleCount": 2 +} + +--- + +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +{ + "action": "allow", + "direction": "INGRESS", + "match": { + "layer4Configs": [ + { + "ipProtocol": "tcp" + } + ], + "srcIpRanges": [ + "10.100.0.1/32" + ] + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "patchFirewallRuleInFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "patchFirewallRuleInFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "action": "allow", + "description": "", + "direction": "INGRESS", + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "tcp" + } + ], + "srcIpRanges": [ + "10.100.0.1/32" + ] + }, + "priority": 9000, + "ruleTupleCount": 2 +} + +--- + +POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "removeFirewallRuleFromFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: operation=${operationID} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "removeFirewallRuleFromFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +Content-Type: application/json +User-Agent: kcc/controller-manager +x-goog-request-params: firewall_policy=${firewallPolicyId} + +400 Bad Request +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 400, + "message": "Invalid value for field 'priority': '9000'. The firewall policy does not contain a rule at priority 9000." + } +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "creationTimestamp": "2024-04-01T12:34:56.123456Z", + "description": "A basic organization firewall policy", + "displayName": "firewallpolicy-${uniqueId}", + "fingerprint": "abcdef0123A=", + "id": "000000000000000000000", + "kind": "compute#firewallPolicy", + "name": "${firewallPolicyId}", + "parent": "organizations/${organizationID}", + "ruleTupleCount": 8, + "rules": [ + { + "action": "goto_next", + "description": "default egress rule ipv6", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "::/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483644, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule ipv6", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "::/0" + ] + }, + "priority": 2147483645, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default egress rule", + "direction": "EGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "destIpRanges": [ + "0.0.0.0/0" + ], + "layer4Configs": [ + { + "ipProtocol": "all" + } + ] + }, + "priority": 2147483646, + "ruleTupleCount": 2 + }, + { + "action": "goto_next", + "description": "default ingress rule", + "direction": "INGRESS", + "enableLogging": false, + "kind": "compute#firewallPolicyRule", + "match": { + "layer4Configs": [ + { + "ipProtocol": "all" + } + ], + "srcIpRanges": [ + "0.0.0.0/0" + ] + }, + "priority": 2147483647, + "ruleTupleCount": 2 + } + ], + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "selfLinkWithId": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}/${firewallPolicyId}", + "shortName": "firewallpolicy-${uniqueId}" +} + +--- + +DELETE https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "deleteFirewallPolicy", + "progress": 0, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "RUNNING", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "endTime": "2024-04-01T12:34:56.123456Z", + "id": "000000000000000000000", + "insertTime": "2024-04-01T12:34:56.123456Z", + "kind": "compute#operation", + "name": "${operationID}", + "operationType": "deleteFirewallPolicy", + "progress": 100, + "selfLink": "https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}", + "startTime": "2024-04-01T12:34:56.123456Z", + "status": "DONE", + "targetId": "${firewallPolicyId}", + "targetLink": "https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyId}", + "user": "user@example.com" +} + +--- + +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}?alt=json +Content-Type: application/json +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "The resource 'locations/global/firewallPolicies/${firewallPolicyId}' was not found", + "reason": "notFound" + } + ], + "message": "The resource 'locations/global/firewallPolicies/${firewallPolicyId}' was not found" + } +} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/create.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/create.yaml new file mode 100644 index 0000000000..649ddbd6aa --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/create.yaml @@ -0,0 +1,31 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicyRule +metadata: + name: firewallpolicyrule-${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + action: "deny" + direction: "INGRESS" + firewallPolicyRef: + name: firewallpolicyrule-${uniqueId} + match: + layer4Configs: + - ipProtocol: "tcp" + srcIPRanges: + - "10.100.0.1/32" + priority: 9000 \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/dependencies.yaml new file mode 100644 index 0000000000..b63821d4d4 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/dependencies.yaml @@ -0,0 +1,23 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicy +metadata: + name: firewallpolicyrule-${uniqueId} +spec: + organizationRef: + external: "organizations/${TEST_ORG_ID}" + shortName: firewallpolicy-${uniqueId} + description: "A basic organization firewall policy" diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/update.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/update.yaml new file mode 100644 index 0000000000..e53acb2b38 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/update.yaml @@ -0,0 +1,31 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeFirewallPolicyRule +metadata: + name: firewallpolicyrule-${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + action: "allow" + direction: "INGRESS" + firewallPolicyRef: + name: firewallpolicyrule-${uniqueId} + match: + layer4Configs: + - ipProtocol: "tcp" + srcIPRanges: + - "10.100.0.1/32" + priority: 9000 \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_generated_object_computefirewallpolicyrule-minimal.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_generated_object_computefirewallpolicyrule-minimal.golden.yaml index 77bb2c7fb2..52d426c96b 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_generated_object_computefirewallpolicyrule-minimal.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_generated_object_computefirewallpolicyrule-minimal.golden.yaml @@ -3,6 +3,7 @@ kind: ComputeFirewallPolicyRule metadata: annotations: cnrm.cloud.google.com/management-conflict-prevention-policy: none + cnrm.cloud.google.com/state-into-spec: absent finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log index c08bdcdb0b..9fdf50371c 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal/_http.log @@ -177,10 +177,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 400 Bad Request Cache-Control: private @@ -202,14 +201,14 @@ X-Xss-Protection: 0 --- -POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/addRule?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 { "action": "deny", "direction": "INGRESS", + "firewallPolicy": "locations/global/firewallPolicies/${firewallPolicyId}", "match": { "layer4Configs": [ { @@ -251,10 +250,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: operation=${operationID} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -285,10 +283,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -322,10 +319,9 @@ X-Xss-Protection: 0 --- -POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?priority=9000 +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/patchRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 { "action": "allow", @@ -370,10 +366,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: operation=${operationID} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -404,10 +399,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -441,10 +435,9 @@ X-Xss-Protection: 0 --- -POST https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?priority=9000 +POST https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/removeRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -474,10 +467,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/operations/${operationID} +GET https://www.googleapis.com/compute/v1/locations/global/operations/${operationID}?alt=json Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: operation=${operationID} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 200 OK Cache-Control: private @@ -508,10 +500,9 @@ X-Xss-Protection: 0 --- -GET https://compute.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?priority=9000 +GET https://www.googleapis.com/compute/v1/locations/global/firewallPolicies/${firewallPolicyID}/getRule?alt=json&priority=9000 Content-Type: application/json -User-Agent: kcc/controller-manager -x-goog-request-params: firewall_policy=${firewallPolicyId} +User-Agent: kcc/controller-manager DeclarativeClientLib/0.0.1 400 Bad Request Cache-Control: private From 412e8a76f1a2e4d575e2563e855cd313f850ad51 Mon Sep 17 00:00:00 2001 From: justinsb Date: Tue, 5 Nov 2024 08:48:36 -0500 Subject: [PATCH 18/31] tests: add e2e for serviceusage --- dev/ci/periodics/e2e-service-serviceusage | 25 +++++++++++++++++++++++ dev/tasks/create-test-project | 1 + 2 files changed, 26 insertions(+) create mode 100755 dev/ci/periodics/e2e-service-serviceusage diff --git a/dev/ci/periodics/e2e-service-serviceusage b/dev/ci/periodics/e2e-service-serviceusage new file mode 100755 index 0000000000..c107c155fa --- /dev/null +++ b/dev/ci/periodics/e2e-service-serviceusage @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +REPO_ROOT="$(git rev-parse --show-toplevel)" +cd ${REPO_ROOT} + +export ONLY_TEST_APIGROUPS=serviceusage.cnrm.cloud.google.com + +dev/ci/periodics/_create_project_and_run_e2e diff --git a/dev/tasks/create-test-project b/dev/tasks/create-test-project index 66cba1cd0a..71b6e3e210 100755 --- a/dev/tasks/create-test-project +++ b/dev/tasks/create-test-project @@ -62,6 +62,7 @@ gcloud services enable \ monitoring.googleapis.com \ redis.googleapis.com \ servicenetworking.googleapis.com \ + serviceusage.googleapis.com \ sqladmin.googleapis.com \ workstations.googleapis.com From 5e691710595c8bdd91c57cb251a9a4ab799461ca Mon Sep 17 00:00:00 2001 From: justinsb Date: Tue, 5 Nov 2024 08:51:14 -0500 Subject: [PATCH 19/31] tests: add e2e for apigee --- dev/ci/periodics/e2e-service-apigee | 25 +++++++++++++++++++++++++ dev/tasks/create-test-project | 1 + 2 files changed, 26 insertions(+) create mode 100755 dev/ci/periodics/e2e-service-apigee diff --git a/dev/ci/periodics/e2e-service-apigee b/dev/ci/periodics/e2e-service-apigee new file mode 100755 index 0000000000..5868ffb45f --- /dev/null +++ b/dev/ci/periodics/e2e-service-apigee @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +REPO_ROOT="$(git rev-parse --show-toplevel)" +cd ${REPO_ROOT} + +export ONLY_TEST_APIGROUPS=apigee.cnrm.cloud.google.com + +dev/ci/periodics/_create_project_and_run_e2e diff --git a/dev/tasks/create-test-project b/dev/tasks/create-test-project index 66cba1cd0a..f5529b3f9c 100755 --- a/dev/tasks/create-test-project +++ b/dev/tasks/create-test-project @@ -53,6 +53,7 @@ echo "Setting default project to ${GCP_PROJECT_ID}" gcloud config set project "${GCP_PROJECT_ID}" gcloud services enable \ + apigee.googleapis.com \ compute.googleapis.com \ cloudbuild.googleapis.com \ cloudkms.googleapis.com \ From 4125263eeb48fa055f5419b89a71fa15887ad2c7 Mon Sep 17 00:00:00 2001 From: justinsb Date: Mon, 4 Nov 2024 10:23:33 -0500 Subject: [PATCH 20/31] tests: set time limit for webhook start If there's a port conflict, the webhook may not start. Don't wait forever in this case. --- config/tests/samples/create/harness.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/config/tests/samples/create/harness.go b/config/tests/samples/create/harness.go index 0dbda728d4..581b4ca36c 100644 --- a/config/tests/samples/create/harness.go +++ b/config/tests/samples/create/harness.go @@ -653,6 +653,8 @@ func NewHarness(ctx context.Context, t *testing.T, opts ...HarnessOption) *Harne }() // Wait for the webhook server to start (mgr.Start runs asynchronously) + webhookWaitStart := time.Now() + webhookTimeout := 10 * time.Second for { webhookStarted := mgr.GetWebhookServer().StartedChecker() req := &http.Request{} @@ -660,6 +662,9 @@ func NewHarness(ctx context.Context, t *testing.T, opts ...HarnessOption) *Harne if err == nil { break } + if time.Since(webhookWaitStart) > webhookTimeout { + t.Fatalf("webhook did not start within %v timeout", webhookTimeout) + } t.Logf("waiting for webhook to start (%v)", err) time.Sleep(100 * time.Millisecond) } From 01aed363f7f6ac03852c64e842c7a58346be7992 Mon Sep 17 00:00:00 2001 From: justinsb Date: Thu, 11 Jul 2024 08:06:55 -0400 Subject: [PATCH 21/31] feat: add gemini prompting to controllerbuilder --- dev/tools/controllerbuilder/cmd/root.go | 3 + .../commands/exportcsv/exportcsvcommand.go | 114 ++++++++ .../pkg/commands/exportcsv/prompt.go | 117 ++++++++ .../controllerbuilder/pkg/toolbot/csv.go | 249 ++++++++++++++++++ .../pkg/toolbot/datapoint.go | 105 ++++++++ .../pkg/toolbot/enhancewithprotodefinition.go | 249 ++++++++++++++++++ .../pkg/toolbot/extracttoolmarkers.go | 117 ++++++++ mockgcp/mockkms/cryptokey.go | 10 +- mockgcp/mockkms/cryptokeyversion.go | 10 +- mockgcp/mockkms/keyring.go | 10 +- mockgcp/mocklogging/logbucket.go | 8 +- mockgcp/mocklogging/logmetric.go | 12 +- mockgcp/mocklogging/logview.go | 8 +- mockgcp/mockpubsublite/reservation.go | 6 + mockgcp/mockpubsublite/subscription.go | 6 + mockgcp/mockpubsublite/topic.go | 6 + 16 files changed, 1001 insertions(+), 29 deletions(-) create mode 100644 dev/tools/controllerbuilder/pkg/commands/exportcsv/exportcsvcommand.go create mode 100644 dev/tools/controllerbuilder/pkg/commands/exportcsv/prompt.go create mode 100644 dev/tools/controllerbuilder/pkg/toolbot/csv.go create mode 100644 dev/tools/controllerbuilder/pkg/toolbot/datapoint.go create mode 100644 dev/tools/controllerbuilder/pkg/toolbot/enhancewithprotodefinition.go create mode 100644 dev/tools/controllerbuilder/pkg/toolbot/extracttoolmarkers.go diff --git a/dev/tools/controllerbuilder/cmd/root.go b/dev/tools/controllerbuilder/cmd/root.go index c557ad9b6c..2b1c0e7888 100644 --- a/dev/tools/controllerbuilder/cmd/root.go +++ b/dev/tools/controllerbuilder/cmd/root.go @@ -19,6 +19,7 @@ import ( "os" "strings" + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/commands/exportcsv" "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/commands/generatemapper" "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/commands/generatetypes" "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/commands/updatetypes" @@ -94,6 +95,8 @@ func Execute() { rootCmd.AddCommand(generatetypes.BuildCommand(&generateOptions)) rootCmd.AddCommand(generatemapper.BuildCommand(&generateOptions)) rootCmd.AddCommand(updatetypes.BuildCommand(&generateOptions)) + rootCmd.AddCommand(exportcsv.BuildCommand(&generateOptions)) + rootCmd.AddCommand(exportcsv.BuildPromptCommand(&generateOptions)) if err := rootCmd.Execute(); err != nil { fmt.Fprintf(os.Stderr, "%v\n", err) diff --git a/dev/tools/controllerbuilder/pkg/commands/exportcsv/exportcsvcommand.go b/dev/tools/controllerbuilder/pkg/commands/exportcsv/exportcsvcommand.go new file mode 100644 index 0000000000..2438451135 --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/commands/exportcsv/exportcsvcommand.go @@ -0,0 +1,114 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package exportcsv + +import ( + "context" + "fmt" + "os" + "strings" + + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/options" + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/toolbot" + + "github.com/spf13/cobra" +) + +// ExportCSVOptions are the options for the export-csv command. +type ExportCSVOptions struct { + *options.GenerateOptions + + ProtoDir string + SrcDir string + OutputDir string +} + +// BindFlags binds the flags to the command. +func (o *ExportCSVOptions) BindFlags(cmd *cobra.Command) { + cmd.Flags().StringVar(&o.ProtoDir, "proto-dir", o.ProtoDir, "base directory for checkout of proto API definitions") + cmd.Flags().StringVar(&o.SrcDir, "src-dir", o.SrcDir, "base directory for source code") + cmd.Flags().StringVar(&o.OutputDir, "output-dir", o.OutputDir, "base directory for writing CSVs") +} + +// BuildCommand builds the export-csv command. +func BuildCommand(baseOptions *options.GenerateOptions) *cobra.Command { + opt := &ExportCSVOptions{ + GenerateOptions: baseOptions, + } + + cmd := &cobra.Command{ + Use: "export-csv", + Short: "generate CSV from tool annotations", + RunE: func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + if err := RunExportCSV(ctx, opt); err != nil { + return err + } + return nil + }, + } + + opt.BindFlags(cmd) + + return cmd +} + +// rewriteFilePath rewrites the file path to the user's home directory if it starts with "~". +func rewriteFilePath(p *string) error { + if strings.HasPrefix(*p, "~/") { + homeDir, err := os.UserHomeDir() + if err != nil { + return fmt.Errorf("getting home directory: %w", err) + } + *p = strings.Replace(*p, "~", homeDir, 1) + } + return nil +} + +// RunExportCSV runs the export-csv command. +func RunExportCSV(ctx context.Context, o *ExportCSVOptions) error { + if err := rewriteFilePath(&o.ProtoDir); err != nil { + return err + } + + if o.ProtoDir == "" { + return fmt.Errorf("--proto-dir is required") + } + if o.SrcDir == "" { + return fmt.Errorf("--src-dir is required") + } + if o.OutputDir == "" { + return fmt.Errorf("--output-dir is required") + } + + extractor := &toolbot.ExtractToolMarkers{} + addProtoDefinition, err := toolbot.NewEnhanceWithProtoDefinition(o.ProtoDir) + if err != nil { + return err + } + x, err := toolbot.NewCSVExporter(extractor, addProtoDefinition) + if err != nil { + return err + } + if err := x.VisitCodeDir(ctx, o.SrcDir); err != nil { + return err + } + + if err := x.WriteCSVForAllTools(ctx, o.OutputDir); err != nil { + return err + } + + return nil +} diff --git a/dev/tools/controllerbuilder/pkg/commands/exportcsv/prompt.go b/dev/tools/controllerbuilder/pkg/commands/exportcsv/prompt.go new file mode 100644 index 0000000000..927d4efd87 --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/commands/exportcsv/prompt.go @@ -0,0 +1,117 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package exportcsv + +import ( + "context" + "fmt" + "io" + "os" + + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/options" + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/toolbot" + "k8s.io/klog/v2" + + "github.com/spf13/cobra" +) + +// PromptOptions are the options for the prompt command. +type PromptOptions struct { + *options.GenerateOptions + + ProtoDir string + SrcDir string +} + +// BindFlags binds the flags to the command. +func (o *PromptOptions) BindFlags(cmd *cobra.Command) { + cmd.Flags().StringVar(&o.SrcDir, "src-dir", o.SrcDir, "base directory for source code") + cmd.Flags().StringVar(&o.ProtoDir, "proto-dir", o.ProtoDir, "base directory for checkout of proto API definitions") +} + +// BuildPromptCommand builds the `prompt` command. +func BuildPromptCommand(baseOptions *options.GenerateOptions) *cobra.Command { + opt := &PromptOptions{ + GenerateOptions: baseOptions, + } + + cmd := &cobra.Command{ + Use: "prompt", + Short: "executes a prompt against Gemini, generating context based on the source code.", + RunE: func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + if err := RunPrompt(ctx, opt); err != nil { + return err + } + return nil + }, + } + + opt.BindFlags(cmd) + + return cmd +} + +// RunPrompt runs the `prompt` command. +func RunPrompt(ctx context.Context, o *PromptOptions) error { + log := klog.FromContext(ctx) + + if err := rewriteFilePath(&o.ProtoDir); err != nil { + return err + } + + if o.ProtoDir == "" { + return fmt.Errorf("--proto-dir is required") + } + extractor := &toolbot.ExtractToolMarkers{} + addProtoDefinition, err := toolbot.NewEnhanceWithProtoDefinition(o.ProtoDir) + if err != nil { + return err + } + x, err := toolbot.NewCSVExporter(extractor, addProtoDefinition) + if err != nil { + return err + } + + if o.SrcDir != "" { + if err := x.VisitCodeDir(ctx, o.SrcDir); err != nil { + return err + } + } + + b, err := io.ReadAll(os.Stdin) + if err != nil { + return fmt.Errorf("reading from stdin: %w", err) + } + + dataPoints, err := x.BuildDataPoints(ctx, b) + if err != nil { + return err + } + + if len(dataPoints) != 1 { + return fmt.Errorf("expected exactly one data point, got %d", len(dataPoints)) + } + + dataPoint := dataPoints[0] + + log.Info("built data point", "dataPoint", dataPoint) + + if err := x.RunGemini(ctx, dataPoint, os.Stdout); err != nil { + return fmt.Errorf("running LLM inference: %w", err) + + } + return nil +} diff --git a/dev/tools/controllerbuilder/pkg/toolbot/csv.go b/dev/tools/controllerbuilder/pkg/toolbot/csv.go new file mode 100644 index 0000000000..88a1d3d014 --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/toolbot/csv.go @@ -0,0 +1,249 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package toolbot + +import ( + "bytes" + "context" + "encoding/csv" + "fmt" + "io" + "os" + "path/filepath" + + "github.com/google/generative-ai-go/genai" + "google.golang.org/api/option" + "k8s.io/apimachinery/pkg/util/sets" + "k8s.io/klog/v2" +) + +// CSVExporter is an exporter that writes CSV files for each tool. +type CSVExporter struct { + enhancers []Enhancer + extractor Extractor + dataPoints []*DataPoint +} + +// Extractor is an interface for extracting data points from source code. +type Extractor interface { + Extract(ctx context.Context, b []byte) ([]*DataPoint, error) +} + +// Enhancer is an interface for enhancing a data point. +// For example, it might add a computed field to the data point, such as the definition of a proto message, +// given the name of the proto message. +type Enhancer interface { + EnhanceDataPoint(ctx context.Context, d *DataPoint) error +} + +// NewCSVExporter creates a new CSVExporter. +func NewCSVExporter(extractor Extractor, enhancers ...Enhancer) (*CSVExporter, error) { + x := &CSVExporter{ + enhancers: enhancers, + extractor: extractor, + } + + return x, nil +} + +// visitGoFile visits a Go file and extracts data points from it. +func (x *CSVExporter) visitGoFile(ctx context.Context, p string) error { + b, err := os.ReadFile(p) + if err != nil { + return fmt.Errorf("reading file %q: %w", p, err) + } + dataPoints, err := x.extractor.Extract(ctx, b) + if err != nil { + return err + } + x.dataPoints = append(x.dataPoints, dataPoints...) + return nil +} + +// VisitCodeDir visits a directory and extracts data points from all Go files in the directory tree. +func (x *CSVExporter) VisitCodeDir(ctx context.Context, srcDir string) error { + if err := filepath.WalkDir(srcDir, func(p string, d os.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + return nil + } + switch filepath.Ext(p) { + case ".go": + // OK + default: + return nil + } + // klog.Infof("%v", p) + if err := x.visitGoFile(ctx, p); err != nil { + return fmt.Errorf("processing file %q: %w", p, err) + } + return nil + }); err != nil { + return fmt.Errorf("walking directory tree: %w", err) + } + + return nil +} + +// WriteCSVForAllTools writes CSV files for all tools. +func (x *CSVExporter) WriteCSVForAllTools(ctx context.Context, outputDir string) error { + log := klog.FromContext(ctx) + + for _, dataPoint := range x.dataPoints { + if err := x.EnhanceDataPoint(ctx, dataPoint); err != nil { + return err + } + } + + toolNames := sets.NewString() + for _, dataPoint := range x.dataPoints { + toolNames.Insert(dataPoint.Type) + } + + for _, toolName := range toolNames.List() { + outFilePath := filepath.Join(outputDir, toolName+".csv") + log.Info("writing CSV", "path", outFilePath) + var bb bytes.Buffer + if err := x.writeCSVForTool(ctx, toolName, &bb); err != nil { + return err + } + if err := os.WriteFile(outFilePath, bb.Bytes(), 0644); err != nil { + return fmt.Errorf("writing to file %q: %w", outFilePath, err) + } + } + + return nil +} + +// writeCSVForTool writes a CSV file for a single tool. +func (x *CSVExporter) writeCSVForTool(ctx context.Context, toolName string, out io.Writer) error { + var dataPoints []*DataPoint + for _, dataPoint := range x.dataPoints { + if dataPoint.Type != toolName { + continue + } + dataPoints = append(dataPoints, dataPoint) + } + + columnSet := sets.New[string]() + for _, dataPoint := range dataPoints { + dataPoint.AddCSVColumns(columnSet) + } + + columns := sets.List(columnSet) + + csvFile := csv.NewWriter(out) + + // write the CSV header + csvFile.Write(columns) + + for _, dataPoint := range dataPoints { + if err := dataPoint.WriteCSV(csvFile, columns); err != nil { + return err + } + } + + csvFile.Flush() + + if err := csvFile.Error(); err != nil { + return fmt.Errorf("writing to csv: %w", err) + } + + return nil +} + +// EnhanceDataPoint enhances a data point by running all the registered enhancers. +func (x *CSVExporter) EnhanceDataPoint(ctx context.Context, d *DataPoint) error { + for _, enhancer := range x.enhancers { + if err := enhancer.EnhanceDataPoint(ctx, d); err != nil { + return err + } + } + return nil +} + +// BuildDataPoints extracts data points from a byte slice representing a Go file. +func (x *CSVExporter) BuildDataPoints(ctx context.Context, src []byte) ([]*DataPoint, error) { + dataPoints, err := x.extractor.Extract(ctx, src) + if err != nil { + return nil, err + } + + for _, dataPoint := range dataPoints { + if err := x.EnhanceDataPoint(ctx, dataPoint); err != nil { + return nil, err + } + } + + return dataPoints, nil +} + +// RunGemini runs a prompt against Gemini, generating context based on the source code. +func (x *CSVExporter) RunGemini(ctx context.Context, input *DataPoint, out io.Writer) error { + client, err := genai.NewClient(ctx, option.WithAPIKey(os.Getenv("GEMINI_API_KEY"))) + if err != nil { + return fmt.Errorf("building gemini client: %w", err) + } + defer client.Close() + + model := client.GenerativeModel("gemini-1.5-pro-002") + + // Some values that are recommended by aistudio + model.SetTemperature(1) + model.SetTopK(40) + model.SetTopP(0.95) + model.SetMaxOutputTokens(8192) + model.ResponseMIMEType = "text/plain" + + var parts []genai.Part + + // We only include data points for the same tool as the input. + for _, dataPoint := range x.dataPoints { + if dataPoint.Type != input.Type { + continue + } + parts = append(parts, dataPoint.ToGenAIParts()...) + } + + // We also include the input data point. + parts = append(parts, input.ToGenAIParts()...) + + // We also include a prompt for Gemini to fill in. + parts = append(parts, genai.Text("out ")) + + resp, err := model.GenerateContent(ctx, parts...) + if err != nil { + return fmt.Errorf("generating content with gemini: %w", err) + } + + // Print the usage metadata (includes token count i.e. cost) + klog.Infof("UsageMetadata: %+v", resp.UsageMetadata) + + for _, candidate := range resp.Candidates { + content := candidate.Content + + for _, part := range content.Parts { + if text, ok := part.(genai.Text); ok { + klog.Infof("TEXT: %+v", text) + } else { + klog.Infof("UNKNOWN: %T %+v", part, part) + } + } + } + + return nil +} diff --git a/dev/tools/controllerbuilder/pkg/toolbot/datapoint.go b/dev/tools/controllerbuilder/pkg/toolbot/datapoint.go new file mode 100644 index 0000000000..0ba1cd053e --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/toolbot/datapoint.go @@ -0,0 +1,105 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package toolbot + +import ( + "encoding/csv" + "fmt" + "strings" + + "github.com/google/generative-ai-go/genai" + "k8s.io/apimachinery/pkg/util/sets" + "k8s.io/klog/v2" +) + +// DataPoint holds the input and output for a tool. +type DataPoint struct { + Type string + Input map[string]string + Output string +} + +// SetInput sets an input value for the data point. +func (p *DataPoint) SetInput(k, v string) { + if p.Input == nil { + p.Input = make(map[string]string) + } + p.Input[k] = v +} + +// AddCSVColumns adds the columns for the data point to the columnSet. +func (p *DataPoint) AddCSVColumns(columnSet sets.Set[string]) { + if p.Output != "" { + columnSet.Insert("out") + } + + for k := range p.Input { + columnSet.Insert("in." + k) + } +} + +// WriteCSV writes the data point to the CSV writer. +func (p *DataPoint) WriteCSV(csvWriter *csv.Writer, columns []string) error { + row := make([]string, len(columns)) + for i, column := range columns { + switch column { + case "out": + row[i] = p.Output + + default: + if strings.HasPrefix(column, "in.") { + row[i] = p.Input[strings.TrimPrefix(column, "in.")] + } else { + return fmt.Errorf("unknown column %q", column) + } + } + } + return csvWriter.Write(row) +} + +// ToGenAIParts converts the data point to the input format for Gemini. +func (p *DataPoint) ToGenAIParts() []genai.Part { + columnSet := sets.NewString() + if p.Output != "" { + columnSet.Insert("out") + } + for k := range p.Input { + columnSet.Insert("in." + k) + } + + var parts []genai.Part + columns := columnSet.List() + + for _, column := range columns { + v := "" + + switch column { + case "out": + v = p.Output + + default: + if strings.HasPrefix(column, "in.") { + v = p.Input[strings.TrimPrefix(column, "in.")] + } else { + klog.Fatalf("unknown column %q", column) + } + } + + s := fmt.Sprintf("%s %s", column, v) + parts = append(parts, genai.Text(s)) + } + + return parts +} diff --git a/dev/tools/controllerbuilder/pkg/toolbot/enhancewithprotodefinition.go b/dev/tools/controllerbuilder/pkg/toolbot/enhancewithprotodefinition.go new file mode 100644 index 0000000000..eb14bfe250 --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/toolbot/enhancewithprotodefinition.go @@ -0,0 +1,249 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package toolbot + +import ( + "bufio" + "bytes" + "context" + "fmt" + "io" + "os" + "path/filepath" + "strings" + + "k8s.io/klog/v2" +) + +type protoService struct { + FilePath string + Definition []string +} + +type protoMessage struct { + FilePath string + Definition []string +} + +// EnhanceWithProtoDefinition is an enhancer that adds the definition of a proto message or service to the data point. +type EnhanceWithProtoDefinition struct { + protoDirectory string + messages map[string]*protoMessage +} + +// NewEnhanceWithProtoDefinition creates a new EnhanceWithProtoDefinition. +func NewEnhanceWithProtoDefinition(protoDirectory string) (*EnhanceWithProtoDefinition, error) { + x := &EnhanceWithProtoDefinition{ + protoDirectory: protoDirectory, + messages: make(map[string]*protoMessage), + } + if err := x.findProtoMessages(); err != nil { + return nil, err + } + return x, nil +} + +var _ Enhancer = &EnhanceWithProtoDefinition{} + +// EnhanceDataPoint enhances the data point by adding the definition of the proto message or service. +func (x *EnhanceWithProtoDefinition) EnhanceDataPoint(ctx context.Context, p *DataPoint) error { + service := p.Input["proto.service"] + if service != "" { + protoService, err := x.getProtoForService(ctx, service) + if err != nil { + return fmt.Errorf("getting proto for service %q: %w", service, err) + } + p.SetInput("proto.service.definition", strings.Join(protoService.Definition, "\n")) + } + + message := p.Input["proto.message"] + if message != "" { + protoMessage := x.messages[message] + if protoMessage != nil { + p.SetInput("proto.message.definition", strings.Join(protoMessage.Definition, "\n")) + } else { + klog.Infof("unable to find proto message %q", message) + } + } + + return nil +} + +// getProtoForService gets the proto definition for a service. +func (x *EnhanceWithProtoDefinition) getProtoForService(ctx context.Context, serviceName string) (*protoService, error) { + var matches []*protoService + if err := filepath.WalkDir(x.protoDirectory, func(p string, d os.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + return nil + } + switch filepath.Ext(p) { + case ".proto": + // OK + default: + return nil + } + b, err := os.ReadFile(p) + if err != nil { + return fmt.Errorf("reading file %q: %w", p, err) + } + r := bytes.NewReader(b) + br := bufio.NewReader(r) + + packageName := "" + + for { + line, err := br.ReadString('\n') + if err != nil { + if err == io.EOF { + break + } + return fmt.Errorf("scanning file %q: %w", p, err) + } + line = strings.TrimSuffix(line, "\n") + + tokens := strings.Fields(line) + + if len(tokens) >= 2 && tokens[0] == "package" { + packageName = strings.TrimSuffix(tokens[1], ";") + } + + if len(tokens) >= 2 && tokens[0] == "service" { + found := packageName + "." + tokens[1] + + if found != serviceName { + continue + } + + match := &protoService{FilePath: p} + indent := 0 + for { + match.Definition = append(match.Definition, line) + for _, r := range line { + if r == '{' { + indent++ + } + if r == '}' { + indent-- + } + } + if indent == 0 { + break + } + line, err = br.ReadString('\n') + if err != nil { + if err == io.EOF { + break + } + return fmt.Errorf("scanning file %q: %w", p, err) + } + line = strings.TrimSuffix(line, "\n") + } + matches = append(matches, match) + } + } + return nil + }); err != nil { + return nil, fmt.Errorf("walking directory tree: %w", err) + } + + if len(matches) == 0 { + return nil, fmt.Errorf("service %q not found", serviceName) + } + if len(matches) > 1 { + return nil, fmt.Errorf("found multiple services with name %q", serviceName) + } + return matches[0], nil +} + +// findProtoMessages finds all the proto messages in the proto directory. +func (x *EnhanceWithProtoDefinition) findProtoMessages() error { + if err := filepath.WalkDir(x.protoDirectory, func(p string, d os.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + return nil + } + switch filepath.Ext(p) { + case ".proto": + // OK + default: + return nil + } + b, err := os.ReadFile(p) + if err != nil { + return fmt.Errorf("reading file %q: %w", p, err) + } + r := bytes.NewReader(b) + br := bufio.NewReader(r) + + packageName := "" + + for { + line, err := br.ReadString('\n') + if err != nil { + if err == io.EOF { + break + } + return fmt.Errorf("scanning file %q: %w", p, err) + } + line = strings.TrimSuffix(line, "\n") + + tokens := strings.Fields(line) + + if len(tokens) >= 2 && tokens[0] == "package" { + packageName = strings.TrimSuffix(tokens[1], ";") + } + + if len(tokens) >= 2 && tokens[0] == "message" { + messageName := packageName + "." + tokens[1] + + message := &protoMessage{FilePath: p} + indent := 0 + for { + message.Definition = append(message.Definition, line) + for _, r := range line { + if r == '{' { + indent++ + } + if r == '}' { + indent-- + } + } + if indent == 0 { + break + } + line, err = br.ReadString('\n') + if err != nil { + if err == io.EOF { + break + } + return fmt.Errorf("scanning file %q: %w", p, err) + } + line = strings.TrimSuffix(line, "\n") + } + x.messages[messageName] = message + } + } + return nil + }); err != nil { + return fmt.Errorf("walking directory tree: %w", err) + } + + return nil +} diff --git a/dev/tools/controllerbuilder/pkg/toolbot/extracttoolmarkers.go b/dev/tools/controllerbuilder/pkg/toolbot/extracttoolmarkers.go new file mode 100644 index 0000000000..1787bc2508 --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/toolbot/extracttoolmarkers.go @@ -0,0 +1,117 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package toolbot + +import ( + "bufio" + "bytes" + "context" + "fmt" + "io" + "strings" + + "k8s.io/klog" +) + +// ExtractToolMarkers extracts tool markers from source code. +type ExtractToolMarkers struct { +} + +// Extract extracts tool markers from source code. +func (x *ExtractToolMarkers) Extract(ctx context.Context, src []byte) ([]*DataPoint, error) { + var dataPoints []*DataPoint + + r := bytes.NewReader(src) + br := bufio.NewReader(r) + + for { + line, err := br.ReadString('\n') + if err != nil { + if err == io.EOF { + break + } + return nil, fmt.Errorf("scanning code: %w", err) + } + line = strings.TrimSpace(line) + if strings.HasPrefix(line, "//") { + comment := strings.TrimPrefix(line, "//") + comment = strings.TrimSpace(comment) + if strings.HasPrefix(comment, "+tool:") { + klog.V(2).Infof("found tool line %q", comment) + toolName := strings.TrimPrefix(comment, "+tool:") + dataPoint := &DataPoint{ + Type: toolName, + Output: string(src), + } + + for { + line, err := br.ReadString('\n') + if err != nil { + if err == io.EOF { + break + } + return nil, fmt.Errorf("scanning code: %w", err) + } + line = strings.TrimSpace(line) + if !strings.HasPrefix(line, "//") { + break + } + toolLine := strings.TrimPrefix(line, "//") + toolLine = strings.TrimPrefix(toolLine, " ") + + tokens := strings.SplitN(toolLine, ":", 2) + if len(tokens) == 2 { + dataPoint.SetInput(tokens[0], strings.TrimSpace(tokens[1])) + } else { + return nil, fmt.Errorf("cannot parse tool line %q", toolLine) + } + } + dataPoints = append(dataPoints, dataPoint) + } + + if strings.HasPrefix(comment, "+kcc:proto=") { + klog.V(2).Infof("found tool line %q", comment) + toolName := "kcc-proto" + dataPoint := &DataPoint{ + Type: toolName, + } + + proto := strings.TrimPrefix(comment, "+kcc:proto=") + dataPoint.SetInput("proto.message", proto) + + var bb bytes.Buffer + for { + line, err := br.ReadString('\n') + if err != nil { + if err == io.EOF { + break + } + return nil, fmt.Errorf("scanning code: %w", err) + } + + bb.WriteString(line) + + s := strings.TrimSpace(line) + if strings.HasPrefix(s, "}") { + break + } + } + dataPoint.Output = bb.String() + dataPoints = append(dataPoints, dataPoint) + } + } + } + return dataPoints, nil +} diff --git a/mockgcp/mockkms/cryptokey.go b/mockgcp/mockkms/cryptokey.go index 36525fa235..f9acb3b50c 100644 --- a/mockgcp/mockkms/cryptokey.go +++ b/mockgcp/mockkms/cryptokey.go @@ -12,11 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -// +mockgcp-support -// apiVersion: kms.cnrm.cloud.google.com/v1beta1 -// kind: KMSCryptoKey -// service: google.cloud.kms.v1.KeyManagementService -// resource: CryptoKey +// +tool:mockgcp-support +// krm.apiVersion: kms.cnrm.cloud.google.com/v1beta1 +// krm.kind: KMSCryptoKey +// proto.service: google.cloud.kms.v1.KeyManagementService +// proto.resource: CryptoKey package mockkms diff --git a/mockgcp/mockkms/cryptokeyversion.go b/mockgcp/mockkms/cryptokeyversion.go index 58542c6208..1d962d5306 100644 --- a/mockgcp/mockkms/cryptokeyversion.go +++ b/mockgcp/mockkms/cryptokeyversion.go @@ -12,11 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -// +mockgcp-support -// apiVersion: kms.cnrm.cloud.google.com/v1beta1 -// kind: KMSCryptoKeyVersion -// service: google.cloud.kms.v1.KeyManagementService -// resource: CryptoKeyVersion +// +tool:mockgcp-support +// krm.apiVersion: kms.cnrm.cloud.google.com/v1beta1 +// krm.kind: KMSCryptoKeyVersion +// proto.service: google.cloud.kms.v1.KeyManagementService +// proto.resource: CryptoKeyVersion package mockkms diff --git a/mockgcp/mockkms/keyring.go b/mockgcp/mockkms/keyring.go index d373883e61..4bc27d883c 100644 --- a/mockgcp/mockkms/keyring.go +++ b/mockgcp/mockkms/keyring.go @@ -12,11 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -// +mockgcp-support -// apiVersion: kms.cnrm.cloud.google.com/v1beta1 -// kind: KMSKeyRing -// service: google.cloud.kms.v1.KeyManagementService -// resource: KeyRing +// +tool:mockgcp-support +// krm.apiVersion: kms.cnrm.cloud.google.com/v1beta1 +// krm.kind: KMSKeyRing +// proto.service: google.cloud.kms.v1.KeyManagementService +// proto.resource: KeyRing package mockkms diff --git a/mockgcp/mocklogging/logbucket.go b/mockgcp/mocklogging/logbucket.go index 1dc4e1c322..a01a1b8144 100644 --- a/mockgcp/mocklogging/logbucket.go +++ b/mockgcp/mocklogging/logbucket.go @@ -13,10 +13,10 @@ // limitations under the License. // +tool:mockgcp-support -// apiVersion: logging.cnrm.cloud.google.com/v1beta1 -// kind: LoggingLogBucket -// service: google.logging.v2.ConfigServiceV2 -// resource: LogBucket +// krm.apiVersion: logging.cnrm.cloud.google.com/v1beta1 +// krm.kind: LoggingLogBucket +// proto.service: google.logging.v2.ConfigServiceV2 +// proto.resource: LogBucket package mocklogging diff --git a/mockgcp/mocklogging/logmetric.go b/mockgcp/mocklogging/logmetric.go index 853a41dda2..17e0e27e42 100644 --- a/mockgcp/mocklogging/logmetric.go +++ b/mockgcp/mocklogging/logmetric.go @@ -13,10 +13,10 @@ // limitations under the License. // +tool:mockgcp-support -// apiVersion: logging.cnrm.cloud.google.com/v1beta1 -// kind: LoggingLogMetric -// service: google.logging.v2.MetricsServiceV2 -// resource: LogMetric +// krm.apiVersion: logging.cnrm.cloud.google.com/v1beta1 +// krm.kind: LoggingLogMetric +// proto.service: google.logging.v2.MetricsServiceV2 +// proto.resource: LogMetric package mocklogging @@ -184,7 +184,7 @@ func (s *MockService) parseLogMetricName(name string) (*logMetricName, error) { } return name, nil - } else { - return nil, status.Errorf(codes.InvalidArgument, "name %q is not valid", name) } + + return nil, status.Errorf(codes.InvalidArgument, "name %q is not valid", name) } diff --git a/mockgcp/mocklogging/logview.go b/mockgcp/mocklogging/logview.go index 78976f7fe2..18bbd2fddf 100644 --- a/mockgcp/mocklogging/logview.go +++ b/mockgcp/mocklogging/logview.go @@ -13,10 +13,10 @@ // limitations under the License. // +tool:mockgcp-support -// apiVersion: logging.cnrm.cloud.google.com/v1beta1 -// kind: LoggingLogView -// service: google.logging.v2.ConfigServiceV2 -// resource: LogView +// krm.apiVersion: logging.cnrm.cloud.google.com/v1beta1 +// krm.kind: LoggingLogView +// proto.service: google.logging.v2.ConfigServiceV2 +// proto.resource: LogView package mocklogging diff --git a/mockgcp/mockpubsublite/reservation.go b/mockgcp/mockpubsublite/reservation.go index 6d92c68ee7..474dabba70 100644 --- a/mockgcp/mockpubsublite/reservation.go +++ b/mockgcp/mockpubsublite/reservation.go @@ -12,6 +12,12 @@ // See the License for the specific language governing permissions and // limitations under the License. +// +tool:mockgcp-support +// krm.apiVersion: pubsublite.cnrm.cloud.google.com/v1beta1 +// krm.kind: PubSubLiteReservation +// proto.service: google.cloud.pubsublite.v1.AdminService +// proto.resource: Reservation + package mockpubsublite import ( diff --git a/mockgcp/mockpubsublite/subscription.go b/mockgcp/mockpubsublite/subscription.go index 72c1b1a31b..c550190848 100644 --- a/mockgcp/mockpubsublite/subscription.go +++ b/mockgcp/mockpubsublite/subscription.go @@ -12,6 +12,12 @@ // See the License for the specific language governing permissions and // limitations under the License. +// +tool:mockgcp-support +// krm.apiVersion: pubsublite.cnrm.cloud.google.com/v1beta1 +// krm.kind: PubSubLiteSubscription +// proto.service: google.cloud.pubsublite.v1.AdminService +// proto.resource: Subscription + package mockpubsublite import ( diff --git a/mockgcp/mockpubsublite/topic.go b/mockgcp/mockpubsublite/topic.go index 66b153bce5..a37bc02ceb 100644 --- a/mockgcp/mockpubsublite/topic.go +++ b/mockgcp/mockpubsublite/topic.go @@ -12,6 +12,12 @@ // See the License for the specific language governing permissions and // limitations under the License. +// +tool:mockgcp-support +// krm.apiVersion: pubsublite.cnrm.cloud.google.com/v1beta1 +// krm.kind: PubSubLiteTopic +// proto.service: google.cloud.pubsublite.v1.AdminService +// proto.resource: Topic + package mockpubsublite import ( From 9ea511b149341fb3b9fa453b60729eae490cb4c8 Mon Sep 17 00:00:00 2001 From: justinsb Date: Tue, 5 Nov 2024 10:19:07 -0500 Subject: [PATCH 22/31] tests: improve turbo-e2e tool to run on mac and linux Also fix error handling. --- dev/tasks/turbo-e2e | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/dev/tasks/turbo-e2e b/dev/tasks/turbo-e2e index 5f33984e30..862f30e487 100755 --- a/dev/tasks/turbo-e2e +++ b/dev/tasks/turbo-e2e @@ -30,7 +30,16 @@ export PREBUILT_TEST_BINARY=${REPO_ROOT}/.build/tests-e2e go test -c -o ${PREBUILT_TEST_BINARY} ./tests/e2e echo "Running tests" -dev/tasks/list-tests | sort | uniq | xargs -P 40 -S 1024 -I {} /bin/bash -c "echo {}; RUN_TESTS={} dev/tasks/run-e2e > testlogs/{}/log 2>&1" +EXIT_CODE=0 +if [[ $(uname) == "Darwin" ]]; then + (dev/tasks/list-tests | sort | uniq | xargs -P 40 -S 1024 -I {} /bin/bash -c "echo {}; RUN_TESTS={} dev/tasks/run-e2e > testlogs/{}/log 2>&1") || EXIT_CODE=$? +else + (dev/tasks/list-tests | sort | uniq | xargs -P 40 -I {} /bin/bash -c "echo {}; RUN_TESTS={} dev/tasks/run-e2e > testlogs/{}/log 2>&1") || EXIT_CODE=$? +fi -echo "Checking for failures" -rgrep FAIL testlogs/ +if [[ ${EXIT_CODE} -ne 0 ]]; then + echo "Some tests failed!" + rgrep FAIL testlogs/ +fi + +exit ${EXIT_CODE} \ No newline at end of file From d10694b4789629a7d4c70b0a3cb5672bd2b5d36e Mon Sep 17 00:00:00 2001 From: alex <8968914+acpana@users.noreply.github.com> Date: Tue, 5 Nov 2024 07:57:00 -0800 Subject: [PATCH 23/31] Update release-1.125.md --- docs/releasenotes/release-1.125.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/releasenotes/release-1.125.md b/docs/releasenotes/release-1.125.md index a35b914142..e611008d20 100644 --- a/docs/releasenotes/release-1.125.md +++ b/docs/releasenotes/release-1.125.md @@ -11,6 +11,7 @@ TODO: list contributors with `git log v1.124.0... | grep Merge | grep from | awk ## Resources promoted from alpha to beta: * `RedisCluster` is now a v1beta1 resource. +* `BigQueryAnlayticsHubDataExchange` is now a v1beta1 resource. ## New Resources: From 6c18aaebad6c5dc59598ccb8ce2e254b23762943 Mon Sep 17 00:00:00 2001 From: Jason Vigil Date: Mon, 21 Oct 2024 22:29:23 +0000 Subject: [PATCH 24/31] feat: Promote WorkstationCluster to v1beta1 --- apis/workstations/v1beta1/doc.go | 16 + .../workstations/v1beta1/groupversion_info.go | 33 + apis/workstations/v1beta1/types.generated.go | 162 ++++ .../v1beta1/workstationcluster_types.go | 214 +++++ .../v1beta1/zz_generated.deepcopy.go | 314 ++++++++ ...rs.workstations.cnrm.cloud.google.com.yaml | 346 ++++++++ .../compute_v1beta1_computenetwork.yaml | 21 + .../compute_v1beta1_computesubnetwork.yaml | 23 + ...rkstations_v1beta1_workstationcluster.yaml | 26 + .../compute_v1beta1_computenetwork.yaml | 21 + .../compute_v1beta1_computesubnetwork.yaml | 23 + ...rkstations_v1beta1_workstationcluster.yaml | 37 + config/servicemappings/workstations.yaml | 27 + dev/tools/controllerbuilder/generate.sh | 4 +- .../workstations/{v1alpha1 => v1beta1}/doc.go | 4 +- .../{v1alpha1 => v1beta1}/register.go | 6 +- .../workstationcluster_types.go | 2 +- .../zz_generated.deepcopy.go | 6 +- .../client/clientset/versioned/clientset.go | 16 +- .../versioned/fake/clientset_generated.go | 10 +- .../clientset/versioned/fake/register.go | 4 +- .../clientset/versioned/scheme/register.go | 4 +- .../workstations/{v1alpha1 => v1beta1}/doc.go | 2 +- .../{v1alpha1 => v1beta1}/fake/doc.go | 0 .../fake/fake_workstationcluster.go | 50 +- .../fake/fake_workstations_client.go | 8 +- .../generated_expansion.go | 2 +- .../workstationcluster.go | 42 +- .../workstations_client.go | 36 +- .../workstationcluster_controller.go | 2 +- .../workstationcluster_mappings.go | 2 +- .../workstationcluster_normalize.go | 2 +- pkg/gvks/supportedgvks/gvks_generated.go | 10 + .../snippetgeneration/snippetgeneration.go | 1 + pkg/test/resourcefixture/sets.go | 1 + ...object_workstationcluster-full.golden.yaml | 4 +- .../workstationcluster-full/create.yaml | 2 +- .../workstationcluster-full/update.yaml | 2 +- ...ect_workstationcluster-minimal.golden.yaml | 4 +- .../workstationcluster-minimal/create.yaml | 2 +- .../workstations/workstationcluster.md | 746 ++++++++++++++++++ .../workstations_workstationcluster.tmpl | 53 ++ 42 files changed, 2184 insertions(+), 106 deletions(-) create mode 100644 apis/workstations/v1beta1/doc.go create mode 100644 apis/workstations/v1beta1/groupversion_info.go create mode 100644 apis/workstations/v1beta1/types.generated.go create mode 100644 apis/workstations/v1beta1/workstationcluster_types.go create mode 100644 apis/workstations/v1beta1/zz_generated.deepcopy.go create mode 100644 config/samples/resources/workstationcluster/basic-workstationcluster/compute_v1beta1_computenetwork.yaml create mode 100644 config/samples/resources/workstationcluster/basic-workstationcluster/compute_v1beta1_computesubnetwork.yaml create mode 100644 config/samples/resources/workstationcluster/basic-workstationcluster/workstations_v1beta1_workstationcluster.yaml create mode 100644 config/samples/resources/workstationcluster/workstationcluster-with-privateclusterconfig/compute_v1beta1_computenetwork.yaml create mode 100644 config/samples/resources/workstationcluster/workstationcluster-with-privateclusterconfig/compute_v1beta1_computesubnetwork.yaml create mode 100644 config/samples/resources/workstationcluster/workstationcluster-with-privateclusterconfig/workstations_v1beta1_workstationcluster.yaml create mode 100644 config/servicemappings/workstations.yaml rename pkg/clients/generated/apis/workstations/{v1alpha1 => v1beta1}/doc.go (93%) rename pkg/clients/generated/apis/workstations/{v1alpha1 => v1beta1}/register.go (93%) rename pkg/clients/generated/apis/workstations/{v1alpha1 => v1beta1}/workstationcluster_types.go (99%) rename pkg/clients/generated/apis/workstations/{v1alpha1 => v1beta1}/zz_generated.deepcopy.go (98%) rename pkg/clients/generated/client/clientset/versioned/typed/workstations/{v1alpha1 => v1beta1}/doc.go (98%) rename pkg/clients/generated/client/clientset/versioned/typed/workstations/{v1alpha1 => v1beta1}/fake/doc.go (100%) rename pkg/clients/generated/client/clientset/versioned/typed/workstations/{v1alpha1 => v1beta1}/fake/fake_workstationcluster.go (70%) rename pkg/clients/generated/client/clientset/versioned/typed/workstations/{v1alpha1 => v1beta1}/fake/fake_workstations_client.go (76%) rename pkg/clients/generated/client/clientset/versioned/typed/workstations/{v1alpha1 => v1beta1}/generated_expansion.go (98%) rename pkg/clients/generated/client/clientset/versioned/typed/workstations/{v1alpha1 => v1beta1}/workstationcluster.go (78%) rename pkg/clients/generated/client/clientset/versioned/typed/workstations/{v1alpha1 => v1beta1}/workstations_client.go (66%) create mode 100644 scripts/generate-google3-docs/resource-reference/generated/resource-docs/workstations/workstationcluster.md create mode 100644 scripts/generate-google3-docs/resource-reference/templates/workstations_workstationcluster.tmpl diff --git a/apis/workstations/v1beta1/doc.go b/apis/workstations/v1beta1/doc.go new file mode 100644 index 0000000000..ab6b6f0c9f --- /dev/null +++ b/apis/workstations/v1beta1/doc.go @@ -0,0 +1,16 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +kcc:proto=google.cloud.workstations.v1 +package v1beta1 diff --git a/apis/workstations/v1beta1/groupversion_info.go b/apis/workstations/v1beta1/groupversion_info.go new file mode 100644 index 0000000000..771c4ffa32 --- /dev/null +++ b/apis/workstations/v1beta1/groupversion_info.go @@ -0,0 +1,33 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +kubebuilder:object:generate=true +// +groupName=workstations.cnrm.cloud.google.com +package v1beta1 + +import ( + "k8s.io/apimachinery/pkg/runtime/schema" + "sigs.k8s.io/controller-runtime/pkg/scheme" +) + +var ( + // GroupVersion is group version used to register these objects + GroupVersion = schema.GroupVersion{Group: "workstations.cnrm.cloud.google.com", Version: "v1beta1"} + + // SchemeBuilder is used to add go types to the GroupVersionKind scheme + SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} + + // AddToScheme adds the types in this group-version to the given scheme. + AddToScheme = SchemeBuilder.AddToScheme +) diff --git a/apis/workstations/v1beta1/types.generated.go b/apis/workstations/v1beta1/types.generated.go new file mode 100644 index 0000000000..cb1944d784 --- /dev/null +++ b/apis/workstations/v1beta1/types.generated.go @@ -0,0 +1,162 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1beta1 + +/* +// +kcc:proto=google.cloud.workstations.v1.WorkstationCluster +type WorkstationCluster struct { + // Full name of this workstation cluster. + Name *string `json:"name,omitempty"` + + // Optional. Human-readable name for this workstation cluster. + DisplayName *string `json:"displayName,omitempty"` + + // Output only. A system-assigned unique identifier for this workstation + // cluster. + Uid *string `json:"uid,omitempty"` + + // Output only. Indicates whether this workstation cluster is currently being + // updated to match its intended state. + Reconciling *bool `json:"reconciling,omitempty"` + + // Optional. Client-specified annotations. + Annotations map[string]string `json:"annotations,omitempty"` + + // Optional. + // [Labels](https://cloud.google.com/workstations/docs/label-resources) that + // are applied to the workstation cluster and that are also propagated to the + // underlying Compute Engine resources. + Labels map[string]string `json:"labels,omitempty"` + + // Output only. Time when this workstation cluster was created. + CreateTime *string `json:"createTime,omitempty"` + + // Output only. Time when this workstation cluster was most recently updated. + UpdateTime *string `json:"updateTime,omitempty"` + + // Output only. Time when this workstation cluster was soft-deleted. + DeleteTime *string `json:"deleteTime,omitempty"` + + // Optional. Checksum computed by the server. May be sent on update and delete + // requests to make sure that the client has an up-to-date value before + // proceeding. + Etag *string `json:"etag,omitempty"` + + // Immutable. Name of the Compute Engine network in which instances associated + // with this workstation cluster will be created. + Network *string `json:"network,omitempty"` + + // Immutable. Name of the Compute Engine subnetwork in which instances + // associated with this workstation cluster will be created. Must be part of + // the subnetwork specified for this workstation cluster. + Subnetwork *string `json:"subnetwork,omitempty"` + + // Output only. The private IP address of the control plane for this + // workstation cluster. Workstation VMs need access to this IP address to work + // with the service, so make sure that your firewall rules allow egress from + // the workstation VMs to this address. + ControlPlaneIp *string `json:"controlPlaneIp,omitempty"` + + // Optional. Configuration for private workstation cluster. + PrivateClusterConfig *WorkstationCluster_PrivateClusterConfig `json:"privateClusterConfig,omitempty"` + + // Output only. Whether this workstation cluster is in degraded mode, in which + // case it may require user action to restore full functionality. Details can + // be found in + // [conditions][google.cloud.workstations.v1.WorkstationCluster.conditions]. + Degraded *bool `json:"degraded,omitempty"` + + // Output only. Status conditions describing the workstation cluster's current + // state. + Conditions []Status `json:"conditions,omitempty"` +} + +// +kcc:proto=google.cloud.workstations.v1.WorkstationCluster.PrivateClusterConfig +type WorkstationCluster_PrivateClusterConfig struct { + // Immutable. Whether Workstations endpoint is private. + EnablePrivateEndpoint *bool `json:"enablePrivateEndpoint,omitempty"` + + // Output only. Hostname for the workstation cluster. This field will be + // populated only when private endpoint is enabled. To access workstations + // in the workstation cluster, create a new DNS zone mapping this domain + // name to an internal IP address and a forwarding rule mapping that address + // to the service attachment. + ClusterHostname *string `json:"clusterHostname,omitempty"` + + // Output only. Service attachment URI for the workstation cluster. The + // service attachment is created when private endpoint is enabled. To access + // workstations in the workstation cluster, configure access to the managed + // service using [Private Service + // Connect](https://cloud.google.com/vpc/docs/configure-private-service-connect-services). + ServiceAttachmentUri *string `json:"serviceAttachmentUri,omitempty"` + + // Optional. Additional projects that are allowed to attach to the + // workstation cluster's service attachment. By default, the workstation + // cluster's project and the VPC host project (if different) are allowed. + AllowedProjects []string `json:"allowedProjects,omitempty"` +} + +// +kcc:proto=google.protobuf.Any +type Any struct { + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + TypeURL *string `json:"typeURL,omitempty"` + + // Must be a valid serialized protocol buffer of the above specified type. + Value []byte `json:"value,omitempty"` +} + +// +kcc:proto=google.rpc.Status +type Status struct { + // The status code, which should be an enum value of + // [google.rpc.Code][google.rpc.Code]. + Code *int32 `json:"code,omitempty"` + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized + // by the client. + Message *string `json:"message,omitempty"` + + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + Details []Any `json:"details,omitempty"` +} +*/ diff --git a/apis/workstations/v1beta1/workstationcluster_types.go b/apis/workstations/v1beta1/workstationcluster_types.go new file mode 100644 index 0000000000..a14d73a9a2 --- /dev/null +++ b/apis/workstations/v1beta1/workstationcluster_types.go @@ -0,0 +1,214 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1beta1 + +import ( + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + + refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/apis/k8s/v1alpha1" +) + +var WorkstationClusterGVK = GroupVersion.WithKind("WorkstationCluster") + +// EDIT THIS FILE! THIS IS SCAFFOLDING FOR YOU TO OWN! +// NOTE: json tags are required. Any new fields you add must have json tags for the fields to be serialized. + +// WorkstationClusterSpec defines the desired state of WorkstationCluster +// +kcc:proto=google.cloud.workstations.v1.WorkstationCluster +type WorkstationClusterSpec struct { + // Immutable. The Project that this resource belongs to. + // +kubebuilder:validation:XValidation:rule="self == oldSelf",message="ResourceID field is immutable" + ProjectRef refs.ProjectRef `json:"projectRef"` + + // The location of the cluster. + Location string `json:"location,omitempty"` + + // +kubebuilder:validation:XValidation:rule="self == oldSelf",message="ResourceID field is immutable" + // Immutable. + // The WorkstationCluster name. If not given, the metadata.name will be used. + ResourceID *string `json:"resourceID,omitempty"` + + // Optional. Human-readable name for this workstation cluster. + DisplayName *string `json:"displayName,omitempty"` + + // Optional. Client-specified annotations. + Annotations []WorkstationClusterAnnotation `json:"annotations,omitempty"` + + // Optional. + // [Labels](https://cloud.google.com/workstations/docs/label-resources) that + // are applied to the workstation cluster and that are also propagated to the + // underlying Compute Engine resources. + Labels []WorkstationClusterLabel `json:"labels,omitempty"` + + // Immutable. Reference to the Compute Engine network in which instances associated + // with this workstation cluster will be created. + // +required + NetworkRef refs.ComputeNetworkRef `json:"networkRef"` + + // Immutable. Reference to the Compute Engine subnetwork in which instances + // associated with this workstation cluster will be created. Must be part of + // the subnetwork specified for this workstation cluster. + // +required + SubnetworkRef refs.ComputeSubnetworkRef `json:"subnetworkRef"` + + // Optional. Configuration for private workstation cluster. + PrivateClusterConfig *WorkstationCluster_PrivateClusterConfig `json:"privateClusterConfig,omitempty"` +} + +type WorkstationClusterAnnotation struct { + // Key for the annotation. + Key string `json:"key,omitempty"` + + // Value for the annotation. + Value string `json:"value,omitempty"` +} + +type WorkstationClusterLabel struct { + // Key for the annotation. + Key string `json:"key,omitempty"` + + // Value for the annotation. + Value string `json:"value,omitempty"` +} + +// +kcc:proto=google.cloud.workstations.v1.WorkstationCluster.PrivateClusterConfig +type WorkstationCluster_PrivateClusterConfig struct { + // Immutable. Whether Workstations endpoint is private. + EnablePrivateEndpoint *bool `json:"enablePrivateEndpoint,omitempty"` + + // Optional. Additional projects that are allowed to attach to the + // workstation cluster's service attachment. By default, the workstation + // cluster's project and the VPC host project (if different) are allowed. + AllowedProjects []refs.ProjectRef `json:"allowedProjects,omitempty"` +} + +// WorkstationClusterStatus defines the config connector machine state of WorkstationCluster +type WorkstationClusterStatus struct { + /* Conditions represent the latest available observations of the + object's current state. */ + Conditions []v1alpha1.Condition `json:"conditions,omitempty"` + + // ObservedGeneration is the generation of the resource that was most recently observed by the Config Connector controller. If this is equal to metadata.generation, then that means that the current reported status reflects the most recent desired state of the resource. + ObservedGeneration *int64 `json:"observedGeneration,omitempty"` + + // A unique specifier for the WorkstationCluster resource in GCP. + ExternalRef *string `json:"externalRef,omitempty"` + + // ObservedState is the state of the resource as most recently observed in GCP. + ObservedState *WorkstationClusterObservedState `json:"observedState,omitempty"` +} + +// WorkstationClusterSpec defines the desired state of WorkstationCluster +// +kcc:proto=google.cloud.workstations.v1.WorkstationCluster +type WorkstationClusterObservedState struct { + // Output only. A system-assigned unique identifier for this workstation + // cluster. + Uid *string `json:"uid,omitempty"` + + // Output only. Indicates whether this workstation cluster is currently being + // updated to match its intended state. + Reconciling *bool `json:"reconciling,omitempty"` + + // Output only. Time when this workstation cluster was created. + CreateTime *string `json:"createTime,omitempty"` + + // Output only. Time when this workstation cluster was most recently updated. + UpdateTime *string `json:"updateTime,omitempty"` + + // Output only. Time when this workstation cluster was soft-deleted. + DeleteTime *string `json:"deleteTime,omitempty"` + + // Optional. Checksum computed by the server. May be sent on update and delete + // requests to make sure that the client has an up-to-date value before + // proceeding. + Etag *string `json:"etag,omitempty"` + + // Output only. The private IP address of the control plane for this + // workstation cluster. Workstation VMs need access to this IP address to work + // with the service, so make sure that your firewall rules allow egress from + // the workstation VMs to this address. + ControlPlaneIP *string `json:"controlPlaneIP,omitempty"` + + // Output only. Hostname for the workstation cluster. This field will be + // populated only when private endpoint is enabled. To access workstations + // in the workstation cluster, create a new DNS zone mapping this domain + // name to an internal IP address and a forwarding rule mapping that address + // to the service attachment. + ClusterHostname *string `json:"clusterHostname,omitempty"` + + // Output only. Service attachment URI for the workstation cluster. The + // service attachment is created when private endpoint is enabled. To access + // workstations in the workstation cluster, configure access to the managed + // service using [Private Service + // Connect](https://cloud.google.com/vpc/docs/configure-private-service-connect-services). + ServiceAttachmentURI *string `json:"serviceAttachmentUri,omitempty"` + + // Output only. Whether this workstation cluster is in degraded mode, in which + // case it may require user action to restore full functionality. Details can + // be found in + // [conditions][google.cloud.workstations.v1.WorkstationCluster.conditions]. + Degraded *bool `json:"degraded,omitempty"` + + // Output only. Status conditions describing the workstation cluster's current + // state. + GCPConditions []WorkstationClusterGCPCondition `json:"gcpConditions,omitempty"` +} + +// +kcc:proto=google.rpc.Status +type WorkstationClusterGCPCondition struct { + // The status code, which should be an enum value of + // [google.rpc.Code][google.rpc.Code]. + Code *int32 `json:"code,omitempty"` + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized + // by the client. + Message *string `json:"message,omitempty"` +} + +// +genclient +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +// +kubebuilder:resource:categories=gcp +// +kubebuilder:subresource:status +// +kubebuilder:storageversion +// +kubebuilder:metadata:labels="cnrm.cloud.google.com/managed-by-kcc=true";"cnrm.cloud.google.com/system=true" +// +kubebuilder:printcolumn:name="Age",JSONPath=".metadata.creationTimestamp",type="date" +// +kubebuilder:printcolumn:name="Ready",JSONPath=".status.conditions[?(@.type=='Ready')].status",type="string",description="When 'True', the most recent reconcile of the resource succeeded" +// +kubebuilder:printcolumn:name="Status",JSONPath=".status.conditions[?(@.type=='Ready')].reason",type="string",description="The reason for the value in 'Ready'" +// +kubebuilder:printcolumn:name="Status Age",JSONPath=".status.conditions[?(@.type=='Ready')].lastTransitionTime",type="date",description="The last transition time for the value in 'Status'" + +// WorkstationCluster is the Schema for the WorkstationCluster API +// +k8s:openapi-gen=true +type WorkstationCluster struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata,omitempty"` + + Spec WorkstationClusterSpec `json:"spec,omitempty"` + Status WorkstationClusterStatus `json:"status,omitempty"` +} + +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +// WorkstationClusterList contains a list of WorkstationCluster +type WorkstationClusterList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []WorkstationCluster `json:"items"` +} + +func init() { + SchemeBuilder.Register(&WorkstationCluster{}, &WorkstationClusterList{}) +} diff --git a/apis/workstations/v1beta1/zz_generated.deepcopy.go b/apis/workstations/v1beta1/zz_generated.deepcopy.go new file mode 100644 index 0000000000..3e4ab1982b --- /dev/null +++ b/apis/workstations/v1beta1/zz_generated.deepcopy.go @@ -0,0 +1,314 @@ +//go:build !ignore_autogenerated + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by controller-gen. DO NOT EDIT. + +package v1beta1 + +import ( + refsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/apis/k8s/v1alpha1" + runtime "k8s.io/apimachinery/pkg/runtime" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkstationCluster) DeepCopyInto(out *WorkstationCluster) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + in.Status.DeepCopyInto(&out.Status) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkstationCluster. +func (in *WorkstationCluster) DeepCopy() *WorkstationCluster { + if in == nil { + return nil + } + out := new(WorkstationCluster) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *WorkstationCluster) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkstationClusterAnnotation) DeepCopyInto(out *WorkstationClusterAnnotation) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkstationClusterAnnotation. +func (in *WorkstationClusterAnnotation) DeepCopy() *WorkstationClusterAnnotation { + if in == nil { + return nil + } + out := new(WorkstationClusterAnnotation) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkstationClusterGCPCondition) DeepCopyInto(out *WorkstationClusterGCPCondition) { + *out = *in + if in.Code != nil { + in, out := &in.Code, &out.Code + *out = new(int32) + **out = **in + } + if in.Message != nil { + in, out := &in.Message, &out.Message + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkstationClusterGCPCondition. +func (in *WorkstationClusterGCPCondition) DeepCopy() *WorkstationClusterGCPCondition { + if in == nil { + return nil + } + out := new(WorkstationClusterGCPCondition) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkstationClusterLabel) DeepCopyInto(out *WorkstationClusterLabel) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkstationClusterLabel. +func (in *WorkstationClusterLabel) DeepCopy() *WorkstationClusterLabel { + if in == nil { + return nil + } + out := new(WorkstationClusterLabel) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkstationClusterList) DeepCopyInto(out *WorkstationClusterList) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]WorkstationCluster, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkstationClusterList. +func (in *WorkstationClusterList) DeepCopy() *WorkstationClusterList { + if in == nil { + return nil + } + out := new(WorkstationClusterList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *WorkstationClusterList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkstationClusterObservedState) DeepCopyInto(out *WorkstationClusterObservedState) { + *out = *in + if in.Uid != nil { + in, out := &in.Uid, &out.Uid + *out = new(string) + **out = **in + } + if in.Reconciling != nil { + in, out := &in.Reconciling, &out.Reconciling + *out = new(bool) + **out = **in + } + if in.CreateTime != nil { + in, out := &in.CreateTime, &out.CreateTime + *out = new(string) + **out = **in + } + if in.UpdateTime != nil { + in, out := &in.UpdateTime, &out.UpdateTime + *out = new(string) + **out = **in + } + if in.DeleteTime != nil { + in, out := &in.DeleteTime, &out.DeleteTime + *out = new(string) + **out = **in + } + if in.Etag != nil { + in, out := &in.Etag, &out.Etag + *out = new(string) + **out = **in + } + if in.ControlPlaneIP != nil { + in, out := &in.ControlPlaneIP, &out.ControlPlaneIP + *out = new(string) + **out = **in + } + if in.ClusterHostname != nil { + in, out := &in.ClusterHostname, &out.ClusterHostname + *out = new(string) + **out = **in + } + if in.ServiceAttachmentURI != nil { + in, out := &in.ServiceAttachmentURI, &out.ServiceAttachmentURI + *out = new(string) + **out = **in + } + if in.Degraded != nil { + in, out := &in.Degraded, &out.Degraded + *out = new(bool) + **out = **in + } + if in.GCPConditions != nil { + in, out := &in.GCPConditions, &out.GCPConditions + *out = make([]WorkstationClusterGCPCondition, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkstationClusterObservedState. +func (in *WorkstationClusterObservedState) DeepCopy() *WorkstationClusterObservedState { + if in == nil { + return nil + } + out := new(WorkstationClusterObservedState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkstationClusterSpec) DeepCopyInto(out *WorkstationClusterSpec) { + *out = *in + out.ProjectRef = in.ProjectRef + if in.ResourceID != nil { + in, out := &in.ResourceID, &out.ResourceID + *out = new(string) + **out = **in + } + if in.DisplayName != nil { + in, out := &in.DisplayName, &out.DisplayName + *out = new(string) + **out = **in + } + if in.Annotations != nil { + in, out := &in.Annotations, &out.Annotations + *out = make([]WorkstationClusterAnnotation, len(*in)) + copy(*out, *in) + } + if in.Labels != nil { + in, out := &in.Labels, &out.Labels + *out = make([]WorkstationClusterLabel, len(*in)) + copy(*out, *in) + } + out.NetworkRef = in.NetworkRef + out.SubnetworkRef = in.SubnetworkRef + if in.PrivateClusterConfig != nil { + in, out := &in.PrivateClusterConfig, &out.PrivateClusterConfig + *out = new(WorkstationCluster_PrivateClusterConfig) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkstationClusterSpec. +func (in *WorkstationClusterSpec) DeepCopy() *WorkstationClusterSpec { + if in == nil { + return nil + } + out := new(WorkstationClusterSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkstationClusterStatus) DeepCopyInto(out *WorkstationClusterStatus) { + *out = *in + if in.Conditions != nil { + in, out := &in.Conditions, &out.Conditions + *out = make([]v1alpha1.Condition, len(*in)) + copy(*out, *in) + } + if in.ObservedGeneration != nil { + in, out := &in.ObservedGeneration, &out.ObservedGeneration + *out = new(int64) + **out = **in + } + if in.ExternalRef != nil { + in, out := &in.ExternalRef, &out.ExternalRef + *out = new(string) + **out = **in + } + if in.ObservedState != nil { + in, out := &in.ObservedState, &out.ObservedState + *out = new(WorkstationClusterObservedState) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkstationClusterStatus. +func (in *WorkstationClusterStatus) DeepCopy() *WorkstationClusterStatus { + if in == nil { + return nil + } + out := new(WorkstationClusterStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkstationCluster_PrivateClusterConfig) DeepCopyInto(out *WorkstationCluster_PrivateClusterConfig) { + *out = *in + if in.EnablePrivateEndpoint != nil { + in, out := &in.EnablePrivateEndpoint, &out.EnablePrivateEndpoint + *out = new(bool) + **out = **in + } + if in.AllowedProjects != nil { + in, out := &in.AllowedProjects, &out.AllowedProjects + *out = make([]refsv1beta1.ProjectRef, len(*in)) + copy(*out, *in) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkstationCluster_PrivateClusterConfig. +func (in *WorkstationCluster_PrivateClusterConfig) DeepCopy() *WorkstationCluster_PrivateClusterConfig { + if in == nil { + return nil + } + out := new(WorkstationCluster_PrivateClusterConfig) + in.DeepCopyInto(out) + return out +} diff --git a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_workstationclusters.workstations.cnrm.cloud.google.com.yaml b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_workstationclusters.workstations.cnrm.cloud.google.com.yaml index 0798a5bd8c..552e95981a 100644 --- a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_workstationclusters.workstations.cnrm.cloud.google.com.yaml +++ b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_workstationclusters.workstations.cnrm.cloud.google.com.yaml @@ -363,6 +363,352 @@ spec: type: object type: object served: true + storage: false + subresources: + status: {} + - additionalPrinterColumns: + - jsonPath: .metadata.creationTimestamp + name: Age + type: date + - description: When 'True', the most recent reconcile of the resource succeeded + jsonPath: .status.conditions[?(@.type=='Ready')].status + name: Ready + type: string + - description: The reason for the value in 'Ready' + jsonPath: .status.conditions[?(@.type=='Ready')].reason + name: Status + type: string + - description: The last transition time for the value in 'Status' + jsonPath: .status.conditions[?(@.type=='Ready')].lastTransitionTime + name: Status Age + type: date + name: v1beta1 + schema: + openAPIV3Schema: + description: WorkstationCluster is the Schema for the WorkstationCluster API + properties: + apiVersion: + description: 'APIVersion defines the versioned schema of this representation + of an object. Servers should convert recognized schemas to the latest + internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources' + type: string + kind: + description: 'Kind is a string value representing the REST resource this + object represents. Servers may infer this from the endpoint the client + submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds' + type: string + metadata: + type: object + spec: + description: WorkstationClusterSpec defines the desired state of WorkstationCluster + properties: + annotations: + description: Optional. Client-specified annotations. + items: + properties: + key: + description: Key for the annotation. + type: string + value: + description: Value for the annotation. + type: string + type: object + type: array + displayName: + description: Optional. Human-readable name for this workstation cluster. + type: string + labels: + description: Optional. [Labels](https://cloud.google.com/workstations/docs/label-resources) + that are applied to the workstation cluster and that are also propagated + to the underlying Compute Engine resources. + items: + properties: + key: + description: Key for the annotation. + type: string + value: + description: Value for the annotation. + type: string + type: object + type: array + location: + description: The location of the cluster. + type: string + networkRef: + description: Immutable. Reference to the Compute Engine network in + which instances associated with this workstation cluster will be + created. + oneOf: + - not: + required: + - external + required: + - name + - not: + anyOf: + - required: + - name + - required: + - namespace + required: + - external + properties: + external: + description: A reference to an externally managed Compute Network + resource. Should be in the format `projects//global/networks/`. + type: string + name: + description: The `name` field of a `ComputeNetwork` resource. + type: string + namespace: + description: The `namespace` field of a `ComputeNetwork` resource. + type: string + type: object + privateClusterConfig: + description: Optional. Configuration for private workstation cluster. + properties: + allowedProjects: + description: Optional. Additional projects that are allowed to + attach to the workstation cluster's service attachment. By default, + the workstation cluster's project and the VPC host project (if + different) are allowed. + items: + description: The Project that this resource belongs to. + oneOf: + - not: + required: + - external + required: + - name + - kind + - not: + anyOf: + - required: + - name + - required: + - namespace + - required: + - kind + required: + - external + properties: + external: + description: The `projectID` field of a project, when not + managed by Config Connector. + type: string + kind: + description: The kind of the Project resource; optional + but must be `Project` if provided. + type: string + name: + description: The `name` field of a `Project` resource. + type: string + namespace: + description: The `namespace` field of a `Project` resource. + type: string + type: object + type: array + enablePrivateEndpoint: + description: Immutable. Whether Workstations endpoint is private. + type: boolean + type: object + projectRef: + description: Immutable. The Project that this resource belongs to. + oneOf: + - not: + required: + - external + required: + - name + - not: + anyOf: + - required: + - name + - required: + - namespace + required: + - external + properties: + external: + description: The `projectID` field of a project, when not managed + by Config Connector. + type: string + kind: + description: The kind of the Project resource; optional but must + be `Project` if provided. + type: string + name: + description: The `name` field of a `Project` resource. + type: string + namespace: + description: The `namespace` field of a `Project` resource. + type: string + type: object + x-kubernetes-validations: + - message: ResourceID field is immutable + rule: self == oldSelf + resourceID: + description: Immutable. The WorkstationCluster name. If not given, + the metadata.name will be used. + type: string + x-kubernetes-validations: + - message: ResourceID field is immutable + rule: self == oldSelf + subnetworkRef: + description: Immutable. Reference to the Compute Engine subnetwork + in which instances associated with this workstation cluster will + be created. Must be part of the subnetwork specified for this workstation + cluster. + oneOf: + - not: + required: + - external + required: + - name + - not: + anyOf: + - required: + - name + - required: + - namespace + required: + - external + properties: + external: + description: The ComputeSubnetwork selflink of form "projects/{{project}}/regions/{{region}}/subnetworks/{{name}}", + when not managed by Config Connector. + type: string + name: + description: The `name` field of a `ComputeSubnetwork` resource. + type: string + namespace: + description: The `namespace` field of a `ComputeSubnetwork` resource. + type: string + type: object + required: + - networkRef + - projectRef + - subnetworkRef + type: object + status: + description: WorkstationClusterStatus defines the config connector machine + state of WorkstationCluster + properties: + conditions: + description: Conditions represent the latest available observations + of the object's current state. + items: + properties: + lastTransitionTime: + description: Last time the condition transitioned from one status + to another. + type: string + message: + description: Human-readable message indicating details about + last transition. + type: string + reason: + description: Unique, one-word, CamelCase reason for the condition's + last transition. + type: string + status: + description: Status is the status of the condition. Can be True, + False, Unknown. + type: string + type: + description: Type is the type of the condition. + type: string + type: object + type: array + externalRef: + description: A unique specifier for the WorkstationCluster resource + in GCP. + type: string + observedGeneration: + description: ObservedGeneration is the generation of the resource + that was most recently observed by the Config Connector controller. + If this is equal to metadata.generation, then that means that the + current reported status reflects the most recent desired state of + the resource. + format: int64 + type: integer + observedState: + description: ObservedState is the state of the resource as most recently + observed in GCP. + properties: + clusterHostname: + description: Output only. Hostname for the workstation cluster. + This field will be populated only when private endpoint is enabled. + To access workstations in the workstation cluster, create a + new DNS zone mapping this domain name to an internal IP address + and a forwarding rule mapping that address to the service attachment. + type: string + controlPlaneIP: + description: Output only. The private IP address of the control + plane for this workstation cluster. Workstation VMs need access + to this IP address to work with the service, so make sure that + your firewall rules allow egress from the workstation VMs to + this address. + type: string + createTime: + description: Output only. Time when this workstation cluster was + created. + type: string + degraded: + description: Output only. Whether this workstation cluster is + in degraded mode, in which case it may require user action to + restore full functionality. Details can be found in [conditions][google.cloud.workstations.v1.WorkstationCluster.conditions]. + type: boolean + deleteTime: + description: Output only. Time when this workstation cluster was + soft-deleted. + type: string + etag: + description: Optional. Checksum computed by the server. May be + sent on update and delete requests to make sure that the client + has an up-to-date value before proceeding. + type: string + gcpConditions: + description: Output only. Status conditions describing the workstation + cluster's current state. + items: + properties: + code: + description: The status code, which should be an enum value + of [google.rpc.Code][google.rpc.Code]. + format: int32 + type: integer + message: + description: A developer-facing error message, which should + be in English. Any user-facing error message should be + localized and sent in the [google.rpc.Status.details][google.rpc.Status.details] + field, or localized by the client. + type: string + type: object + type: array + reconciling: + description: Output only. Indicates whether this workstation cluster + is currently being updated to match its intended state. + type: boolean + serviceAttachmentUri: + description: Output only. Service attachment URI for the workstation + cluster. The service attachment is created when private endpoint + is enabled. To access workstations in the workstation cluster, + configure access to the managed service using [Private Service + Connect](https://cloud.google.com/vpc/docs/configure-private-service-connect-services). + type: string + uid: + description: Output only. A system-assigned unique identifier + for this workstation cluster. + type: string + updateTime: + description: Output only. Time when this workstation cluster was + most recently updated. + type: string + type: object + type: object + type: object + served: true storage: true subresources: status: {} diff --git a/config/samples/resources/workstationcluster/basic-workstationcluster/compute_v1beta1_computenetwork.yaml b/config/samples/resources/workstationcluster/basic-workstationcluster/compute_v1beta1_computenetwork.yaml new file mode 100644 index 0000000000..224bcddf5b --- /dev/null +++ b/config/samples/resources/workstationcluster/basic-workstationcluster/compute_v1beta1_computenetwork.yaml @@ -0,0 +1,21 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeNetwork +metadata: + name: computenetwork-dep +spec: + routingMode: GLOBAL + autoCreateSubnetworks: false \ No newline at end of file diff --git a/config/samples/resources/workstationcluster/basic-workstationcluster/compute_v1beta1_computesubnetwork.yaml b/config/samples/resources/workstationcluster/basic-workstationcluster/compute_v1beta1_computesubnetwork.yaml new file mode 100644 index 0000000000..7e489aa7fb --- /dev/null +++ b/config/samples/resources/workstationcluster/basic-workstationcluster/compute_v1beta1_computesubnetwork.yaml @@ -0,0 +1,23 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeSubnetwork +metadata: + name: computesubnetwork-dep +spec: + ipCidrRange: 10.0.0.0/24 + region: us-west1 + networkRef: + name: computenetwork-dep \ No newline at end of file diff --git a/config/samples/resources/workstationcluster/basic-workstationcluster/workstations_v1beta1_workstationcluster.yaml b/config/samples/resources/workstationcluster/basic-workstationcluster/workstations_v1beta1_workstationcluster.yaml new file mode 100644 index 0000000000..6670642d06 --- /dev/null +++ b/config/samples/resources/workstationcluster/basic-workstationcluster/workstations_v1beta1_workstationcluster.yaml @@ -0,0 +1,26 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: workstations.cnrm.cloud.google.com/v1beta1 +kind: WorkstationCluster +metadata: + name: workstationcluster-sample +spec: + projectRef: + external: "projects/${PROJECT_NUMBER1}" + location: us-west1 + networkRef: + name: computenetwork-dep + subnetworkRef: + name: computesubnetwork-dep \ No newline at end of file diff --git a/config/samples/resources/workstationcluster/workstationcluster-with-privateclusterconfig/compute_v1beta1_computenetwork.yaml b/config/samples/resources/workstationcluster/workstationcluster-with-privateclusterconfig/compute_v1beta1_computenetwork.yaml new file mode 100644 index 0000000000..224bcddf5b --- /dev/null +++ b/config/samples/resources/workstationcluster/workstationcluster-with-privateclusterconfig/compute_v1beta1_computenetwork.yaml @@ -0,0 +1,21 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeNetwork +metadata: + name: computenetwork-dep +spec: + routingMode: GLOBAL + autoCreateSubnetworks: false \ No newline at end of file diff --git a/config/samples/resources/workstationcluster/workstationcluster-with-privateclusterconfig/compute_v1beta1_computesubnetwork.yaml b/config/samples/resources/workstationcluster/workstationcluster-with-privateclusterconfig/compute_v1beta1_computesubnetwork.yaml new file mode 100644 index 0000000000..7e489aa7fb --- /dev/null +++ b/config/samples/resources/workstationcluster/workstationcluster-with-privateclusterconfig/compute_v1beta1_computesubnetwork.yaml @@ -0,0 +1,23 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeSubnetwork +metadata: + name: computesubnetwork-dep +spec: + ipCidrRange: 10.0.0.0/24 + region: us-west1 + networkRef: + name: computenetwork-dep \ No newline at end of file diff --git a/config/samples/resources/workstationcluster/workstationcluster-with-privateclusterconfig/workstations_v1beta1_workstationcluster.yaml b/config/samples/resources/workstationcluster/workstationcluster-with-privateclusterconfig/workstations_v1beta1_workstationcluster.yaml new file mode 100644 index 0000000000..2e859ac147 --- /dev/null +++ b/config/samples/resources/workstationcluster/workstationcluster-with-privateclusterconfig/workstations_v1beta1_workstationcluster.yaml @@ -0,0 +1,37 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: workstations.cnrm.cloud.google.com/v1beta1 +kind: WorkstationCluster +metadata: + name: workstationcluster-sample +spec: + projectRef: + external: "projects/${PROJECT_NUMBER1}" + location: us-west1 + displayName: workstationcluster-sample-displayname + annotations: + - key: a-key1 + value: a-value1 + labels: + - key: l-key1 + value: l-value1 + networkRef: + name: computenetwork-dep + subnetworkRef: + name: computesubnetwork-dep + privateClusterConfig: + enablePrivateEndpoint: true + allowedProjects: + - external: "projects/${PROJECT_NUMBER1}" diff --git a/config/servicemappings/workstations.yaml b/config/servicemappings/workstations.yaml new file mode 100644 index 0000000000..fc7f2ad2d4 --- /dev/null +++ b/config/servicemappings/workstations.yaml @@ -0,0 +1,27 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: core.cnrm.cloud.google.com/v1alpha1 +kind: ServiceMapping +metadata: + name: workstations.cnrm.cloud.google.com + namespace: cnrm-system +spec: + name: Workstations + version: v1beta1 + serviceHostName: "workstations.googleapis.com" + resources: + - name: google_workstations_workstationcluster + kind: WorkstationCluster + direct: true \ No newline at end of file diff --git a/dev/tools/controllerbuilder/generate.sh b/dev/tools/controllerbuilder/generate.sh index 952e14090a..47f8e30f13 100755 --- a/dev/tools/controllerbuilder/generate.sh +++ b/dev/tools/controllerbuilder/generate.sh @@ -211,7 +211,7 @@ go run . generate-types \ go run . generate-types \ --proto-source-path ../proto-to-mapper/build/googleapis.pb \ --service google.cloud.workstations.v1 \ - --api-version workstations.cnrm.cloud.google.com/v1alpha1 \ + --api-version workstations.cnrm.cloud.google.com/v1beta1 \ --output-api ${APIS_DIR} \ --kind WorkstationCluster \ --proto-resource WorkstationCluster @@ -219,7 +219,7 @@ go run . generate-types \ go run . generate-mapper \ --proto-source-path ../proto-to-mapper/build/googleapis.pb \ --service google.cloud.workstations.v1 \ - --api-version workstations.cnrm.cloud.google.com/v1alpha1 \ + --api-version workstations.cnrm.cloud.google.com/v1beta1 \ --api-go-package-path github.com/GoogleCloudPlatform/k8s-config-connector/apis \ --output-dir ${OUTPUT_MAPPER} \ --api-dir ${APIS_DIR} diff --git a/pkg/clients/generated/apis/workstations/v1alpha1/doc.go b/pkg/clients/generated/apis/workstations/v1beta1/doc.go similarity index 93% rename from pkg/clients/generated/apis/workstations/v1alpha1/doc.go rename to pkg/clients/generated/apis/workstations/v1beta1/doc.go index 50ff66e39d..880d61a7ed 100644 --- a/pkg/clients/generated/apis/workstations/v1alpha1/doc.go +++ b/pkg/clients/generated/apis/workstations/v1beta1/doc.go @@ -28,11 +28,11 @@ // that future versions of the go-client may include breaking changes. // Please try it out and give us feedback! -// Package v1alpha1 contains API Schema definitions for the workstations v1alpha1 API group. +// Package v1beta1 contains API Schema definitions for the workstations v1beta1 API group. // +k8s:openapi-gen=true // +k8s:deepcopy-gen=package,register // +k8s:conversion-gen=github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/pkg/apis/workstations // +k8s:defaulter-gen=TypeMeta // +groupName=workstations.cnrm.cloud.google.com -package v1alpha1 +package v1beta1 diff --git a/pkg/clients/generated/apis/workstations/v1alpha1/register.go b/pkg/clients/generated/apis/workstations/v1beta1/register.go similarity index 93% rename from pkg/clients/generated/apis/workstations/v1alpha1/register.go rename to pkg/clients/generated/apis/workstations/v1beta1/register.go index 960b60e844..d31c460155 100644 --- a/pkg/clients/generated/apis/workstations/v1alpha1/register.go +++ b/pkg/clients/generated/apis/workstations/v1beta1/register.go @@ -28,13 +28,13 @@ // that future versions of the go-client may include breaking changes. // Please try it out and give us feedback! -// Package v1alpha1 contains API Schema definitions for the workstations v1alpha1 API group. +// Package v1beta1 contains API Schema definitions for the workstations v1beta1 API group. // +k8s:openapi-gen=true // +k8s:deepcopy-gen=package,register // +k8s:conversion-gen=github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/pkg/apis/workstations // +k8s:defaulter-gen=TypeMeta // +groupName=workstations.cnrm.cloud.google.com -package v1alpha1 +package v1beta1 import ( "reflect" @@ -45,7 +45,7 @@ import ( var ( // SchemeGroupVersion is the group version used to register these objects. - SchemeGroupVersion = schema.GroupVersion{Group: "workstations.cnrm.cloud.google.com", Version: "v1alpha1"} + SchemeGroupVersion = schema.GroupVersion{Group: "workstations.cnrm.cloud.google.com", Version: "v1beta1"} // SchemeBuilder is used to add go types to the GroupVersionKind scheme. SchemeBuilder = &scheme.Builder{GroupVersion: SchemeGroupVersion} diff --git a/pkg/clients/generated/apis/workstations/v1alpha1/workstationcluster_types.go b/pkg/clients/generated/apis/workstations/v1beta1/workstationcluster_types.go similarity index 99% rename from pkg/clients/generated/apis/workstations/v1alpha1/workstationcluster_types.go rename to pkg/clients/generated/apis/workstations/v1beta1/workstationcluster_types.go index edc0542f28..6841bfe6c7 100644 --- a/pkg/clients/generated/apis/workstations/v1alpha1/workstationcluster_types.go +++ b/pkg/clients/generated/apis/workstations/v1beta1/workstationcluster_types.go @@ -28,7 +28,7 @@ // that future versions of the go-client may include breaking changes. // Please try it out and give us feedback! -package v1alpha1 +package v1beta1 import ( "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/k8s/v1alpha1" diff --git a/pkg/clients/generated/apis/workstations/v1alpha1/zz_generated.deepcopy.go b/pkg/clients/generated/apis/workstations/v1beta1/zz_generated.deepcopy.go similarity index 98% rename from pkg/clients/generated/apis/workstations/v1alpha1/zz_generated.deepcopy.go rename to pkg/clients/generated/apis/workstations/v1beta1/zz_generated.deepcopy.go index e835d3fad2..4b81cc1c72 100644 --- a/pkg/clients/generated/apis/workstations/v1alpha1/zz_generated.deepcopy.go +++ b/pkg/clients/generated/apis/workstations/v1beta1/zz_generated.deepcopy.go @@ -22,10 +22,10 @@ // Code generated by deepcopy-gen. DO NOT EDIT. -package v1alpha1 +package v1beta1 import ( - k8sv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/k8s/v1alpha1" + v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/k8s/v1alpha1" runtime "k8s.io/apimachinery/pkg/runtime" ) @@ -148,7 +148,7 @@ func (in *WorkstationClusterStatus) DeepCopyInto(out *WorkstationClusterStatus) *out = *in if in.Conditions != nil { in, out := &in.Conditions, &out.Conditions - *out = make([]k8sv1alpha1.Condition, len(*in)) + *out = make([]v1alpha1.Condition, len(*in)) copy(*out, *in) } if in.ExternalRef != nil { diff --git a/pkg/clients/generated/client/clientset/versioned/clientset.go b/pkg/clients/generated/client/clientset/versioned/clientset.go index 6c1153d140..136ead487d 100644 --- a/pkg/clients/generated/client/clientset/versioned/clientset.go +++ b/pkg/clients/generated/client/clientset/versioned/clientset.go @@ -146,7 +146,7 @@ import ( vertexaiv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/vertexai/v1beta1" vpcaccessv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/vpcaccess/v1beta1" workflowsv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/workflows/v1alpha1" - workstationsv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1" + workstationsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1" discovery "k8s.io/client-go/discovery" rest "k8s.io/client-go/rest" flowcontrol "k8s.io/client-go/util/flowcontrol" @@ -275,7 +275,7 @@ type Interface interface { VertexaiV1beta1() vertexaiv1beta1.VertexaiV1beta1Interface VpcaccessV1beta1() vpcaccessv1beta1.VpcaccessV1beta1Interface WorkflowsV1alpha1() workflowsv1alpha1.WorkflowsV1alpha1Interface - WorkstationsV1alpha1() workstationsv1alpha1.WorkstationsV1alpha1Interface + WorkstationsV1beta1() workstationsv1beta1.WorkstationsV1beta1Interface } // Clientset contains the clients for groups. @@ -402,7 +402,7 @@ type Clientset struct { vertexaiV1beta1 *vertexaiv1beta1.VertexaiV1beta1Client vpcaccessV1beta1 *vpcaccessv1beta1.VpcaccessV1beta1Client workflowsV1alpha1 *workflowsv1alpha1.WorkflowsV1alpha1Client - workstationsV1alpha1 *workstationsv1alpha1.WorkstationsV1alpha1Client + workstationsV1beta1 *workstationsv1beta1.WorkstationsV1beta1Client } // AccesscontextmanagerV1beta1 retrieves the AccesscontextmanagerV1beta1Client @@ -1010,9 +1010,9 @@ func (c *Clientset) WorkflowsV1alpha1() workflowsv1alpha1.WorkflowsV1alpha1Inter return c.workflowsV1alpha1 } -// WorkstationsV1alpha1 retrieves the WorkstationsV1alpha1Client -func (c *Clientset) WorkstationsV1alpha1() workstationsv1alpha1.WorkstationsV1alpha1Interface { - return c.workstationsV1alpha1 +// WorkstationsV1beta1 retrieves the WorkstationsV1beta1Client +func (c *Clientset) WorkstationsV1beta1() workstationsv1beta1.WorkstationsV1beta1Interface { + return c.workstationsV1beta1 } // Discovery retrieves the DiscoveryClient @@ -1543,7 +1543,7 @@ func NewForConfigAndClient(c *rest.Config, httpClient *http.Client) (*Clientset, if err != nil { return nil, err } - cs.workstationsV1alpha1, err = workstationsv1alpha1.NewForConfigAndClient(&configShallowCopy, httpClient) + cs.workstationsV1beta1, err = workstationsv1beta1.NewForConfigAndClient(&configShallowCopy, httpClient) if err != nil { return nil, err } @@ -1689,7 +1689,7 @@ func New(c rest.Interface) *Clientset { cs.vertexaiV1beta1 = vertexaiv1beta1.New(c) cs.vpcaccessV1beta1 = vpcaccessv1beta1.New(c) cs.workflowsV1alpha1 = workflowsv1alpha1.New(c) - cs.workstationsV1alpha1 = workstationsv1alpha1.New(c) + cs.workstationsV1beta1 = workstationsv1beta1.New(c) cs.DiscoveryClient = discovery.NewDiscoveryClient(c) return &cs diff --git a/pkg/clients/generated/client/clientset/versioned/fake/clientset_generated.go b/pkg/clients/generated/client/clientset/versioned/fake/clientset_generated.go index 72077fce72..276f3f952a 100644 --- a/pkg/clients/generated/client/clientset/versioned/fake/clientset_generated.go +++ b/pkg/clients/generated/client/clientset/versioned/fake/clientset_generated.go @@ -265,8 +265,8 @@ import ( fakevpcaccessv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/vpcaccess/v1beta1/fake" workflowsv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/workflows/v1alpha1" fakeworkflowsv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/workflows/v1alpha1/fake" - workstationsv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1" - fakeworkstationsv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/fake" + workstationsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1" + fakeworkstationsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/fake" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/watch" "k8s.io/client-go/discovery" @@ -929,7 +929,7 @@ func (c *Clientset) WorkflowsV1alpha1() workflowsv1alpha1.WorkflowsV1alpha1Inter return &fakeworkflowsv1alpha1.FakeWorkflowsV1alpha1{Fake: &c.Fake} } -// WorkstationsV1alpha1 retrieves the WorkstationsV1alpha1Client -func (c *Clientset) WorkstationsV1alpha1() workstationsv1alpha1.WorkstationsV1alpha1Interface { - return &fakeworkstationsv1alpha1.FakeWorkstationsV1alpha1{Fake: &c.Fake} +// WorkstationsV1beta1 retrieves the WorkstationsV1beta1Client +func (c *Clientset) WorkstationsV1beta1() workstationsv1beta1.WorkstationsV1beta1Interface { + return &fakeworkstationsv1beta1.FakeWorkstationsV1beta1{Fake: &c.Fake} } diff --git a/pkg/clients/generated/client/clientset/versioned/fake/register.go b/pkg/clients/generated/client/clientset/versioned/fake/register.go index 80b32ac689..6de26447dd 100644 --- a/pkg/clients/generated/client/clientset/versioned/fake/register.go +++ b/pkg/clients/generated/client/clientset/versioned/fake/register.go @@ -143,7 +143,7 @@ import ( vertexaiv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/vertexai/v1beta1" vpcaccessv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/vpcaccess/v1beta1" workflowsv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/workflows/v1alpha1" - workstationsv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/workstations/v1alpha1" + workstationsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/workstations/v1beta1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" runtime "k8s.io/apimachinery/pkg/runtime" schema "k8s.io/apimachinery/pkg/runtime/schema" @@ -276,7 +276,7 @@ var localSchemeBuilder = runtime.SchemeBuilder{ vertexaiv1beta1.AddToScheme, vpcaccessv1beta1.AddToScheme, workflowsv1alpha1.AddToScheme, - workstationsv1alpha1.AddToScheme, + workstationsv1beta1.AddToScheme, } // AddToScheme adds all types of this clientset into the given scheme. This allows composition diff --git a/pkg/clients/generated/client/clientset/versioned/scheme/register.go b/pkg/clients/generated/client/clientset/versioned/scheme/register.go index 6644207546..78b538c2e2 100644 --- a/pkg/clients/generated/client/clientset/versioned/scheme/register.go +++ b/pkg/clients/generated/client/clientset/versioned/scheme/register.go @@ -143,7 +143,7 @@ import ( vertexaiv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/vertexai/v1beta1" vpcaccessv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/vpcaccess/v1beta1" workflowsv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/workflows/v1alpha1" - workstationsv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/workstations/v1alpha1" + workstationsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/workstations/v1beta1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" runtime "k8s.io/apimachinery/pkg/runtime" schema "k8s.io/apimachinery/pkg/runtime/schema" @@ -276,7 +276,7 @@ var localSchemeBuilder = runtime.SchemeBuilder{ vertexaiv1beta1.AddToScheme, vpcaccessv1beta1.AddToScheme, workflowsv1alpha1.AddToScheme, - workstationsv1alpha1.AddToScheme, + workstationsv1beta1.AddToScheme, } // AddToScheme adds all types of this clientset into the given scheme. This allows composition diff --git a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/doc.go b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/doc.go similarity index 98% rename from pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/doc.go rename to pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/doc.go index d3dac805d0..41dbecdb4a 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/doc.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/doc.go @@ -20,4 +20,4 @@ // Code generated by client-gen. DO NOT EDIT. // This package has the automatically generated typed clients. -package v1alpha1 +package v1beta1 diff --git a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/fake/doc.go b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/fake/doc.go similarity index 100% rename from pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/fake/doc.go rename to pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/fake/doc.go diff --git a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/fake/fake_workstationcluster.go b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/fake/fake_workstationcluster.go similarity index 70% rename from pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/fake/fake_workstationcluster.go rename to pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/fake/fake_workstationcluster.go index 83c071ff1c..aa80744c32 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/fake/fake_workstationcluster.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/fake/fake_workstationcluster.go @@ -24,7 +24,7 @@ package fake import ( "context" - v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/workstations/v1alpha1" + v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/workstations/v1beta1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" labels "k8s.io/apimachinery/pkg/labels" types "k8s.io/apimachinery/pkg/types" @@ -34,29 +34,29 @@ import ( // FakeWorkstationClusters implements WorkstationClusterInterface type FakeWorkstationClusters struct { - Fake *FakeWorkstationsV1alpha1 + Fake *FakeWorkstationsV1beta1 ns string } -var workstationclustersResource = v1alpha1.SchemeGroupVersion.WithResource("workstationclusters") +var workstationclustersResource = v1beta1.SchemeGroupVersion.WithResource("workstationclusters") -var workstationclustersKind = v1alpha1.SchemeGroupVersion.WithKind("WorkstationCluster") +var workstationclustersKind = v1beta1.SchemeGroupVersion.WithKind("WorkstationCluster") // Get takes name of the workstationCluster, and returns the corresponding workstationCluster object, and an error if there is any. -func (c *FakeWorkstationClusters) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.WorkstationCluster, err error) { +func (c *FakeWorkstationClusters) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.WorkstationCluster, err error) { obj, err := c.Fake. - Invokes(testing.NewGetAction(workstationclustersResource, c.ns, name), &v1alpha1.WorkstationCluster{}) + Invokes(testing.NewGetAction(workstationclustersResource, c.ns, name), &v1beta1.WorkstationCluster{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.WorkstationCluster), err + return obj.(*v1beta1.WorkstationCluster), err } // List takes label and field selectors, and returns the list of WorkstationClusters that match those selectors. -func (c *FakeWorkstationClusters) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.WorkstationClusterList, err error) { +func (c *FakeWorkstationClusters) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.WorkstationClusterList, err error) { obj, err := c.Fake. - Invokes(testing.NewListAction(workstationclustersResource, workstationclustersKind, c.ns, opts), &v1alpha1.WorkstationClusterList{}) + Invokes(testing.NewListAction(workstationclustersResource, workstationclustersKind, c.ns, opts), &v1beta1.WorkstationClusterList{}) if obj == nil { return nil, err @@ -66,8 +66,8 @@ func (c *FakeWorkstationClusters) List(ctx context.Context, opts v1.ListOptions) if label == nil { label = labels.Everything() } - list := &v1alpha1.WorkstationClusterList{ListMeta: obj.(*v1alpha1.WorkstationClusterList).ListMeta} - for _, item := range obj.(*v1alpha1.WorkstationClusterList).Items { + list := &v1beta1.WorkstationClusterList{ListMeta: obj.(*v1beta1.WorkstationClusterList).ListMeta} + for _, item := range obj.(*v1beta1.WorkstationClusterList).Items { if label.Matches(labels.Set(item.Labels)) { list.Items = append(list.Items, item) } @@ -83,43 +83,43 @@ func (c *FakeWorkstationClusters) Watch(ctx context.Context, opts v1.ListOptions } // Create takes the representation of a workstationCluster and creates it. Returns the server's representation of the workstationCluster, and an error, if there is any. -func (c *FakeWorkstationClusters) Create(ctx context.Context, workstationCluster *v1alpha1.WorkstationCluster, opts v1.CreateOptions) (result *v1alpha1.WorkstationCluster, err error) { +func (c *FakeWorkstationClusters) Create(ctx context.Context, workstationCluster *v1beta1.WorkstationCluster, opts v1.CreateOptions) (result *v1beta1.WorkstationCluster, err error) { obj, err := c.Fake. - Invokes(testing.NewCreateAction(workstationclustersResource, c.ns, workstationCluster), &v1alpha1.WorkstationCluster{}) + Invokes(testing.NewCreateAction(workstationclustersResource, c.ns, workstationCluster), &v1beta1.WorkstationCluster{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.WorkstationCluster), err + return obj.(*v1beta1.WorkstationCluster), err } // Update takes the representation of a workstationCluster and updates it. Returns the server's representation of the workstationCluster, and an error, if there is any. -func (c *FakeWorkstationClusters) Update(ctx context.Context, workstationCluster *v1alpha1.WorkstationCluster, opts v1.UpdateOptions) (result *v1alpha1.WorkstationCluster, err error) { +func (c *FakeWorkstationClusters) Update(ctx context.Context, workstationCluster *v1beta1.WorkstationCluster, opts v1.UpdateOptions) (result *v1beta1.WorkstationCluster, err error) { obj, err := c.Fake. - Invokes(testing.NewUpdateAction(workstationclustersResource, c.ns, workstationCluster), &v1alpha1.WorkstationCluster{}) + Invokes(testing.NewUpdateAction(workstationclustersResource, c.ns, workstationCluster), &v1beta1.WorkstationCluster{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.WorkstationCluster), err + return obj.(*v1beta1.WorkstationCluster), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeWorkstationClusters) UpdateStatus(ctx context.Context, workstationCluster *v1alpha1.WorkstationCluster, opts v1.UpdateOptions) (*v1alpha1.WorkstationCluster, error) { +func (c *FakeWorkstationClusters) UpdateStatus(ctx context.Context, workstationCluster *v1beta1.WorkstationCluster, opts v1.UpdateOptions) (*v1beta1.WorkstationCluster, error) { obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(workstationclustersResource, "status", c.ns, workstationCluster), &v1alpha1.WorkstationCluster{}) + Invokes(testing.NewUpdateSubresourceAction(workstationclustersResource, "status", c.ns, workstationCluster), &v1beta1.WorkstationCluster{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.WorkstationCluster), err + return obj.(*v1beta1.WorkstationCluster), err } // Delete takes name of the workstationCluster and deletes it. Returns an error if one occurs. func (c *FakeWorkstationClusters) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { _, err := c.Fake. - Invokes(testing.NewDeleteActionWithOptions(workstationclustersResource, c.ns, name, opts), &v1alpha1.WorkstationCluster{}) + Invokes(testing.NewDeleteActionWithOptions(workstationclustersResource, c.ns, name, opts), &v1beta1.WorkstationCluster{}) return err } @@ -128,17 +128,17 @@ func (c *FakeWorkstationClusters) Delete(ctx context.Context, name string, opts func (c *FakeWorkstationClusters) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { action := testing.NewDeleteCollectionAction(workstationclustersResource, c.ns, listOpts) - _, err := c.Fake.Invokes(action, &v1alpha1.WorkstationClusterList{}) + _, err := c.Fake.Invokes(action, &v1beta1.WorkstationClusterList{}) return err } // Patch applies the patch and returns the patched workstationCluster. -func (c *FakeWorkstationClusters) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.WorkstationCluster, err error) { +func (c *FakeWorkstationClusters) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.WorkstationCluster, err error) { obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(workstationclustersResource, c.ns, name, pt, data, subresources...), &v1alpha1.WorkstationCluster{}) + Invokes(testing.NewPatchSubresourceAction(workstationclustersResource, c.ns, name, pt, data, subresources...), &v1beta1.WorkstationCluster{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.WorkstationCluster), err + return obj.(*v1beta1.WorkstationCluster), err } diff --git a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/fake/fake_workstations_client.go b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/fake/fake_workstations_client.go similarity index 76% rename from pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/fake/fake_workstations_client.go rename to pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/fake/fake_workstations_client.go index dd7381345e..4b357b336b 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/fake/fake_workstations_client.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/fake/fake_workstations_client.go @@ -22,22 +22,22 @@ package fake import ( - v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1" + v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1" rest "k8s.io/client-go/rest" testing "k8s.io/client-go/testing" ) -type FakeWorkstationsV1alpha1 struct { +type FakeWorkstationsV1beta1 struct { *testing.Fake } -func (c *FakeWorkstationsV1alpha1) WorkstationClusters(namespace string) v1alpha1.WorkstationClusterInterface { +func (c *FakeWorkstationsV1beta1) WorkstationClusters(namespace string) v1beta1.WorkstationClusterInterface { return &FakeWorkstationClusters{c, namespace} } // RESTClient returns a RESTClient that is used to communicate // with API server by this client implementation. -func (c *FakeWorkstationsV1alpha1) RESTClient() rest.Interface { +func (c *FakeWorkstationsV1beta1) RESTClient() rest.Interface { var ret *rest.RESTClient return ret } diff --git a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/generated_expansion.go b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/generated_expansion.go similarity index 98% rename from pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/generated_expansion.go rename to pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/generated_expansion.go index bad8e07430..d36d8e7953 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/generated_expansion.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/generated_expansion.go @@ -19,6 +19,6 @@ // Code generated by client-gen. DO NOT EDIT. -package v1alpha1 +package v1beta1 type WorkstationClusterExpansion interface{} diff --git a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/workstationcluster.go b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/workstationcluster.go similarity index 78% rename from pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/workstationcluster.go rename to pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/workstationcluster.go index 2def453898..7c6e41d077 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/workstationcluster.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/workstationcluster.go @@ -19,13 +19,13 @@ // Code generated by client-gen. DO NOT EDIT. -package v1alpha1 +package v1beta1 import ( "context" "time" - v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/workstations/v1alpha1" + v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/workstations/v1beta1" scheme "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/scheme" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" @@ -41,15 +41,15 @@ type WorkstationClustersGetter interface { // WorkstationClusterInterface has methods to work with WorkstationCluster resources. type WorkstationClusterInterface interface { - Create(ctx context.Context, workstationCluster *v1alpha1.WorkstationCluster, opts v1.CreateOptions) (*v1alpha1.WorkstationCluster, error) - Update(ctx context.Context, workstationCluster *v1alpha1.WorkstationCluster, opts v1.UpdateOptions) (*v1alpha1.WorkstationCluster, error) - UpdateStatus(ctx context.Context, workstationCluster *v1alpha1.WorkstationCluster, opts v1.UpdateOptions) (*v1alpha1.WorkstationCluster, error) + Create(ctx context.Context, workstationCluster *v1beta1.WorkstationCluster, opts v1.CreateOptions) (*v1beta1.WorkstationCluster, error) + Update(ctx context.Context, workstationCluster *v1beta1.WorkstationCluster, opts v1.UpdateOptions) (*v1beta1.WorkstationCluster, error) + UpdateStatus(ctx context.Context, workstationCluster *v1beta1.WorkstationCluster, opts v1.UpdateOptions) (*v1beta1.WorkstationCluster, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error - Get(ctx context.Context, name string, opts v1.GetOptions) (*v1alpha1.WorkstationCluster, error) - List(ctx context.Context, opts v1.ListOptions) (*v1alpha1.WorkstationClusterList, error) + Get(ctx context.Context, name string, opts v1.GetOptions) (*v1beta1.WorkstationCluster, error) + List(ctx context.Context, opts v1.ListOptions) (*v1beta1.WorkstationClusterList, error) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) - Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.WorkstationCluster, err error) + Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.WorkstationCluster, err error) WorkstationClusterExpansion } @@ -60,7 +60,7 @@ type workstationClusters struct { } // newWorkstationClusters returns a WorkstationClusters -func newWorkstationClusters(c *WorkstationsV1alpha1Client, namespace string) *workstationClusters { +func newWorkstationClusters(c *WorkstationsV1beta1Client, namespace string) *workstationClusters { return &workstationClusters{ client: c.RESTClient(), ns: namespace, @@ -68,8 +68,8 @@ func newWorkstationClusters(c *WorkstationsV1alpha1Client, namespace string) *wo } // Get takes name of the workstationCluster, and returns the corresponding workstationCluster object, and an error if there is any. -func (c *workstationClusters) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.WorkstationCluster, err error) { - result = &v1alpha1.WorkstationCluster{} +func (c *workstationClusters) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.WorkstationCluster, err error) { + result = &v1beta1.WorkstationCluster{} err = c.client.Get(). Namespace(c.ns). Resource("workstationclusters"). @@ -81,12 +81,12 @@ func (c *workstationClusters) Get(ctx context.Context, name string, options v1.G } // List takes label and field selectors, and returns the list of WorkstationClusters that match those selectors. -func (c *workstationClusters) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.WorkstationClusterList, err error) { +func (c *workstationClusters) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.WorkstationClusterList, err error) { var timeout time.Duration if opts.TimeoutSeconds != nil { timeout = time.Duration(*opts.TimeoutSeconds) * time.Second } - result = &v1alpha1.WorkstationClusterList{} + result = &v1beta1.WorkstationClusterList{} err = c.client.Get(). Namespace(c.ns). Resource("workstationclusters"). @@ -113,8 +113,8 @@ func (c *workstationClusters) Watch(ctx context.Context, opts v1.ListOptions) (w } // Create takes the representation of a workstationCluster and creates it. Returns the server's representation of the workstationCluster, and an error, if there is any. -func (c *workstationClusters) Create(ctx context.Context, workstationCluster *v1alpha1.WorkstationCluster, opts v1.CreateOptions) (result *v1alpha1.WorkstationCluster, err error) { - result = &v1alpha1.WorkstationCluster{} +func (c *workstationClusters) Create(ctx context.Context, workstationCluster *v1beta1.WorkstationCluster, opts v1.CreateOptions) (result *v1beta1.WorkstationCluster, err error) { + result = &v1beta1.WorkstationCluster{} err = c.client.Post(). Namespace(c.ns). Resource("workstationclusters"). @@ -126,8 +126,8 @@ func (c *workstationClusters) Create(ctx context.Context, workstationCluster *v1 } // Update takes the representation of a workstationCluster and updates it. Returns the server's representation of the workstationCluster, and an error, if there is any. -func (c *workstationClusters) Update(ctx context.Context, workstationCluster *v1alpha1.WorkstationCluster, opts v1.UpdateOptions) (result *v1alpha1.WorkstationCluster, err error) { - result = &v1alpha1.WorkstationCluster{} +func (c *workstationClusters) Update(ctx context.Context, workstationCluster *v1beta1.WorkstationCluster, opts v1.UpdateOptions) (result *v1beta1.WorkstationCluster, err error) { + result = &v1beta1.WorkstationCluster{} err = c.client.Put(). Namespace(c.ns). Resource("workstationclusters"). @@ -141,8 +141,8 @@ func (c *workstationClusters) Update(ctx context.Context, workstationCluster *v1 // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *workstationClusters) UpdateStatus(ctx context.Context, workstationCluster *v1alpha1.WorkstationCluster, opts v1.UpdateOptions) (result *v1alpha1.WorkstationCluster, err error) { - result = &v1alpha1.WorkstationCluster{} +func (c *workstationClusters) UpdateStatus(ctx context.Context, workstationCluster *v1beta1.WorkstationCluster, opts v1.UpdateOptions) (result *v1beta1.WorkstationCluster, err error) { + result = &v1beta1.WorkstationCluster{} err = c.client.Put(). Namespace(c.ns). Resource("workstationclusters"). @@ -183,8 +183,8 @@ func (c *workstationClusters) DeleteCollection(ctx context.Context, opts v1.Dele } // Patch applies the patch and returns the patched workstationCluster. -func (c *workstationClusters) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.WorkstationCluster, err error) { - result = &v1alpha1.WorkstationCluster{} +func (c *workstationClusters) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.WorkstationCluster, err error) { + result = &v1beta1.WorkstationCluster{} err = c.client.Patch(pt). Namespace(c.ns). Resource("workstationclusters"). diff --git a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/workstations_client.go b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/workstations_client.go similarity index 66% rename from pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/workstations_client.go rename to pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/workstations_client.go index a509798797..b8c14966d0 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1alpha1/workstations_client.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/workstations/v1beta1/workstations_client.go @@ -19,34 +19,34 @@ // Code generated by client-gen. DO NOT EDIT. -package v1alpha1 +package v1beta1 import ( "net/http" - v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/workstations/v1alpha1" + v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/workstations/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/scheme" rest "k8s.io/client-go/rest" ) -type WorkstationsV1alpha1Interface interface { +type WorkstationsV1beta1Interface interface { RESTClient() rest.Interface WorkstationClustersGetter } -// WorkstationsV1alpha1Client is used to interact with features provided by the workstations.cnrm.cloud.google.com group. -type WorkstationsV1alpha1Client struct { +// WorkstationsV1beta1Client is used to interact with features provided by the workstations.cnrm.cloud.google.com group. +type WorkstationsV1beta1Client struct { restClient rest.Interface } -func (c *WorkstationsV1alpha1Client) WorkstationClusters(namespace string) WorkstationClusterInterface { +func (c *WorkstationsV1beta1Client) WorkstationClusters(namespace string) WorkstationClusterInterface { return newWorkstationClusters(c, namespace) } -// NewForConfig creates a new WorkstationsV1alpha1Client for the given config. +// NewForConfig creates a new WorkstationsV1beta1Client for the given config. // NewForConfig is equivalent to NewForConfigAndClient(c, httpClient), // where httpClient was generated with rest.HTTPClientFor(c). -func NewForConfig(c *rest.Config) (*WorkstationsV1alpha1Client, error) { +func NewForConfig(c *rest.Config) (*WorkstationsV1beta1Client, error) { config := *c if err := setConfigDefaults(&config); err != nil { return nil, err @@ -58,9 +58,9 @@ func NewForConfig(c *rest.Config) (*WorkstationsV1alpha1Client, error) { return NewForConfigAndClient(&config, httpClient) } -// NewForConfigAndClient creates a new WorkstationsV1alpha1Client for the given config and http client. +// NewForConfigAndClient creates a new WorkstationsV1beta1Client for the given config and http client. // Note the http client provided takes precedence over the configured transport values. -func NewForConfigAndClient(c *rest.Config, h *http.Client) (*WorkstationsV1alpha1Client, error) { +func NewForConfigAndClient(c *rest.Config, h *http.Client) (*WorkstationsV1beta1Client, error) { config := *c if err := setConfigDefaults(&config); err != nil { return nil, err @@ -69,12 +69,12 @@ func NewForConfigAndClient(c *rest.Config, h *http.Client) (*WorkstationsV1alpha if err != nil { return nil, err } - return &WorkstationsV1alpha1Client{client}, nil + return &WorkstationsV1beta1Client{client}, nil } -// NewForConfigOrDie creates a new WorkstationsV1alpha1Client for the given config and +// NewForConfigOrDie creates a new WorkstationsV1beta1Client for the given config and // panics if there is an error in the config. -func NewForConfigOrDie(c *rest.Config) *WorkstationsV1alpha1Client { +func NewForConfigOrDie(c *rest.Config) *WorkstationsV1beta1Client { client, err := NewForConfig(c) if err != nil { panic(err) @@ -82,13 +82,13 @@ func NewForConfigOrDie(c *rest.Config) *WorkstationsV1alpha1Client { return client } -// New creates a new WorkstationsV1alpha1Client for the given RESTClient. -func New(c rest.Interface) *WorkstationsV1alpha1Client { - return &WorkstationsV1alpha1Client{c} +// New creates a new WorkstationsV1beta1Client for the given RESTClient. +func New(c rest.Interface) *WorkstationsV1beta1Client { + return &WorkstationsV1beta1Client{c} } func setConfigDefaults(config *rest.Config) error { - gv := v1alpha1.SchemeGroupVersion + gv := v1beta1.SchemeGroupVersion config.GroupVersion = &gv config.APIPath = "/apis" config.NegotiatedSerializer = scheme.Codecs.WithoutConversion() @@ -102,7 +102,7 @@ func setConfigDefaults(config *rest.Config) error { // RESTClient returns a RESTClient that is used to communicate // with API server by this client implementation. -func (c *WorkstationsV1alpha1Client) RESTClient() rest.Interface { +func (c *WorkstationsV1beta1Client) RESTClient() rest.Interface { if c == nil { return nil } diff --git a/pkg/controller/direct/workstations/workstationcluster_controller.go b/pkg/controller/direct/workstations/workstationcluster_controller.go index dded7c979f..35b1018323 100644 --- a/pkg/controller/direct/workstations/workstationcluster_controller.go +++ b/pkg/controller/direct/workstations/workstationcluster_controller.go @@ -20,7 +20,7 @@ import ( "reflect" refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" - krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/workstations/v1alpha1" + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/workstations/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/config" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/directbase" diff --git a/pkg/controller/direct/workstations/workstationcluster_mappings.go b/pkg/controller/direct/workstations/workstationcluster_mappings.go index 9928074fcf..6cf03cd33e 100644 --- a/pkg/controller/direct/workstations/workstationcluster_mappings.go +++ b/pkg/controller/direct/workstations/workstationcluster_mappings.go @@ -19,7 +19,7 @@ import ( status "google.golang.org/genproto/googleapis/rpc/status" refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" - krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/workstations/v1alpha1" + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/workstations/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" ) diff --git a/pkg/controller/direct/workstations/workstationcluster_normalize.go b/pkg/controller/direct/workstations/workstationcluster_normalize.go index f256363d2f..654b2eb19c 100644 --- a/pkg/controller/direct/workstations/workstationcluster_normalize.go +++ b/pkg/controller/direct/workstations/workstationcluster_normalize.go @@ -20,7 +20,7 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client" refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" - krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/workstations/v1alpha1" + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/workstations/v1beta1" ) func NormalizeWorkstationCluster(ctx context.Context, kube client.Reader, obj *krm.WorkstationCluster) error { diff --git a/pkg/gvks/supportedgvks/gvks_generated.go b/pkg/gvks/supportedgvks/gvks_generated.go index 17274db582..c7d626d735 100644 --- a/pkg/gvks/supportedgvks/gvks_generated.go +++ b/pkg/gvks/supportedgvks/gvks_generated.go @@ -4414,4 +4414,14 @@ var SupportedGVKs = map[schema.GroupVersionKind]GVKMetadata{ "cnrm.cloud.google.com/managed-by-kcc": "true", "cnrm.cloud.google.com/system": "true", }, + }, + { + Group: "workstations.cnrm.cloud.google.com", + Version: "v1beta1", + Kind: "WorkstationCluster", + }: { + Labels: map[string]string{ + "cnrm.cloud.google.com/managed-by-kcc": "true", + "cnrm.cloud.google.com/system": "true", + }, }} diff --git a/pkg/snippet/snippetgeneration/snippetgeneration.go b/pkg/snippet/snippetgeneration/snippetgeneration.go index 6a8891aaa0..81c7ba4df7 100644 --- a/pkg/snippet/snippetgeneration/snippetgeneration.go +++ b/pkg/snippet/snippetgeneration/snippetgeneration.go @@ -113,6 +113,7 @@ var preferredSampleForResource = map[string]string{ "vpcaccessconnector": "cidr-connector", "vertexaidataset": "vertexai-dataset-encryptionkey", "vertexaiendpoint": "vertexai-endpoint-network", + "workstationcluster": "basic-workstationcluster", } type Snippet struct { diff --git a/pkg/test/resourcefixture/sets.go b/pkg/test/resourcefixture/sets.go index 38d26911ed..3d5d2eb4ec 100644 --- a/pkg/test/resourcefixture/sets.go +++ b/pkg/test/resourcefixture/sets.go @@ -95,6 +95,7 @@ func IsPureDirectResource(gk schema.GroupKind) bool { "PrivilegedAccessManagerEntitlement", "RedisCluster", "BigQueryAnalyticsHubDataExchange", + "WorkstationCluster", } return slices.Contains(pureDirectResources, gk.Kind) } diff --git a/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-full/_generated_object_workstationcluster-full.golden.yaml b/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-full/_generated_object_workstationcluster-full.golden.yaml index a994215717..caaec82d5e 100644 --- a/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-full/_generated_object_workstationcluster-full.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-full/_generated_object_workstationcluster-full.golden.yaml @@ -1,6 +1,8 @@ -apiVersion: workstations.cnrm.cloud.google.com/v1alpha1 +apiVersion: workstations.cnrm.cloud.google.com/v1beta1 kind: WorkstationCluster metadata: + annotations: + cnrm.cloud.google.com/management-conflict-prevention-policy: none finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-full/create.yaml b/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-full/create.yaml index 683fa2a672..d1f1736bf3 100644 --- a/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-full/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-full/create.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: workstations.cnrm.cloud.google.com/v1alpha1 +apiVersion: workstations.cnrm.cloud.google.com/v1beta1 kind: WorkstationCluster metadata: name: workstationcluster-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-full/update.yaml b/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-full/update.yaml index e5257c0ce3..6593569a23 100644 --- a/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-full/update.yaml +++ b/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-full/update.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: workstations.cnrm.cloud.google.com/v1alpha1 +apiVersion: workstations.cnrm.cloud.google.com/v1beta1 kind: WorkstationCluster metadata: name: workstationcluster-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-minimal/_generated_object_workstationcluster-minimal.golden.yaml b/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-minimal/_generated_object_workstationcluster-minimal.golden.yaml index fe45a18f71..a3f9889d0d 100644 --- a/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-minimal/_generated_object_workstationcluster-minimal.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-minimal/_generated_object_workstationcluster-minimal.golden.yaml @@ -1,6 +1,8 @@ -apiVersion: workstations.cnrm.cloud.google.com/v1alpha1 +apiVersion: workstations.cnrm.cloud.google.com/v1beta1 kind: WorkstationCluster metadata: + annotations: + cnrm.cloud.google.com/management-conflict-prevention-policy: none finalizers: - cnrm.cloud.google.com/finalizer - cnrm.cloud.google.com/deletion-defender diff --git a/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-minimal/create.yaml b/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-minimal/create.yaml index f94a28f345..a9dab4c53a 100644 --- a/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-minimal/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/workstations/workstationcluster/workstationcluster-minimal/create.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: workstations.cnrm.cloud.google.com/v1alpha1 +apiVersion: workstations.cnrm.cloud.google.com/v1beta1 kind: WorkstationCluster metadata: name: workstationcluster-${uniqueId} diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/workstations/workstationcluster.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/workstations/workstationcluster.md new file mode 100644 index 0000000000..5478efca0f --- /dev/null +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/workstations/workstationcluster.md @@ -0,0 +1,746 @@ +{# AUTOGENERATED. DO NOT EDIT. #} + +{% extends "config-connector/_base.html" %} + +{% block page_title %}WorkstationCluster{% endblock %} +{% block body %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyValue
{{gcp_name_short}} Service NameCloud Workstations
{{gcp_name_short}} Service Documentation/workstations/docs/
{{gcp_name_short}} REST Resource Namev1.projects.locations.workstationClusters
{{gcp_name_short}} REST Resource Documentation/workstations/docs/reference/rest/v1/projects.locations.workstationClusters
{{product_name_short}} Resource Short Namesworkstationcluster
{{product_name_short}} Service Nameworkstations.googleapis.com
{{product_name_short}} Resource Fully Qualified Nameworkstationclusters.workstations.cnrm.cloud.google.com
Can Be Referenced by IAMPolicy/IAMPolicyMemberNo
{{product_name_short}} Default Average Reconcile Interval In Seconds600
+ +## Custom Resource Definition Properties + + + +### Spec +#### Schema +```yaml +annotations: +- key: string + value: string +displayName: string +labels: +- key: string + value: string +location: string +networkRef: + external: string + name: string + namespace: string +privateClusterConfig: + allowedProjects: + - external: string + kind: string + name: string + namespace: string + enablePrivateEndpoint: boolean +projectRef: + external: string + kind: string + name: string + namespace: string +resourceID: string +subnetworkRef: + external: string + name: string + namespace: string +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Fields
+

annotations

+

Optional

+
+

list (object)

+

{% verbatim %}Optional. Client-specified annotations.{% endverbatim %}

+
+

annotations[]

+

Optional

+
+

object

+

{% verbatim %}{% endverbatim %}

+
+

annotations[].key

+

Optional

+
+

string

+

{% verbatim %}Key for the annotation.{% endverbatim %}

+
+

annotations[].value

+

Optional

+
+

string

+

{% verbatim %}Value for the annotation.{% endverbatim %}

+
+

displayName

+

Optional

+
+

string

+

{% verbatim %}Optional. Human-readable name for this workstation cluster.{% endverbatim %}

+
+

labels

+

Optional

+
+

list (object)

+

{% verbatim %}Optional. [Labels](https://cloud.google.com/workstations/docs/label-resources) that are applied to the workstation cluster and that are also propagated to the underlying Compute Engine resources.{% endverbatim %}

+
+

labels[]

+

Optional

+
+

object

+

{% verbatim %}{% endverbatim %}

+
+

labels[].key

+

Optional

+
+

string

+

{% verbatim %}Key for the annotation.{% endverbatim %}

+
+

labels[].value

+

Optional

+
+

string

+

{% verbatim %}Value for the annotation.{% endverbatim %}

+
+

location

+

Optional

+
+

string

+

{% verbatim %}The location of the cluster.{% endverbatim %}

+
+

networkRef

+

Required*

+
+

object

+

{% verbatim %}Immutable. Reference to the Compute Engine network in which instances associated with this workstation cluster will be created.{% endverbatim %}

+
+

networkRef.external

+

Optional

+
+

string

+

{% verbatim %}A reference to an externally managed Compute Network resource. Should be in the format `projects//global/networks/`.{% endverbatim %}

+
+

networkRef.name

+

Optional

+
+

string

+

{% verbatim %}The `name` field of a `ComputeNetwork` resource.{% endverbatim %}

+
+

networkRef.namespace

+

Optional

+
+

string

+

{% verbatim %}The `namespace` field of a `ComputeNetwork` resource.{% endverbatim %}

+
+

privateClusterConfig

+

Optional

+
+

object

+

{% verbatim %}Optional. Configuration for private workstation cluster.{% endverbatim %}

+
+

privateClusterConfig.allowedProjects

+

Optional

+
+

list (object)

+

{% verbatim %}Optional. Additional projects that are allowed to attach to the workstation cluster's service attachment. By default, the workstation cluster's project and the VPC host project (if different) are allowed.{% endverbatim %}

+
+

privateClusterConfig.allowedProjects[]

+

Optional

+
+

object

+

{% verbatim %}The Project that this resource belongs to.{% endverbatim %}

+
+

privateClusterConfig.allowedProjects[].external

+

Optional

+
+

string

+

{% verbatim %}The `projectID` field of a project, when not managed by Config Connector.{% endverbatim %}

+
+

privateClusterConfig.allowedProjects[].kind

+

Optional

+
+

string

+

{% verbatim %}The kind of the Project resource; optional but must be `Project` if provided.{% endverbatim %}

+
+

privateClusterConfig.allowedProjects[].name

+

Optional

+
+

string

+

{% verbatim %}The `name` field of a `Project` resource.{% endverbatim %}

+
+

privateClusterConfig.allowedProjects[].namespace

+

Optional

+
+

string

+

{% verbatim %}The `namespace` field of a `Project` resource.{% endverbatim %}

+
+

privateClusterConfig.enablePrivateEndpoint

+

Optional

+
+

boolean

+

{% verbatim %}Immutable. Whether Workstations endpoint is private.{% endverbatim %}

+
+

projectRef

+

Required*

+
+

object

+

{% verbatim %}Immutable. The Project that this resource belongs to.{% endverbatim %}

+
+

projectRef.external

+

Optional

+
+

string

+

{% verbatim %}The `projectID` field of a project, when not managed by Config Connector.{% endverbatim %}

+
+

projectRef.kind

+

Optional

+
+

string

+

{% verbatim %}The kind of the Project resource; optional but must be `Project` if provided.{% endverbatim %}

+
+

projectRef.name

+

Optional

+
+

string

+

{% verbatim %}The `name` field of a `Project` resource.{% endverbatim %}

+
+

projectRef.namespace

+

Optional

+
+

string

+

{% verbatim %}The `namespace` field of a `Project` resource.{% endverbatim %}

+
+

resourceID

+

Optional

+
+

string

+

{% verbatim %}Immutable. The WorkstationCluster name. If not given, the metadata.name will be used.{% endverbatim %}

+
+

subnetworkRef

+

Required*

+
+

object

+

{% verbatim %}Immutable. Reference to the Compute Engine subnetwork in which instances associated with this workstation cluster will be created. Must be part of the subnetwork specified for this workstation cluster.{% endverbatim %}

+
+

subnetworkRef.external

+

Optional

+
+

string

+

{% verbatim %}The ComputeSubnetwork selflink of form "projects/{{project}}/regions/{{region}}/subnetworks/{{name}}", when not managed by Config Connector.{% endverbatim %}

+
+

subnetworkRef.name

+

Optional

+
+

string

+

{% verbatim %}The `name` field of a `ComputeSubnetwork` resource.{% endverbatim %}

+
+

subnetworkRef.namespace

+

Optional

+
+

string

+

{% verbatim %}The `namespace` field of a `ComputeSubnetwork` resource.{% endverbatim %}

+
+ + +

* Field is required when parent field is specified

+ + +### Status +#### Schema +```yaml +conditions: +- lastTransitionTime: string + message: string + reason: string + status: string + type: string +externalRef: string +observedGeneration: integer +observedState: + clusterHostname: string + controlPlaneIP: string + createTime: string + degraded: boolean + deleteTime: string + etag: string + gcpConditions: + - code: integer + message: string + reconciling: boolean + serviceAttachmentUri: string + uid: string + updateTime: string +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Fields
conditions +

list (object)

+

{% verbatim %}Conditions represent the latest available observations of the object's current state.{% endverbatim %}

+
conditions[] +

object

+

{% verbatim %}{% endverbatim %}

+
conditions[].lastTransitionTime +

string

+

{% verbatim %}Last time the condition transitioned from one status to another.{% endverbatim %}

+
conditions[].message +

string

+

{% verbatim %}Human-readable message indicating details about last transition.{% endverbatim %}

+
conditions[].reason +

string

+

{% verbatim %}Unique, one-word, CamelCase reason for the condition's last transition.{% endverbatim %}

+
conditions[].status +

string

+

{% verbatim %}Status is the status of the condition. Can be True, False, Unknown.{% endverbatim %}

+
conditions[].type +

string

+

{% verbatim %}Type is the type of the condition.{% endverbatim %}

+
externalRef +

string

+

{% verbatim %}A unique specifier for the WorkstationCluster resource in GCP.{% endverbatim %}

+
observedGeneration +

integer

+

{% verbatim %}ObservedGeneration is the generation of the resource that was most recently observed by the Config Connector controller. If this is equal to metadata.generation, then that means that the current reported status reflects the most recent desired state of the resource.{% endverbatim %}

+
observedState +

object

+

{% verbatim %}ObservedState is the state of the resource as most recently observed in GCP.{% endverbatim %}

+
observedState.clusterHostname +

string

+

{% verbatim %}Output only. Hostname for the workstation cluster. This field will be populated only when private endpoint is enabled. To access workstations in the workstation cluster, create a new DNS zone mapping this domain name to an internal IP address and a forwarding rule mapping that address to the service attachment.{% endverbatim %}

+
observedState.controlPlaneIP +

string

+

{% verbatim %}Output only. The private IP address of the control plane for this workstation cluster. Workstation VMs need access to this IP address to work with the service, so make sure that your firewall rules allow egress from the workstation VMs to this address.{% endverbatim %}

+
observedState.createTime +

string

+

{% verbatim %}Output only. Time when this workstation cluster was created.{% endverbatim %}

+
observedState.degraded +

boolean

+

{% verbatim %}Output only. Whether this workstation cluster is in degraded mode, in which case it may require user action to restore full functionality. Details can be found in [conditions][google.cloud.workstations.v1.WorkstationCluster.conditions].{% endverbatim %}

+
observedState.deleteTime +

string

+

{% verbatim %}Output only. Time when this workstation cluster was soft-deleted.{% endverbatim %}

+
observedState.etag +

string

+

{% verbatim %}Optional. Checksum computed by the server. May be sent on update and delete requests to make sure that the client has an up-to-date value before proceeding.{% endverbatim %}

+
observedState.gcpConditions +

list (object)

+

{% verbatim %}Output only. Status conditions describing the workstation cluster's current state.{% endverbatim %}

+
observedState.gcpConditions[] +

object

+

{% verbatim %}{% endverbatim %}

+
observedState.gcpConditions[].code +

integer

+

{% verbatim %}The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code].{% endverbatim %}

+
observedState.gcpConditions[].message +

string

+

{% verbatim %}A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client.{% endverbatim %}

+
observedState.reconciling +

boolean

+

{% verbatim %}Output only. Indicates whether this workstation cluster is currently being updated to match its intended state.{% endverbatim %}

+
observedState.serviceAttachmentUri +

string

+

{% verbatim %}Output only. Service attachment URI for the workstation cluster. The service attachment is created when private endpoint is enabled. To access workstations in the workstation cluster, configure access to the managed service using [Private Service Connect](https://cloud.google.com/vpc/docs/configure-private-service-connect-services).{% endverbatim %}

+
observedState.uid +

string

+

{% verbatim %}Output only. A system-assigned unique identifier for this workstation cluster.{% endverbatim %}

+
observedState.updateTime +

string

+

{% verbatim %}Output only. Time when this workstation cluster was most recently updated.{% endverbatim %}

+
+ +## Sample YAML(s) + +### Basic WorkstationCluster +```yaml +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: workstations.cnrm.cloud.google.com/v1beta1 +kind: WorkstationCluster +metadata: + name: workstationcluster-sample +spec: + projectRef: + external: "projects/${PROJECT_NUMBER1}" + location: us-west1 + networkRef: + name: computenetwork-dep + subnetworkRef: + name: computesubnetwork-dep +--- +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeNetwork +metadata: + name: computenetwork-dep +spec: + routingMode: GLOBAL + autoCreateSubnetworks: false +--- +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeSubnetwork +metadata: + name: computesubnetwork-dep +spec: + ipCidrRange: 10.0.0.0/24 + region: us-west1 + networkRef: + name: computenetwork-dep +``` + +### WorkstationCluster With Privateclusterconfig +```yaml +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: workstations.cnrm.cloud.google.com/v1beta1 +kind: WorkstationCluster +metadata: + name: workstationcluster-sample +spec: + projectRef: + external: "projects/${PROJECT_NUMBER1}" + location: us-west1 + displayName: workstationcluster-sample-displayname + annotations: + - key: a-key1 + value: a-value1 + labels: + - key: l-key1 + value: l-value1 + networkRef: + name: computenetwork-dep + subnetworkRef: + name: computesubnetwork-dep + privateClusterConfig: + enablePrivateEndpoint: true + allowedProjects: + - external: "projects/${PROJECT_NUMBER1}" +--- +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeNetwork +metadata: + name: computenetwork-dep +spec: + routingMode: GLOBAL + autoCreateSubnetworks: false +--- +apiVersion: compute.cnrm.cloud.google.com/v1beta1 +kind: ComputeSubnetwork +metadata: + name: computesubnetwork-dep +spec: + ipCidrRange: 10.0.0.0/24 + region: us-west1 + networkRef: + name: computenetwork-dep +``` + + +Note: If you have any trouble with instantiating the resource, refer to Troubleshoot Config Connector. + +{% endblock %} diff --git a/scripts/generate-google3-docs/resource-reference/templates/workstations_workstationcluster.tmpl b/scripts/generate-google3-docs/resource-reference/templates/workstations_workstationcluster.tmpl new file mode 100644 index 0000000000..406091b8fa --- /dev/null +++ b/scripts/generate-google3-docs/resource-reference/templates/workstations_workstationcluster.tmpl @@ -0,0 +1,53 @@ +{{template "headercomment.tmpl" .}} + +{% extends "config-connector/_base.html" %} + +{% block page_title %}{{ .Kind}}{% endblock %} +{% block body %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +{{template "iamsupport.tmpl" .}} + + + + + +
PropertyValue
{{"{{gcp_name_short}}"}} Service NameCloud Workstations
{{"{{gcp_name_short}}"}} Service Documentation/workstations/docs/
{{"{{gcp_name_short}}"}} REST Resource Namev1.projects.locations.workstationClusters
{{"{{gcp_name_short}}"}} REST Resource Documentation/workstations/docs/reference/rest/v1/projects.locations.workstationClusters
{{"{{product_name_short}}"}} Resource Short Names{{ .ShortNames}}
{{"{{product_name_short}}"}} Service Nameworkstations.googleapis.com
{{"{{product_name_short}}"}} Resource Fully Qualified Name{{ .FullyQualifiedName}}
{{"{{product_name_short}}"}} Default Average Reconcile Interval In Seconds{{ .DefaultReconcileInterval}}
+ +{{template "resource.tmpl" .}} +{{template "endnote.tmpl" .}} +{% endblock %} From 1c73e78dcb3ad4a99531bcf468b908bff30f277c Mon Sep 17 00:00:00 2001 From: Joyce Ma Date: Tue, 5 Nov 2024 21:16:52 +0000 Subject: [PATCH 25/31] Update field name from 'step' to 'steps' in PrivilegedAccessManagerEntitlement --- .../v1alpha1/types.generated.go | 2 +- .../v1beta1/types.generated.go | 2 +- .../cloudbuild_v1beta1_cloudbuildtrigger.yaml | 2 +- ...edaccessmanager.cnrm.cloud.google.com.yaml | 4 ++-- ...a1_privilegedaccessmanagerentitlement.yaml | 2 +- ...rivilegedaccessmanagerentitlement_types.go | 4 ++-- .../v1beta1/zz_generated.deepcopy.go | 14 ++++++------ ...ssmanagerentitlementfullfolder.golden.yaml | 2 +- .../create.yaml | 2 +- .../update.yaml | 2 +- ...ccessmanagerentitlementfullorg.golden.yaml | 2 +- .../create.yaml | 2 +- .../update.yaml | 2 +- .../privilegedaccessmanagerentitlement.md | 22 +++++++++---------- 14 files changed, 32 insertions(+), 32 deletions(-) diff --git a/apis/privilegedaccessmanager/v1alpha1/types.generated.go b/apis/privilegedaccessmanager/v1alpha1/types.generated.go index 9f230b1001..f8c288c663 100644 --- a/apis/privilegedaccessmanager/v1alpha1/types.generated.go +++ b/apis/privilegedaccessmanager/v1alpha1/types.generated.go @@ -99,7 +99,7 @@ type ManualApprovals struct { // Optional. List of approval steps in this workflow. These steps are followed // in the specified order sequentially. Only 1 step is supported. // +optional - Steps []Step `json:"step,omitempty"` + Steps []Step `json:"steps,omitempty"` } // Step represents a logical step in a manual approval workflow. diff --git a/apis/privilegedaccessmanager/v1beta1/types.generated.go b/apis/privilegedaccessmanager/v1beta1/types.generated.go index 94f25dc7b1..a69ebc2846 100644 --- a/apis/privilegedaccessmanager/v1beta1/types.generated.go +++ b/apis/privilegedaccessmanager/v1beta1/types.generated.go @@ -99,7 +99,7 @@ type ManualApprovals struct { // Optional. List of approval steps in this workflow. These steps are followed // in the specified order sequentially. Only 1 step is supported. // +optional - Steps []Step `json:"step,omitempty"` + Steps []Step `json:"steps,omitempty"` } // Step represents a logical step in a manual approval workflow. diff --git a/config/cloudcodesnippets/cloudbuild_v1beta1_cloudbuildtrigger.yaml b/config/cloudcodesnippets/cloudbuild_v1beta1_cloudbuildtrigger.yaml index 5ea0b3f51f..755acb312f 100644 --- a/config/cloudcodesnippets/cloudbuild_v1beta1_cloudbuildtrigger.yaml +++ b/config/cloudcodesnippets/cloudbuild_v1beta1_cloudbuildtrigger.yaml @@ -27,7 +27,7 @@ insertText: | - \${11:team-a} - \${12:service-b} timeout: \${13:1800s} - step: + steps: - id: \${14:download_zip} name: \${15:gcr.io/cloud-builders/gsutil} args: diff --git a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_privilegedaccessmanagerentitlements.privilegedaccessmanager.cnrm.cloud.google.com.yaml b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_privilegedaccessmanagerentitlements.privilegedaccessmanager.cnrm.cloud.google.com.yaml index 2cbc0f249d..33ed8273e2 100644 --- a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_privilegedaccessmanagerentitlements.privilegedaccessmanager.cnrm.cloud.google.com.yaml +++ b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_privilegedaccessmanagerentitlements.privilegedaccessmanager.cnrm.cloud.google.com.yaml @@ -87,7 +87,7 @@ spec: description: Optional. Whether the approvers need to provide a justification for their actions. type: boolean - step: + steps: description: Optional. List of approval steps in this workflow. These steps are followed in the specified order sequentially. Only 1 step is supported. @@ -454,7 +454,7 @@ spec: description: Optional. Whether the approvers need to provide a justification for their actions. type: boolean - step: + steps: description: Optional. List of approval steps in this workflow. These steps are followed in the specified order sequentially. Only 1 step is supported. diff --git a/config/samples/resources/privilegedaccessmanagerentitlement/folder-level-entitlement/privilegedaccessmanager_v1beta1_privilegedaccessmanagerentitlement.yaml b/config/samples/resources/privilegedaccessmanagerentitlement/folder-level-entitlement/privilegedaccessmanager_v1beta1_privilegedaccessmanagerentitlement.yaml index 6d5c3d3955..ea5295f87e 100644 --- a/config/samples/resources/privilegedaccessmanagerentitlement/folder-level-entitlement/privilegedaccessmanager_v1beta1_privilegedaccessmanagerentitlement.yaml +++ b/config/samples/resources/privilegedaccessmanagerentitlement/folder-level-entitlement/privilegedaccessmanager_v1beta1_privilegedaccessmanagerentitlement.yaml @@ -44,7 +44,7 @@ spec: approvalWorkflow: manualApprovals: requireApproverJustification: true - step: + steps: - approvalsNeeded: 1 approverEmailRecipients: # Replace ${PROJECT_ID?} with your project ID. diff --git a/pkg/clients/generated/apis/privilegedaccessmanager/v1beta1/privilegedaccessmanagerentitlement_types.go b/pkg/clients/generated/apis/privilegedaccessmanager/v1beta1/privilegedaccessmanagerentitlement_types.go index fff70a2ab3..4f21a093da 100644 --- a/pkg/clients/generated/apis/privilegedaccessmanager/v1beta1/privilegedaccessmanagerentitlement_types.go +++ b/pkg/clients/generated/apis/privilegedaccessmanager/v1beta1/privilegedaccessmanagerentitlement_types.go @@ -72,7 +72,7 @@ type EntitlementManualApprovals struct { /* Optional. List of approval steps in this workflow. These steps are followed in the specified order sequentially. Only 1 step is supported. */ // +optional - Step []EntitlementStep `json:"step,omitempty"` + Steps []EntitlementSteps `json:"steps,omitempty"` } type EntitlementNotMandatory struct { @@ -109,7 +109,7 @@ type EntitlementRoleBindings struct { Role string `json:"role"` } -type EntitlementStep struct { +type EntitlementSteps struct { /* Required. How many users from the above list need to approve. If there aren't enough distinct users in the list, then the workflow indefinitely blocks. Should always be greater than 0. 1 is the only supported value. */ ApprovalsNeeded int32 `json:"approvalsNeeded"` diff --git a/pkg/clients/generated/apis/privilegedaccessmanager/v1beta1/zz_generated.deepcopy.go b/pkg/clients/generated/apis/privilegedaccessmanager/v1beta1/zz_generated.deepcopy.go index 1a08465893..6a8c4c9441 100644 --- a/pkg/clients/generated/apis/privilegedaccessmanager/v1beta1/zz_generated.deepcopy.go +++ b/pkg/clients/generated/apis/privilegedaccessmanager/v1beta1/zz_generated.deepcopy.go @@ -145,9 +145,9 @@ func (in *EntitlementManualApprovals) DeepCopyInto(out *EntitlementManualApprova *out = new(bool) **out = **in } - if in.Step != nil { - in, out := &in.Step, &out.Step - *out = make([]EntitlementStep, len(*in)) + if in.Steps != nil { + in, out := &in.Steps, &out.Steps + *out = make([]EntitlementSteps, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -282,7 +282,7 @@ func (in *EntitlementRoleBindings) DeepCopy() *EntitlementRoleBindings { } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EntitlementStep) DeepCopyInto(out *EntitlementStep) { +func (in *EntitlementSteps) DeepCopyInto(out *EntitlementSteps) { *out = *in if in.ApproverEmailRecipients != nil { in, out := &in.ApproverEmailRecipients, &out.ApproverEmailRecipients @@ -299,12 +299,12 @@ func (in *EntitlementStep) DeepCopyInto(out *EntitlementStep) { return } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntitlementStep. -func (in *EntitlementStep) DeepCopy() *EntitlementStep { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntitlementSteps. +func (in *EntitlementSteps) DeepCopy() *EntitlementSteps { if in == nil { return nil } - out := new(EntitlementStep) + out := new(EntitlementSteps) in.DeepCopyInto(out) return out } diff --git a/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullfolder/_generated_object_privilegedaccessmanagerentitlementfullfolder.golden.yaml b/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullfolder/_generated_object_privilegedaccessmanagerentitlementfullfolder.golden.yaml index 8ea3a2510b..85ab508214 100644 --- a/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullfolder/_generated_object_privilegedaccessmanagerentitlementfullfolder.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullfolder/_generated_object_privilegedaccessmanagerentitlementfullfolder.golden.yaml @@ -21,7 +21,7 @@ spec: approvalWorkflow: manualApprovals: requireApproverJustification: true - step: + steps: - approvalsNeeded: 1 approverEmailRecipients: - gsa-1-${uniqueId}@${projectId}.iam.gserviceaccount.com diff --git a/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullfolder/create.yaml b/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullfolder/create.yaml index f7867dbb5b..222cdf4c9e 100644 --- a/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullfolder/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullfolder/create.yaml @@ -40,7 +40,7 @@ spec: approvalWorkflow: manualApprovals: requireApproverJustification: false - step: + steps: - approvalsNeeded: 1 approverEmailRecipients: - gsa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com diff --git a/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullfolder/update.yaml b/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullfolder/update.yaml index 02abeeba51..b1d1b73483 100644 --- a/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullfolder/update.yaml +++ b/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullfolder/update.yaml @@ -41,7 +41,7 @@ spec: approvalWorkflow: manualApprovals: requireApproverJustification: true - step: + steps: - approvalsNeeded: 1 approverEmailRecipients: - gsa-1-${uniqueId}@${projectId}.iam.gserviceaccount.com diff --git a/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullorg/_generated_object_privilegedaccessmanagerentitlementfullorg.golden.yaml b/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullorg/_generated_object_privilegedaccessmanagerentitlementfullorg.golden.yaml index b3262d9f23..06b20db417 100644 --- a/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullorg/_generated_object_privilegedaccessmanagerentitlementfullorg.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullorg/_generated_object_privilegedaccessmanagerentitlementfullorg.golden.yaml @@ -21,7 +21,7 @@ spec: approvalWorkflow: manualApprovals: requireApproverJustification: true - step: + steps: - approvalsNeeded: 1 approverEmailRecipients: - gsa-1-${uniqueId}@${projectId}.iam.gserviceaccount.com diff --git a/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullorg/create.yaml b/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullorg/create.yaml index af1140cf90..fb3d750e24 100644 --- a/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullorg/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullorg/create.yaml @@ -40,7 +40,7 @@ spec: approvalWorkflow: manualApprovals: requireApproverJustification: false - step: + steps: - approvalsNeeded: 1 approverEmailRecipients: - gsa-2-${uniqueId}@${projectId}.iam.gserviceaccount.com diff --git a/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullorg/update.yaml b/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullorg/update.yaml index c94caddcd9..471c9ae31a 100644 --- a/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullorg/update.yaml +++ b/pkg/test/resourcefixture/testdata/basic/privilegedaccessmanager/v1alpha1/privilegedaccessmanagerentitlement/privilegedaccessmanagerentitlementfullorg/update.yaml @@ -41,7 +41,7 @@ spec: approvalWorkflow: manualApprovals: requireApproverJustification: true - step: + steps: - approvalsNeeded: 1 approverEmailRecipients: - gsa-1-${uniqueId}@${projectId}.iam.gserviceaccount.com diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/privilegedaccessmanager/privilegedaccessmanagerentitlement.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/privilegedaccessmanager/privilegedaccessmanagerentitlement.md index 598da25b7a..9a73baa71c 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/privilegedaccessmanager/privilegedaccessmanagerentitlement.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/privilegedaccessmanager/privilegedaccessmanagerentitlement.md @@ -77,7 +77,7 @@ additionalNotificationTargets: approvalWorkflow: manualApprovals: requireApproverJustification: boolean - step: + steps: - approvalsNeeded: integer approverEmailRecipients: - string @@ -200,7 +200,7 @@ resourceID: string -

approvalWorkflow.manualApprovals.step

+

approvalWorkflow.manualApprovals.steps

Optional

@@ -210,7 +210,7 @@ resourceID: string -

approvalWorkflow.manualApprovals.step[]

+

approvalWorkflow.manualApprovals.steps[]

Optional

@@ -220,7 +220,7 @@ resourceID: string -

approvalWorkflow.manualApprovals.step[].approvalsNeeded

+

approvalWorkflow.manualApprovals.steps[].approvalsNeeded

Required*

@@ -230,7 +230,7 @@ resourceID: string -

approvalWorkflow.manualApprovals.step[].approverEmailRecipients

+

approvalWorkflow.manualApprovals.steps[].approverEmailRecipients

Optional

@@ -240,7 +240,7 @@ resourceID: string -

approvalWorkflow.manualApprovals.step[].approverEmailRecipients[]

+

approvalWorkflow.manualApprovals.steps[].approverEmailRecipients[]

Optional

@@ -250,7 +250,7 @@ resourceID: string -

approvalWorkflow.manualApprovals.step[].approvers

+

approvalWorkflow.manualApprovals.steps[].approvers

Optional

@@ -260,7 +260,7 @@ resourceID: string -

approvalWorkflow.manualApprovals.step[].approvers[]

+

approvalWorkflow.manualApprovals.steps[].approvers[]

Optional

@@ -270,7 +270,7 @@ resourceID: string -

approvalWorkflow.manualApprovals.step[].approvers[].principals

+

approvalWorkflow.manualApprovals.steps[].approvers[].principals

Required*

@@ -280,7 +280,7 @@ resourceID: string -

approvalWorkflow.manualApprovals.step[].approvers[].principals[]

+

approvalWorkflow.manualApprovals.steps[].approvers[].principals[]

Required*

@@ -748,7 +748,7 @@ spec: approvalWorkflow: manualApprovals: requireApproverJustification: true - step: + steps: - approvalsNeeded: 1 approverEmailRecipients: # Replace ${PROJECT_ID?} with your project ID. From c705bafa7d02a0a99c73fadd49d030122a1d89e4 Mon Sep 17 00:00:00 2001 From: Jingyi Hu Date: Tue, 5 Nov 2024 21:56:17 +0000 Subject: [PATCH 26/31] docs: update release note with cluster mode rate limit feature --- docs/releasenotes/release-1.125.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/releasenotes/release-1.125.md b/docs/releasenotes/release-1.125.md index e611008d20..db4be43de3 100644 --- a/docs/releasenotes/release-1.125.md +++ b/docs/releasenotes/release-1.125.md @@ -21,3 +21,9 @@ TODO: list contributors with `git log v1.124.0... | grep Merge | grep from | awk * PlaceholderKind * Added `spec.placeholder` field. + +## New features: + +* Allow more customization of resource reconciliation in cluster mode + * Added a new `ControllerReconciler` CRD (v1alpha1). See [example](https://github.com/GoogleCloudPlatform/k8s-config-connector/blob/master/operator/config/samples/controller_reconciler_customization_sample.yaml) + * This feature allows users to customize the client-side kube-apiserver request rate limit when Config Connector is runnning in cluster mode. From 4764ad93cd353cc3be68a154a9a8e13bb379f8fd Mon Sep 17 00:00:00 2001 From: xiaoweim Date: Sat, 28 Sep 2024 01:26:47 +0000 Subject: [PATCH 27/31] feat: add direct controller for BigQueryDataset --- apis/bigquery/v1beta1/dataset_reference.go | 187 + apis/bigquery/v1beta1/dataset_types.go | 11 +- apis/bigquery/v1beta1/types.generated.go | 5103 +---------- .../bigquery/v1beta1/zz_generated.deepcopy.go | 7698 +---------------- ...tasets.bigquery.cnrm.cloud.google.com.yaml | 45 +- .../bigquery_v1beta1_bigquerydataset.yaml | 3 +- .../bigquery_v1beta1_bigquerydataset.yaml | 2 + .../bigquery_v1beta1_bigquerydataset.yaml | 1 + .../pkg/codegen/mappergenerator.go | 2 + go.mod | 10 +- go.sum | 31 +- mockgcp/mockbigquery/datasets.go | 1 - .../bigquery/v1beta1/bigquerydataset_types.go | 25 +- .../bigquery/v1beta1/zz_generated.deepcopy.go | 10 +- .../direct/bigquery/v2/mapper.generated.go | 248 + .../bigquerydataset_mappings.go | 330 +- .../bigquerydataset/dataset_controller.go | 315 + .../dataset_externalresource.go | 25 + .../direct/bigquerydataset/utils.go | 57 + pkg/controller/direct/register/register.go | 1 + ..._export_basicbigquerydataset-direct.golden | 25 + ...ct_basicbigquerydataset-direct.golden.yaml | 32 + .../basicbigquerydataset-direct/_http.log | 369 + .../basicbigquerydataset-direct/create.yaml | 23 + .../basicbigquerydataset-direct/update.yaml | 23 + ...nerated_export_basicbigquerydataset.golden | 2 +- ...ed_object_basicbigquerydataset.golden.yaml | 1 + .../basicbigquerydataset/_http.log | 12 +- .../basicbigquerydataset/create.yaml | 1 + .../basicbigquerydataset/update.yaml | 1 + ...t_bigquerydatasetaccessblock-direct.golden | 25 + ...querydatasetaccessblock-direct.golden.yaml | 41 + .../_http.log | 393 + .../create.yaml | 32 + .../update.yaml | 34 + .../bigquerydatasetaccessblock/create.yaml | 2 + .../bigquerydatasetaccessblock/update.yaml | 2 + ..._export_fullybigquerydataset-direct.golden | 30 + ...ct_fullybigquerydataset-direct.golden.yaml | 49 + .../fullybigquerydataset-direct/_http.log | 999 +++ .../fullybigquerydataset-direct/create.yaml | 38 + .../dependencies.yaml | 53 + .../fullybigquerydataset-direct/update.yaml | 42 + .../bigqueryjob/_vcr_cassettes/tf.yaml | 2788 +++--- .../bigquery/v1beta1/bigqueryjob/create.yaml | 2 +- .../v1beta1/bigqueryjob/dependencies.yaml | 4 + .../bigquery/v1beta1/bigquerytable/_http.log | 8 +- .../bigquerytable/_vcr_cassettes/tf.yaml | 136 +- .../v1beta1/bigquerytable/dependencies.yaml | 2 + .../_http.log | 8 +- .../dependencies.yaml | 2 + .../bigquerypubsubsubscription/_http.log | 8 +- .../dependencies.yaml | 1 + .../_generated_export_projectid.golden | 2 +- .../_generated_object_projectid.golden.yaml | 1 + .../containerannotations/projectid/_http.log | 12 +- .../projectid/create.yaml | 1 + .../projectid/update.yaml | 1 + .../_generated_export_bigquerydataset.golden | 2 +- ...nerated_object_bigquerydataset.golden.yaml | 1 + .../bigquerydataset/_http.log | 12 +- .../bigquerydataset/create.yaml | 1 + .../bigquerydataset/update.yaml | 1 + .../_http.log | 8 +- .../dependencies.yaml | 1 + ...ated_export_userspecifiedresourceid.golden | 2 +- ...object_userspecifiedresourceid.golden.yaml | 1 + .../userspecifiedresourceid/_http.log | 12 +- .../userspecifiedresourceid/create.yaml | 3 +- .../userspecifiedresourceid/update.yaml | 3 +- ...generated_export_bigquerydataset#01.golden | 2 +- ...ated_object_bigquerydataset#01.golden.yaml | 15 +- .../bigquerydataset/_http.log | 12 +- .../bigquerydataset/create.yaml | 1 + .../bigquerydataset/update.yaml | 1 + .../resource-docs/bigquery/bigquerydataset.md | 28 +- .../resource-docs/bigquery/bigquerytable.md | 1 + .../dataflow/dataflowflextemplatejob.md | 2 + .../pubsub/pubsubsubscription.md | 1 + 79 files changed, 4743 insertions(+), 14677 deletions(-) create mode 100644 apis/bigquery/v1beta1/dataset_reference.go create mode 100644 pkg/controller/direct/bigquery/v2/mapper.generated.go create mode 100644 pkg/controller/direct/bigquerydataset/dataset_controller.go create mode 100644 pkg/controller/direct/bigquerydataset/dataset_externalresource.go create mode 100644 pkg/controller/direct/bigquerydataset/utils.go create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_export_basicbigquerydataset-direct.golden create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_object_basicbigquerydataset-direct.golden.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/create.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/update.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_export_bigquerydatasetaccessblock-direct.golden create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_object_bigquerydatasetaccessblock-direct.golden.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/create.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/update.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_export_fullybigquerydataset-direct.golden create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_object_fullybigquerydataset-direct.golden.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_http.log create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/create.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/dependencies.yaml create mode 100644 pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/update.yaml diff --git a/apis/bigquery/v1beta1/dataset_reference.go b/apis/bigquery/v1beta1/dataset_reference.go new file mode 100644 index 0000000000..27df856488 --- /dev/null +++ b/apis/bigquery/v1beta1/dataset_reference.go @@ -0,0 +1,187 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1beta1 + +import ( + "context" + "fmt" + "strings" + + refsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/k8s" + apierrors "k8s.io/apimachinery/pkg/api/errors" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/types" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +var _ refsv1beta1.ExternalNormalizer = &BigQueryDatasetRef{} + +// BigQueryDatasetRef defines the resource reference to BigQueryDataset, which "External" field +// holds the GCP identifier for the KRM object. +type BigQueryDatasetRef struct { + // A reference to an externally managed BigQueryDataset resource. + // Should be in the format "projects//locations//datasets/". + External string `json:"external,omitempty"` + + // The name of a BigQueryDataset resource. + Name string `json:"name,omitempty"` + + // The namespace of a BigQueryDataset resource. + Namespace string `json:"namespace,omitempty"` + + parent *BigQueryDatasetParent +} + +// NormalizedExternal provision the "External" value for other resource that depends on BigQueryDataset. +// If the "External" is given in the other resource's spec.BigQueryDatasetRef, the given value will be used. +// Otherwise, the "Name" and "Namespace" will be used to query the actual BigQueryDataset object from the cluster. +func (r *BigQueryDatasetRef) NormalizedExternal(ctx context.Context, reader client.Reader, otherNamespace string) (string, error) { + if r.External != "" && r.Name != "" { + return "", fmt.Errorf("cannot specify both name and external on %s reference", BigQueryDatasetGVK.Kind) + } + // From given External + if r.External != "" { + if _, _, err := ParseBigQueryDatasetExternal(r.External); err != nil { + return "", err + } + return r.External, nil + } + + // From the Config Connector object + if r.Namespace == "" { + r.Namespace = otherNamespace + } + key := types.NamespacedName{Name: r.Name, Namespace: r.Namespace} + u := &unstructured.Unstructured{} + u.SetGroupVersionKind(BigQueryDatasetGVK) + if err := reader.Get(ctx, key, u); err != nil { + if apierrors.IsNotFound(err) { + return "", k8s.NewReferenceNotFoundError(u.GroupVersionKind(), key) + } + return "", fmt.Errorf("reading referenced %s %s: %w", BigQueryDatasetGVK, key, err) + } + // Get external from status.externalRef. This is the most trustworthy place. + actualExternalRef, _, err := unstructured.NestedString(u.Object, "status", "externalRef") + if err != nil { + return "", fmt.Errorf("reading status.externalRef: %w", err) + } + if actualExternalRef == "" { + return "", k8s.NewReferenceNotReadyError(u.GroupVersionKind(), key) + } + r.External = actualExternalRef + return r.External, nil +} + +// New builds a BigQueryDatasetRef from the Config Connector BigQueryDataset object. +func NewBigQueryDatasetRef(ctx context.Context, reader client.Reader, obj *BigQueryDataset) (*BigQueryDatasetRef, error) { + id := &BigQueryDatasetRef{} + + // Get Parent + projectRef, err := refsv1beta1.ResolveProject(ctx, reader, obj, obj.Spec.ProjectRef) + if err != nil { + return nil, err + } + projectID := projectRef.ProjectID + if projectID == "" { + return nil, fmt.Errorf("cannot resolve project") + } + location := obj.Spec.Location + id.parent = &BigQueryDatasetParent{ProjectID: projectID, Location: valueOf(location)} + + // Get desired ID + resourceID := valueOf(obj.Spec.ResourceID) + if resourceID == "" { + resourceID = obj.GetName() + } + if resourceID == "" { + return nil, fmt.Errorf("cannot resolve resource ID") + } + + // Use approved External + externalRef := valueOf(obj.Status.ExternalRef) + if externalRef == "" { + id.External = asBigQueryDatasetExternal(id.parent, resourceID) + return id, nil + } + + // Validate desired with actual + actualParent, actualResourceID, err := ParseBigQueryDatasetExternal(externalRef) + if err != nil { + return nil, err + } + if actualParent.ProjectID != projectID { + return nil, fmt.Errorf("spec.projectRef changed, expect %s, got %s", actualParent.ProjectID, projectID) + } + if actualParent.Location != valueOf(location) { + return nil, fmt.Errorf("spec.location changed, expect %s, got %s", actualParent.Location, valueOf(location)) + } + if actualResourceID != resourceID { + return nil, fmt.Errorf("cannot reset `metadata.name` or `spec.resourceID` to %s, since it has already assigned to %s", + resourceID, actualResourceID) + } + id.External = externalRef + id.parent = &BigQueryDatasetParent{ProjectID: projectID, Location: valueOf(location)} + return id, nil +} + +func (r *BigQueryDatasetRef) Parent() (*BigQueryDatasetParent, error) { + if r.parent != nil { + return r.parent, nil + } + if r.External != "" { + parent, _, err := ParseBigQueryDatasetExternal(r.External) + if err != nil { + return nil, err + } + return parent, nil + } + return nil, fmt.Errorf("BigQueryDatasetRef not initialized from `NewBigQueryDatasetRef` or `NormalizedExternal`") +} + +type BigQueryDatasetParent struct { + ProjectID string + Location string +} + +func (p *BigQueryDatasetParent) String() string { + return "projects/" + p.ProjectID + "/locations/" + p.Location +} + +func asBigQueryDatasetExternal(parent *BigQueryDatasetParent, resourceID string) (external string) { + return parent.String() + "/datasets/" + resourceID +} + +func ParseBigQueryDatasetExternal(external string) (parent *BigQueryDatasetParent, resourceID string, err error) { + external = strings.TrimPrefix(external, "/") + tokens := strings.Split(external, "/") + if len(tokens) != 6 || tokens[0] != "projects" || tokens[2] != "locations" || tokens[4] != "datasets" { + return nil, "", fmt.Errorf("format of BigQueryDataset external=%q was not known (use projects//locations//datasets/)", external) + } + parent = &BigQueryDatasetParent{ + ProjectID: tokens[1], + Location: tokens[3], + } + resourceID = tokens[5] + return parent, resourceID, nil +} + +func valueOf[T any](t *T) T { + var zeroVal T + if t == nil { + return zeroVal + } + return *t +} diff --git a/apis/bigquery/v1beta1/dataset_types.go b/apis/bigquery/v1beta1/dataset_types.go index 3009c1048e..5555bc2cf4 100644 --- a/apis/bigquery/v1beta1/dataset_types.go +++ b/apis/bigquery/v1beta1/dataset_types.go @@ -25,7 +25,7 @@ var BigQueryDatasetGVK = GroupVersion.WithKind("BigQueryDataset") // NOTE: json tags are required. Any new fields you add must have json tags for the fields to be serialized. // BigQueryDatasetSpec defines the desired state of BigQueryDataset -// +kcc:proto=google.cloud.bigquery.v2.dataset +// +kcc:proto=google.cloud.bigquery.v2.Dataset type BigQueryDatasetSpec struct { // The BigQueryDataset name. If not given, the metadata.name will be used. ResourceID *string `json:"resourceID,omitempty"` @@ -94,15 +94,15 @@ type BigQueryDatasetSpec struct { // The geographic location where the dataset should reside. See // https://cloud.google.com/bigquery/docs/locations for supported // locations. - Location *string `json:"location,omitempty"` + // +required + Location *string `json:"location"` // Optional. Defines the time travel window in hours. The value can be from 48 // to 168 hours (2 to 7 days). The default value is 168 hours if this is not // set. MaxTimeTravelHours *string `json:"maxTimeTravelHours,omitempty"` - // The project that this resource belongs to. - // optional. + // Optional. The project that this resource belongs to. ProjectRef *refs.ProjectRef `json:"projectRef,omitempty"` // Optional. Updates storage_billing_model for the dataset. @@ -122,6 +122,9 @@ type BigQueryDatasetStatus struct { // Output only. A hash of the resource. Etag *string `json:"etag,omitempty"` + // A unique specifier for the BigQueryAnalyticsHubDataExchangeListing resource in GCP. + ExternalRef *string `json:"externalRef,omitempty"` + // Output only. The date when this dataset was last modified, in milliseconds // since the epoch. LastModifiedTime *int64 `json:"lastModifiedTime,omitempty"` diff --git a/apis/bigquery/v1beta1/types.generated.go b/apis/bigquery/v1beta1/types.generated.go index dc76ed20c1..42b796f676 100644 --- a/apis/bigquery/v1beta1/types.generated.go +++ b/apis/bigquery/v1beta1/types.generated.go @@ -14,9 +14,7 @@ package v1beta1 -import ( - refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" -) +import refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" // +kcc:proto=google.cloud.bigquery.v2.Access type Access struct { @@ -85,688 +83,30 @@ type Access struct { Dataset *DatasetAccessEntry `json:"dataset,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.AggregationThresholdPolicy -type AggregationThresholdPolicy struct { - // Optional. The threshold for the "aggregation threshold" policy. - Threshold *int64 `json:"threshold,omitempty"` - - // Optional. The privacy unit column(s) associated with this policy. - // For now, only one column per data source object (table, view) is allowed as - // a privacy unit column. - // Representing as a repeated field in metadata for extensibility to - // multiple columns in future. - // Duplicates and Repeated struct fields are not allowed. - // For nested fields, use dot notation ("outer.inner") - PrivacyUnitColumns []string `json:"privacyUnitColumns,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.AvroOptions -type AvroOptions struct { - // Optional. If sourceFormat is set to "AVRO", indicates whether to interpret - // logical types as the corresponding BigQuery data type (for example, - // TIMESTAMP), instead of using the raw type (for example, INTEGER). - UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.BiEngineReason -type BiEngineReason struct { - // Output only. High-level BI Engine reason for partial or disabled - // acceleration - Code *string `json:"code,omitempty"` - - // Output only. Free form human-readable reason for partial or disabled - // acceleration. - Message *string `json:"message,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.BiEngineStatistics -type BiEngineStatistics struct { - // Output only. Specifies which mode of BI Engine acceleration was performed - // (if any). - BiEngineMode *string `json:"biEngineMode,omitempty"` - - // Output only. Specifies which mode of BI Engine acceleration was performed - // (if any). - AccelerationMode *string `json:"accelerationMode,omitempty"` - - // In case of DISABLED or PARTIAL bi_engine_mode, these contain the - // explanatory reasons as to why BI Engine could not accelerate. - // In case the full query was accelerated, this field is not populated. - BiEngineReasons []BiEngineReason `json:"biEngineReasons,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.BigLakeConfiguration -type BigLakeConfiguration struct { - // Required. The connection specifying the credentials to be used to read and - // write to external storage, such as Cloud Storage. The connection_id can - // have the form `{project}.{location}.{connection_id}` or - // `projects/{project}/locations/{location}/connections/{connection_id}". - ConnectionID *string `json:"connectionID,omitempty"` - - // Required. The fully qualified location prefix of the external folder where - // table data is stored. The '*' wildcard character is not allowed. The URI - // should be in the format `gs://bucket/path_to_table/` - StorageUri *string `json:"storageUri,omitempty"` - - // Required. The file format the table data is stored in. - FileFormat *string `json:"fileFormat,omitempty"` - - // Required. The table format the metadata only snapshots are stored in. - TableFormat *string `json:"tableFormat,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.BigtableColumn -type BigtableColumn struct { - // [Required] Qualifier of the column. - // Columns in the parent column family that has this exact qualifier are - // exposed as `.` field. - // If the qualifier is valid UTF-8 string, it can be specified in the - // qualifier_string field. Otherwise, a base-64 encoded value must be set to - // qualifier_encoded. - // The column field name is the same as the column qualifier. However, if the - // qualifier is not a valid BigQuery field identifier i.e. does not match - // [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as field_name. - QualifierEncoded *byte `json:"qualifierEncoded,omitempty"` - - // Qualifier string. - QualifierString *string `json:"qualifierString,omitempty"` - - // Optional. If the qualifier is not a valid BigQuery field identifier i.e. - // does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided - // as the column field name and is used as field name in queries. - FieldName *string `json:"fieldName,omitempty"` - - // Optional. The type to convert the value in cells of this column. - // The values are expected to be encoded using HBase Bytes.toBytes function - // when using the BINARY encoding value. - // Following BigQuery types are allowed (case-sensitive): - // - // * BYTES - // * STRING - // * INTEGER - // * FLOAT - // * BOOLEAN - // * JSON - // - // Default type is BYTES. - // 'type' can also be set at the column family level. However, the setting at - // this level takes precedence if 'type' is set at both levels. - Type *string `json:"type,omitempty"` - - // Optional. The encoding of the values when the type is not STRING. - // Acceptable encoding values are: - // TEXT - indicates values are alphanumeric text strings. - // BINARY - indicates values are encoded using HBase Bytes.toBytes family of - // functions. - // 'encoding' can also be set at the column family level. However, the setting - // at this level takes precedence if 'encoding' is set at both levels. - Encoding *string `json:"encoding,omitempty"` - - // Optional. If this is set, only the latest version of value in this column - // are exposed. - // 'onlyReadLatest' can also be set at the column family level. However, the - // setting at this level takes precedence if 'onlyReadLatest' is set at both - // levels. - OnlyReadLatest *bool `json:"onlyReadLatest,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.BigtableColumnFamily -type BigtableColumnFamily struct { - // Identifier of the column family. - FamilyID *string `json:"familyID,omitempty"` - - // Optional. The type to convert the value in cells of this column family. - // The values are expected to be encoded using HBase Bytes.toBytes function - // when using the BINARY encoding value. - // Following BigQuery types are allowed (case-sensitive): - // - // * BYTES - // * STRING - // * INTEGER - // * FLOAT - // * BOOLEAN - // * JSON - // - // Default type is BYTES. - // This can be overridden for a specific column by listing that column in - // 'columns' and specifying a type for it. - Type *string `json:"type,omitempty"` - - // Optional. The encoding of the values when the type is not STRING. - // Acceptable encoding values are: - // TEXT - indicates values are alphanumeric text strings. - // BINARY - indicates values are encoded using HBase Bytes.toBytes family of - // functions. - // This can be overridden for a specific column by listing that column in - // 'columns' and specifying an encoding for it. - Encoding *string `json:"encoding,omitempty"` - - // Optional. Lists of columns that should be exposed as individual fields as - // opposed to a list of (column name, value) pairs. - // All columns whose qualifier matches a qualifier in this list can be - // accessed as `.`. - // Other columns can be accessed as a list through - // the `.Column` field. - Columns []BigtableColumn `json:"columns,omitempty"` - - // Optional. If this is set only the latest version of value are exposed for - // all columns in this column family. - // This can be overridden for a specific column by listing that column in - // 'columns' and specifying a different setting - // for that column. - OnlyReadLatest *bool `json:"onlyReadLatest,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.BigtableOptions -type BigtableOptions struct { - // Optional. List of column families to expose in the table schema along with - // their types. - // This list restricts the column families that can be referenced in queries - // and specifies their value types. - // You can use this list to do type conversions - see the 'type' field for - // more details. - // If you leave this list empty, all column families are present in the table - // schema and their values are read as BYTES. - // During a query only the column families referenced in that query are read - // from Bigtable. - ColumnFamilies []BigtableColumnFamily `json:"columnFamilies,omitempty"` - - // Optional. If field is true, then the column families that are not - // specified in columnFamilies list are not exposed in the table schema. - // Otherwise, they are read with BYTES type values. - // The default value is false. - IgnoreUnspecifiedColumnFamilies *bool `json:"ignoreUnspecifiedColumnFamilies,omitempty"` - - // Optional. If field is true, then the rowkey column families will be read - // and converted to string. Otherwise they are read with BYTES type values and - // users need to manually cast them with CAST if necessary. - // The default value is false. - ReadRowkeyAsString *bool `json:"readRowkeyAsString,omitempty"` - - // Optional. If field is true, then each column family will be read as a - // single JSON column. Otherwise they are read as a repeated cell structure - // containing timestamp/value tuples. The default value is false. - OutputColumnFamiliesAsJson *bool `json:"outputColumnFamiliesAsJson,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.CloneDefinition -type CloneDefinition struct { - // Required. Reference describing the ID of the table that was cloned. - BaseTableReference *TableReference `json:"baseTableReference,omitempty"` - - // Required. The time at which the base table was cloned. This value is - // reported in the JSON response using RFC3339 format. - CloneTime *string `json:"cloneTime,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Clustering -type Clustering struct { - // One or more fields on which data should be clustered. Only top-level, - // non-repeated, simple-type fields are supported. The ordering of the - // clustering fields should be prioritized from most to least important - // for filtering purposes. - // - // Additional information on limitations can be found here: - // https://cloud.google.com/bigquery/docs/creating-clustered-tables#limitations - Fields []string `json:"fields,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ColumnReference -type ColumnReference struct { - // Required. The column that composes the foreign key. - ReferencingColumn *string `json:"referencingColumn,omitempty"` - - // Required. The column in the primary key that are referenced by the - // referencing_column. - ReferencedColumn *string `json:"referencedColumn,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ConnectionProperty -type ConnectionProperty struct { - // The key of the property to set. - Key *string `json:"key,omitempty"` - - // The value of the property to set. - Value *string `json:"value,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.CopyJobStatistics -type CopyJobStatistics struct { - // Output only. Number of rows copied to the destination table. - CopiedRows *int64 `json:"copiedRows,omitempty"` - - // Output only. Number of logical bytes copied to the destination table. - CopiedLogicalBytes *int64 `json:"copiedLogicalBytes,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.CsvOptions -type CsvOptions struct { - // Optional. The separator character for fields in a CSV file. The separator - // is interpreted as a single byte. For files encoded in ISO-8859-1, any - // single character can be used as a separator. For files encoded in UTF-8, - // characters represented in decimal range 1-127 (U+0001-U+007F) can be used - // without any modification. UTF-8 characters encoded with multiple bytes - // (i.e. U+0080 and above) will have only the first byte used for separating - // fields. The remaining bytes will be treated as a part of the field. - // BigQuery also supports the escape sequence "\t" (U+0009) to specify a tab - // separator. The default value is comma (",", U+002C). - FieldDelimiter *string `json:"fieldDelimiter,omitempty"` - - // Optional. The number of rows at the top of a CSV file that BigQuery will - // skip when reading the data. The default value is 0. This property is - // useful if you have header rows in the file that should be skipped. - // When autodetect is on, the behavior is the following: - // - // * skipLeadingRows unspecified - Autodetect tries to detect headers in the - // first row. If they are not detected, the row is read as data. Otherwise - // data is read starting from the second row. - // * skipLeadingRows is 0 - Instructs autodetect that there are no headers and - // data should be read starting from the first row. - // * skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect - // headers in row N. If headers are not detected, row N is just skipped. - // Otherwise row N is used to extract column names for the detected schema. - SkipLeadingRows *int64 `json:"skipLeadingRows,omitempty"` - - // Optional. The value that is used to quote data sections in a CSV file. - // BigQuery converts the string to ISO-8859-1 encoding, and then uses the - // first byte of the encoded string to split the data in its raw, binary - // state. - // The default value is a float64-quote ("). - // If your data does not contain quoted sections, - // set the property value to an empty string. - // If your data contains quoted newline characters, you must also set the - // allowQuotedNewlines property to true. - // To include the specific quote character within a quoted value, precede it - // with an additional matching quote character. For example, if you want to - // escape the default character ' " ', use ' "" '. - Quote *string `json:"quote,omitempty"` - - // Optional. Indicates if BigQuery should allow quoted data sections that - // contain newline characters in a CSV file. The default value is false. - AllowQuotedNewlines *bool `json:"allowQuotedNewlines,omitempty"` - - // Optional. Indicates if BigQuery should accept rows that are missing - // trailing optional columns. If true, BigQuery treats missing trailing - // columns as null values. - // If false, records with missing trailing columns are treated as bad records, - // and if there are too many bad records, an invalid error is returned in the - // job result. The default value is false. - AllowJaggedRows *bool `json:"allowJaggedRows,omitempty"` - - // Optional. The character encoding of the data. - // The supported values are UTF-8, ISO-8859-1, UTF-16BE, UTF-16LE, UTF-32BE, - // and UTF-32LE. The default value is UTF-8. - // BigQuery decodes the data after the raw, binary data has been split using - // the values of the quote and fieldDelimiter properties. - Encoding *string `json:"encoding,omitempty"` - - // Optional. Indicates if the embedded ASCII control characters (the first 32 - // characters in the ASCII-table, from '\x00' to '\x1F') are preserved. - PreserveAsciiControlCharacters *bool `json:"preserveAsciiControlCharacters,omitempty"` - - // Optional. Specifies a string that represents a null value in a CSV file. - // For example, if you specify "\N", BigQuery interprets "\N" as a null value - // when querying a CSV file. - // The default value is the empty string. If you set this property to a custom - // value, BigQuery throws an error if an empty string is present for all data - // types except for STRING and BYTE. For STRING and BYTE columns, BigQuery - // interprets the empty string as an empty value. - NullMarker *string `json:"nullMarker,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DataFormatOptions -type DataFormatOptions struct { - // Optional. Output timestamp as usec int64. Default is false. - UseInt64Timestamp *bool `json:"useInt64Timestamp,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DataMaskingStatistics -type DataMaskingStatistics struct { - // Whether any accessed data was protected by the data masking. - DataMaskingApplied *bool `json:"dataMaskingApplied,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DataPolicyOption -type DataPolicyOption struct { - // Data policy resource name in the form of - // projects/project_id/locations/location_id/dataPolicies/data_policy_id. - Name *string `json:"name,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Dataset -type Dataset struct { - // Output only. The resource type. - Kind *string `json:"kind,omitempty"` - - // Output only. A hash of the resource. - Etag *string `json:"etag,omitempty"` - - // Output only. The fully-qualified unique name of the dataset in the format - // projectId:datasetId. The dataset name without the project name is given in - // the datasetId field. When creating a new dataset, leave this field blank, - // and instead specify the datasetId field. - ID *string `json:"id,omitempty"` - - // Output only. A URL that can be used to access the resource again. You can - // use this URL in Get or Update requests to the resource. - SelfLink *string `json:"selfLink,omitempty"` - - // Required. A reference that identifies the dataset. - DatasetReference *DatasetReference `json:"datasetReference,omitempty"` - - // Optional. A descriptive name for the dataset. - FriendlyName *string `json:"friendlyName,omitempty"` - - // Optional. A user-friendly description of the dataset. - Description *string `json:"description,omitempty"` - - // Optional. The default lifetime of all tables in the dataset, in - // milliseconds. The minimum lifetime value is 3600000 milliseconds (one - // hour). To clear an existing default expiration with a PATCH request, set to - // 0. Once this property is set, all newly-created tables in the dataset will - // have an expirationTime property set to the creation time plus the value in - // this property, and changing the value will only affect new tables, not - // existing ones. When the expirationTime for a given table is reached, that - // table will be deleted automatically. - // If a table's expirationTime is modified or removed before the table - // expires, or if you provide an explicit expirationTime when creating a - // table, that value takes precedence over the default expiration time - // indicated by this property. - DefaultTableExpirationMs *int64 `json:"defaultTableExpirationMs,omitempty"` - - // This default partition expiration, expressed in milliseconds. - // - // When new time-partitioned tables are created in a dataset where this - // property is set, the table will inherit this value, propagated as the - // `TimePartitioning.expirationMs` property on the new table. If you set - // `TimePartitioning.expirationMs` explicitly when creating a table, - // the `defaultPartitionExpirationMs` of the containing dataset is ignored. - // - // When creating a partitioned table, if `defaultPartitionExpirationMs` - // is set, the `defaultTableExpirationMs` value is ignored and the table - // will not be inherit a table expiration deadline. - DefaultPartitionExpirationMs *int64 `json:"defaultPartitionExpirationMs,omitempty"` - - // The labels associated with this dataset. You can use these - // to organize and group your datasets. - // You can set this property when inserting or updating a dataset. - // See [Creating and Updating Dataset - // Labels](https://cloud.google.com/bigquery/docs/creating-managing-labels#creating_and_updating_dataset_labels) - // for more information. - Labels map[string]string `json:"labels,omitempty"` - - // Optional. An array of objects that define dataset access for one or more - // entities. You can set this property when inserting or updating a dataset in - // order to control who is allowed to access the data. If unspecified at - // dataset creation time, BigQuery adds default dataset access for the - // following entities: access.specialGroup: projectReaders; access.role: - // READER; access.specialGroup: projectWriters; access.role: WRITER; - // access.specialGroup: projectOwners; access.role: OWNER; - // access.userByEmail: [dataset creator email]; access.role: OWNER; - // If you patch a dataset, then this field is overwritten by the patched - // dataset's access field. To add entities, you must supply the entire - // existing access array in addition to any new entities that you want to add. - Access []Access `json:"access,omitempty"` - - // Output only. The time when this dataset was created, in milliseconds since - // the epoch. - CreationTime *int64 `json:"creationTime,omitempty"` - - // Output only. The date when this dataset was last modified, in milliseconds - // since the epoch. - LastModifiedTime *int64 `json:"lastModifiedTime,omitempty"` - - // The geographic location where the dataset should reside. See - // https://cloud.google.com/bigquery/docs/locations for supported - // locations. - Location *string `json:"location,omitempty"` - - // The default encryption key for all tables in the dataset. - // After this property is set, the encryption key of all newly-created tables - // in the dataset is set to this value unless the table creation request or - // query explicitly overrides the key. - DefaultEncryptionConfiguration *EncryptionConfiguration `json:"defaultEncryptionConfiguration,omitempty"` - - // Output only. Reserved for future use. - SatisfiesPzs *bool `json:"satisfiesPzs,omitempty"` - - // Output only. Reserved for future use. - SatisfiesPzi *bool `json:"satisfiesPzi,omitempty"` - - // Output only. Same as `type` in `ListFormatDataset`. - // The type of the dataset, one of: - // - // * DEFAULT - only accessible by owner and authorized accounts, - // * PUBLIC - accessible by everyone, - // * LINKED - linked dataset, - // * EXTERNAL - dataset with definition in external metadata catalog. - Type *string `json:"type,omitempty"` - - // Optional. The source dataset reference when the dataset is of type LINKED. - // For all other dataset types it is not set. This field cannot be updated - // once it is set. Any attempt to update this field using Update and Patch API - // Operations will be ignored. - LinkedDatasetSource *LinkedDatasetSource `json:"linkedDatasetSource,omitempty"` - - // Output only. Metadata about the LinkedDataset. Filled out when the dataset - // type is LINKED. - LinkedDatasetMetadata *LinkedDatasetMetadata `json:"linkedDatasetMetadata,omitempty"` - - // Optional. Reference to a read-only external dataset defined in data - // catalogs outside of BigQuery. Filled out when the dataset type is EXTERNAL. - ExternalDatasetReference *ExternalDatasetReference `json:"externalDatasetReference,omitempty"` - - // Optional. Options defining open source compatible datasets living in the - // BigQuery catalog. Contains metadata of open source database, schema or - // namespace represented by the current dataset. - ExternalCatalogDatasetOptions *ExternalCatalogDatasetOptions `json:"externalCatalogDatasetOptions,omitempty"` - - // Optional. TRUE if the dataset and its table names are case-insensitive, - // otherwise FALSE. By default, this is FALSE, which means the dataset and its - // table names are case-sensitive. This field does not affect routine - // references. - IsCaseInsensitive *bool `json:"isCaseInsensitive,omitempty"` - - // Optional. Defines the default collation specification of future tables - // created in the dataset. If a table is created in this dataset without - // table-level default collation, then the table inherits the dataset default - // collation, which is applied to the string fields that do not have explicit - // collation specified. A change to this field affects only tables created - // afterwards, and does not alter the existing tables. - // The following values are supported: - // - // * 'und:ci': undetermined locale, case insensitive. - // * '': empty string. Default to case-sensitive behavior. - DefaultCollation *string `json:"defaultCollation,omitempty"` - - // Optional. Defines the default rounding mode specification of new tables - // created within this dataset. During table creation, if this field is - // specified, the table within this dataset will inherit the default rounding - // mode of the dataset. Setting the default rounding mode on a table overrides - // this option. Existing tables in the dataset are unaffected. - // If columns are defined during that table creation, - // they will immediately inherit the table's default rounding mode, - // unless otherwise specified. - DefaultRoundingMode *string `json:"defaultRoundingMode,omitempty"` - - // Optional. Defines the time travel window in hours. The value can be from 48 - // to 168 hours (2 to 7 days). The default value is 168 hours if this is not - // set. - MaxTimeTravelHours *int64 `json:"maxTimeTravelHours,omitempty"` - - // Output only. Tags for the dataset. To provide tags as inputs, use the - // `resourceTags` field. - Tags []GcpTag `json:"tags,omitempty"` - - // Optional. Updates storage_billing_model for the dataset. - StorageBillingModel *string `json:"storageBillingModel,omitempty"` - - // Optional. Output only. Restriction config for all tables and dataset. If - // set, restrict certain accesses on the dataset and all its tables based on - // the config. See [Data - // egress](https://cloud.google.com/bigquery/docs/analytics-hub-introduction#data_egress) - // for more details. - Restrictions *RestrictionConfig `json:"restrictions,omitempty"` - - // Optional. The [tags](https://cloud.google.com/bigquery/docs/tags) attached - // to this dataset. Tag keys are globally unique. Tag key is expected to be in - // the namespaced format, for example "123456789012/environment" where - // 123456789012 is the ID of the parent organization or project resource for - // this tag key. Tag value is expected to be the short name, for example - // "Production". See [Tag - // definitions](https://cloud.google.com/iam/docs/tags-access-control#definitions) - // for more details. - ResourceTags map[string]string `json:"resourceTags,omitempty"` -} - // +kcc:proto=google.cloud.bigquery.v2.DatasetAccessEntry type DatasetAccessEntry struct { // The dataset this entry applies to. - Dataset *DatasetReference `json:"dataset"` + // +required + Dataset *DatasetReference `json:"dataset,omitempty"` // Which resources in the dataset this entry applies to. Currently, only // views are supported, but additional target types may be added in the // future. - TargetTypes []string `json:"targetTypes"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DatasetList -type DatasetList struct { - // Output only. The resource type. - // This property always returns the value "bigquery#datasetList" - Kind *string `json:"kind,omitempty"` - - // Output only. A hash value of the results page. You can use this property to - // determine if the page has changed since the last request. - Etag *string `json:"etag,omitempty"` - - // A token that can be used to request the next results page. This property is - // omitted on the final results page. - NextPageToken *string `json:"nextPageToken,omitempty"` - - // An array of the dataset resources in the project. - // Each resource contains basic information. - // For full information about a particular dataset resource, use the Datasets: - // get method. This property is omitted when there are no datasets in the - // project. - Datasets []ListFormatDataset `json:"datasets,omitempty"` - - // A list of skipped locations that were unreachable. For more information - // about BigQuery locations, see: - // https://cloud.google.com/bigquery/docs/locations. Example: "europe-west5" - Unreachable []string `json:"unreachable,omitempty"` + // +required + TargetTypes []string `json:"targetTypes,omitempty"` } // +kcc:proto=google.cloud.bigquery.v2.DatasetReference type DatasetReference struct { - // Required. A unique ID for this dataset, without the project name. The ID + // A unique Id for this dataset, without the project name. The Id // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). // The maximum length is 1,024 characters. - DatasetId *string `json:"datasetId"` - - // Required. The ID of the project containing this dataset. - ProjectId *string `json:"projectId"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DestinationTableProperties -type DestinationTableProperties struct { - // Optional. Friendly name for the destination table. If the table already - // exists, it should be same as the existing friendly name. - FriendlyName *string `json:"friendlyName,omitempty"` - - // Optional. The description for the destination table. - // This will only be used if the destination table is newly created. - // If the table already exists and a value different than the current - // description is provided, the job will fail. - Description *string `json:"description,omitempty"` - - // Optional. The labels associated with this table. You can use these to - // organize and group your tables. This will only be used if the destination - // table is newly created. If the table already exists and labels are - // different than the current labels are provided, the job will fail. - Labels map[string]string `json:"labels,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DifferentialPrivacyPolicy -type DifferentialPrivacyPolicy struct { - // Optional. The maximum epsilon value that a query can consume. If the - // subscriber specifies epsilon as a parameter in a SELECT query, it must be - // less than or equal to this value. The epsilon parameter controls the amount - // of noise that is added to the groups — a higher epsilon means less noise. - MaxEpsilonPerQuery *float64 `json:"maxEpsilonPerQuery,omitempty"` - - // Optional. The delta value that is used per query. Delta represents the - // probability that any row will fail to be epsilon differentially private. - // Indicates the risk associated with exposing aggregate rows in the result of - // a query. - DeltaPerQuery *float64 `json:"deltaPerQuery,omitempty"` - - // Optional. The maximum groups contributed value that is used per query. - // Represents the maximum number of groups to which each protected entity can - // contribute. Changing this value does not improve or worsen privacy. The - // best value for accuracy and utility depends on the query and data. - MaxGroupsContributed *int64 `json:"maxGroupsContributed,omitempty"` - - // Optional. The privacy unit column associated with this policy. Differential - // privacy policies can only have one privacy unit column per data source - // object (table, view). - PrivacyUnitColumn *string `json:"privacyUnitColumn,omitempty"` - - // Optional. The total epsilon budget for all queries against the - // privacy-protected view. Each subscriber query against this view charges the - // amount of epsilon they request in their query. If there is sufficient - // budget, then the subscriber query attempts to complete. It might still fail - // due to other reasons, in which case the charge is refunded. If there is - // insufficient budget the query is rejected. There might be multiple charge - // attempts if a single query references multiple views. In this case there - // must be sufficient budget for all charges or the query is rejected and - // charges are refunded in best effort. The budget does not have a refresh - // policy and can only be updated via ALTER VIEW or circumvented by creating a - // new view that can be queried with a fresh budget. - EpsilonBudget *float64 `json:"epsilonBudget,omitempty"` - - // Optional. The total delta budget for all queries against the - // privacy-protected view. Each subscriber query against this view charges the - // amount of delta that is pre-defined by the contributor through the privacy - // policy delta_per_query field. If there is sufficient budget, then the - // subscriber query attempts to complete. It might still fail due to other - // reasons, in which case the charge is refunded. If there is insufficient - // budget the query is rejected. There might be multiple charge attempts if a - // single query references multiple views. In this case there must be - // sufficient budget for all charges or the query is rejected and charges are - // refunded in best effort. The budget does not have a refresh policy and can - // only be updated via ALTER VIEW or circumvented by creating a new view that - // can be queried with a fresh budget. - DeltaBudget *float64 `json:"deltaBudget,omitempty"` - - // Output only. The epsilon budget remaining. If budget is exhausted, no more - // queries are allowed. Note that the budget for queries that are in progress - // is deducted before the query executes. If the query fails or is cancelled - // then the budget is refunded. In this case the amount of budget remaining - // can increase. - EpsilonBudgetRemaining *float64 `json:"epsilonBudgetRemaining,omitempty"` - - // Output only. The delta budget remaining. If budget is exhausted, no more - // queries are allowed. Note that the budget for queries that are in progress - // is deducted before the query executes. If the query fails or is cancelled - // then the budget is refunded. In this case the amount of budget remaining - // can increase. - DeltaBudgetRemaining *float64 `json:"deltaBudgetRemaining,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DmlStats -type DmlStats struct { - // Output only. Number of inserted Rows. Populated by DML INSERT and MERGE - // statements - InsertedRowCount *int64 `json:"insertedRowCount,omitempty"` + // +required + DatasetId *string `json:"datasetId,omitempty"` - // Output only. Number of deleted Rows. populated by DML DELETE, MERGE and - // TRUNCATE statements. - DeletedRowCount *int64 `json:"deletedRowCount,omitempty"` - - // Output only. Number of updated Rows. Populated by DML UPDATE and MERGE - // statements. - UpdatedRowCount *int64 `json:"updatedRowCount,omitempty"` + // The ID of the project containing this dataset. + // +required + ProjectId *string `json:"projectId,omitempty"` } // +kcc:proto=google.cloud.bigquery.v2.EncryptionConfiguration @@ -777,141 +117,6 @@ type EncryptionConfiguration struct { KmsKeyRef *refs.KMSCryptoKeyRef `json:"kmsKeyRef,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.ErrorProto -type ErrorProto struct { - // A short error code that summarizes the error. - Reason *string `json:"reason,omitempty"` - - // Specifies where the error occurred, if present. - Location *string `json:"location,omitempty"` - - // Debugging information. This property is internal to Google and should not - // be used. - DebugInfo *string `json:"debugInfo,omitempty"` - - // A human-readable description of the error. - Message *string `json:"message,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ExplainQueryStage -type ExplainQueryStage struct { - // Human-readable name for the stage. - Name *string `json:"name,omitempty"` - - // Unique ID for the stage within the plan. - ID *int64 `json:"id,omitempty"` - - // Stage start time represented as milliseconds since the epoch. - StartMs *int64 `json:"startMs,omitempty"` - - // Stage end time represented as milliseconds since the epoch. - EndMs *int64 `json:"endMs,omitempty"` - - // IDs for stages that are inputs to this stage. - InputStages []int64 `json:"inputStages,omitempty"` - - // Relative amount of time the average shard spent waiting to be - // scheduled. - WaitRatioAvg *float64 `json:"waitRatioAvg,omitempty"` - - // Milliseconds the average shard spent waiting to be scheduled. - WaitMsAvg *int64 `json:"waitMsAvg,omitempty"` - - // Relative amount of time the slowest shard spent waiting to be - // scheduled. - WaitRatioMax *float64 `json:"waitRatioMax,omitempty"` - - // Milliseconds the slowest shard spent waiting to be scheduled. - WaitMsMax *int64 `json:"waitMsMax,omitempty"` - - // Relative amount of time the average shard spent reading input. - ReadRatioAvg *float64 `json:"readRatioAvg,omitempty"` - - // Milliseconds the average shard spent reading input. - ReadMsAvg *int64 `json:"readMsAvg,omitempty"` - - // Relative amount of time the slowest shard spent reading input. - ReadRatioMax *float64 `json:"readRatioMax,omitempty"` - - // Milliseconds the slowest shard spent reading input. - ReadMsMax *int64 `json:"readMsMax,omitempty"` - - // Relative amount of time the average shard spent on CPU-bound tasks. - ComputeRatioAvg *float64 `json:"computeRatioAvg,omitempty"` - - // Milliseconds the average shard spent on CPU-bound tasks. - ComputeMsAvg *int64 `json:"computeMsAvg,omitempty"` - - // Relative amount of time the slowest shard spent on CPU-bound tasks. - ComputeRatioMax *float64 `json:"computeRatioMax,omitempty"` - - // Milliseconds the slowest shard spent on CPU-bound tasks. - ComputeMsMax *int64 `json:"computeMsMax,omitempty"` - - // Relative amount of time the average shard spent on writing output. - WriteRatioAvg *float64 `json:"writeRatioAvg,omitempty"` - - // Milliseconds the average shard spent on writing output. - WriteMsAvg *int64 `json:"writeMsAvg,omitempty"` - - // Relative amount of time the slowest shard spent on writing output. - WriteRatioMax *float64 `json:"writeRatioMax,omitempty"` - - // Milliseconds the slowest shard spent on writing output. - WriteMsMax *int64 `json:"writeMsMax,omitempty"` - - // Total number of bytes written to shuffle. - ShuffleOutputBytes *int64 `json:"shuffleOutputBytes,omitempty"` - - // Total number of bytes written to shuffle and spilled to disk. - ShuffleOutputBytesSpilled *int64 `json:"shuffleOutputBytesSpilled,omitempty"` - - // Number of records read into the stage. - RecordsRead *int64 `json:"recordsRead,omitempty"` - - // Number of records written by the stage. - RecordsWritten *int64 `json:"recordsWritten,omitempty"` - - // Number of parallel input segments to be processed - ParallelInputs *int64 `json:"parallelInputs,omitempty"` - - // Number of parallel input segments completed. - CompletedParallelInputs *int64 `json:"completedParallelInputs,omitempty"` - - // Current status for this stage. - Status *string `json:"status,omitempty"` - - // List of operations within the stage in dependency order (approximately - // chronological). - Steps []ExplainQueryStep `json:"steps,omitempty"` - - // Slot-milliseconds used by the stage. - SlotMs *int64 `json:"slotMs,omitempty"` - - // Output only. Compute mode for this stage. - ComputeMode *string `json:"computeMode,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ExplainQueryStep -type ExplainQueryStep struct { - // Machine-readable operation type. - Kind *string `json:"kind,omitempty"` - - // Human-readable description of the step(s). - Substeps []string `json:"substeps,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ExportDataStatistics -type ExportDataStatistics struct { - // Number of destination files generated in case of EXPORT DATA - // statement only. - FileCount *int64 `json:"fileCount,omitempty"` - - // [Alpha] Number of destination rows generated in case of EXPORT DATA - // statement only. - RowCount *int64 `json:"rowCount,omitempty"` -} - // +kcc:proto=google.cloud.bigquery.v2.ExternalCatalogDatasetOptions type ExternalCatalogDatasetOptions struct { // Optional. A map of key value pairs defining the parameters and properties @@ -924,240 +129,16 @@ type ExternalCatalogDatasetOptions struct { DefaultStorageLocationUri *string `json:"defaultStorageLocationUri,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.ExternalCatalogTableOptions -type ExternalCatalogTableOptions struct { - // Optional. A map of key value pairs defining the parameters and properties - // of the open source table. Corresponds with hive meta store table - // parameters. Maximum size of 4Mib. - Parameters map[string]string `json:"parameters,omitempty"` - - // Optional. A storage descriptor containing information about the physical - // storage of this table. - StorageDescriptor *StorageDescriptor `json:"storageDescriptor,omitempty"` - - // Optional. The connection specifying the credentials to be used to read - // external storage, such as Azure Blob, Cloud Storage, or S3. The connection - // is needed to read the open source table from BigQuery Engine. The - // connection_id can have the form - // `..` or - // `projects//locations//connections/`. - ConnectionID *string `json:"connectionID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ExternalDataConfiguration -type ExternalDataConfiguration struct { - // [Required] The fully-qualified URIs that point to your data in Google - // Cloud. For Google Cloud Storage URIs: - // Each URI can contain one '*' wildcard character and it must come after - // the 'bucket' name. - // Size limits related to load jobs apply to external data sources. - // For Google Cloud Bigtable URIs: - // Exactly one URI can be specified and it has be a fully specified and - // valid HTTPS URL for a Google Cloud Bigtable table. - // For Google Cloud Datastore backups, exactly one URI can be specified. Also, - // the '*' wildcard character is not allowed. - SourceUris []string `json:"sourceUris,omitempty"` - - // Optional. Specifies how source URIs are interpreted for constructing the - // file set to load. By default source URIs are expanded against the - // underlying storage. Other options include specifying manifest files. Only - // applicable to object storage systems. - FileSetSpecType *string `json:"fileSetSpecType,omitempty"` - - // Optional. The schema for the data. - // Schema is required for CSV and JSON formats if autodetect is not on. - // Schema is disallowed for Google Cloud Bigtable, Cloud Datastore backups, - // Avro, ORC and Parquet formats. - Schema *TableSchema `json:"schema,omitempty"` - - // [Required] The data format. - // For CSV files, specify "CSV". - // For Google sheets, specify "GOOGLE_SHEETS". - // For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". - // For Avro files, specify "AVRO". - // For Google Cloud Datastore backups, specify "DATASTORE_BACKUP". - // For Apache Iceberg tables, specify "ICEBERG". - // For ORC files, specify "ORC". - // For Parquet files, specify "PARQUET". - // [Beta] For Google Cloud Bigtable, specify "BIGTABLE". - SourceFormat *string `json:"sourceFormat,omitempty"` - - // Optional. The maximum number of bad records that BigQuery can ignore when - // reading data. If the number of bad records exceeds this value, an invalid - // error is returned in the job result. The default value is 0, which requires - // that all records are valid. This setting is ignored for Google Cloud - // Bigtable, Google Cloud Datastore backups, Avro, ORC and Parquet formats. - MaxBadRecords *int32 `json:"maxBadRecords,omitempty"` - - // Try to detect schema and format options automatically. - // Any option specified explicitly will be honored. - Autodetect *bool `json:"autodetect,omitempty"` - - // Optional. Indicates if BigQuery should allow extra values that are not - // represented in the table schema. - // If true, the extra values are ignored. - // If false, records with extra columns are treated as bad records, and if - // there are too many bad records, an invalid error is returned in the job - // result. - // The default value is false. - // The sourceFormat property determines what BigQuery treats as an extra - // value: - // CSV: Trailing columns - // JSON: Named values that don't match any column names - // Google Cloud Bigtable: This setting is ignored. - // Google Cloud Datastore backups: This setting is ignored. - // Avro: This setting is ignored. - // ORC: This setting is ignored. - // Parquet: This setting is ignored. - IgnoreUnknownValues *bool `json:"ignoreUnknownValues,omitempty"` - - // Optional. The compression type of the data source. - // Possible values include GZIP and NONE. The default value is NONE. - // This setting is ignored for Google Cloud Bigtable, Google Cloud Datastore - // backups, Avro, ORC and Parquet - // formats. An empty string is an invalid value. - Compression *string `json:"compression,omitempty"` - - // Optional. Additional properties to set if sourceFormat is set to CSV. - CsvOptions *CsvOptions `json:"csvOptions,omitempty"` - - // Optional. Additional properties to set if sourceFormat is set to JSON. - JsonOptions *JsonOptions `json:"jsonOptions,omitempty"` - - // Optional. Additional options if sourceFormat is set to BIGTABLE. - BigtableOptions *BigtableOptions `json:"bigtableOptions,omitempty"` - - // Optional. Additional options if sourceFormat is set to GOOGLE_SHEETS. - GoogleSheetsOptions *GoogleSheetsOptions `json:"googleSheetsOptions,omitempty"` - - // Optional. When set, configures hive partitioning support. Not all storage - // formats support hive partitioning -- requesting hive partitioning on an - // unsupported format will lead to an error, as will providing an invalid - // specification. - HivePartitioningOptions *HivePartitioningOptions `json:"hivePartitioningOptions,omitempty"` - - // Optional. The connection specifying the credentials to be used to read - // external storage, such as Azure Blob, Cloud Storage, or S3. The - // connection_id can have the form - // `{project_id}.{location_id};{connection_id}` or - // `projects/{project_id}/locations/{location_id}/connections/{connection_id}`. - ConnectionID *string `json:"connectionID,omitempty"` - - // Defines the list of possible SQL data types to which the source decimal - // values are converted. This list and the precision and the scale parameters - // of the decimal field determine the target type. In the order of NUMERIC, - // BIGNUMERIC, and STRING, a - // type is picked if it is in the specified list and if it supports the - // precision and the scale. STRING supports all precision and scale values. - // If none of the listed types supports the precision and the scale, the type - // supporting the widest range in the specified list is picked, and if a value - // exceeds the supported range when reading the data, an error will be thrown. - // - // Example: Suppose the value of this field is ["NUMERIC", "BIGNUMERIC"]. - // If (precision,scale) is: - // - // * (38,9) -> NUMERIC; - // * (39,9) -> BIGNUMERIC (NUMERIC cannot hold 30 integer digits); - // * (38,10) -> BIGNUMERIC (NUMERIC cannot hold 10 fractional digits); - // * (76,38) -> BIGNUMERIC; - // * (77,38) -> BIGNUMERIC (error if value exeeds supported range). - // - // This field cannot contain duplicate types. The order of the types in this - // field is ignored. For example, ["BIGNUMERIC", "NUMERIC"] is the same as - // ["NUMERIC", "BIGNUMERIC"] and NUMERIC always takes precedence over - // BIGNUMERIC. - // - // Defaults to ["NUMERIC", "STRING"] for ORC and ["NUMERIC"] for the other - // file formats. - DecimalTargetTypes []string `json:"decimalTargetTypes,omitempty"` - - // Optional. Additional properties to set if sourceFormat is set to AVRO. - AvroOptions *AvroOptions `json:"avroOptions,omitempty"` - - // Optional. Load option to be used together with source_format - // newline-delimited JSON to indicate that a variant of JSON is being loaded. - // To load newline-delimited GeoJSON, specify GEOJSON (and source_format must - // be set to NEWLINE_DELIMITED_JSON). - JsonExtension *string `json:"jsonExtension,omitempty"` - - // Optional. Additional properties to set if sourceFormat is set to PARQUET. - ParquetOptions *ParquetOptions `json:"parquetOptions,omitempty"` - - // Optional. ObjectMetadata is used to create Object Tables. Object Tables - // contain a listing of objects (with their metadata) found at the - // source_uris. If ObjectMetadata is set, source_format should be omitted. - // - // Currently SIMPLE is the only supported Object Metadata type. - ObjectMetadata *string `json:"objectMetadata,omitempty"` - - // Optional. When creating an external table, the user can provide a reference - // file with the table schema. This is enabled for the following formats: - // AVRO, PARQUET, ORC. - ReferenceFileSchemaUri *string `json:"referenceFileSchemaUri,omitempty"` - - // Optional. Metadata Cache Mode for the table. Set this to enable caching of - // metadata from external data source. - MetadataCacheMode *string `json:"metadataCacheMode,omitempty"` -} - // +kcc:proto=google.cloud.bigquery.v2.ExternalDatasetReference type ExternalDatasetReference struct { - // Required. External source that backs this dataset. - ExternalSource *string `json:"externalSource"` + // +required. External source that backs this dataset. + ExternalSource *string `json:"externalSource,omitempty"` - // Required. The connection id that is used to access the external_source. + // +required. The connection id that is used to access the external_source. // // Format: // projects/{project_id}/locations/{location_id}/connections/{connection_id} - Connection *string `json:"connection"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ExternalServiceCost -type ExternalServiceCost struct { - // External service name. - ExternalService *string `json:"externalService,omitempty"` - - // External service cost in terms of bigquery bytes processed. - BytesProcessed *int64 `json:"bytesProcessed,omitempty"` - - // External service cost in terms of bigquery bytes billed. - BytesBilled *int64 `json:"bytesBilled,omitempty"` - - // External service cost in terms of bigquery slot milliseconds. - SlotMs *int64 `json:"slotMs,omitempty"` - - // Non-preemptable reserved slots used for external job. - // For example, reserved slots for Cloua AI Platform job are the VM usages - // converted to BigQuery slot with equivalent mount of price. - ReservedSlotCount *int64 `json:"reservedSlotCount,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ForeignKey -type ForeignKey struct { - // Optional. Set only if the foreign key constraint is named. - Name *string `json:"name,omitempty"` - - // Required. The table that holds the primary key and is referenced by this - // foreign key. - ReferencedTable *TableReference `json:"referencedTable,omitempty"` - - // Required. The columns that compose the foreign key. - ColumnReferences []ColumnReference `json:"columnReferences,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ForeignTypeInfo -type ForeignTypeInfo struct { - // Required. Specifies the system which defines the foreign data type. - TypeSystem *string `json:"typeSystem,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ForeignViewDefinition -type ForeignViewDefinition struct { - // Required. The query that defines the view. - Query *string `json:"query,omitempty"` - - // Optional. Represents the dialect of the query. - Dialect *string `json:"dialect,omitempty"` + Connection *string `json:"connection,omitempty"` } // +kcc:proto=google.cloud.bigquery.v2.GcpTag @@ -1170,4038 +151,58 @@ type GcpTag struct { TagValue *string `json:"tagValue,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.GoogleSheetsOptions -type GoogleSheetsOptions struct { - // Optional. The number of rows at the top of a sheet that BigQuery will skip - // when reading the data. The default value is 0. This property is useful if - // you have header rows that should be skipped. When autodetect is on, - // the behavior is the following: - // * skipLeadingRows unspecified - Autodetect tries to detect headers in the - // first row. If they are not detected, the row is read as data. Otherwise - // data is read starting from the second row. - // * skipLeadingRows is 0 - Instructs autodetect that there are no headers and - // data should be read starting from the first row. - // * skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect - // headers in row N. If headers are not detected, row N is just skipped. - // Otherwise row N is used to extract column names for the detected schema. - SkipLeadingRows *int64 `json:"skipLeadingRows,omitempty"` - - // Optional. Range of a sheet to query from. Only used when non-empty. - // Typical format: sheet_name!top_left_cell_id:bottom_right_cell_id - // For example: sheet1!A1:B20 - Range *string `json:"range,omitempty"` +// +kcc:proto=google.cloud.bigquery.v2.LinkedDatasetSource +type LinkedDatasetSource struct { + // The source dataset reference contains project numbers and not project ids. + SourceDataset *DatasetReference `json:"sourceDataset,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.HighCardinalityJoin -type HighCardinalityJoin struct { - // Output only. Count of left input rows. - LeftRows *int64 `json:"leftRows,omitempty"` - - // Output only. Count of right input rows. - RightRows *int64 `json:"rightRows,omitempty"` - - // Output only. Count of the output rows. - OutputRows *int64 `json:"outputRows,omitempty"` - - // Output only. The index of the join operator in the ExplainQueryStep lists. - StepIndex *int32 `json:"stepIndex,omitempty"` +// +kcc:proto=google.cloud.bigquery.v2.RestrictionConfig +type RestrictionConfig struct { + // Output only. Specifies the type of dataset/table restriction. + Type *string `json:"type,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.HivePartitioningOptions -type HivePartitioningOptions struct { - // Optional. When set, what mode of hive partitioning to use when reading - // data. The following modes are supported: - // - // * AUTO: automatically infer partition key name(s) and type(s). - // - // * STRINGS: automatically infer partition key name(s). All types are - // strings. - // - // * CUSTOM: partition key schema is encoded in the source URI prefix. - // - // Not all storage formats support hive partitioning. Requesting hive - // partitioning on an unsupported format will lead to an error. - // Currently supported formats are: JSON, CSV, ORC, Avro and Parquet. - Mode *string `json:"mode,omitempty"` - - // Optional. When hive partition detection is requested, a common prefix for - // all source uris must be required. The prefix must end immediately before - // the partition key encoding begins. For example, consider files following - // this data layout: - // - // gs://bucket/path_to_table/dt=2019-06-01/country=USA/id=7/file.avro - // - // gs://bucket/path_to_table/dt=2019-05-31/country=CA/id=3/file.avro - // - // When hive partitioning is requested with either AUTO or STRINGS detection, - // the common prefix can be either of gs://bucket/path_to_table or - // gs://bucket/path_to_table/. - // - // CUSTOM detection requires encoding the partitioning schema immediately - // after the common prefix. For CUSTOM, any of - // - // * gs://bucket/path_to_table/{dt:DATE}/{country:STRING}/{id:INTEGER} - // - // * gs://bucket/path_to_table/{dt:STRING}/{country:STRING}/{id:INTEGER} - // - // * gs://bucket/path_to_table/{dt:DATE}/{country:STRING}/{id:STRING} - // - // would all be valid source URI prefixes. - SourceUriPrefix *string `json:"sourceUriPrefix,omitempty"` +// +kcc:proto=google.cloud.bigquery.v2.RoutineReference +type RoutineReference struct { + // The ID of the project containing this routine. + // +required + ProjectId *string `json:"projectId,omitempty"` - // Optional. If set to true, queries over this table require a partition - // filter that can be used for partition elimination to be specified. - // - // Note that this field should only be true when creating a permanent - // external table or querying a temporary external table. - // - // Hive-partitioned loads with require_partition_filter explicitly set to - // true will fail. - RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty"` + // The ID of the dataset containing this routine. + // +required + DatasetId *string `json:"datasetId,omitempty"` - // Output only. For permanent external tables, this field is populated with - // the hive partition keys in the order they were inferred. The types of the - // partition keys can be deduced by checking the table schema (which will - // include the partition keys). Not every API will populate this field in the - // output. For example, Tables.Get will populate it, but Tables.List will not - // contain this field. - Fields []string `json:"fields,omitempty"` + // The Id of the routine. The Id must contain only + // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum + // length is 256 characters. + // +required + RoutineId *string `json:"routineId,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.IndexUnusedReason -type IndexUnusedReason struct { - // Specifies the high-level reason for the scenario when no search index was - // used. - Code *string `json:"code,omitempty"` - - // Free form human-readable reason for the scenario when no search index was - // used. - Message *string `json:"message,omitempty"` +// +kcc:proto=google.cloud.bigquery.v2.TableReference +type TableReference struct { + // The ID of the project containing this table. + // +required + ProjectId *string `json:"projectId,omitempty"` - // Specifies the base table involved in the reason that no search index was - // used. - BaseTable *TableReference `json:"baseTable,omitempty"` + // The ID of the dataset containing this table. + // +required + DatasetId *string `json:"datasetId,omitempty"` - // Specifies the name of the unused search index, if available. - IndexName *string `json:"indexName,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.InputDataChange -type InputDataChange struct { - // Output only. Records read difference percentage compared to a previous run. - RecordsReadDiffPercentage *float64 `json:"recordsReadDiffPercentage,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Job -type Job struct { - // Output only. The type of the resource. - Kind *string `json:"kind,omitempty"` - - // Output only. A hash of this resource. - Etag *string `json:"etag,omitempty"` - - // Output only. Opaque ID field of the job. - ID *string `json:"id,omitempty"` - - // Output only. A URL that can be used to access the resource again. - SelfLink *string `json:"selfLink,omitempty"` - - // Output only. Email address of the user who ran the job. - UserEmail *string `json:"userEmail,omitempty"` - - // Required. Describes the job configuration. - Configuration *JobConfiguration `json:"configuration,omitempty"` - - // Optional. Reference describing the unique-per-user name of the job. - JobReference *JobReference `json:"jobReference,omitempty"` - - // Output only. Information about the job, including starting time and ending - // time of the job. - Statistics *JobStatistics `json:"statistics,omitempty"` - - // Output only. The status of this job. Examine this value when polling an - // asynchronous job to see if the job is complete. - Status *JobStatus `json:"status,omitempty"` - - // Output only. [Full-projection-only] String representation of identity of - // requesting party. Populated for both first- and third-party identities. - // Only present for APIs that support third-party identities. - PrincipalSubject *string `json:"principalSubject,omitempty"` - - // Output only. The reason why a Job was created. - // [Preview](https://cloud.google.com/products/#product-launch-stages) - JobCreationReason *JobCreationReason `json:"jobCreationReason,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobConfiguration -type JobConfiguration struct { - // Output only. The type of the job. Can be QUERY, LOAD, EXTRACT, COPY or - // UNKNOWN. - JobType *string `json:"jobType,omitempty"` - - // [Pick one] Configures a query job. - Query *JobConfigurationQuery `json:"query,omitempty"` - - // [Pick one] Configures a load job. - Load *JobConfigurationLoad `json:"load,omitempty"` - - // [Pick one] Copies a table. - Copy *JobConfigurationTableCopy `json:"copy,omitempty"` - - // [Pick one] Configures an extract job. - Extract *JobConfigurationExtract `json:"extract,omitempty"` - - // Optional. If set, don't actually run this job. A valid query will return - // a mostly empty response with some processing statistics, while an invalid - // query will return the same error it would if it wasn't a dry run. Behavior - // of non-query jobs is undefined. - DryRun *bool `json:"dryRun,omitempty"` - - // Optional. Job timeout in milliseconds. If this time limit is exceeded, - // BigQuery will attempt to stop a longer job, but may not always succeed in - // canceling it before the job completes. For example, a job that takes more - // than 60 seconds to complete has a better chance of being stopped than a job - // that takes 10 seconds to complete. - JobTimeoutMs *int64 `json:"jobTimeoutMs,omitempty"` - - // The labels associated with this job. You can use these to organize and - // group your jobs. - // Label keys and values can be no longer than 63 characters, can only contain - // lowercase letters, numeric characters, underscores and dashes. - // International characters are allowed. Label values are optional. Label - // keys must start with a letter and each label in the list must have a - // different key. - Labels map[string]string `json:"labels,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobConfigurationExtract -type JobConfigurationExtract struct { - // A reference to the table being exported. - SourceTable *TableReference `json:"sourceTable,omitempty"` - - // A reference to the model being exported. - SourceModel *ModelReference `json:"sourceModel,omitempty"` - - // [Pick one] A list of fully-qualified Google Cloud Storage URIs where the - // extracted table should be written. - DestinationUris []string `json:"destinationUris,omitempty"` - - // Optional. Whether to print out a header row in the results. - // Default is true. Not applicable when extracting models. - PrintHeader *bool `json:"printHeader,omitempty"` - - // Optional. When extracting data in CSV format, this defines the - // delimiter to use between fields in the exported data. - // Default is ','. Not applicable when extracting models. - FieldDelimiter *string `json:"fieldDelimiter,omitempty"` - - // Optional. The exported file format. Possible values include CSV, - // NEWLINE_DELIMITED_JSON, PARQUET, or AVRO for tables and ML_TF_SAVED_MODEL - // or ML_XGBOOST_BOOSTER for models. The default value for tables is CSV. - // Tables with nested or repeated fields cannot be exported as CSV. The - // default value for models is ML_TF_SAVED_MODEL. - DestinationFormat *string `json:"destinationFormat,omitempty"` - - // Optional. The compression type to use for exported files. Possible values - // include DEFLATE, GZIP, NONE, SNAPPY, and ZSTD. The default value is NONE. - // Not all compression formats are support for all file formats. DEFLATE is - // only supported for Avro. ZSTD is only supported for Parquet. Not applicable - // when extracting models. - Compression *string `json:"compression,omitempty"` - - // Whether to use logical types when extracting to AVRO format. Not applicable - // when extracting models. - UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes,omitempty"` - - // Optional. Model extract options only applicable when extracting models. - ModelExtractOptions *JobConfigurationExtract_ModelExtractOptions `json:"modelExtractOptions,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobConfigurationExtract.ModelExtractOptions -type JobConfigurationExtract_ModelExtractOptions struct { - // The 1-based ID of the trial to be exported from a hyperparameter tuning - // model. If not specified, the trial with id = - // [Model](https://cloud.google.com/bigquery/docs/reference/rest/v2/models#resource:-model).defaultTrialId - // is exported. This field is ignored for models not trained with - // hyperparameter tuning. - TrialID *int64 `json:"trialID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobConfigurationLoad -type JobConfigurationLoad struct { - // [Required] The fully-qualified URIs that point to your data in Google - // Cloud. - // For Google Cloud Storage URIs: - // Each URI can contain one '*' wildcard character and it must come after - // the 'bucket' name. Size limits related to load jobs apply to external - // data sources. - // For Google Cloud Bigtable URIs: - // Exactly one URI can be specified and it has be a fully specified and - // valid HTTPS URL for a Google Cloud Bigtable table. - // For Google Cloud Datastore backups: - // Exactly one URI can be specified. Also, the '*' wildcard character is not - // allowed. - SourceUris []string `json:"sourceUris,omitempty"` - - // Optional. Specifies how source URIs are interpreted for constructing the - // file set to load. By default, source URIs are expanded against the - // underlying storage. You can also specify manifest files to control how the - // file set is constructed. This option is only applicable to object storage - // systems. - FileSetSpecType *string `json:"fileSetSpecType,omitempty"` - - // Optional. The schema for the destination table. The schema can be - // omitted if the destination table already exists, or if you're loading data - // from Google Cloud Datastore. - Schema *TableSchema `json:"schema,omitempty"` - - // [Required] The destination table to load the data into. - DestinationTable *TableReference `json:"destinationTable,omitempty"` - - // Optional. [Experimental] Properties with which to create the destination - // table if it is new. - DestinationTableProperties *DestinationTableProperties `json:"destinationTableProperties,omitempty"` - - // Optional. Specifies whether the job is allowed to create new tables. - // The following values are supported: - // - // * CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the - // table. - // * CREATE_NEVER: The table must already exist. If it does not, - // a 'notFound' error is returned in the job result. - // The default value is CREATE_IF_NEEDED. - // Creation, truncation and append actions occur as one atomic update - // upon job completion. - CreateDisposition *string `json:"createDisposition,omitempty"` - - // Optional. Specifies the action that occurs if the destination table - // already exists. The following values are supported: - // - // * WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the - // data, removes the constraints and uses the schema from the load job. - // * WRITE_APPEND: If the table already exists, BigQuery appends the data to - // the table. - // * WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' - // error is returned in the job result. - // - // The default value is WRITE_APPEND. - // Each action is atomic and only occurs if BigQuery is able to complete the - // job successfully. - // Creation, truncation and append actions occur as one atomic update - // upon job completion. - WriteDisposition *string `json:"writeDisposition,omitempty"` - - // Optional. Specifies a string that represents a null value in a CSV file. - // For example, if you specify "\N", BigQuery interprets "\N" as a null value - // when loading a CSV file. - // The default value is the empty string. If you set this property to a custom - // value, BigQuery throws an error if an empty string is present for all data - // types except for STRING and BYTE. For STRING and BYTE columns, BigQuery - // interprets the empty string as an empty value. - NullMarker *string `json:"nullMarker,omitempty"` - - // Optional. The separator character for fields in a CSV file. The separator - // is interpreted as a single byte. For files encoded in ISO-8859-1, any - // single character can be used as a separator. For files encoded in UTF-8, - // characters represented in decimal range 1-127 (U+0001-U+007F) can be used - // without any modification. UTF-8 characters encoded with multiple bytes - // (i.e. U+0080 and above) will have only the first byte used for separating - // fields. The remaining bytes will be treated as a part of the field. - // BigQuery also supports the escape sequence "\t" (U+0009) to specify a tab - // separator. The default value is comma (",", U+002C). - FieldDelimiter *string `json:"fieldDelimiter,omitempty"` - - // Optional. The number of rows at the top of a CSV file that BigQuery will - // skip when loading the data. The default value is 0. This property is useful - // if you have header rows in the file that should be skipped. When autodetect - // is on, the behavior is the following: - // - // * skipLeadingRows unspecified - Autodetect tries to detect headers in the - // first row. If they are not detected, the row is read as data. Otherwise - // data is read starting from the second row. - // * skipLeadingRows is 0 - Instructs autodetect that there are no headers and - // data should be read starting from the first row. - // * skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect - // headers in row N. If headers are not detected, row N is just skipped. - // Otherwise row N is used to extract column names for the detected schema. - SkipLeadingRows *int32 `json:"skipLeadingRows,omitempty"` - - // Optional. The character encoding of the data. - // The supported values are UTF-8, ISO-8859-1, UTF-16BE, UTF-16LE, UTF-32BE, - // and UTF-32LE. The default value is UTF-8. BigQuery decodes the data after - // the raw, binary data has been split using the values of the `quote` and - // `fieldDelimiter` properties. - // - // If you don't specify an encoding, or if you specify a UTF-8 encoding when - // the CSV file is not UTF-8 encoded, BigQuery attempts to convert the data to - // UTF-8. Generally, your data loads successfully, but it may not match - // byte-for-byte what you expect. To avoid this, specify the correct encoding - // by using the `--encoding` flag. - // - // If BigQuery can't convert a character other than the ASCII `0` character, - // BigQuery converts the character to the standard Unicode replacement - // character: �. - Encoding *string `json:"encoding,omitempty"` - - // Optional. The value that is used to quote data sections in a CSV file. - // BigQuery converts the string to ISO-8859-1 encoding, and then uses the - // first byte of the encoded string to split the data in its raw, binary - // state. - // The default value is a float64-quote ('"'). - // If your data does not contain quoted sections, set the property value to an - // empty string. - // If your data contains quoted newline characters, you must also set the - // allowQuotedNewlines property to true. - // To include the specific quote character within a quoted value, precede it - // with an additional matching quote character. For example, if you want to - // escape the default character ' " ', use ' "" '. - // @default " - Quote *string `json:"quote,omitempty"` - - // Optional. The maximum number of bad records that BigQuery can ignore when - // running the job. If the number of bad records exceeds this value, an - // invalid error is returned in the job result. - // The default value is 0, which requires that all records are valid. - // This is only supported for CSV and NEWLINE_DELIMITED_JSON file formats. - MaxBadRecords *int32 `json:"maxBadRecords,omitempty"` - - // Indicates if BigQuery should allow quoted data sections that contain - // newline characters in a CSV file. The default value is false. - AllowQuotedNewlines *bool `json:"allowQuotedNewlines,omitempty"` - - // Optional. The format of the data files. - // For CSV files, specify "CSV". For datastore backups, - // specify "DATASTORE_BACKUP". For newline-delimited JSON, - // specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". - // For parquet, specify "PARQUET". For orc, specify "ORC". - // The default value is CSV. - SourceFormat *string `json:"sourceFormat,omitempty"` - - // Optional. Accept rows that are missing trailing optional columns. - // The missing values are treated as nulls. - // If false, records with missing trailing columns are treated as bad records, - // and if there are too many bad records, an invalid error is returned in the - // job result. - // The default value is false. - // Only applicable to CSV, ignored for other formats. - AllowJaggedRows *bool `json:"allowJaggedRows,omitempty"` - - // Optional. Indicates if BigQuery should allow extra values that are not - // represented in the table schema. - // If true, the extra values are ignored. - // If false, records with extra columns are treated as bad records, and if - // there are too many bad records, an invalid error is returned in the job - // result. The default value is false. - // The sourceFormat property determines what BigQuery treats as an extra - // value: - // CSV: Trailing columns - // JSON: Named values that don't match any column names in the table schema - // Avro, Parquet, ORC: Fields in the file schema that don't exist in the - // table schema. - IgnoreUnknownValues *bool `json:"ignoreUnknownValues,omitempty"` - - // If sourceFormat is set to "DATASTORE_BACKUP", indicates which entity - // properties to load into BigQuery from a Cloud Datastore backup. Property - // names are case sensitive and must be top-level properties. If no properties - // are specified, BigQuery loads all properties. If any named property isn't - // found in the Cloud Datastore backup, an invalid error is returned in the - // job result. - ProjectionFields []string `json:"projectionFields,omitempty"` - - // Optional. Indicates if we should automatically infer the options and - // schema for CSV and JSON sources. - Autodetect *bool `json:"autodetect,omitempty"` - - // Allows the schema of the destination table to be updated as a side effect - // of the load job if a schema is autodetected or supplied in the job - // configuration. - // Schema update options are supported in two cases: - // when writeDisposition is WRITE_APPEND; - // when writeDisposition is WRITE_TRUNCATE and the destination table is a - // partition of a table, specified by partition decorators. For normal tables, - // WRITE_TRUNCATE will always overwrite the schema. - // One or more of the following values are specified: - // - // * ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. - // * ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original - // schema to nullable. - SchemaUpdateOptions []string `json:"schemaUpdateOptions,omitempty"` - - // Time-based partitioning specification for the destination table. Only one - // of timePartitioning and rangePartitioning should be specified. - TimePartitioning *TimePartitioning `json:"timePartitioning,omitempty"` - - // Range partitioning specification for the destination table. - // Only one of timePartitioning and rangePartitioning should be specified. - RangePartitioning *RangePartitioning `json:"rangePartitioning,omitempty"` - - // Clustering specification for the destination table. - Clustering *Clustering `json:"clustering,omitempty"` - - // Custom encryption configuration (e.g., Cloud KMS keys) - DestinationEncryptionConfiguration *EncryptionConfiguration `json:"destinationEncryptionConfiguration,omitempty"` - - // Optional. If sourceFormat is set to "AVRO", indicates whether to interpret - // logical types as the corresponding BigQuery data type (for example, - // TIMESTAMP), instead of using the raw type (for example, INTEGER). - UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes,omitempty"` - - // Optional. The user can provide a reference file with the reader schema. - // This file is only loaded if it is part of source URIs, but is not loaded - // otherwise. It is enabled for the following formats: AVRO, PARQUET, ORC. - ReferenceFileSchemaUri *string `json:"referenceFileSchemaUri,omitempty"` - - // Optional. When set, configures hive partitioning support. - // Not all storage formats support hive partitioning -- requesting hive - // partitioning on an unsupported format will lead to an error, as will - // providing an invalid specification. - HivePartitioningOptions *HivePartitioningOptions `json:"hivePartitioningOptions,omitempty"` - - // Defines the list of possible SQL data types to which the source decimal - // values are converted. This list and the precision and the scale parameters - // of the decimal field determine the target type. In the order of NUMERIC, - // BIGNUMERIC, and STRING, a - // type is picked if it is in the specified list and if it supports the - // precision and the scale. STRING supports all precision and scale values. - // If none of the listed types supports the precision and the scale, the type - // supporting the widest range in the specified list is picked, and if a value - // exceeds the supported range when reading the data, an error will be thrown. - // - // Example: Suppose the value of this field is ["NUMERIC", "BIGNUMERIC"]. - // If (precision,scale) is: - // - // * (38,9) -> NUMERIC; - // * (39,9) -> BIGNUMERIC (NUMERIC cannot hold 30 integer digits); - // * (38,10) -> BIGNUMERIC (NUMERIC cannot hold 10 fractional digits); - // * (76,38) -> BIGNUMERIC; - // * (77,38) -> BIGNUMERIC (error if value exeeds supported range). - // - // This field cannot contain duplicate types. The order of the types in this - // field is ignored. For example, ["BIGNUMERIC", "NUMERIC"] is the same as - // ["NUMERIC", "BIGNUMERIC"] and NUMERIC always takes precedence over - // BIGNUMERIC. - // - // Defaults to ["NUMERIC", "STRING"] for ORC and ["NUMERIC"] for the other - // file formats. - DecimalTargetTypes []string `json:"decimalTargetTypes,omitempty"` - - // Optional. Load option to be used together with source_format - // newline-delimited JSON to indicate that a variant of JSON is being loaded. - // To load newline-delimited GeoJSON, specify GEOJSON (and source_format must - // be set to NEWLINE_DELIMITED_JSON). - JsonExtension *string `json:"jsonExtension,omitempty"` - - // Optional. Additional properties to set if sourceFormat is set to PARQUET. - ParquetOptions *ParquetOptions `json:"parquetOptions,omitempty"` - - // Optional. When sourceFormat is set to "CSV", this indicates whether the - // embedded ASCII control characters (the first 32 characters in the - // ASCII-table, from - // '\x00' to '\x1F') are preserved. - PreserveAsciiControlCharacters *bool `json:"preserveAsciiControlCharacters,omitempty"` - - // Optional. Connection properties which can modify the load job behavior. - // Currently, only the 'session_id' connection property is supported, and is - // used to resolve _SESSION appearing as the dataset id. - ConnectionProperties []ConnectionProperty `json:"connectionProperties,omitempty"` - - // Optional. If this property is true, the job creates a new session using a - // randomly generated session_id. To continue using a created session with - // subsequent queries, pass the existing session identifier as a - // `ConnectionProperty` value. The session identifier is returned as part of - // the `SessionInfo` message within the query statistics. - // - // The new session's location will be set to `Job.JobReference.location` if it - // is present, otherwise it's set to the default location based on existing - // routing logic. - CreateSession *bool `json:"createSession,omitempty"` - - // Optional. Character map supported for column names in CSV/Parquet loads. - // Defaults to STRICT and can be overridden by Project Config Service. Using - // this option with unsupporting load formats will result in an error. - ColumnNameCharacterMap *string `json:"columnNameCharacterMap,omitempty"` - - // Optional. [Experimental] Configures the load job to copy files directly to - // the destination BigLake managed table, bypassing file content reading and - // rewriting. - // - // Copying files only is supported when all the following are true: - // - // * `source_uris` are located in the same Cloud Storage location as the - // destination table's `storage_uri` location. - // * `source_format` is `PARQUET`. - // * `destination_table` is an existing BigLake managed table. The table's - // schema does not have flexible column names. The table's columns do not - // have type parameters other than precision and scale. - // * No options other than the above are specified. - CopyFilesOnly *bool `json:"copyFilesOnly,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobConfigurationQuery -type JobConfigurationQuery struct { - // [Required] SQL query text to execute. The useLegacySql field can be used - // to indicate whether the query uses legacy SQL or GoogleSQL. - Query *string `json:"query,omitempty"` - - // Optional. Describes the table where the query results should be stored. - // This property must be set for large results that exceed the maximum - // response size. For queries that produce anonymous (cached) results, this - // field will be populated by BigQuery. - DestinationTable *TableReference `json:"destinationTable,omitempty"` - - // TODO: map type string message for external_table_definitions - - // Describes user-defined function resources used in the query. - UserDefinedFunctionResources []UserDefinedFunctionResource `json:"userDefinedFunctionResources,omitempty"` - - // Optional. Specifies whether the job is allowed to create new tables. - // The following values are supported: - // - // * CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the - // table. - // * CREATE_NEVER: The table must already exist. If it does not, - // a 'notFound' error is returned in the job result. - // - // The default value is CREATE_IF_NEEDED. - // Creation, truncation and append actions occur as one atomic update - // upon job completion. - CreateDisposition *string `json:"createDisposition,omitempty"` - - // Optional. Specifies the action that occurs if the destination table - // already exists. The following values are supported: - // - // * WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the - // data, removes the constraints, and uses the schema from the query result. - // * WRITE_APPEND: If the table already exists, BigQuery appends the data to - // the table. - // * WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' - // error is returned in the job result. - // - // The default value is WRITE_EMPTY. Each action is atomic and only occurs if - // BigQuery is able to complete the job successfully. Creation, truncation and - // append actions occur as one atomic update upon job completion. - WriteDisposition *string `json:"writeDisposition,omitempty"` - - // Optional. Specifies the default dataset to use for unqualified - // table names in the query. This setting does not alter behavior of - // unqualified dataset names. Setting the system variable - // `@@dataset_id` achieves the same behavior. See - // https://cloud.google.com/bigquery/docs/reference/system-variables for more - // information on system variables. - DefaultDataset *DatasetReference `json:"defaultDataset,omitempty"` - - // Optional. Specifies a priority for the query. Possible values include - // INTERACTIVE and BATCH. The default value is INTERACTIVE. - Priority *string `json:"priority,omitempty"` - - // Optional. If true and query uses legacy SQL dialect, allows the query - // to produce arbitrarily large result tables at a slight cost in performance. - // Requires destinationTable to be set. - // For GoogleSQL queries, this flag is ignored and large results are - // always allowed. However, you must still set destinationTable when result - // size exceeds the allowed maximum response size. - AllowLargeResults *bool `json:"allowLargeResults,omitempty"` - - // Optional. Whether to look for the result in the query cache. The query - // cache is a best-effort cache that will be flushed whenever tables in the - // query are modified. Moreover, the query cache is only available when a - // query does not have a destination table specified. The default value is - // true. - UseQueryCache *bool `json:"useQueryCache,omitempty"` - - // Optional. If true and query uses legacy SQL dialect, flattens all nested - // and repeated fields in the query results. - // allowLargeResults must be true if this is set to false. - // For GoogleSQL queries, this flag is ignored and results are never - // flattened. - FlattenResults *bool `json:"flattenResults,omitempty"` - - // Limits the bytes billed for this job. Queries that will have - // bytes billed beyond this limit will fail (without incurring a charge). - // If unspecified, this will be set to your project default. - MaximumBytesBilled *int64 `json:"maximumBytesBilled,omitempty"` - - // Optional. Specifies whether to use BigQuery's legacy SQL dialect for this - // query. The default value is true. If set to false, the query will use - // BigQuery's GoogleSQL: - // https://cloud.google.com/bigquery/sql-reference/ - // - // When useLegacySql is set to false, the value of flattenResults is ignored; - // query will be run as if flattenResults is false. - UseLegacySql *bool `json:"useLegacySql,omitempty"` - - // GoogleSQL only. Set to POSITIONAL to use positional (?) query parameters - // or to NAMED to use named (@myparam) query parameters in this query. - ParameterMode *string `json:"parameterMode,omitempty"` - - // Query parameters for GoogleSQL queries. - QueryParameters []QueryParameter `json:"queryParameters,omitempty"` - - // Output only. System variables for GoogleSQL queries. A system variable is - // output if the variable is settable and its value differs from the system - // default. - // "@@" prefix is not included in the name of the System variables. - SystemVariables *SystemVariables `json:"systemVariables,omitempty"` - - // Allows the schema of the destination table to be updated as a side effect - // of the query job. Schema update options are supported in two cases: - // when writeDisposition is WRITE_APPEND; - // when writeDisposition is WRITE_TRUNCATE and the destination table is a - // partition of a table, specified by partition decorators. For normal tables, - // WRITE_TRUNCATE will always overwrite the schema. - // One or more of the following values are specified: - // - // * ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. - // * ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original - // schema to nullable. - SchemaUpdateOptions []string `json:"schemaUpdateOptions,omitempty"` - - // Time-based partitioning specification for the destination table. Only one - // of timePartitioning and rangePartitioning should be specified. - TimePartitioning *TimePartitioning `json:"timePartitioning,omitempty"` - - // Range partitioning specification for the destination table. - // Only one of timePartitioning and rangePartitioning should be specified. - RangePartitioning *RangePartitioning `json:"rangePartitioning,omitempty"` - - // Clustering specification for the destination table. - Clustering *Clustering `json:"clustering,omitempty"` - - // Custom encryption configuration (e.g., Cloud KMS keys) - DestinationEncryptionConfiguration *EncryptionConfiguration `json:"destinationEncryptionConfiguration,omitempty"` - - // Options controlling the execution of scripts. - ScriptOptions *ScriptOptions `json:"scriptOptions,omitempty"` - - // Connection properties which can modify the query behavior. - ConnectionProperties []ConnectionProperty `json:"connectionProperties,omitempty"` - - // If this property is true, the job creates a new session using a randomly - // generated session_id. To continue using a created session with - // subsequent queries, pass the existing session identifier as a - // `ConnectionProperty` value. The session identifier is returned as part of - // the `SessionInfo` message within the query statistics. - // - // The new session's location will be set to `Job.JobReference.location` if it - // is present, otherwise it's set to the default location based on existing - // routing logic. - CreateSession *bool `json:"createSession,omitempty"` - - // Optional. Whether to run the query as continuous or a regular query. - // Continuous query is currently in experimental stage and not ready for - // general usage. - Continuous *bool `json:"continuous,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobConfigurationTableCopy -type JobConfigurationTableCopy struct { - // [Pick one] Source table to copy. - SourceTable *TableReference `json:"sourceTable,omitempty"` - - // [Pick one] Source tables to copy. - SourceTables []TableReference `json:"sourceTables,omitempty"` - - // [Required] The destination table. - DestinationTable *TableReference `json:"destinationTable,omitempty"` - - // Optional. Specifies whether the job is allowed to create new tables. - // The following values are supported: - // - // * CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the - // table. - // * CREATE_NEVER: The table must already exist. If it does not, - // a 'notFound' error is returned in the job result. - // - // The default value is CREATE_IF_NEEDED. - // Creation, truncation and append actions occur as one atomic update - // upon job completion. - CreateDisposition *string `json:"createDisposition,omitempty"` - - // Optional. Specifies the action that occurs if the destination table - // already exists. The following values are supported: - // - // * WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the - // table data and uses the schema and table constraints from the source table. - // * WRITE_APPEND: If the table already exists, BigQuery appends the data to - // the table. - // * WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' - // error is returned in the job result. - // - // The default value is WRITE_EMPTY. Each action is atomic and only occurs if - // BigQuery is able to complete the job successfully. Creation, truncation and - // append actions occur as one atomic update upon job completion. - WriteDisposition *string `json:"writeDisposition,omitempty"` - - // Custom encryption configuration (e.g., Cloud KMS keys). - DestinationEncryptionConfiguration *EncryptionConfiguration `json:"destinationEncryptionConfiguration,omitempty"` - - // Optional. Supported operation types in table copy job. - OperationType *string `json:"operationType,omitempty"` - - // Optional. The time when the destination table expires. Expired tables will - // be deleted and their storage reclaimed. - DestinationExpirationTime *string `json:"destinationExpirationTime,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobCreationReason -type JobCreationReason struct { - // Output only. Specifies the high level reason why a Job was created. - Code *string `json:"code,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobList -type JobList struct { - // A hash of this page of results. - Etag *string `json:"etag,omitempty"` - - // The resource type of the response. - Kind *string `json:"kind,omitempty"` - - // A token to request the next page of results. - NextPageToken *string `json:"nextPageToken,omitempty"` - - // List of jobs that were requested. - Jobs []ListFormatJob `json:"jobs,omitempty"` - - // A list of skipped locations that were unreachable. For more information - // about BigQuery locations, see: - // https://cloud.google.com/bigquery/docs/locations. Example: "europe-west5" - Unreachable []string `json:"unreachable,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobReference -type JobReference struct { - // Required. The ID of the project containing this job. - ProjectID *string `json:"projectID,omitempty"` - - // Required. The ID of the job. The ID must contain only letters (a-z, A-Z), - // numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 - // characters. - JobID *string `json:"jobID,omitempty"` - - // Optional. The geographic location of the job. The default value is US. - // - // For more information about BigQuery locations, see: - // https://cloud.google.com/bigquery/docs/locations - Location *string `json:"location,omitempty"` - - // This field should not be used. - LocationAlternative []string `json:"locationAlternative,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobStatistics -type JobStatistics struct { - // Output only. Creation time of this job, in milliseconds since the epoch. - // This field will be present on all jobs. - CreationTime *int64 `json:"creationTime,omitempty"` - - // Output only. Start time of this job, in milliseconds since the epoch. - // This field will be present when the job transitions from the PENDING state - // to either RUNNING or DONE. - StartTime *int64 `json:"startTime,omitempty"` - - // Output only. End time of this job, in milliseconds since the epoch. This - // field will be present whenever a job is in the DONE state. - EndTime *int64 `json:"endTime,omitempty"` - - // Output only. Total bytes processed for the job. - TotalBytesProcessed *int64 `json:"totalBytesProcessed,omitempty"` - - // Output only. [TrustedTester] Job progress (0.0 -> 1.0) for LOAD and - // EXTRACT jobs. - CompletionRatio *float64 `json:"completionRatio,omitempty"` - - // Output only. Quotas which delayed this job's start time. - QuotaDeferments []string `json:"quotaDeferments,omitempty"` - - // Output only. Statistics for a query job. - Query *JobStatistics2 `json:"query,omitempty"` - - // Output only. Statistics for a load job. - Load *JobStatistics3 `json:"load,omitempty"` - - // Output only. Statistics for an extract job. - Extract *JobStatistics4 `json:"extract,omitempty"` - - // Output only. Statistics for a copy job. - Copy *CopyJobStatistics `json:"copy,omitempty"` - - // Output only. Slot-milliseconds for the job. - TotalSlotMs *int64 `json:"totalSlotMs,omitempty"` - - // Output only. Name of the primary reservation assigned to this job. Note - // that this could be different than reservations reported in the reservation - // usage field if parent reservations were used to execute this job. - ReservationID *string `json:"reservationID,omitempty"` - - // Output only. Number of child jobs executed. - NumChildJobs *int64 `json:"numChildJobs,omitempty"` - - // Output only. If this is a child job, specifies the job ID of the parent. - ParentJobID *string `json:"parentJobID,omitempty"` - - // Output only. If this a child job of a script, specifies information about - // the context of this job within the script. - ScriptStatistics *ScriptStatistics `json:"scriptStatistics,omitempty"` - - // Output only. Statistics for row-level security. Present only for query and - // extract jobs. - RowLevelSecurityStatistics *RowLevelSecurityStatistics `json:"rowLevelSecurityStatistics,omitempty"` - - // Output only. Statistics for data-masking. Present only for query and - // extract jobs. - DataMaskingStatistics *DataMaskingStatistics `json:"dataMaskingStatistics,omitempty"` - - // Output only. [Alpha] Information of the multi-statement transaction if this - // job is part of one. - // - // This property is only expected on a child job or a job that is in a - // session. A script parent job is not part of the transaction started in the - // script. - TransactionInfo *JobStatistics_TransactionInfo `json:"transactionInfo,omitempty"` - - // Output only. Information of the session if this job is part of one. - SessionInfo *SessionInfo `json:"sessionInfo,omitempty"` - - // Output only. The duration in milliseconds of the execution of the final - // attempt of this job, as BigQuery may internally re-attempt to execute the - // job. - FinalExecutionDurationMs *int64 `json:"finalExecutionDurationMs,omitempty"` - - // Output only. Name of edition corresponding to the reservation for this job - // at the time of this update. - Edition *string `json:"edition,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobStatistics.TransactionInfo -type JobStatistics_TransactionInfo struct { - // Output only. [Alpha] Id of the transaction. - TransactionID *string `json:"transactionID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobStatistics2 -type JobStatistics2 struct { - // Output only. Describes execution plan for the query. - QueryPlan []ExplainQueryStage `json:"queryPlan,omitempty"` - - // Output only. The original estimate of bytes processed for the job. - EstimatedBytesProcessed *int64 `json:"estimatedBytesProcessed,omitempty"` - - // Output only. Describes a timeline of job execution. - Timeline []QueryTimelineSample `json:"timeline,omitempty"` - - // Output only. Total number of partitions processed from all partitioned - // tables referenced in the job. - TotalPartitionsProcessed *int64 `json:"totalPartitionsProcessed,omitempty"` - - // Output only. Total bytes processed for the job. - TotalBytesProcessed *int64 `json:"totalBytesProcessed,omitempty"` - - // Output only. For dry-run jobs, totalBytesProcessed is an estimate and this - // field specifies the accuracy of the estimate. Possible values can be: - // UNKNOWN: accuracy of the estimate is unknown. - // PRECISE: estimate is precise. - // LOWER_BOUND: estimate is lower bound of what the query would cost. - // UPPER_BOUND: estimate is upper bound of what the query would cost. - TotalBytesProcessedAccuracy *string `json:"totalBytesProcessedAccuracy,omitempty"` - - // Output only. If the project is configured to use on-demand pricing, - // then this field contains the total bytes billed for the job. - // If the project is configured to use flat-rate pricing, then you are - // not billed for bytes and this field is informational only. - TotalBytesBilled *int64 `json:"totalBytesBilled,omitempty"` - - // Output only. Billing tier for the job. This is a BigQuery-specific concept - // which is not related to the Google Cloud notion of "free tier". The value - // here is a measure of the query's resource consumption relative to the - // amount of data scanned. For on-demand queries, the limit is 100, and all - // queries within this limit are billed at the standard on-demand rates. - // On-demand queries that exceed this limit will fail with a - // billingTierLimitExceeded error. - BillingTier *int32 `json:"billingTier,omitempty"` - - // Output only. Slot-milliseconds for the job. - TotalSlotMs *int64 `json:"totalSlotMs,omitempty"` - - // Output only. Whether the query result was fetched from the query cache. - CacheHit *bool `json:"cacheHit,omitempty"` - - // Output only. Referenced tables for the job. Queries that reference more - // than 50 tables will not have a complete list. - ReferencedTables []TableReference `json:"referencedTables,omitempty"` - - // Output only. Referenced routines for the job. - ReferencedRoutines []RoutineReference `json:"referencedRoutines,omitempty"` - - // Output only. The schema of the results. Present only for successful dry - // run of non-legacy SQL queries. - Schema *TableSchema `json:"schema,omitempty"` - - // Output only. The number of rows affected by a DML statement. Present - // only for DML statements INSERT, UPDATE or DELETE. - NumDmlAffectedRows *int64 `json:"numDmlAffectedRows,omitempty"` - - // Output only. Detailed statistics for DML statements INSERT, UPDATE, DELETE, - // MERGE or TRUNCATE. - DmlStats *DmlStats `json:"dmlStats,omitempty"` - - // Output only. GoogleSQL only: list of undeclared query - // parameters detected during a dry run validation. - UndeclaredQueryParameters []QueryParameter `json:"undeclaredQueryParameters,omitempty"` - - // Output only. The type of query statement, if valid. - // Possible values: - // - // * `SELECT`: - // [`SELECT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#select_list) - // statement. - // * `ASSERT`: - // [`ASSERT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/debugging-statements#assert) - // statement. - // * `INSERT`: - // [`INSERT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#insert_statement) - // statement. - // * `UPDATE`: - // [`UPDATE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#update_statement) - // statement. - // * `DELETE`: - // [`DELETE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language) - // statement. - // * `MERGE`: - // [`MERGE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language) - // statement. - // * `CREATE_TABLE`: [`CREATE - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_table_statement) - // statement, without `AS SELECT`. - // * `CREATE_TABLE_AS_SELECT`: [`CREATE TABLE AS - // SELECT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#query_statement) - // statement. - // * `CREATE_VIEW`: [`CREATE - // VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_view_statement) - // statement. - // * `CREATE_MODEL`: [`CREATE - // MODEL`](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-create#create_model_statement) - // statement. - // * `CREATE_MATERIALIZED_VIEW`: [`CREATE MATERIALIZED - // VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_materialized_view_statement) - // statement. - // * `CREATE_FUNCTION`: [`CREATE - // FUNCTION`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement) - // statement. - // * `CREATE_TABLE_FUNCTION`: [`CREATE TABLE - // FUNCTION`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_table_function_statement) - // statement. - // * `CREATE_PROCEDURE`: [`CREATE - // PROCEDURE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_procedure) - // statement. - // * `CREATE_ROW_ACCESS_POLICY`: [`CREATE ROW ACCESS - // POLICY`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_row_access_policy_statement) - // statement. - // * `CREATE_SCHEMA`: [`CREATE - // SCHEMA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_schema_statement) - // statement. - // * `CREATE_SNAPSHOT_TABLE`: [`CREATE SNAPSHOT - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_snapshot_table_statement) - // statement. - // * `CREATE_SEARCH_INDEX`: [`CREATE SEARCH - // INDEX`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_search_index_statement) - // statement. - // * `DROP_TABLE`: [`DROP - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_statement) - // statement. - // * `DROP_EXTERNAL_TABLE`: [`DROP EXTERNAL - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_external_table_statement) - // statement. - // * `DROP_VIEW`: [`DROP - // VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_view_statement) - // statement. - // * `DROP_MODEL`: [`DROP - // MODEL`](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-drop-model) - // statement. - // * `DROP_MATERIALIZED_VIEW`: [`DROP MATERIALIZED - // VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_materialized_view_statement) - // statement. - // * `DROP_FUNCTION` : [`DROP - // FUNCTION`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_function_statement) - // statement. - // * `DROP_TABLE_FUNCTION` : [`DROP TABLE - // FUNCTION`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_function) - // statement. - // * `DROP_PROCEDURE`: [`DROP - // PROCEDURE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_procedure_statement) - // statement. - // * `DROP_SEARCH_INDEX`: [`DROP SEARCH - // INDEX`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_search_index) - // statement. - // * `DROP_SCHEMA`: [`DROP - // SCHEMA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_schema_statement) - // statement. - // * `DROP_SNAPSHOT_TABLE`: [`DROP SNAPSHOT - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_snapshot_table_statement) - // statement. - // * `DROP_ROW_ACCESS_POLICY`: [`DROP [ALL] ROW ACCESS - // POLICY|POLICIES`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_row_access_policy_statement) - // statement. - // * `ALTER_TABLE`: [`ALTER - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#alter_table_set_options_statement) - // statement. - // * `ALTER_VIEW`: [`ALTER - // VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#alter_view_set_options_statement) - // statement. - // * `ALTER_MATERIALIZED_VIEW`: [`ALTER MATERIALIZED - // VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#alter_materialized_view_set_options_statement) - // statement. - // * `ALTER_SCHEMA`: [`ALTER - // SCHEMA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#aalter_schema_set_options_statement) - // statement. - // * `SCRIPT`: - // [`SCRIPT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language). - // * `TRUNCATE_TABLE`: [`TRUNCATE - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#truncate_table_statement) - // statement. - // * `CREATE_EXTERNAL_TABLE`: [`CREATE EXTERNAL - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_external_table_statement) - // statement. - // * `EXPORT_DATA`: [`EXPORT - // DATA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/other-statements#export_data_statement) - // statement. - // * `EXPORT_MODEL`: [`EXPORT - // MODEL`](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-export-model) - // statement. - // * `LOAD_DATA`: [`LOAD - // DATA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/other-statements#load_data_statement) - // statement. - // * `CALL`: - // [`CALL`](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#call) - // statement. - StatementType *string `json:"statementType,omitempty"` - - // Output only. The DDL operation performed, possibly - // dependent on the pre-existence of the DDL target. - DdlOperationPerformed *string `json:"ddlOperationPerformed,omitempty"` - - // Output only. The DDL target table. Present only for - // CREATE/DROP TABLE/VIEW and DROP ALL ROW ACCESS POLICIES queries. - DdlTargetTable *TableReference `json:"ddlTargetTable,omitempty"` - - // Output only. The table after rename. Present only for ALTER TABLE RENAME TO - // query. - DdlDestinationTable *TableReference `json:"ddlDestinationTable,omitempty"` - - // Output only. The DDL target row access policy. Present only for - // CREATE/DROP ROW ACCESS POLICY queries. - DdlTargetRowAccessPolicy *RowAccessPolicyReference `json:"ddlTargetRowAccessPolicy,omitempty"` - - // Output only. The number of row access policies affected by a DDL statement. - // Present only for DROP ALL ROW ACCESS POLICIES queries. - DdlAffectedRowAccessPolicyCount *int64 `json:"ddlAffectedRowAccessPolicyCount,omitempty"` - - // Output only. [Beta] The DDL target routine. Present only for - // CREATE/DROP FUNCTION/PROCEDURE queries. - DdlTargetRoutine *RoutineReference `json:"ddlTargetRoutine,omitempty"` - - // Output only. The DDL target dataset. Present only for CREATE/ALTER/DROP - // SCHEMA(dataset) queries. - DdlTargetDataset *DatasetReference `json:"ddlTargetDataset,omitempty"` - - // Output only. Statistics of a BigQuery ML training job. - MlStatistics *MlStatistics `json:"mlStatistics,omitempty"` - - // Output only. Stats for EXPORT DATA statement. - ExportDataStatistics *ExportDataStatistics `json:"exportDataStatistics,omitempty"` - - // Output only. Job cost breakdown as bigquery internal cost and external - // service costs. - ExternalServiceCosts []ExternalServiceCost `json:"externalServiceCosts,omitempty"` - - // Output only. BI Engine specific Statistics. - BiEngineStatistics *BiEngineStatistics `json:"biEngineStatistics,omitempty"` - - // Output only. Statistics for a LOAD query. - LoadQueryStatistics *LoadQueryStatistics `json:"loadQueryStatistics,omitempty"` - - // Output only. Referenced table for DCL statement. - DclTargetTable *TableReference `json:"dclTargetTable,omitempty"` - - // Output only. Referenced view for DCL statement. - DclTargetView *TableReference `json:"dclTargetView,omitempty"` - - // Output only. Referenced dataset for DCL statement. - DclTargetDataset *DatasetReference `json:"dclTargetDataset,omitempty"` - - // Output only. Search query specific statistics. - SearchStatistics *SearchStatistics `json:"searchStatistics,omitempty"` - - // Output only. Vector Search query specific statistics. - VectorSearchStatistics *VectorSearchStatistics `json:"vectorSearchStatistics,omitempty"` - - // Output only. Performance insights. - PerformanceInsights *PerformanceInsights `json:"performanceInsights,omitempty"` - - // Output only. Query optimization information for a QUERY job. - QueryInfo *QueryInfo `json:"queryInfo,omitempty"` - - // Output only. Statistics of a Spark procedure job. - SparkStatistics *SparkStatistics `json:"sparkStatistics,omitempty"` - - // Output only. Total bytes transferred for cross-cloud queries such as Cross - // Cloud Transfer and CREATE TABLE AS SELECT (CTAS). - TransferredBytes *int64 `json:"transferredBytes,omitempty"` - - // Output only. Statistics of materialized views of a query job. - MaterializedViewStatistics *MaterializedViewStatistics `json:"materializedViewStatistics,omitempty"` - - // Output only. Statistics of metadata cache usage in a query for BigLake - // tables. - MetadataCacheStatistics *MetadataCacheStatistics `json:"metadataCacheStatistics,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobStatistics3 -type JobStatistics3 struct { - // Output only. Number of source files in a load job. - InputFiles *int64 `json:"inputFiles,omitempty"` - - // Output only. Number of bytes of source data in a load job. - InputFileBytes *int64 `json:"inputFileBytes,omitempty"` - - // Output only. Number of rows imported in a load job. - // Note that while an import job is in the running state, this - // value may change. - OutputRows *int64 `json:"outputRows,omitempty"` - - // Output only. Size of the loaded data in bytes. Note - // that while a load job is in the running state, this value may change. - OutputBytes *int64 `json:"outputBytes,omitempty"` - - // Output only. The number of bad records encountered. Note that if the job - // has failed because of more bad records encountered than the maximum - // allowed in the load job configuration, then this number can be less than - // the total number of bad records present in the input data. - BadRecords *int64 `json:"badRecords,omitempty"` - - // Output only. Describes a timeline of job execution. - Timeline []QueryTimelineSample `json:"timeline,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobStatistics4 -type JobStatistics4 struct { - // Output only. Number of files per destination URI or URI pattern - // specified in the extract configuration. These values will be in the same - // order as the URIs specified in the 'destinationUris' field. - DestinationUriFileCounts []int64 `json:"destinationUriFileCounts,omitempty"` - - // Output only. Number of user bytes extracted into the result. This is the - // byte count as computed by BigQuery for billing purposes - // and doesn't have any relationship with the number of actual - // result bytes extracted in the desired format. - InputBytes *int64 `json:"inputBytes,omitempty"` - - // Output only. Describes a timeline of job execution. - Timeline []QueryTimelineSample `json:"timeline,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobStatus -type JobStatus struct { - // Output only. Final error result of the job. If present, indicates that the - // job has completed and was unsuccessful. - ErrorResult *ErrorProto `json:"errorResult,omitempty"` - - // Output only. The first errors encountered during the running of the job. - // The final message includes the number of errors that caused the process to - // stop. Errors here do not necessarily mean that the job has not completed or - // was unsuccessful. - Errors []ErrorProto `json:"errors,omitempty"` - - // Output only. Running state of the job. Valid states include 'PENDING', - // 'RUNNING', and 'DONE'. - State *string `json:"state,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JoinRestrictionPolicy -type JoinRestrictionPolicy struct { - // Optional. Specifies if a join is required or not on queries for the view. - // Default is JOIN_CONDITION_UNSPECIFIED. - JoinCondition *string `json:"joinCondition,omitempty"` - - // Optional. The only columns that joins are allowed on. - // This field is must be specified for join_conditions JOIN_ANY and JOIN_ALL - // and it cannot be set for JOIN_BLOCKED. - JoinAllowedColumns []string `json:"joinAllowedColumns,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JsonOptions -type JsonOptions struct { - // Optional. The character encoding of the data. - // The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, - // and UTF-32LE. The default value is UTF-8. - Encoding *string `json:"encoding,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.LinkedDatasetSource -type LinkedDatasetSource struct { - // The source dataset reference contains project numbers and not project ids. - SourceDataset *DatasetReference `json:"sourceDataset,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ListFormatDataset -type ListFormatDataset struct { - // The resource type. - // This property always returns the value "bigquery#dataset" - Kind *string `json:"kind,omitempty"` - - // The fully-qualified, unique, opaque ID of the dataset. - ID *string `json:"id,omitempty"` - - // The dataset reference. - // Use this property to access specific parts of the dataset's ID, such as - // project ID or dataset ID. - DatasetReference *DatasetReference `json:"datasetReference,omitempty"` - - // The labels associated with this dataset. - // You can use these to organize and group your datasets. - Labels map[string]string `json:"labels,omitempty"` - - // An alternate name for the dataset. The friendly name is purely - // decorative in nature. - FriendlyName *string `json:"friendlyName,omitempty"` - - // The geographic location where the dataset resides. - Location *string `json:"location,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ListFormatJob -type ListFormatJob struct { - // Unique opaque ID of the job. - ID *string `json:"id,omitempty"` - - // The resource type. - Kind *string `json:"kind,omitempty"` - - // Unique opaque ID of the job. - JobReference *JobReference `json:"jobReference,omitempty"` - - // Running state of the job. When the state is DONE, errorResult can be - // checked to determine whether the job succeeded or failed. - State *string `json:"state,omitempty"` - - // A result object that will be present only if the job has failed. - ErrorResult *ErrorProto `json:"errorResult,omitempty"` - - // Output only. Information about the job, including starting time and ending - // time of the job. - Statistics *JobStatistics `json:"statistics,omitempty"` - - // Required. Describes the job configuration. - Configuration *JobConfiguration `json:"configuration,omitempty"` - - // [Full-projection-only] Describes the status of this job. - Status *JobStatus `json:"status,omitempty"` - - // [Full-projection-only] Email address of the user who ran the job. - UserEmail *string `json:"userEmail,omitempty"` - - // [Full-projection-only] String representation of identity of requesting - // party. Populated for both first- and third-party identities. Only present - // for APIs that support third-party identities. - PrincipalSubject *string `json:"principalSubject,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ListFormatTable -type ListFormatTable struct { - // The resource type. - Kind *string `json:"kind,omitempty"` - - // An opaque ID of the table. - ID *string `json:"id,omitempty"` - - // A reference uniquely identifying table. - TableReference *TableReference `json:"tableReference,omitempty"` - - // The user-friendly name for this table. - FriendlyName *string `json:"friendlyName,omitempty"` - - // The type of table. - Type *string `json:"type,omitempty"` - - // The time-based partitioning for this table. - TimePartitioning *TimePartitioning `json:"timePartitioning,omitempty"` - - // The range partitioning for this table. - RangePartitioning *RangePartitioning `json:"rangePartitioning,omitempty"` - - // Clustering specification for this table, if configured. - Clustering *Clustering `json:"clustering,omitempty"` - - // The labels associated with this table. You can use these to organize - // and group your tables. - Labels map[string]string `json:"labels,omitempty"` - - // Additional details for a view. - View *ListFormatView `json:"view,omitempty"` - - // Output only. The time when this table was created, in milliseconds since - // the epoch. - CreationTime *int64 `json:"creationTime,omitempty"` - - // The time when this table expires, in milliseconds since the - // epoch. If not present, the table will persist indefinitely. Expired tables - // will be deleted and their storage reclaimed. - ExpirationTime *int64 `json:"expirationTime,omitempty"` - - // Optional. If set to true, queries including this table must specify a - // partition filter. This filter is used for partition elimination. - RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ListFormatView -type ListFormatView struct { - // True if view is defined in legacy SQL dialect, - // false if in GoogleSQL. - UseLegacySql *bool `json:"useLegacySql,omitempty"` - - // Specifics the privacy policy for the view. - PrivacyPolicy *PrivacyPolicy `json:"privacyPolicy,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.LoadQueryStatistics -type LoadQueryStatistics struct { - // Output only. Number of source files in a LOAD query. - InputFiles *int64 `json:"inputFiles,omitempty"` - - // Output only. Number of bytes of source data in a LOAD query. - InputFileBytes *int64 `json:"inputFileBytes,omitempty"` - - // Output only. Number of rows imported in a LOAD query. - // Note that while a LOAD query is in the running state, this value may - // change. - OutputRows *int64 `json:"outputRows,omitempty"` - - // Output only. Size of the loaded data in bytes. Note that while a LOAD query - // is in the running state, this value may change. - OutputBytes *int64 `json:"outputBytes,omitempty"` - - // Output only. The number of bad records encountered while processing a LOAD - // query. Note that if the job has failed because of more bad records - // encountered than the maximum allowed in the load job configuration, then - // this number can be less than the total number of bad records present in the - // input data. - BadRecords *int64 `json:"badRecords,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.MaterializedView -type MaterializedView struct { - // The candidate materialized view. - TableReference *TableReference `json:"tableReference,omitempty"` - - // Whether the materialized view is chosen for the query. - // - // A materialized view can be chosen to rewrite multiple parts of the same - // query. If a materialized view is chosen to rewrite any part of the query, - // then this field is true, even if the materialized view was not chosen to - // rewrite others parts. - Chosen *bool `json:"chosen,omitempty"` - - // If present, specifies a best-effort estimation of the bytes saved by using - // the materialized view rather than its base tables. - EstimatedBytesSaved *int64 `json:"estimatedBytesSaved,omitempty"` - - // If present, specifies the reason why the materialized view was not chosen - // for the query. - RejectedReason *string `json:"rejectedReason,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.MaterializedViewDefinition -type MaterializedViewDefinition struct { - // Required. A query whose results are persisted. - Query *string `json:"query,omitempty"` - - // Output only. The time when this materialized view was last refreshed, in - // milliseconds since the epoch. - LastRefreshTime *int64 `json:"lastRefreshTime,omitempty"` - - // Optional. Enable automatic refresh of the materialized view when the base - // table is updated. The default value is "true". - EnableRefresh *bool `json:"enableRefresh,omitempty"` - - // Optional. The maximum frequency at which this materialized view will be - // refreshed. The default value is "1800000" (30 minutes). - RefreshIntervalMs *uint64 `json:"refreshIntervalMs,omitempty"` - - // Optional. This option declares the intention to construct a materialized - // view that isn't refreshed incrementally. - AllowNonIncrementalDefinition *bool `json:"allowNonIncrementalDefinition,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.MaterializedViewStatistics -type MaterializedViewStatistics struct { - // Materialized views considered for the query job. Only certain materialized - // views are used. For a detailed list, see the child message. - // - // If many materialized views are considered, then the list might be - // incomplete. - MaterializedView []MaterializedView `json:"materializedView,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.MaterializedViewStatus -type MaterializedViewStatus struct { - // Output only. Refresh watermark of materialized view. The base tables' data - // were collected into the materialized view cache until this time. - RefreshWatermark *string `json:"refreshWatermark,omitempty"` - - // Output only. Error result of the last automatic refresh. If present, - // indicates that the last automatic refresh was unsuccessful. - LastRefreshStatus *ErrorProto `json:"lastRefreshStatus,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.MetadataCacheStatistics -type MetadataCacheStatistics struct { - // Set for the Metadata caching eligible tables referenced in the query. - TableMetadataCacheUsage []TableMetadataCacheUsage `json:"tableMetadataCacheUsage,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.MlStatistics -type MlStatistics struct { - // Output only. Maximum number of iterations specified as max_iterations in - // the 'CREATE MODEL' query. The actual number of iterations may be less than - // this number due to early stop. - MaxIterations *int64 `json:"maxIterations,omitempty"` - - // Results for all completed iterations. - // Empty for [hyperparameter tuning - // jobs](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview). - IterationResults []Model_TrainingRun_IterationResult `json:"iterationResults,omitempty"` - - // Output only. The type of the model that is being trained. - ModelType *string `json:"modelType,omitempty"` - - // Output only. Training type of the job. - TrainingType *string `json:"trainingType,omitempty"` - - // Output only. Trials of a [hyperparameter tuning - // job](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) - // sorted by trial_id. - HparamTrials []Model_HparamTuningTrial `json:"hparamTrials,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model -type Model struct { - // Output only. A hash of this resource. - Etag *string `json:"etag,omitempty"` - - // Required. Unique identifier for this model. - ModelReference *ModelReference `json:"modelReference,omitempty"` - - // Output only. The time when this model was created, in millisecs since the - // epoch. - CreationTime *int64 `json:"creationTime,omitempty"` - - // Output only. The time when this model was last modified, in millisecs since - // the epoch. - LastModifiedTime *int64 `json:"lastModifiedTime,omitempty"` - - // Optional. A user-friendly description of this model. - Description *string `json:"description,omitempty"` - - // Optional. A descriptive name for this model. - FriendlyName *string `json:"friendlyName,omitempty"` - - // The labels associated with this model. You can use these to organize - // and group your models. Label keys and values can be no longer - // than 63 characters, can only contain lowercase letters, numeric - // characters, underscores and dashes. International characters are allowed. - // Label values are optional. Label keys must start with a letter and each - // label in the list must have a different key. - Labels map[string]string `json:"labels,omitempty"` - - // Optional. The time when this model expires, in milliseconds since the - // epoch. If not present, the model will persist indefinitely. Expired models - // will be deleted and their storage reclaimed. The defaultTableExpirationMs - // property of the encapsulating dataset can be used to set a default - // expirationTime on newly created models. - ExpirationTime *int64 `json:"expirationTime,omitempty"` - - // Output only. The geographic location where the model resides. This value - // is inherited from the dataset. - Location *string `json:"location,omitempty"` - - // Custom encryption configuration (e.g., Cloud KMS keys). This shows the - // encryption configuration of the model data while stored in BigQuery - // storage. This field can be used with PatchModel to update encryption key - // for an already encrypted model. - EncryptionConfiguration *EncryptionConfiguration `json:"encryptionConfiguration,omitempty"` - - // Output only. Type of the model resource. - ModelType *string `json:"modelType,omitempty"` - - // Information for all training runs in increasing order of start_time. - TrainingRuns []Model_TrainingRun `json:"trainingRuns,omitempty"` - - // Output only. Input feature columns for the model inference. If the model is - // trained with TRANSFORM clause, these are the input of the TRANSFORM clause. - FeatureColumns []StandardSqlField `json:"featureColumns,omitempty"` - - // Output only. Label columns that were used to train this model. - // The output of the model will have a "predicted_" prefix to these columns. - LabelColumns []StandardSqlField `json:"labelColumns,omitempty"` - - // Output only. This field will be populated if a TRANSFORM clause was used to - // train a model. TRANSFORM clause (if used) takes feature_columns as input - // and outputs transform_columns. transform_columns then are used to train the - // model. - TransformColumns []TransformColumn `json:"transformColumns,omitempty"` - - // Output only. All hyperparameter search spaces in this model. - HparamSearchSpaces *Model_HparamSearchSpaces `json:"hparamSearchSpaces,omitempty"` - - // Output only. The default trial_id to use in TVFs when the trial_id is not - // passed in. For single-objective [hyperparameter - // tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) - // models, this is the best trial ID. For multi-objective [hyperparameter - // tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) - // models, this is the smallest trial ID among all Pareto optimal trials. - DefaultTrialID *int64 `json:"defaultTrialID,omitempty"` - - // Output only. Trials of a [hyperparameter - // tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) - // model sorted by trial_id. - HparamTrials []Model_HparamTuningTrial `json:"hparamTrials,omitempty"` - - // Output only. For single-objective [hyperparameter - // tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) - // models, it only contains the best trial. For multi-objective - // [hyperparameter - // tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) - // models, it contains all Pareto optimal trials sorted by trial_id. - OptimalTrialIds []int64 `json:"optimalTrialIds,omitempty"` - - // Output only. Remote model info - RemoteModelInfo *RemoteModelInfo `json:"remoteModelInfo,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.AggregateClassificationMetrics -type Model_AggregateClassificationMetrics struct { - // Precision is the fraction of actual positive predictions that had - // positive actual labels. For multiclass this is a macro-averaged - // metric treating each class as a binary classifier. - Precision *float64 `json:"precision,omitempty"` - - // Recall is the fraction of actual positive labels that were given a - // positive prediction. For multiclass this is a macro-averaged metric. - Recall *float64 `json:"recall,omitempty"` - - // Accuracy is the fraction of predictions given the correct label. For - // multiclass this is a micro-averaged metric. - Accuracy *float64 `json:"accuracy,omitempty"` - - // Threshold at which the metrics are computed. For binary - // classification models this is the positive class threshold. - // For multi-class classfication models this is the confidence - // threshold. - Threshold *float64 `json:"threshold,omitempty"` - - // The F1 score is an average of recall and precision. For multiclass - // this is a macro-averaged metric. - F1Score *float64 `json:"f1Score,omitempty"` - - // Logarithmic Loss. For multiclass this is a macro-averaged metric. - LogLoss *float64 `json:"logLoss,omitempty"` - - // Area Under a ROC Curve. For multiclass this is a macro-averaged - // metric. - RocAuc *float64 `json:"rocAuc,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ArimaFittingMetrics -type Model_ArimaFittingMetrics struct { - // Log-likelihood. - LogLikelihood *float64 `json:"logLikelihood,omitempty"` - - // AIC. - Aic *float64 `json:"aic,omitempty"` - - // Variance. - Variance *float64 `json:"variance,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ArimaForecastingMetrics -type Model_ArimaForecastingMetrics struct { - // Repeated as there can be many metric sets (one for each model) in - // auto-arima and the large-scale case. - ArimaSingleModelForecastingMetrics []Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics `json:"arimaSingleModelForecastingMetrics,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ArimaForecastingMetrics.ArimaSingleModelForecastingMetrics -type Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics struct { - // Non-seasonal order. - NonSeasonalOrder *Model_ArimaOrder `json:"nonSeasonalOrder,omitempty"` - - // Arima fitting metrics. - ArimaFittingMetrics *Model_ArimaFittingMetrics `json:"arimaFittingMetrics,omitempty"` - - // Is arima model fitted with drift or not. It is always false when d - // is not 1. - HasDrift *bool `json:"hasDrift,omitempty"` - - // The time_series_id value for this time series. It will be one of - // the unique values from the time_series_id_column specified during - // ARIMA model training. Only present when time_series_id_column - // training option was used. - TimeSeriesID *string `json:"timeSeriesID,omitempty"` - - // The tuple of time_series_ids identifying this time series. It will - // be one of the unique tuples of values present in the - // time_series_id_columns specified during ARIMA model training. Only - // present when time_series_id_columns training option was used and - // the order of values here are same as the order of - // time_series_id_columns. - TimeSeriesIds []string `json:"timeSeriesIds,omitempty"` - - // Seasonal periods. Repeated because multiple periods are supported - // for one time series. - SeasonalPeriods []string `json:"seasonalPeriods,omitempty"` - - // If true, holiday_effect is a part of time series decomposition result. - HasHolidayEffect *bool `json:"hasHolidayEffect,omitempty"` - - // If true, spikes_and_dips is a part of time series decomposition result. - HasSpikesAndDips *bool `json:"hasSpikesAndDips,omitempty"` - - // If true, step_changes is a part of time series decomposition result. - HasStepChanges *bool `json:"hasStepChanges,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ArimaOrder -type Model_ArimaOrder struct { - // Order of the autoregressive part. - P *int64 `json:"p,omitempty"` - - // Order of the differencing part. - D *int64 `json:"d,omitempty"` - - // Order of the moving-average part. - Q *int64 `json:"q,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.BinaryClassificationMetrics -type Model_BinaryClassificationMetrics struct { - // Aggregate classification metrics. - AggregateClassificationMetrics *Model_AggregateClassificationMetrics `json:"aggregateClassificationMetrics,omitempty"` - - // Binary confusion matrix at multiple thresholds. - BinaryConfusionMatrixList []Model_BinaryClassificationMetrics_BinaryConfusionMatrix `json:"binaryConfusionMatrixList,omitempty"` - - // Label representing the positive class. - PositiveLabel *string `json:"positiveLabel,omitempty"` - - // Label representing the negative class. - NegativeLabel *string `json:"negativeLabel,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix -type Model_BinaryClassificationMetrics_BinaryConfusionMatrix struct { - // Threshold value used when computing each of the following metric. - PositiveClassThreshold *float64 `json:"positiveClassThreshold,omitempty"` - - // Number of true samples predicted as true. - TruePositives *int64 `json:"truePositives,omitempty"` - - // Number of false samples predicted as true. - FalsePositives *int64 `json:"falsePositives,omitempty"` - - // Number of true samples predicted as false. - TrueNegatives *int64 `json:"trueNegatives,omitempty"` - - // Number of false samples predicted as false. - FalseNegatives *int64 `json:"falseNegatives,omitempty"` - - // The fraction of actual positive predictions that had positive actual - // labels. - Precision *float64 `json:"precision,omitempty"` - - // The fraction of actual positive labels that were given a positive - // prediction. - Recall *float64 `json:"recall,omitempty"` - - // The equally weighted average of recall and precision. - F1Score *float64 `json:"f1Score,omitempty"` - - // The fraction of predictions given the correct label. - Accuracy *float64 `json:"accuracy,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.BoostedTreeOptionEnums -type Model_BoostedTreeOptionEnums struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.CategoryEncodingMethod -type Model_CategoryEncodingMethod struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ClusteringMetrics -type Model_ClusteringMetrics struct { - // Davies-Bouldin index. - DaviesBouldinIndex *float64 `json:"daviesBouldinIndex,omitempty"` - - // Mean of squared distances between each sample to its cluster centroid. - MeanSquaredDistance *float64 `json:"meanSquaredDistance,omitempty"` - - // Information for all clusters. - Clusters []Model_ClusteringMetrics_Cluster `json:"clusters,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster -type Model_ClusteringMetrics_Cluster struct { - // Centroid id. - CentroidID *int64 `json:"centroidID,omitempty"` - - // Values of highly variant features for this cluster. - FeatureValues []Model_ClusteringMetrics_Cluster_FeatureValue `json:"featureValues,omitempty"` - - // Count of training data rows that were assigned to this cluster. - Count *int64 `json:"count,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue -type Model_ClusteringMetrics_Cluster_FeatureValue struct { - // The feature column name. - FeatureColumn *string `json:"featureColumn,omitempty"` - - // The numerical feature value. This is the centroid value for this - // feature. - NumericalValue *float64 `json:"numericalValue,omitempty"` - - // The categorical feature value. - CategoricalValue *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue `json:"categoricalValue,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue -type Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue struct { - // Counts of all categories for the categorical feature. If there are - // more than ten categories, we return top ten (by count) and return - // one more CategoryCount with category "_OTHER_" and count as - // aggregate counts of remaining categories. - CategoryCounts []Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount `json:"categoryCounts,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount -type Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount struct { - // The name of category. - Category *string `json:"category,omitempty"` - - // The count of training samples matching the category within the - // cluster. - Count *int64 `json:"count,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.DataSplitResult -type Model_DataSplitResult struct { - // Table reference of the training data after split. - TrainingTable *TableReference `json:"trainingTable,omitempty"` - - // Table reference of the evaluation data after split. - EvaluationTable *TableReference `json:"evaluationTable,omitempty"` - - // Table reference of the test data after split. - TestTable *TableReference `json:"testTable,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.DimensionalityReductionMetrics -type Model_DimensionalityReductionMetrics struct { - // Total percentage of variance explained by the selected principal - // components. - TotalExplainedVarianceRatio *float64 `json:"totalExplainedVarianceRatio,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.float64HparamSearchSpace -type Model_float64HparamSearchSpace struct { - // Range of the float64 hyperparameter. - Range *Model_float64HparamSearchSpace_float64Range `json:"range,omitempty"` - - // Candidates of the float64 hyperparameter. - Candidates *Model_float64HparamSearchSpace_float64Candidates `json:"candidates,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.float64HparamSearchSpace.float64Candidates -type Model_float64HparamSearchSpace_float64Candidates struct { - // Candidates for the float64 parameter in increasing order. - Candidates []float64 `json:"candidates,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.float64HparamSearchSpace.float64Range -type Model_float64HparamSearchSpace_float64Range struct { - // Min value of the float64 parameter. - Min *float64 `json:"min,omitempty"` - - // Max value of the float64 parameter. - Max *float64 `json:"max,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.EvaluationMetrics -type Model_EvaluationMetrics struct { - // Populated for regression models and explicit feedback type matrix - // factorization models. - RegressionMetrics *Model_RegressionMetrics `json:"regressionMetrics,omitempty"` - - // Populated for binary classification/classifier models. - BinaryClassificationMetrics *Model_BinaryClassificationMetrics `json:"binaryClassificationMetrics,omitempty"` - - // Populated for multi-class classification/classifier models. - MultiClassClassificationMetrics *Model_MultiClassClassificationMetrics `json:"multiClassClassificationMetrics,omitempty"` - - // Populated for clustering models. - ClusteringMetrics *Model_ClusteringMetrics `json:"clusteringMetrics,omitempty"` - - // Populated for implicit feedback type matrix factorization models. - RankingMetrics *Model_RankingMetrics `json:"rankingMetrics,omitempty"` - - // Populated for ARIMA models. - ArimaForecastingMetrics *Model_ArimaForecastingMetrics `json:"arimaForecastingMetrics,omitempty"` - - // Evaluation metrics when the model is a dimensionality reduction model, - // which currently includes PCA. - DimensionalityReductionMetrics *Model_DimensionalityReductionMetrics `json:"dimensionalityReductionMetrics,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.GlobalExplanation -type Model_GlobalExplanation struct { - // A list of the top global explanations. Sorted by absolute value of - // attribution in descending order. - Explanations []Model_GlobalExplanation_Explanation `json:"explanations,omitempty"` - - // Class label for this set of global explanations. Will be empty/null for - // binary logistic and linear regression models. Sorted alphabetically in - // descending order. - ClassLabel *string `json:"classLabel,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.GlobalExplanation.Explanation -type Model_GlobalExplanation_Explanation struct { - // The full feature name. For non-numerical features, will be formatted - // like `.`. Overall size of feature - // name will always be truncated to first 120 characters. - FeatureName *string `json:"featureName,omitempty"` - - // Attribution of feature. - Attribution *float64 `json:"attribution,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.HparamSearchSpaces -type Model_HparamSearchSpaces struct { - // Learning rate of training jobs. - LearnRate *Model_float64HparamSearchSpace `json:"learnRate,omitempty"` - - // L1 regularization coefficient. - L1Reg *Model_float64HparamSearchSpace `json:"l1Reg,omitempty"` - - // L2 regularization coefficient. - L2Reg *Model_float64HparamSearchSpace `json:"l2Reg,omitempty"` - - // Number of clusters for k-means. - NumClusters *Model_IntHparamSearchSpace `json:"numClusters,omitempty"` - - // Number of latent factors to train on. - NumFactors *Model_IntHparamSearchSpace `json:"numFactors,omitempty"` - - // Hidden units for neural network models. - HiddenUnits *Model_IntArrayHparamSearchSpace `json:"hiddenUnits,omitempty"` - - // Mini batch sample size. - BatchSize *Model_IntHparamSearchSpace `json:"batchSize,omitempty"` - - // Dropout probability for dnn model training and boosted tree models - // using dart booster. - Dropout *Model_float64HparamSearchSpace `json:"dropout,omitempty"` - - // Maximum depth of a tree for boosted tree models. - MaxTreeDepth *Model_IntHparamSearchSpace `json:"maxTreeDepth,omitempty"` - - // Subsample the training data to grow tree to prevent overfitting for - // boosted tree models. - Subsample *Model_float64HparamSearchSpace `json:"subsample,omitempty"` - - // Minimum split loss for boosted tree models. - MinSplitLoss *Model_float64HparamSearchSpace `json:"minSplitLoss,omitempty"` - - // Hyperparameter for matrix factoration when implicit feedback type is - // specified. - WalsAlpha *Model_float64HparamSearchSpace `json:"walsAlpha,omitempty"` - - // Booster type for boosted tree models. - BoosterType *Model_StringHparamSearchSpace `json:"boosterType,omitempty"` - - // Number of parallel trees for boosted tree models. - NumParallelTree *Model_IntHparamSearchSpace `json:"numParallelTree,omitempty"` - - // Dart normalization type for boosted tree models. - DartNormalizeType *Model_StringHparamSearchSpace `json:"dartNormalizeType,omitempty"` - - // Tree construction algorithm for boosted tree models. - TreeMethod *Model_StringHparamSearchSpace `json:"treeMethod,omitempty"` - - // Minimum sum of instance weight needed in a child for boosted tree models. - MinTreeChildWeight *Model_IntHparamSearchSpace `json:"minTreeChildWeight,omitempty"` - - // Subsample ratio of columns when constructing each tree for boosted tree - // models. - ColsampleBytree *Model_float64HparamSearchSpace `json:"colsampleBytree,omitempty"` - - // Subsample ratio of columns for each level for boosted tree models. - ColsampleBylevel *Model_float64HparamSearchSpace `json:"colsampleBylevel,omitempty"` - - // Subsample ratio of columns for each node(split) for boosted tree models. - ColsampleBynode *Model_float64HparamSearchSpace `json:"colsampleBynode,omitempty"` - - // Activation functions of neural network models. - ActivationFn *Model_StringHparamSearchSpace `json:"activationFn,omitempty"` - - // Optimizer of TF models. - Optimizer *Model_StringHparamSearchSpace `json:"optimizer,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.HparamTuningEnums -type Model_HparamTuningEnums struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.HparamTuningTrial -type Model_HparamTuningTrial struct { - // 1-based index of the trial. - TrialID *int64 `json:"trialID,omitempty"` - - // Starting time of the trial. - StartTimeMs *int64 `json:"startTimeMs,omitempty"` - - // Ending time of the trial. - EndTimeMs *int64 `json:"endTimeMs,omitempty"` - - // The hyperprameters selected for this trial. - Hparams *Model_TrainingRun_TrainingOptions `json:"hparams,omitempty"` - - // Evaluation metrics of this trial calculated on the test data. - // Empty in Job API. - EvaluationMetrics *Model_EvaluationMetrics `json:"evaluationMetrics,omitempty"` - - // The status of the trial. - Status *string `json:"status,omitempty"` - - // Error message for FAILED and INFEASIBLE trial. - ErrorMessage *string `json:"errorMessage,omitempty"` - - // Loss computed on the training data at the end of trial. - TrainingLoss *float64 `json:"trainingLoss,omitempty"` - - // Loss computed on the eval data at the end of trial. - EvalLoss *float64 `json:"evalLoss,omitempty"` - - // Hyperparameter tuning evaluation metrics of this trial calculated on the - // eval data. Unlike evaluation_metrics, only the fields corresponding to - // the hparam_tuning_objectives are set. - HparamTuningEvaluationMetrics *Model_EvaluationMetrics `json:"hparamTuningEvaluationMetrics,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.IntArrayHparamSearchSpace -type Model_IntArrayHparamSearchSpace struct { - // Candidates for the int array parameter. - Candidates []Model_IntArrayHparamSearchSpace_IntArray `json:"candidates,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.IntArrayHparamSearchSpace.IntArray -type Model_IntArrayHparamSearchSpace_IntArray struct { - // Elements in the int array. - Elements []int64 `json:"elements,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.IntHparamSearchSpace -type Model_IntHparamSearchSpace struct { - // Range of the int hyperparameter. - Range *Model_IntHparamSearchSpace_IntRange `json:"range,omitempty"` - - // Candidates of the int hyperparameter. - Candidates *Model_IntHparamSearchSpace_IntCandidates `json:"candidates,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.IntHparamSearchSpace.IntCandidates -type Model_IntHparamSearchSpace_IntCandidates struct { - // Candidates for the int parameter in increasing order. - Candidates []int64 `json:"candidates,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.IntHparamSearchSpace.IntRange -type Model_IntHparamSearchSpace_IntRange struct { - // Min value of the int parameter. - Min *int64 `json:"min,omitempty"` - - // Max value of the int parameter. - Max *int64 `json:"max,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.KmeansEnums -type Model_KmeansEnums struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ModelRegistryOptionEnums -type Model_ModelRegistryOptionEnums struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics -type Model_MultiClassClassificationMetrics struct { - // Aggregate classification metrics. - AggregateClassificationMetrics *Model_AggregateClassificationMetrics `json:"aggregateClassificationMetrics,omitempty"` - - // Confusion matrix at different thresholds. - ConfusionMatrixList []Model_MultiClassClassificationMetrics_ConfusionMatrix `json:"confusionMatrixList,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix -type Model_MultiClassClassificationMetrics_ConfusionMatrix struct { - // Confidence threshold used when computing the entries of the - // confusion matrix. - ConfidenceThreshold *float64 `json:"confidenceThreshold,omitempty"` - - // One row per actual label. - Rows []Model_MultiClassClassificationMetrics_ConfusionMatrix_Row `json:"rows,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry -type Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry struct { - // The predicted label. For confidence_threshold > 0, we will - // also add an entry indicating the number of items under the - // confidence threshold. - PredictedLabel *string `json:"predictedLabel,omitempty"` - - // Number of items being predicted as this label. - ItemCount *int64 `json:"itemCount,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row -type Model_MultiClassClassificationMetrics_ConfusionMatrix_Row struct { - // The original label of this row. - ActualLabel *string `json:"actualLabel,omitempty"` - - // Info describing predicted label distribution. - Entries []Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry `json:"entries,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.PcaSolverOptionEnums -type Model_PcaSolverOptionEnums struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.RankingMetrics -type Model_RankingMetrics struct { - // Calculates a precision per user for all the items by ranking them and - // then averages all the precisions across all the users. - MeanAveragePrecision *float64 `json:"meanAveragePrecision,omitempty"` - - // Similar to the mean squared error computed in regression and explicit - // recommendation models except instead of computing the rating directly, - // the output from evaluate is computed against a preference which is 1 or 0 - // depending on if the rating exists or not. - MeanSquaredError *float64 `json:"meanSquaredError,omitempty"` - - // A metric to determine the goodness of a ranking calculated from the - // predicted confidence by comparing it to an ideal rank measured by the - // original ratings. - NormalizedDiscountedCumulativeGain *float64 `json:"normalizedDiscountedCumulativeGain,omitempty"` - - // Determines the goodness of a ranking by computing the percentile rank - // from the predicted confidence and dividing it by the original rank. - AverageRank *float64 `json:"averageRank,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.RegressionMetrics -type Model_RegressionMetrics struct { - // Mean absolute error. - MeanAbsoluteError *float64 `json:"meanAbsoluteError,omitempty"` - - // Mean squared error. - MeanSquaredError *float64 `json:"meanSquaredError,omitempty"` - - // Mean squared log error. - MeanSquaredLogError *float64 `json:"meanSquaredLogError,omitempty"` - - // Median absolute error. - MedianAbsoluteError *float64 `json:"medianAbsoluteError,omitempty"` - - // R^2 score. This corresponds to r2_score in ML.EVALUATE. - RSquared *float64 `json:"rSquared,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.SeasonalPeriod -type Model_SeasonalPeriod struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.StringHparamSearchSpace -type Model_StringHparamSearchSpace struct { - // Canididates for the string or enum parameter in lower case. - Candidates []string `json:"candidates,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun -type Model_TrainingRun struct { - // Output only. Options that were used for this training run, includes - // user specified and default options that were used. - TrainingOptions *Model_TrainingRun_TrainingOptions `json:"trainingOptions,omitempty"` - - // Output only. The start time of this training run. - StartTime *string `json:"startTime,omitempty"` - - // Output only. Output of each iteration run, results.size() <= - // max_iterations. - Results []Model_TrainingRun_IterationResult `json:"results,omitempty"` - - // Output only. The evaluation metrics over training/eval data that were - // computed at the end of training. - EvaluationMetrics *Model_EvaluationMetrics `json:"evaluationMetrics,omitempty"` - - // Output only. Data split result of the training run. Only set when the - // input data is actually split. - DataSplitResult *Model_DataSplitResult `json:"dataSplitResult,omitempty"` - - // Output only. Global explanation contains the explanation of top features - // on the model level. Applies to both regression and classification models. - ModelLevelGlobalExplanation *Model_GlobalExplanation `json:"modelLevelGlobalExplanation,omitempty"` - - // Output only. Global explanation contains the explanation of top features - // on the class level. Applies to classification models only. - ClassLevelGlobalExplanations []Model_GlobalExplanation `json:"classLevelGlobalExplanations,omitempty"` - - // The model id in the [Vertex AI Model - // Registry](https://cloud.google.com/vertex-ai/docs/model-registry/introduction) - // for this training run. - VertexAiModelID *string `json:"vertexAiModelID,omitempty"` - - // Output only. The model version in the [Vertex AI Model - // Registry](https://cloud.google.com/vertex-ai/docs/model-registry/introduction) - // for this training run. - VertexAiModelVersion *string `json:"vertexAiModelVersion,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.IterationResult -type Model_TrainingRun_IterationResult struct { - // Index of the iteration, 0 based. - Index *int32 `json:"index,omitempty"` - - // Time taken to run the iteration in milliseconds. - DurationMs *int64 `json:"durationMs,omitempty"` - - // Loss computed on the training data at the end of iteration. - TrainingLoss *float64 `json:"trainingLoss,omitempty"` - - // Loss computed on the eval data at the end of iteration. - EvalLoss *float64 `json:"evalLoss,omitempty"` - - // Learn rate used for this iteration. - LearnRate *float64 `json:"learnRate,omitempty"` - - // Information about top clusters for clustering models. - ClusterInfos []Model_TrainingRun_IterationResult_ClusterInfo `json:"clusterInfos,omitempty"` - - // Arima result. - ArimaResult *Model_TrainingRun_IterationResult_ArimaResult `json:"arimaResult,omitempty"` - - // The information of the principal components. - PrincipalComponentInfos []Model_TrainingRun_IterationResult_PrincipalComponentInfo `json:"principalComponentInfos,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ArimaResult -type Model_TrainingRun_IterationResult_ArimaResult struct { - // This message is repeated because there are multiple arima models - // fitted in auto-arima. For non-auto-arima model, its size is one. - ArimaModelInfo []Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo `json:"arimaModelInfo,omitempty"` - - // Seasonal periods. Repeated because multiple periods are supported for - // one time series. - SeasonalPeriods []string `json:"seasonalPeriods,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ArimaResult.ArimaCoefficients -type Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients struct { - // Auto-regressive coefficients, an array of float64. - AutoRegressiveCoefficients []float64 `json:"autoRegressiveCoefficients,omitempty"` - - // Moving-average coefficients, an array of float64. - MovingAverageCoefficients []float64 `json:"movingAverageCoefficients,omitempty"` - - // Intercept coefficient, just a float64 not an array. - InterceptCoefficient *float64 `json:"interceptCoefficient,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ArimaResult.ArimaModelInfo -type Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo struct { - // Non-seasonal order. - NonSeasonalOrder *Model_ArimaOrder `json:"nonSeasonalOrder,omitempty"` - - // Arima coefficients. - ArimaCoefficients *Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients `json:"arimaCoefficients,omitempty"` - - // Arima fitting metrics. - ArimaFittingMetrics *Model_ArimaFittingMetrics `json:"arimaFittingMetrics,omitempty"` - - // Whether Arima model fitted with drift or not. It is always false - // when d is not 1. - HasDrift *bool `json:"hasDrift,omitempty"` - - // The time_series_id value for this time series. It will be one of - // the unique values from the time_series_id_column specified during - // ARIMA model training. Only present when time_series_id_column - // training option was used. - TimeSeriesID *string `json:"timeSeriesID,omitempty"` - - // The tuple of time_series_ids identifying this time series. It will - // be one of the unique tuples of values present in the - // time_series_id_columns specified during ARIMA model training. Only - // present when time_series_id_columns training option was used and - // the order of values here are same as the order of - // time_series_id_columns. - TimeSeriesIds []string `json:"timeSeriesIds,omitempty"` - - // Seasonal periods. Repeated because multiple periods are supported - // for one time series. - SeasonalPeriods []string `json:"seasonalPeriods,omitempty"` - - // If true, holiday_effect is a part of time series decomposition - // result. - HasHolidayEffect *bool `json:"hasHolidayEffect,omitempty"` - - // If true, spikes_and_dips is a part of time series decomposition - // result. - HasSpikesAndDips *bool `json:"hasSpikesAndDips,omitempty"` - - // If true, step_changes is a part of time series decomposition - // result. - HasStepChanges *bool `json:"hasStepChanges,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo -type Model_TrainingRun_IterationResult_ClusterInfo struct { - // Centroid id. - CentroidID *int64 `json:"centroidID,omitempty"` - - // Cluster radius, the average distance from centroid - // to each point assigned to the cluster. - ClusterRadius *float64 `json:"clusterRadius,omitempty"` - - // Cluster size, the total number of points assigned to the cluster. - ClusterSize *int64 `json:"clusterSize,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.PrincipalComponentInfo -type Model_TrainingRun_IterationResult_PrincipalComponentInfo struct { - // Id of the principal component. - PrincipalComponentID *int64 `json:"principalComponentID,omitempty"` - - // Explained variance by this principal component, which is simply the - // eigenvalue. - ExplainedVariance *float64 `json:"explainedVariance,omitempty"` - - // Explained_variance over the total explained variance. - ExplainedVarianceRatio *float64 `json:"explainedVarianceRatio,omitempty"` - - // The explained_variance is pre-ordered in the descending order to - // compute the cumulative explained variance ratio. - CumulativeExplainedVarianceRatio *float64 `json:"cumulativeExplainedVarianceRatio,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions -type Model_TrainingRun_TrainingOptions struct { - // The maximum number of iterations in training. Used only for iterative - // training algorithms. - MaxIterations *int64 `json:"maxIterations,omitempty"` - - // Type of loss function used during training run. - LossType *string `json:"lossType,omitempty"` - - // Learning rate in training. Used only for iterative training algorithms. - LearnRate *float64 `json:"learnRate,omitempty"` - - // L1 regularization coefficient. - L1Regularization *float64 `json:"l1Regularization,omitempty"` - - // L2 regularization coefficient. - L2Regularization *float64 `json:"l2Regularization,omitempty"` - - // When early_stop is true, stops training when accuracy improvement is - // less than 'min_relative_progress'. Used only for iterative training - // algorithms. - MinRelativeProgress *float64 `json:"minRelativeProgress,omitempty"` - - // Whether to train a model from the last checkpoint. - WarmStart *bool `json:"warmStart,omitempty"` - - // Whether to stop early when the loss doesn't improve significantly - // any more (compared to min_relative_progress). Used only for iterative - // training algorithms. - EarlyStop *bool `json:"earlyStop,omitempty"` - - // Name of input label columns in training data. - InputLabelColumns []string `json:"inputLabelColumns,omitempty"` - - // The data split type for training and evaluation, e.g. RANDOM. - DataSplitMethod *string `json:"dataSplitMethod,omitempty"` - - // The fraction of evaluation data over the whole input data. The rest - // of data will be used as training data. The format should be float64. - // Accurate to two decimal places. - // Default value is 0.2. - DataSplitEvalFraction *float64 `json:"dataSplitEvalFraction,omitempty"` - - // The column to split data with. This column won't be used as a - // feature. - // 1. When data_split_method is CUSTOM, the corresponding column should - // be boolean. The rows with true value tag are eval data, and the false - // are training data. - // 2. When data_split_method is SEQ, the first DATA_SPLIT_EVAL_FRACTION - // rows (from smallest to largest) in the corresponding column are used - // as training data, and the rest are eval data. It respects the order - // in Orderable data types: - // https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#data-type-properties - DataSplitColumn *string `json:"dataSplitColumn,omitempty"` - - // The strategy to determine learn rate for the current iteration. - LearnRateStrategy *string `json:"learnRateStrategy,omitempty"` - - // Specifies the initial learning rate for the line search learn rate - // strategy. - InitialLearnRate *float64 `json:"initialLearnRate,omitempty"` - - // TODO: map type string float64 for label_class_weights - - // User column specified for matrix factorization models. - UserColumn *string `json:"userColumn,omitempty"` - - // Item column specified for matrix factorization models. - ItemColumn *string `json:"itemColumn,omitempty"` - - // Distance type for clustering models. - DistanceType *string `json:"distanceType,omitempty"` - - // Number of clusters for clustering models. - NumClusters *int64 `json:"numClusters,omitempty"` - - // Google Cloud Storage URI from which the model was imported. Only - // applicable for imported models. - ModelUri *string `json:"modelUri,omitempty"` - - // Optimization strategy for training linear regression models. - OptimizationStrategy *string `json:"optimizationStrategy,omitempty"` - - // Hidden units for dnn models. - HiddenUnits []int64 `json:"hiddenUnits,omitempty"` - - // Batch size for dnn models. - BatchSize *int64 `json:"batchSize,omitempty"` - - // Dropout probability for dnn models. - Dropout *float64 `json:"dropout,omitempty"` - - // Maximum depth of a tree for boosted tree models. - MaxTreeDepth *int64 `json:"maxTreeDepth,omitempty"` - - // Subsample fraction of the training data to grow tree to prevent - // overfitting for boosted tree models. - Subsample *float64 `json:"subsample,omitempty"` - - // Minimum split loss for boosted tree models. - MinSplitLoss *float64 `json:"minSplitLoss,omitempty"` - - // Booster type for boosted tree models. - BoosterType *string `json:"boosterType,omitempty"` - - // Number of parallel trees constructed during each iteration for boosted - // tree models. - NumParallelTree *int64 `json:"numParallelTree,omitempty"` - - // Type of normalization algorithm for boosted tree models using - // dart booster. - DartNormalizeType *string `json:"dartNormalizeType,omitempty"` - - // Tree construction algorithm for boosted tree models. - TreeMethod *string `json:"treeMethod,omitempty"` - - // Minimum sum of instance weight needed in a child for boosted tree - // models. - MinTreeChildWeight *int64 `json:"minTreeChildWeight,omitempty"` - - // Subsample ratio of columns when constructing each tree for boosted tree - // models. - ColsampleBytree *float64 `json:"colsampleBytree,omitempty"` - - // Subsample ratio of columns for each level for boosted tree models. - ColsampleBylevel *float64 `json:"colsampleBylevel,omitempty"` - - // Subsample ratio of columns for each node(split) for boosted tree - // models. - ColsampleBynode *float64 `json:"colsampleBynode,omitempty"` - - // Num factors specified for matrix factorization models. - NumFactors *int64 `json:"numFactors,omitempty"` - - // Feedback type that specifies which algorithm to run for matrix - // factorization. - FeedbackType *string `json:"feedbackType,omitempty"` - - // Hyperparameter for matrix factoration when implicit feedback type is - // specified. - WalsAlpha *float64 `json:"walsAlpha,omitempty"` - - // The method used to initialize the centroids for kmeans algorithm. - KmeansInitializationMethod *string `json:"kmeansInitializationMethod,omitempty"` - - // The column used to provide the initial centroids for kmeans algorithm - // when kmeans_initialization_method is CUSTOM. - KmeansInitializationColumn *string `json:"kmeansInitializationColumn,omitempty"` - - // Column to be designated as time series timestamp for ARIMA model. - TimeSeriesTimestampColumn *string `json:"timeSeriesTimestampColumn,omitempty"` - - // Column to be designated as time series data for ARIMA model. - TimeSeriesDataColumn *string `json:"timeSeriesDataColumn,omitempty"` - - // Whether to enable auto ARIMA or not. - AutoArima *bool `json:"autoArima,omitempty"` - - // A specification of the non-seasonal part of the ARIMA model: the three - // components (p, d, q) are the AR order, the degree of differencing, and - // the MA order. - NonSeasonalOrder *Model_ArimaOrder `json:"nonSeasonalOrder,omitempty"` - - // The data frequency of a time series. - DataFrequency *string `json:"dataFrequency,omitempty"` - - // Whether or not p-value test should be computed for this model. Only - // available for linear and logistic regression models. - CalculatePValues *bool `json:"calculatePValues,omitempty"` - - // Include drift when fitting an ARIMA model. - IncludeDrift *bool `json:"includeDrift,omitempty"` - - // The geographical region based on which the holidays are considered in - // time series modeling. If a valid value is specified, then holiday - // effects modeling is enabled. - HolidayRegion *string `json:"holidayRegion,omitempty"` - - // A list of geographical regions that are used for time series modeling. - HolidayRegions []string `json:"holidayRegions,omitempty"` - - // The time series id column that was used during ARIMA model training. - TimeSeriesIDColumn *string `json:"timeSeriesIDColumn,omitempty"` - - // The time series id columns that were used during ARIMA model training. - TimeSeriesIDColumns []string `json:"timeSeriesIDColumns,omitempty"` - - // The number of periods ahead that need to be forecasted. - Horizon *int64 `json:"horizon,omitempty"` - - // The max value of the sum of non-seasonal p and q. - AutoArimaMaxOrder *int64 `json:"autoArimaMaxOrder,omitempty"` - - // The min value of the sum of non-seasonal p and q. - AutoArimaMinOrder *int64 `json:"autoArimaMinOrder,omitempty"` - - // Number of trials to run this hyperparameter tuning job. - NumTrials *int64 `json:"numTrials,omitempty"` - - // Maximum number of trials to run in parallel. - MaxParallelTrials *int64 `json:"maxParallelTrials,omitempty"` - - // The target evaluation metrics to optimize the hyperparameters for. - HparamTuningObjectives []string `json:"hparamTuningObjectives,omitempty"` - - // If true, perform decompose time series and save the results. - DecomposeTimeSeries *bool `json:"decomposeTimeSeries,omitempty"` - - // If true, clean spikes and dips in the input time series. - CleanSpikesAndDips *bool `json:"cleanSpikesAndDips,omitempty"` - - // If true, detect step changes and make data adjustment in the input time - // series. - AdjustStepChanges *bool `json:"adjustStepChanges,omitempty"` - - // If true, enable global explanation during training. - EnableGlobalExplain *bool `json:"enableGlobalExplain,omitempty"` - - // Number of paths for the sampled Shapley explain method. - SampledShapleyNumPaths *int64 `json:"sampledShapleyNumPaths,omitempty"` - - // Number of integral steps for the integrated gradients explain method. - IntegratedGradientsNumSteps *int64 `json:"integratedGradientsNumSteps,omitempty"` - - // Categorical feature encoding method. - CategoryEncodingMethod *string `json:"categoryEncodingMethod,omitempty"` - - // Based on the selected TF version, the corresponding docker image is - // used to train external models. - TfVersion *string `json:"tfVersion,omitempty"` - - // Enums for color space, used for processing images in Object Table. - // See more details at - // https://www.tensorflow.org/io/tutorials/colorspace. - ColorSpace *string `json:"colorSpace,omitempty"` - - // Name of the instance weight column for training data. - // This column isn't be used as a feature. - InstanceWeightColumn *string `json:"instanceWeightColumn,omitempty"` - - // Smoothing window size for the trend component. When a positive value is - // specified, a center moving average smoothing is applied on the history - // trend. When the smoothing window is out of the boundary at the - // beginning or the end of the trend, the first element or the last - // element is padded to fill the smoothing window before the average is - // applied. - TrendSmoothingWindowSize *int64 `json:"trendSmoothingWindowSize,omitempty"` - - // The fraction of the interpolated length of the time series that's used - // to model the time series trend component. All of the time points of the - // time series are used to model the non-trend component. This training - // option accelerates modeling training without sacrificing much - // forecasting accuracy. You can use this option with - // `minTimeSeriesLength` but not with `maxTimeSeriesLength`. - TimeSeriesLengthFraction *float64 `json:"timeSeriesLengthFraction,omitempty"` - - // The minimum number of time points in a time series that are used in - // modeling the trend component of the time series. If you use this option - // you must also set the `timeSeriesLengthFraction` option. This training - // option ensures that enough time points are available when you use - // `timeSeriesLengthFraction` in trend modeling. This is particularly - // important when forecasting multiple time series in a single query using - // `timeSeriesIdColumn`. If the total number of time points is less than - // the `minTimeSeriesLength` value, then the query uses all available time - // points. - MinTimeSeriesLength *int64 `json:"minTimeSeriesLength,omitempty"` - - // The maximum number of time points in a time series that can be used in - // modeling the trend component of the time series. Don't use this option - // with the `timeSeriesLengthFraction` or `minTimeSeriesLength` options. - MaxTimeSeriesLength *int64 `json:"maxTimeSeriesLength,omitempty"` - - // User-selected XGBoost versions for training of XGBoost models. - XgboostVersion *string `json:"xgboostVersion,omitempty"` - - // Whether to use approximate feature contribution method in XGBoost model - // explanation for global explain. - ApproxGlobalFeatureContrib *bool `json:"approxGlobalFeatureContrib,omitempty"` - - // Whether the model should include intercept during model training. - FitIntercept *bool `json:"fitIntercept,omitempty"` - - // Number of principal components to keep in the PCA model. Must be <= the - // number of features. - NumPrincipalComponents *int64 `json:"numPrincipalComponents,omitempty"` - - // The minimum ratio of cumulative explained variance that needs to be - // given by the PCA model. - PcaExplainedVarianceRatio *float64 `json:"pcaExplainedVarianceRatio,omitempty"` - - // If true, scale the feature values by dividing the feature standard - // deviation. Currently only apply to PCA. - ScaleFeatures *bool `json:"scaleFeatures,omitempty"` - - // The solver for PCA. - PcaSolver *string `json:"pcaSolver,omitempty"` - - // Whether to calculate class weights automatically based on the - // popularity of each label. - AutoClassWeights *bool `json:"autoClassWeights,omitempty"` - - // Activation function of the neural nets. - ActivationFn *string `json:"activationFn,omitempty"` - - // Optimizer used for training the neural nets. - Optimizer *string `json:"optimizer,omitempty"` - - // Budget in hours for AutoML training. - BudgetHours *float64 `json:"budgetHours,omitempty"` - - // Whether to standardize numerical features. Default to true. - StandardizeFeatures *bool `json:"standardizeFeatures,omitempty"` - - // L1 regularization coefficient to activations. - L1RegActivation *float64 `json:"l1RegActivation,omitempty"` - - // The model registry. - ModelRegistry *string `json:"modelRegistry,omitempty"` - - // The version aliases to apply in Vertex AI model registry. Always - // overwrite if the version aliases exists in a existing model. - VertexAiModelVersionAliases []string `json:"vertexAiModelVersionAliases,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ModelReference -type ModelReference struct { - // Required. The ID of the project containing this model. - ProjectID *string `json:"projectID,omitempty"` - - // Required. The ID of the dataset containing this model. - DatasetID *string `json:"datasetID,omitempty"` - - // Required. The ID of the model. The ID must contain only - // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum - // length is 1,024 characters. - ModelID *string `json:"modelID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ParquetOptions -type ParquetOptions struct { - // Optional. Indicates whether to infer Parquet ENUM logical type as STRING - // instead of BYTES by default. - EnumAsString *bool `json:"enumAsString,omitempty"` - - // Optional. Indicates whether to use schema inference specifically for - // Parquet LIST logical type. - EnableListInference *bool `json:"enableListInference,omitempty"` - - // Optional. Indicates how to represent a Parquet map if present. - MapTargetType *string `json:"mapTargetType,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PartitionSkew -type PartitionSkew struct { - // Output only. Source stages which produce skewed data. - SkewSources []PartitionSkew_SkewSource `json:"skewSources,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PartitionSkew.SkewSource -type PartitionSkew_SkewSource struct { - // Output only. Stage id of the skew source stage. - StageID *int64 `json:"stageID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PartitionedColumn -type PartitionedColumn struct { - // Required. The name of the partition column. - Field *string `json:"field,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PartitioningDefinition -type PartitioningDefinition struct { - // Optional. Details about each partitioning column. This field is output only - // for all partitioning types other than metastore partitioned tables. - // BigQuery native tables only support 1 partitioning column. Other table - // types may support 0, 1 or more partitioning columns. - // For metastore partitioned tables, the order must match the definition order - // in the Hive Metastore, where it must match the physical layout of the - // table. For example, - // - // CREATE TABLE a_table(id BIGINT, name STRING) - // PARTITIONED BY (city STRING, state STRING). - // - // In this case the values must be ['city', 'state'] in that order. - PartitionedColumn []PartitionedColumn `json:"partitionedColumn,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PerformanceInsights -type PerformanceInsights struct { - // Output only. Average execution ms of previous runs. Indicates the job ran - // slow compared to previous executions. To find previous executions, use - // INFORMATION_SCHEMA tables and filter jobs with same query hash. - AvgPreviousExecutionMs *int64 `json:"avgPreviousExecutionMs,omitempty"` - - // Output only. Standalone query stage performance insights, for exploring - // potential improvements. - StagePerformanceStandaloneInsights []StagePerformanceStandaloneInsight `json:"stagePerformanceStandaloneInsights,omitempty"` - - // Output only. Query stage performance insights compared to previous runs, - // for diagnosing performance regression. - StagePerformanceChangeInsights []StagePerformanceChangeInsight `json:"stagePerformanceChangeInsights,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PrimaryKey -type PrimaryKey struct { - // Required. The columns that are composed of the primary key constraint. - Columns []string `json:"columns,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PrivacyPolicy -type PrivacyPolicy struct { - // Optional. Policy used for aggregation thresholds. - AggregationThresholdPolicy *AggregationThresholdPolicy `json:"aggregationThresholdPolicy,omitempty"` - - // Optional. Policy used for differential privacy. - DifferentialPrivacyPolicy *DifferentialPrivacyPolicy `json:"differentialPrivacyPolicy,omitempty"` - - // Optional. Join restriction policy is outside of the one of policies, since - // this policy can be set along with other policies. This policy gives data - // providers the ability to enforce joins on the 'join_allowed_columns' when - // data is queried from a privacy protected view. - JoinRestrictionPolicy *JoinRestrictionPolicy `json:"joinRestrictionPolicy,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.QueryInfo -type QueryInfo struct { - // Output only. Information about query optimizations. - OptimizationDetails *google_protobuf_Struct `json:"optimizationDetails,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.QueryParameter -type QueryParameter struct { - // Optional. If unset, this is a positional parameter. Otherwise, should be - // unique within a query. - Name *string `json:"name,omitempty"` - - // Required. The type of this parameter. - ParameterType *QueryParameterType `json:"parameterType,omitempty"` - - // Required. The value of this parameter. - ParameterValue *QueryParameterValue `json:"parameterValue,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.QueryParameterStructType -type QueryParameterStructType struct { - // Optional. The name of this field. - Name *string `json:"name,omitempty"` - - // Required. The type of this field. - Type *QueryParameterType `json:"type,omitempty"` - - // Optional. Human-oriented description of the field. - Description *string `json:"description,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.QueryParameterType -type QueryParameterType struct { - // Required. The top level type of this field. - Type *string `json:"type,omitempty"` - - // Optional. The type of the array's elements, if this is an array. - ArrayType *QueryParameterType `json:"arrayType,omitempty"` - - // Optional. The types of the fields of this struct, in order, if this is a - // struct. - StructTypes []QueryParameterStructType `json:"structTypes,omitempty"` - - // Optional. The element type of the range, if this is a range. - RangeElementType *QueryParameterType `json:"rangeElementType,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.QueryParameterValue -type QueryParameterValue struct { - // Optional. The value of this value, if a simple scalar type. - Value *string `json:"value,omitempty"` - - // Optional. The array values, if this is an array type. - ArrayValues []QueryParameterValue `json:"arrayValues,omitempty"` - - // TODO: map type string message for struct_values - - // Optional. The range value, if this is a range type. - RangeValue *RangeValue `json:"rangeValue,omitempty"` - - // This field should not be used. - AltStructValues []google_protobuf_Value `json:"altStructValues,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.QueryTimelineSample -type QueryTimelineSample struct { - // Milliseconds elapsed since the start of query execution. - ElapsedMs *int64 `json:"elapsedMs,omitempty"` - - // Cumulative slot-ms consumed by the query. - TotalSlotMs *int64 `json:"totalSlotMs,omitempty"` - - // Total units of work remaining for the query. This number can be revised - // (increased or decreased) while the query is running. - PendingUnits *int64 `json:"pendingUnits,omitempty"` - - // Total parallel units of work completed by this query. - CompletedUnits *int64 `json:"completedUnits,omitempty"` - - // Total number of active workers. This does not correspond directly to - // slot usage. This is the largest value observed since the last sample. - ActiveUnits *int64 `json:"activeUnits,omitempty"` - - // Units of work that can be scheduled immediately. Providing additional slots - // for these units of work will accelerate the query, if no other query in - // the reservation needs additional slots. - EstimatedRunnableUnits *int64 `json:"estimatedRunnableUnits,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RangePartitioning -type RangePartitioning struct { - // Required. The name of the column to partition the table on. It must be a - // top-level, INT64 column whose mode is NULLABLE or REQUIRED. - Field *string `json:"field,omitempty"` - - // Defines the ranges for range partitioning. - Range *RangePartitioning_Range `json:"range,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RangePartitioning.Range -type RangePartitioning_Range struct { - // Required. The start of range partitioning, inclusive. This field is an - // INT64 value represented as a string. - Start *string `json:"start,omitempty"` - - // Required. The end of range partitioning, exclusive. This field is an - // INT64 value represented as a string. - End *string `json:"end,omitempty"` - - // Required. The width of each interval. This field is an INT64 value - // represented as a string. - Interval *string `json:"interval,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RangeValue -type RangeValue struct { - // Optional. The start value of the range. A missing value represents an - // unbounded start. - Start *QueryParameterValue `json:"start,omitempty"` - - // Optional. The end value of the range. A missing value represents an - // unbounded end. - End *QueryParameterValue `json:"end,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RemoteModelInfo -type RemoteModelInfo struct { - // Output only. The endpoint for remote model. - Endpoint *string `json:"endpoint,omitempty"` - - // Output only. The remote service type for remote model. - RemoteServiceType *string `json:"remoteServiceType,omitempty"` - - // Output only. Fully qualified name of the user-provided connection object of - // the remote model. Format: - // ```"projects/{project_id}/locations/{location_id}/connections/{connection_id}"``` - Connection *string `json:"connection,omitempty"` - - // Output only. Max number of rows in each batch sent to the remote service. - // If unset, the number of rows in each batch is set dynamically. - MaxBatchingRows *int64 `json:"maxBatchingRows,omitempty"` - - // Output only. The model version for LLM. - RemoteModelVersion *string `json:"remoteModelVersion,omitempty"` - - // Output only. The name of the speech recognizer to use for speech - // recognition. The expected format is - // `projects/{project}/locations/{location}/recognizers/{recognizer}`. - // Customers can specify this field at model creation. If not specified, a - // default recognizer `projects/{model - // project}/locations/global/recognizers/_` will be used. See more details at - // [recognizers](https://cloud.google.com/speech-to-text/v2/docs/reference/rest/v2/projects.locations.recognizers) - SpeechRecognizer *string `json:"speechRecognizer,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RestrictionConfig -type RestrictionConfig struct { - // Output only. Specifies the type of dataset/table restriction. - Type *string `json:"type,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Routine -type Routine struct { - // Output only. A hash of this resource. - Etag *string `json:"etag,omitempty"` - - // Required. Reference describing the ID of this routine. - RoutineReference *RoutineReference `json:"routineReference,omitempty"` - - // Required. The type of routine. - RoutineType *string `json:"routineType,omitempty"` - - // Output only. The time when this routine was created, in milliseconds since - // the epoch. - CreationTime *int64 `json:"creationTime,omitempty"` - - // Output only. The time when this routine was last modified, in milliseconds - // since the epoch. - LastModifiedTime *int64 `json:"lastModifiedTime,omitempty"` - - // Optional. Defaults to "SQL" if remote_function_options field is absent, not - // set otherwise. - Language *string `json:"language,omitempty"` - - // Optional. - Arguments []Routine_Argument `json:"arguments,omitempty"` - - // Optional if language = "SQL"; required otherwise. - // Cannot be set if routine_type = "TABLE_VALUED_FUNCTION". - // - // If absent, the return type is inferred from definition_body at query time - // in each query that references this routine. If present, then the evaluated - // result will be cast to the specified returned type at query time. - // - // For example, for the functions created with the following statements: - // - // * `CREATE FUNCTION Add(x FLOAT64, y FLOAT64) RETURNS FLOAT64 AS (x + y);` - // - // * `CREATE FUNCTION Increment(x FLOAT64) AS (Add(x, 1));` - // - // * `CREATE FUNCTION Decrement(x FLOAT64) RETURNS FLOAT64 AS (Add(x, -1));` - // - // The return_type is `{type_kind: "FLOAT64"}` for `Add` and `Decrement`, and - // is absent for `Increment` (inferred as FLOAT64 at query time). - // - // Suppose the function `Add` is replaced by - // `CREATE OR REPLACE FUNCTION Add(x INT64, y INT64) AS (x + y);` - // - // Then the inferred return type of `Increment` is automatically changed to - // INT64 at query time, while the return type of `Decrement` remains FLOAT64. - ReturnType *StandardSqlDataType `json:"returnType,omitempty"` - - // Optional. Can be set only if routine_type = "TABLE_VALUED_FUNCTION". - // - // If absent, the return table type is inferred from definition_body at query - // time in each query that references this routine. If present, then the - // columns in the evaluated table result will be cast to match the column - // types specified in return table type, at query time. - ReturnTableType *StandardSqlTableType `json:"returnTableType,omitempty"` - - // Optional. If language = "JAVASCRIPT", this field stores the path of the - // imported JAVASCRIPT libraries. - ImportedLibraries []string `json:"importedLibraries,omitempty"` - - // Required. The body of the routine. - // - // For functions, this is the expression in the AS clause. - // - // If language=SQL, it is the substring inside (but excluding) the - // parentheses. For example, for the function created with the following - // statement: - // - // `CREATE FUNCTION JoinLines(x string, y string) as (concat(x, "\n", y))` - // - // The definition_body is `concat(x, "\n", y)` (\n is not replaced with - // linebreak). - // - // If language=JAVASCRIPT, it is the evaluated string in the AS clause. - // For example, for the function created with the following statement: - // - // `CREATE FUNCTION f() RETURNS STRING LANGUAGE js AS 'return "\n";\n'` - // - // The definition_body is - // - // `return "\n";\n` - // - // Note that both \n are replaced with linebreaks. - DefinitionBody *string `json:"definitionBody,omitempty"` - - // Optional. The description of the routine, if defined. - Description *string `json:"description,omitempty"` - - // Optional. The determinism level of the JavaScript UDF, if defined. - DeterminismLevel *string `json:"determinismLevel,omitempty"` - - // Optional. The security mode of the routine, if defined. If not defined, the - // security mode is automatically determined from the routine's configuration. - SecurityMode *string `json:"securityMode,omitempty"` - - // Optional. Use this option to catch many common errors. Error checking is - // not exhaustive, and successfully creating a procedure doesn't guarantee - // that the procedure will successfully execute at runtime. If `strictMode` is - // set to `TRUE`, the procedure body is further checked for errors such as - // non-existent tables or columns. The `CREATE PROCEDURE` statement fails if - // the body fails any of these checks. - // - // If `strictMode` is set to `FALSE`, the procedure body is checked only for - // syntax. For procedures that invoke themselves recursively, specify - // `strictMode=FALSE` to avoid non-existent procedure errors during - // validation. - // - // Default value is `TRUE`. - StrictMode *bool `json:"strictMode,omitempty"` - - // Optional. Remote function specific options. - RemoteFunctionOptions *Routine_RemoteFunctionOptions `json:"remoteFunctionOptions,omitempty"` - - // Optional. Spark specific options. - SparkOptions *SparkOptions `json:"sparkOptions,omitempty"` - - // Optional. If set to `DATA_MASKING`, the function is validated and made - // available as a masking function. For more information, see [Create custom - // masking - // routines](https://cloud.google.com/bigquery/docs/user-defined-functions#custom-mask). - DataGovernanceType *string `json:"dataGovernanceType,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Routine.Argument -type Routine_Argument struct { - // Optional. The name of this argument. Can be absent for function return - // argument. - Name *string `json:"name,omitempty"` - - // Optional. Defaults to FIXED_TYPE. - ArgumentKind *string `json:"argumentKind,omitempty"` - - // Optional. Specifies whether the argument is input or output. - // Can be set for procedures only. - Mode *string `json:"mode,omitempty"` - - // Required unless argument_kind = ANY_TYPE. - DataType *StandardSqlDataType `json:"dataType,omitempty"` - - // Optional. Whether the argument is an aggregate function parameter. - // Must be Unset for routine types other than AGGREGATE_FUNCTION. - // For AGGREGATE_FUNCTION, if set to false, it is equivalent to adding "NOT - // AGGREGATE" clause in DDL; Otherwise, it is equivalent to omitting "NOT - // AGGREGATE" clause in DDL. - IsAggregate *bool `json:"isAggregate,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Routine.RemoteFunctionOptions -type Routine_RemoteFunctionOptions struct { - // Endpoint of the user-provided remote service, e.g. - // ```https://us-east1-my_gcf_project.cloudfunctions.net/remote_add``` - Endpoint *string `json:"endpoint,omitempty"` - - // Fully qualified name of the user-provided connection object which holds - // the authentication information to send requests to the remote service. - // Format: - // ```"projects/{projectId}/locations/{locationId}/connections/{connectionId}"``` - Connection *string `json:"connection,omitempty"` - - // User-defined context as a set of key/value pairs, which will be sent as - // function invocation context together with batched arguments in the - // requests to the remote service. The total number of bytes of keys and - // values must be less than 8KB. - UserDefinedContext map[string]string `json:"userDefinedContext,omitempty"` - - // Max number of rows in each batch sent to the remote service. - // If absent or if 0, BigQuery dynamically decides the number of rows in a - // batch. - MaxBatchingRows *int64 `json:"maxBatchingRows,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RoutineReference -type RoutineReference struct { - // Required. The ID of the project containing this routine. - ProjectId *string `json:"projectId"` - - // Required. The ID of the dataset containing this routine. - DatasetId *string `json:"datasetId"` - - // Required. The ID of the routine. The ID must contain only - // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum - // length is 256 characters. - RoutineId *string `json:"routineId"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RowAccessPolicy -type RowAccessPolicy struct { - // Output only. A hash of this resource. - Etag *string `json:"etag,omitempty"` - - // Required. Reference describing the ID of this row access policy. - RowAccessPolicyReference *RowAccessPolicyReference `json:"rowAccessPolicyReference,omitempty"` - - // Required. A SQL boolean expression that represents the rows defined by this - // row access policy, similar to the boolean expression in a WHERE clause of a - // SELECT query on a table. - // References to other tables, routines, and temporary functions are not - // supported. - // - // Examples: region="EU" - // date_field = CAST('2019-9-27' as DATE) - // nullable_field is not NULL - // numeric_field BETWEEN 1.0 AND 5.0 - FilterPredicate *string `json:"filterPredicate,omitempty"` - - // Output only. The time when this row access policy was created, in - // milliseconds since the epoch. - CreationTime *string `json:"creationTime,omitempty"` - - // Output only. The time when this row access policy was last modified, in - // milliseconds since the epoch. - LastModifiedTime *string `json:"lastModifiedTime,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RowAccessPolicyReference -type RowAccessPolicyReference struct { - // Required. The ID of the project containing this row access policy. - ProjectID *string `json:"projectID,omitempty"` - - // Required. The ID of the dataset containing this row access policy. - DatasetID *string `json:"datasetID,omitempty"` - - // Required. The ID of the table containing this row access policy. - TableID *string `json:"tableID,omitempty"` - - // Required. The ID of the row access policy. The ID must contain only - // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum - // length is 256 characters. - PolicyID *string `json:"policyID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RowLevelSecurityStatistics -type RowLevelSecurityStatistics struct { - // Whether any accessed data was protected by row access policies. - RowLevelSecurityApplied *bool `json:"rowLevelSecurityApplied,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ScriptOptions -type ScriptOptions struct { - // Timeout period for each statement in a script. - StatementTimeoutMs *int64 `json:"statementTimeoutMs,omitempty"` - - // Limit on the number of bytes billed per statement. Exceeding this budget - // results in an error. - StatementByteBudget *int64 `json:"statementByteBudget,omitempty"` - - // Determines which statement in the script represents the "key result", - // used to populate the schema and query results of the script job. - // Default is LAST. - KeyResultStatement *string `json:"keyResultStatement,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ScriptStatistics -type ScriptStatistics struct { - // Whether this child job was a statement or expression. - EvaluationKind *string `json:"evaluationKind,omitempty"` - - // Stack trace showing the line/column/procedure name of each frame on the - // stack at the point where the current evaluation happened. The leaf frame - // is first, the primary script is last. Never empty. - StackFrames []ScriptStatistics_ScriptStackFrame `json:"stackFrames,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ScriptStatistics.ScriptStackFrame -type ScriptStatistics_ScriptStackFrame struct { - // Output only. One-based start line. - StartLine *int32 `json:"startLine,omitempty"` - - // Output only. One-based start column. - StartColumn *int32 `json:"startColumn,omitempty"` - - // Output only. One-based end line. - EndLine *int32 `json:"endLine,omitempty"` - - // Output only. One-based end column. - EndColumn *int32 `json:"endColumn,omitempty"` - - // Output only. Name of the active procedure, empty if in a top-level - // script. - ProcedureID *string `json:"procedureID,omitempty"` - - // Output only. Text of the current statement/expression. - Text *string `json:"text,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SearchStatistics -type SearchStatistics struct { - // Specifies the index usage mode for the query. - IndexUsageMode *string `json:"indexUsageMode,omitempty"` - - // When `indexUsageMode` is `UNUSED` or `PARTIALLY_USED`, this field explains - // why indexes were not used in all or part of the search query. If - // `indexUsageMode` is `FULLY_USED`, this field is not populated. - IndexUnusedReasons []IndexUnusedReason `json:"indexUnusedReasons,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SerDeInfo -type SerDeInfo struct { - // Optional. Name of the SerDe. - // The maximum length is 256 characters. - Name *string `json:"name,omitempty"` - - // Required. Specifies a fully-qualified class name of the serialization - // library that is responsible for the translation of data between table - // representation and the underlying low-level input and output format - // structures. The maximum length is 256 characters. - SerializationLibrary *string `json:"serializationLibrary,omitempty"` - - // Optional. Key-value pairs that define the initialization parameters for the - // serialization library. - // Maximum size 10 Kib. - Parameters map[string]string `json:"parameters,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SessionInfo -type SessionInfo struct { - // Output only. The id of the session. - SessionID *string `json:"sessionID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SnapshotDefinition -type SnapshotDefinition struct { - // Required. Reference describing the ID of the table that was snapshot. - BaseTableReference *TableReference `json:"baseTableReference,omitempty"` - - // Required. The time at which the base table was snapshot. This value is - // reported in the JSON response using RFC3339 format. - SnapshotTime *string `json:"snapshotTime,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SparkOptions -type SparkOptions struct { - // Fully qualified name of the user-provided Spark connection object. Format: - // ```"projects/{project_id}/locations/{location_id}/connections/{connection_id}"``` - Connection *string `json:"connection,omitempty"` - - // Runtime version. If not specified, the default runtime version is used. - RuntimeVersion *string `json:"runtimeVersion,omitempty"` - - // Custom container image for the runtime environment. - ContainerImage *string `json:"containerImage,omitempty"` - - // Configuration properties as a set of key/value pairs, which will be passed - // on to the Spark application. For more information, see - // [Apache Spark](https://spark.apache.org/docs/latest/index.html) and the - // [procedure option - // list](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#procedure_option_list). - Properties map[string]string `json:"properties,omitempty"` - - // The main file/jar URI of the Spark application. Exactly one of the - // definition_body field and the main_file_uri field must be set for Python. - // Exactly one of main_class and main_file_uri field - // should be set for Java/Scala language type. - MainFileUri *string `json:"mainFileUri,omitempty"` - - // Python files to be placed on the PYTHONPATH for PySpark application. - // Supported file types: `.py`, `.egg`, and `.zip`. For more information - // about Apache Spark, see - // [Apache Spark](https://spark.apache.org/docs/latest/index.html). - PyFileUris []string `json:"pyFileUris,omitempty"` - - // JARs to include on the driver and executor CLASSPATH. - // For more information about Apache Spark, see - // [Apache Spark](https://spark.apache.org/docs/latest/index.html). - JarUris []string `json:"jarUris,omitempty"` - - // Files to be placed in the working directory of each executor. - // For more information about Apache Spark, see - // [Apache Spark](https://spark.apache.org/docs/latest/index.html). - FileUris []string `json:"fileUris,omitempty"` - - // Archive files to be extracted into the working directory of each executor. - // For more information about Apache Spark, see - // [Apache Spark](https://spark.apache.org/docs/latest/index.html). - ArchiveUris []string `json:"archiveUris,omitempty"` - - // The fully qualified name of a class in jar_uris, for example, - // com.example.wordcount. Exactly one of main_class and main_jar_uri field - // should be set for Java/Scala language type. - MainClass *string `json:"mainClass,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SparkStatistics -type SparkStatistics struct { - // Output only. Spark job ID if a Spark job is created successfully. - SparkJobID *string `json:"sparkJobID,omitempty"` - - // Output only. Location where the Spark job is executed. - // A location is selected by BigQueury for jobs configured to run in a - // multi-region. - SparkJobLocation *string `json:"sparkJobLocation,omitempty"` - - // Output only. Endpoints returned from Dataproc. - // Key list: - // - history_server_endpoint: A link to Spark job UI. - Endpoints map[string]string `json:"endpoints,omitempty"` - - // Output only. Logging info is used to generate a link to Cloud Logging. - LoggingInfo *SparkStatistics_LoggingInfo `json:"loggingInfo,omitempty"` - - // Output only. The Cloud KMS encryption key that is used to protect the - // resources created by the Spark job. If the Spark procedure uses the invoker - // security mode, the Cloud KMS encryption key is either inferred from the - // provided system variable, - // `@@spark_proc_properties.kms_key_name`, or the default key of the BigQuery - // job's project (if the CMEK organization policy is enforced). Otherwise, the - // Cloud KMS key is either inferred from the Spark connection associated with - // the procedure (if it is provided), or from the default key of the Spark - // connection's project if the CMEK organization policy is enforced. - // - // Example: - // - // * `projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]` - KmsKeyName *string `json:"kmsKeyName,omitempty"` - - // Output only. The Google Cloud Storage bucket that is used as the default - // file system by the Spark application. This field is only filled when the - // Spark procedure uses the invoker security mode. The `gcsStagingBucket` - // bucket is inferred from the `@@spark_proc_properties.staging_bucket` system - // variable (if it is provided). Otherwise, BigQuery creates a default staging - // bucket for the job and returns the bucket name in this field. - // - // Example: - // - // * `gs://[bucket_name]` - GcsStagingBucket *string `json:"gcsStagingBucket,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SparkStatistics.LoggingInfo -type SparkStatistics_LoggingInfo struct { - // Output only. Resource type used for logging. - ResourceType *string `json:"resourceType,omitempty"` - - // Output only. Project ID where the Spark logs were written. - ProjectID *string `json:"projectID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StagePerformanceChangeInsight -type StagePerformanceChangeInsight struct { - // Output only. The stage id that the insight mapped to. - StageID *int64 `json:"stageID,omitempty"` - - // Output only. Input data change insight of the query stage. - InputDataChange *InputDataChange `json:"inputDataChange,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StagePerformanceStandaloneInsight -type StagePerformanceStandaloneInsight struct { - // Output only. The stage id that the insight mapped to. - StageID *int64 `json:"stageID,omitempty"` - - // Output only. True if the stage has a slot contention issue. - SlotContention *bool `json:"slotContention,omitempty"` - - // Output only. True if the stage has insufficient shuffle quota. - InsufficientShuffleQuota *bool `json:"insufficientShuffleQuota,omitempty"` - - // Output only. If present, the stage had the following reasons for being - // disqualified from BI Engine execution. - BiEngineReasons []BiEngineReason `json:"biEngineReasons,omitempty"` - - // Output only. High cardinality joins in the stage. - HighCardinalityJoins []HighCardinalityJoin `json:"highCardinalityJoins,omitempty"` - - // Output only. Partition skew in the stage. - PartitionSkew *PartitionSkew `json:"partitionSkew,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StandardSqlDataType -type StandardSqlDataType struct { - // Required. The top level type of this field. - // Can be any GoogleSQL data type (e.g., "INT64", "DATE", "ARRAY"). - TypeKind *string `json:"typeKind,omitempty"` - - // The type of the array's elements, if type_kind = "ARRAY". - ArrayElementType *StandardSqlDataType `json:"arrayElementType,omitempty"` - - // The fields of this struct, in order, if type_kind = "STRUCT". - StructType *StandardSqlStructType `json:"structType,omitempty"` - - // The type of the range's elements, if type_kind = "RANGE". - RangeElementType *StandardSqlDataType `json:"rangeElementType,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StandardSqlField -type StandardSqlField struct { - // Optional. The name of this field. Can be absent for struct fields. - Name *string `json:"name,omitempty"` - - // Optional. The type of this parameter. Absent if not explicitly - // specified (e.g., CREATE FUNCTION statement can omit the return type; - // in this case the output parameter does not have this "type" field). - Type *StandardSqlDataType `json:"type,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StandardSqlStructType -type StandardSqlStructType struct { - // Fields within the struct. - Fields []StandardSqlField `json:"fields,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StandardSqlTableType -type StandardSqlTableType struct { - // The columns in this table type - Columns []StandardSqlField `json:"columns,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StorageDescriptor -type StorageDescriptor struct { - // Optional. The physical location of the table - // (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or - // 'gs://spark-dataproc-data/pangea-data/*'). - // The maximum length is 2056 bytes. - LocationUri *string `json:"locationUri,omitempty"` - - // Optional. Specifies the fully qualified class name of the InputFormat - // (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). - // The maximum length is 128 characters. - InputFormat *string `json:"inputFormat,omitempty"` - - // Optional. Specifies the fully qualified class name of the OutputFormat - // (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). - // The maximum length is 128 characters. - OutputFormat *string `json:"outputFormat,omitempty"` - - // Optional. Serializer and deserializer information. - SerdeInfo *SerDeInfo `json:"serdeInfo,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Streamingbuffer -type Streamingbuffer struct { - // Output only. A lower-bound estimate of the number of bytes currently in - // the streaming buffer. - EstimatedBytes *uint64 `json:"estimatedBytes,omitempty"` - - // Output only. A lower-bound estimate of the number of rows currently in the - // streaming buffer. - EstimatedRows *uint64 `json:"estimatedRows,omitempty"` - - // Output only. Contains the timestamp of the oldest entry in the streaming - // buffer, in milliseconds since the epoch, if the streaming buffer is - // available. - OldestEntryTime *uint64 `json:"oldestEntryTime,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SystemVariables -type SystemVariables struct { - - // TODO: map type string message for types - - // Output only. Value for each system variable. - Values *google_protobuf_Struct `json:"values,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Table -type Table struct { - // The type of resource ID. - Kind *string `json:"kind,omitempty"` - - // Output only. A hash of this resource. - Etag *string `json:"etag,omitempty"` - - // Output only. An opaque ID uniquely identifying the table. - ID *string `json:"id,omitempty"` - - // Output only. A URL that can be used to access this resource again. - SelfLink *string `json:"selfLink,omitempty"` - - // Required. Reference describing the ID of this table. - TableReference *TableReference `json:"tableReference,omitempty"` - - // Optional. A descriptive name for this table. - FriendlyName *string `json:"friendlyName,omitempty"` - - // Optional. A user-friendly description of this table. - Description *string `json:"description,omitempty"` - - // The labels associated with this table. You can use these to organize and - // group your tables. Label keys and values can be no longer than 63 - // characters, can only contain lowercase letters, numeric characters, - // underscores and dashes. International characters are allowed. Label values - // are optional. Label keys must start with a letter and each label in the - // list must have a different key. - Labels map[string]string `json:"labels,omitempty"` - - // Optional. Describes the schema of this table. - Schema *TableSchema `json:"schema,omitempty"` - - // If specified, configures time-based partitioning for this table. - TimePartitioning *TimePartitioning `json:"timePartitioning,omitempty"` - - // If specified, configures range partitioning for this table. - RangePartitioning *RangePartitioning `json:"rangePartitioning,omitempty"` - - // Clustering specification for the table. Must be specified with time-based - // partitioning, data in the table will be first partitioned and subsequently - // clustered. - Clustering *Clustering `json:"clustering,omitempty"` - - // Optional. If set to true, queries over this table require - // a partition filter that can be used for partition elimination to be - // specified. - RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty"` - - // Optional. The partition information for all table formats, including - // managed partitioned tables, hive partitioned tables, iceberg partitioned, - // and metastore partitioned tables. This field is only populated for - // metastore partitioned tables. For other table formats, this is an output - // only field. - PartitionDefinition *PartitioningDefinition `json:"partitionDefinition,omitempty"` - - // Output only. The size of this table in logical bytes, excluding any data in - // the streaming buffer. - NumBytes *int64 `json:"numBytes,omitempty"` - - // Output only. The physical size of this table in bytes. This includes - // storage used for time travel. - NumPhysicalBytes *int64 `json:"numPhysicalBytes,omitempty"` - - // Output only. The number of logical bytes in the table that are considered - // "long-term storage". - NumLongTermBytes *int64 `json:"numLongTermBytes,omitempty"` - - // Output only. The number of rows of data in this table, excluding any data - // in the streaming buffer. - NumRows *uint64 `json:"numRows,omitempty"` - - // Output only. The time when this table was created, in milliseconds since - // the epoch. - CreationTime *int64 `json:"creationTime,omitempty"` - - // Optional. The time when this table expires, in milliseconds since the - // epoch. If not present, the table will persist indefinitely. Expired tables - // will be deleted and their storage reclaimed. The defaultTableExpirationMs - // property of the encapsulating dataset can be used to set a default - // expirationTime on newly created tables. - ExpirationTime *int64 `json:"expirationTime,omitempty"` - - // Output only. The time when this table was last modified, in milliseconds - // since the epoch. - LastModifiedTime *uint64 `json:"lastModifiedTime,omitempty"` - - // Output only. Describes the table type. The following values are supported: - // - // * `TABLE`: A normal BigQuery table. - // * `VIEW`: A virtual table defined by a SQL query. - // * `EXTERNAL`: A table that references data stored in an external storage - // system, such as Google Cloud Storage. - // * `MATERIALIZED_VIEW`: A precomputed view defined by a SQL query. - // * `SNAPSHOT`: An immutable BigQuery table that preserves the contents of a - // base table at a particular time. See additional information on - // [table - // snapshots](https://cloud.google.com/bigquery/docs/table-snapshots-intro). - // - // The default value is `TABLE`. - Type *string `json:"type,omitempty"` - - // Optional. The view definition. - View *ViewDefinition `json:"view,omitempty"` - - // Optional. The materialized view definition. - MaterializedView *MaterializedViewDefinition `json:"materializedView,omitempty"` - - // Output only. The materialized view status. - MaterializedViewStatus *MaterializedViewStatus `json:"materializedViewStatus,omitempty"` - - // Optional. Describes the data format, location, and other properties of - // a table stored outside of BigQuery. By defining these properties, the data - // source can then be queried as if it were a standard BigQuery table. - ExternalDataConfiguration *ExternalDataConfiguration `json:"externalDataConfiguration,omitempty"` - - // Optional. Specifies the configuration of a BigLake managed table. - BiglakeConfiguration *BigLakeConfiguration `json:"biglakeConfiguration,omitempty"` - - // Output only. The geographic location where the table resides. This value - // is inherited from the dataset. - Location *string `json:"location,omitempty"` - - // Output only. Contains information regarding this table's streaming buffer, - // if one is present. This field will be absent if the table is not being - // streamed to or if there is no data in the streaming buffer. - StreamingBuffer *Streamingbuffer `json:"streamingBuffer,omitempty"` - - // Custom encryption configuration (e.g., Cloud KMS keys). - EncryptionConfiguration *EncryptionConfiguration `json:"encryptionConfiguration,omitempty"` - - // Output only. Contains information about the snapshot. This value is set via - // snapshot creation. - SnapshotDefinition *SnapshotDefinition `json:"snapshotDefinition,omitempty"` - - // Optional. Defines the default collation specification of new STRING fields - // in the table. During table creation or update, if a STRING field is added - // to this table without explicit collation specified, then the table inherits - // the table default collation. A change to this field affects only fields - // added afterwards, and does not alter the existing fields. - // The following values are supported: - // - // * 'und:ci': undetermined locale, case insensitive. - // * '': empty string. Default to case-sensitive behavior. - DefaultCollation *string `json:"defaultCollation,omitempty"` - - // Optional. Defines the default rounding mode specification of new decimal - // fields (NUMERIC OR BIGNUMERIC) in the table. During table creation or - // update, if a decimal field is added to this table without an explicit - // rounding mode specified, then the field inherits the table default - // rounding mode. Changing this field doesn't affect existing fields. - DefaultRoundingMode *string `json:"defaultRoundingMode,omitempty"` - - // Output only. Contains information about the clone. This value is set via - // the clone operation. - CloneDefinition *CloneDefinition `json:"cloneDefinition,omitempty"` - - // Output only. Number of physical bytes used by time travel storage (deleted - // or changed data). This data is not kept in real time, and might be delayed - // by a few seconds to a few minutes. - NumTimeTravelPhysicalBytes *int64 `json:"numTimeTravelPhysicalBytes,omitempty"` - - // Output only. Total number of logical bytes in the table or materialized - // view. - NumTotalLogicalBytes *int64 `json:"numTotalLogicalBytes,omitempty"` - - // Output only. Number of logical bytes that are less than 90 days old. - NumActiveLogicalBytes *int64 `json:"numActiveLogicalBytes,omitempty"` - - // Output only. Number of logical bytes that are more than 90 days old. - NumLongTermLogicalBytes *int64 `json:"numLongTermLogicalBytes,omitempty"` - - // Output only. Number of physical bytes used by current live data storage. - // This data is not kept in real time, and might be delayed by a few seconds - // to a few minutes. - NumCurrentPhysicalBytes *int64 `json:"numCurrentPhysicalBytes,omitempty"` - - // Output only. The physical size of this table in bytes. This also includes - // storage used for time travel. This data is not kept in real time, and might - // be delayed by a few seconds to a few minutes. - NumTotalPhysicalBytes *int64 `json:"numTotalPhysicalBytes,omitempty"` - - // Output only. Number of physical bytes less than 90 days old. This data is - // not kept in real time, and might be delayed by a few seconds to a few - // minutes. - NumActivePhysicalBytes *int64 `json:"numActivePhysicalBytes,omitempty"` - - // Output only. Number of physical bytes more than 90 days old. - // This data is not kept in real time, and might be delayed by a few seconds - // to a few minutes. - NumLongTermPhysicalBytes *int64 `json:"numLongTermPhysicalBytes,omitempty"` - - // Output only. The number of partitions present in the table or materialized - // view. This data is not kept in real time, and might be delayed by a few - // seconds to a few minutes. - NumPartitions *int64 `json:"numPartitions,omitempty"` - - // Optional. The maximum staleness of data that could be returned when the - // table (or stale MV) is queried. Staleness encoded as a string encoding - // of sql IntervalValue type. - MaxStaleness *string `json:"maxStaleness,omitempty"` - - // Optional. Output only. Restriction config for table. If set, restrict - // certain accesses on the table based on the config. See [Data - // egress](https://cloud.google.com/bigquery/docs/analytics-hub-introduction#data_egress) - // for more details. - Restrictions *RestrictionConfig `json:"restrictions,omitempty"` - - // Optional. Tables Primary Key and Foreign Key information - TableConstraints *TableConstraints `json:"tableConstraints,omitempty"` - - // Optional. The [tags](https://cloud.google.com/bigquery/docs/tags) attached - // to this table. Tag keys are globally unique. Tag key is expected to be in - // the namespaced format, for example "123456789012/environment" where - // 123456789012 is the ID of the parent organization or project resource for - // this tag key. Tag value is expected to be the short name, for example - // "Production". See [Tag - // definitions](https://cloud.google.com/iam/docs/tags-access-control#definitions) - // for more details. - ResourceTags map[string]string `json:"resourceTags,omitempty"` - - // Optional. Table replication info for table created `AS REPLICA` DDL like: - // `CREATE MATERIALIZED VIEW mv1 AS REPLICA OF src_mv` - TableReplicationInfo *TableReplicationInfo `json:"tableReplicationInfo,omitempty"` - - // Optional. Output only. Table references of all replicas currently active on - // the table. - Replicas []TableReference `json:"replicas,omitempty"` - - // Optional. Options defining open source compatible table. - ExternalCatalogTableOptions *ExternalCatalogTableOptions `json:"externalCatalogTableOptions,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableConstraints -type TableConstraints struct { - // Optional. Represents a primary key constraint on a table's columns. - // Present only if the table has a primary key. - // The primary key is not enforced. - PrimaryKey *PrimaryKey `json:"primaryKey,omitempty"` - - // Optional. Present only if the table has a foreign key. - // The foreign key is not enforced. - ForeignKeys []ForeignKey `json:"foreignKeys,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableFieldSchema -type TableFieldSchema struct { - // Required. The field name. The name must contain only letters (a-z, A-Z), - // numbers (0-9), or underscores (_), and must start with a letter or - // underscore. The maximum length is 300 characters. - Name *string `json:"name,omitempty"` - - // Required. The field data type. Possible values include: - // - // * STRING - // * BYTES - // * INTEGER (or INT64) - // * FLOAT (or FLOAT64) - // * BOOLEAN (or BOOL) - // * TIMESTAMP - // * DATE - // * TIME - // * DATETIME - // * GEOGRAPHY - // * NUMERIC - // * BIGNUMERIC - // * JSON - // * RECORD (or STRUCT) - // * RANGE - // - // Use of RECORD/STRUCT indicates that the field contains a nested schema. - Type *string `json:"type,omitempty"` - - // Optional. The field mode. Possible values include NULLABLE, REQUIRED and - // REPEATED. The default value is NULLABLE. - Mode *string `json:"mode,omitempty"` - - // Optional. Describes the nested schema fields if the type property is set - // to RECORD. - Fields []TableFieldSchema `json:"fields,omitempty"` - - // Optional. The field description. The maximum length is 1,024 characters. - Description *string `json:"description,omitempty"` - - // Optional. The policy tags attached to this field, used for field-level - // access control. If not set, defaults to empty policy_tags. - PolicyTags *TableFieldSchema_PolicyTagList `json:"policyTags,omitempty"` - - // Optional. Data policy options, will replace the data_policies. - DataPolicies []DataPolicyOption `json:"dataPolicies,omitempty"` - - // Optional. Maximum length of values of this field for STRINGS or BYTES. - // - // If max_length is not specified, no maximum length constraint is imposed - // on this field. - // - // If type = "STRING", then max_length represents the maximum UTF-8 - // length of strings in this field. - // - // If type = "BYTES", then max_length represents the maximum number of - // bytes in this field. - // - // It is invalid to set this field if type ≠ "STRING" and ≠ "BYTES". - MaxLength *int64 `json:"maxLength,omitempty"` - - // Optional. Precision (maximum number of total digits in base 10) and scale - // (maximum number of digits in the fractional part in base 10) constraints - // for values of this field for NUMERIC or BIGNUMERIC. - // - // It is invalid to set precision or scale if type ≠ "NUMERIC" and ≠ - // "BIGNUMERIC". - // - // If precision and scale are not specified, no value range constraint is - // imposed on this field insofar as values are permitted by the type. - // - // Values of this NUMERIC or BIGNUMERIC field must be in this range when: - // - // * Precision (P) and scale (S) are specified: - // [-10P-S + 10-S, - // 10P-S - 10-S] - // * Precision (P) is specified but not scale (and thus scale is - // interpreted to be equal to zero): - // [-10P + 1, 10P - 1]. - // - // Acceptable values for precision and scale if both are specified: - // - // * If type = "NUMERIC": - // 1 ≤ precision - scale ≤ 29 and 0 ≤ scale ≤ 9. - // * If type = "BIGNUMERIC": - // 1 ≤ precision - scale ≤ 38 and 0 ≤ scale ≤ 38. - // - // Acceptable values for precision if only precision is specified but not - // scale (and thus scale is interpreted to be equal to zero): - // - // * If type = "NUMERIC": 1 ≤ precision ≤ 29. - // * If type = "BIGNUMERIC": 1 ≤ precision ≤ 38. - // - // If scale is specified but not precision, then it is invalid. - Precision *int64 `json:"precision,omitempty"` - - // Optional. See documentation for precision. - Scale *int64 `json:"scale,omitempty"` - - // Optional. Specifies the rounding mode to be used when storing values of - // NUMERIC and BIGNUMERIC type. - RoundingMode *string `json:"roundingMode,omitempty"` - - // Optional. Field collation can be set only when the type of field is STRING. - // The following values are supported: - // - // * 'und:ci': undetermined locale, case insensitive. - // * '': empty string. Default to case-sensitive behavior. - Collation *string `json:"collation,omitempty"` - - // Optional. A SQL expression to specify the [default value] - // (https://cloud.google.com/bigquery/docs/default-values) for this field. - DefaultValueExpression *string `json:"defaultValueExpression,omitempty"` - - // Optional. The subtype of the RANGE, if the type of this field is RANGE. If - // the type is RANGE, this field is required. Values for the field element - // type can be the following: - // - // * DATE - // * DATETIME - // * TIMESTAMP - RangeElementType *TableFieldSchema_FieldElementType `json:"rangeElementType,omitempty"` - - // Optional. Definition of the foreign data type. - // Only valid for top-level schema fields (not nested fields). - // If the type is FOREIGN, this field is required. - ForeignTypeDefinition *string `json:"foreignTypeDefinition,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableFieldSchema.FieldElementType -type TableFieldSchema_FieldElementType struct { - // Required. The type of a field element. For more information, see - // [TableFieldSchema.type][google.cloud.bigquery.v2.TableFieldSchema.type]. - Type *string `json:"type,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableFieldSchema.PolicyTagList -type TableFieldSchema_PolicyTagList struct { - // A list of policy tag resource names. For example, - // "projects/1/locations/eu/taxonomies/2/policyTags/3". At most 1 policy tag - // is currently allowed. - Names []string `json:"names,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableList -type TableList struct { - // The type of list. - Kind *string `json:"kind,omitempty"` - - // A hash of this page of results. - Etag *string `json:"etag,omitempty"` - - // A token to request the next page of results. - NextPageToken *string `json:"nextPageToken,omitempty"` - - // Tables in the requested dataset. - Tables []ListFormatTable `json:"tables,omitempty"` - - // The total number of tables in the dataset. - TotalItems *int32 `json:"totalItems,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableMetadataCacheUsage -type TableMetadataCacheUsage struct { - // Metadata caching eligible table referenced in the query. - TableReference *TableReference `json:"tableReference,omitempty"` - - // Reason for not using metadata caching for the table. - UnusedReason *string `json:"unusedReason,omitempty"` - - // Free form human-readable reason metadata caching was unused for - // the job. - Explanation *string `json:"explanation,omitempty"` - - // Duration since last refresh as of this job for managed tables (indicates - // metadata cache staleness as seen by this job). - Staleness *string `json:"staleness,omitempty"` - - // [Table - // type](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#Table.FIELDS.type). - TableType *string `json:"tableType,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableReference -type TableReference struct { - // Required. The ID of the project containing this table. - ProjectId *string `json:"projectId"` - - // Required. The ID of the dataset containing this table. - DatasetId *string `json:"datasetId"` - - // Required. The ID of the table. The ID can contain Unicode characters in + // The Id of the table. The Id can contain Unicode characters in // category L (letter), M (mark), N (number), Pc (connector, including // underscore), Pd (dash), and Zs (space). For more information, see [General // Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). // The maximum length is 1,024 characters. Certain operations allow suffixing - // of the table ID with a partition decorator, such as + // of the table Id with a partition decorator, such as // `sample_table$20190123`. - TableId *string `json:"tableId"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableReplicationInfo -type TableReplicationInfo struct { - // Required. Source table reference that is replicated. - SourceTable *TableReference `json:"sourceTable,omitempty"` - - // Optional. Specifies the interval at which the source table is polled for - // updates. - // It's Optional. If not specified, default replication interval would be - // applied. - ReplicationIntervalMs *int64 `json:"replicationIntervalMs,omitempty"` - - // Optional. Output only. If source is a materialized view, this field - // signifies the last refresh time of the source. - ReplicatedSourceLastRefreshTime *int64 `json:"replicatedSourceLastRefreshTime,omitempty"` - - // Optional. Output only. Replication status of configured replication. - ReplicationStatus *string `json:"replicationStatus,omitempty"` - - // Optional. Output only. Replication error that will permanently stopped - // table replication. - ReplicationError *ErrorProto `json:"replicationError,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableSchema -type TableSchema struct { - // Describes the fields in a table. - Fields []TableFieldSchema `json:"fields,omitempty"` - - // Optional. Specifies metadata of the foreign data type definition in field - // schema - // ([TableFieldSchema.foreign_type_definition][google.cloud.bigquery.v2.TableFieldSchema.foreign_type_definition]). - ForeignTypeInfo *ForeignTypeInfo `json:"foreignTypeInfo,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TimePartitioning -type TimePartitioning struct { - // Required. The supported types are DAY, HOUR, MONTH, and YEAR, which will - // generate one partition per day, hour, month, and year, respectively. - Type *string `json:"type,omitempty"` - - // Optional. Number of milliseconds for which to keep the storage for a - // partition. - // A wrapper is used here because 0 is an invalid value. - ExpirationMs *int64 `json:"expirationMs,omitempty"` - - // Optional. If not set, the table is partitioned by pseudo - // column '_PARTITIONTIME'; if set, the table is partitioned by this field. - // The field must be a top-level TIMESTAMP or DATE field. Its mode must be - // NULLABLE or REQUIRED. - // A wrapper is used here because an empty string is an invalid value. - Field *string `json:"field,omitempty"` + // +required + TableId *string `json:"tableId,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.TransformColumn -type TransformColumn struct { - // Output only. Name of the column. - Name *string `json:"name,omitempty"` - - // Output only. Data type of the column after the transform. - Type *StandardSqlDataType `json:"type,omitempty"` - - // Output only. The SQL expression used in the column transform. - TransformSql *string `json:"transformSql,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.UserDefinedFunctionResource -type UserDefinedFunctionResource struct { - // [Pick one] A code resource to load from a Google Cloud Storage URI - // (gs://bucket/path). - ResourceUri *string `json:"resourceUri,omitempty"` - - // [Pick one] An inline resource that contains code for a user-defined - // function (UDF). Providing a inline code resource is equivalent to providing - // a URI for a file containing the same code. - InlineCode *string `json:"inlineCode,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.VectorSearchStatistics -type VectorSearchStatistics struct { - // Specifies the index usage mode for the query. - IndexUsageMode *string `json:"indexUsageMode,omitempty"` - - // When `indexUsageMode` is `UNUSED` or `PARTIALLY_USED`, this field explains - // why indexes were not used in all or part of the vector search query. If - // `indexUsageMode` is `FULLY_USED`, this field is not populated. - IndexUnusedReasons []IndexUnusedReason `json:"indexUnusedReasons,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ViewDefinition -type ViewDefinition struct { - // Required. A query that BigQuery executes when the view is referenced. - Query *string `json:"query,omitempty"` - - // Describes user-defined function resources used in the query. - UserDefinedFunctionResources []UserDefinedFunctionResource `json:"userDefinedFunctionResources,omitempty"` - - // Specifies whether to use BigQuery's legacy SQL for this view. - // The default value is true. If set to false, the view will use - // BigQuery's GoogleSQL: - // https://cloud.google.com/bigquery/sql-reference/ - // - // Queries and views that reference this view must use the same flag value. - // A wrapper is used here because the default value is True. - UseLegacySql *bool `json:"useLegacySql,omitempty"` - - // True if the column names are explicitly specified. For example by using the - // 'CREATE VIEW v(c1, c2) AS ...' syntax. - // Can only be set for GoogleSQL views. - UseExplicitColumnNames *bool `json:"useExplicitColumnNames,omitempty"` - - // Optional. Specifics the privacy policy for the view. - PrivacyPolicy *PrivacyPolicy `json:"privacyPolicy,omitempty"` - - // Optional. Foreign view representations. - ForeignDefinitions []ForeignViewDefinition `json:"foreignDefinitions,omitempty"` +// +kcc:proto=google.protobuf.BoolValue +type BoolValue struct { + // The bool value. + Value *bool `json:"value,omitempty"` } diff --git a/apis/bigquery/v1beta1/zz_generated.deepcopy.go b/apis/bigquery/v1beta1/zz_generated.deepcopy.go index b9cb208e43..bf12f8e403 100644 --- a/apis/bigquery/v1beta1/zz_generated.deepcopy.go +++ b/apis/bigquery/v1beta1/zz_generated.deepcopy.go @@ -84,143 +84,6 @@ func (in *Access) DeepCopy() *Access { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *AggregationThresholdPolicy) DeepCopyInto(out *AggregationThresholdPolicy) { - *out = *in - if in.Threshold != nil { - in, out := &in.Threshold, &out.Threshold - *out = new(int64) - **out = **in - } - if in.PrivacyUnitColumns != nil { - in, out := &in.PrivacyUnitColumns, &out.PrivacyUnitColumns - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AggregationThresholdPolicy. -func (in *AggregationThresholdPolicy) DeepCopy() *AggregationThresholdPolicy { - if in == nil { - return nil - } - out := new(AggregationThresholdPolicy) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *AvroOptions) DeepCopyInto(out *AvroOptions) { - *out = *in - if in.UseAvroLogicalTypes != nil { - in, out := &in.UseAvroLogicalTypes, &out.UseAvroLogicalTypes - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AvroOptions. -func (in *AvroOptions) DeepCopy() *AvroOptions { - if in == nil { - return nil - } - out := new(AvroOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BiEngineReason) DeepCopyInto(out *BiEngineReason) { - *out = *in - if in.Code != nil { - in, out := &in.Code, &out.Code - *out = new(string) - **out = **in - } - if in.Message != nil { - in, out := &in.Message, &out.Message - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BiEngineReason. -func (in *BiEngineReason) DeepCopy() *BiEngineReason { - if in == nil { - return nil - } - out := new(BiEngineReason) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BiEngineStatistics) DeepCopyInto(out *BiEngineStatistics) { - *out = *in - if in.BiEngineMode != nil { - in, out := &in.BiEngineMode, &out.BiEngineMode - *out = new(string) - **out = **in - } - if in.AccelerationMode != nil { - in, out := &in.AccelerationMode, &out.AccelerationMode - *out = new(string) - **out = **in - } - if in.BiEngineReasons != nil { - in, out := &in.BiEngineReasons, &out.BiEngineReasons - *out = make([]BiEngineReason, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BiEngineStatistics. -func (in *BiEngineStatistics) DeepCopy() *BiEngineStatistics { - if in == nil { - return nil - } - out := new(BiEngineStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigLakeConfiguration) DeepCopyInto(out *BigLakeConfiguration) { - *out = *in - if in.ConnectionID != nil { - in, out := &in.ConnectionID, &out.ConnectionID - *out = new(string) - **out = **in - } - if in.StorageUri != nil { - in, out := &in.StorageUri, &out.StorageUri - *out = new(string) - **out = **in - } - if in.FileFormat != nil { - in, out := &in.FileFormat, &out.FileFormat - *out = new(string) - **out = **in - } - if in.TableFormat != nil { - in, out := &in.TableFormat, &out.TableFormat - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigLakeConfiguration. -func (in *BigLakeConfiguration) DeepCopy() *BigLakeConfiguration { - if in == nil { - return nil - } - out := new(BigLakeConfiguration) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *BigQueryDataset) DeepCopyInto(out *BigQueryDataset) { *out = *in @@ -295,6 +158,41 @@ func (in *BigQueryDatasetObservedState) DeepCopy() *BigQueryDatasetObservedState return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryDatasetParent) DeepCopyInto(out *BigQueryDatasetParent) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDatasetParent. +func (in *BigQueryDatasetParent) DeepCopy() *BigQueryDatasetParent { + if in == nil { + return nil + } + out := new(BigQueryDatasetParent) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryDatasetRef) DeepCopyInto(out *BigQueryDatasetRef) { + *out = *in + if in.parent != nil { + in, out := &in.parent, &out.parent + *out = new(BigQueryDatasetParent) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDatasetRef. +func (in *BigQueryDatasetRef) DeepCopy() *BigQueryDatasetRef { + if in == nil { + return nil + } + out := new(BigQueryDatasetRef) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *BigQueryDatasetSpec) DeepCopyInto(out *BigQueryDatasetSpec) { *out = *in @@ -395,6 +293,11 @@ func (in *BigQueryDatasetStatus) DeepCopyInto(out *BigQueryDatasetStatus) { *out = new(string) **out = **in } + if in.ExternalRef != nil { + in, out := &in.ExternalRef, &out.ExternalRef + *out = new(string) + **out = **in + } if in.LastModifiedTime != nil { in, out := &in.LastModifiedTime, &out.LastModifiedTime *out = new(int64) @@ -423,7238 +326,209 @@ func (in *BigQueryDatasetStatus) DeepCopy() *BigQueryDatasetStatus { } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigtableColumn) DeepCopyInto(out *BigtableColumn) { +func (in *BoolValue) DeepCopyInto(out *BoolValue) { *out = *in - if in.QualifierEncoded != nil { - in, out := &in.QualifierEncoded, &out.QualifierEncoded - *out = new(byte) - **out = **in - } - if in.QualifierString != nil { - in, out := &in.QualifierString, &out.QualifierString - *out = new(string) - **out = **in - } - if in.FieldName != nil { - in, out := &in.FieldName, &out.FieldName - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.Encoding != nil { - in, out := &in.Encoding, &out.Encoding - *out = new(string) - **out = **in - } - if in.OnlyReadLatest != nil { - in, out := &in.OnlyReadLatest, &out.OnlyReadLatest + if in.Value != nil { + in, out := &in.Value, &out.Value *out = new(bool) **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigtableColumn. -func (in *BigtableColumn) DeepCopy() *BigtableColumn { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BoolValue. +func (in *BoolValue) DeepCopy() *BoolValue { if in == nil { return nil } - out := new(BigtableColumn) + out := new(BoolValue) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigtableColumnFamily) DeepCopyInto(out *BigtableColumnFamily) { +func (in *DatasetAccessEntry) DeepCopyInto(out *DatasetAccessEntry) { *out = *in - if in.FamilyID != nil { - in, out := &in.FamilyID, &out.FamilyID - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.Encoding != nil { - in, out := &in.Encoding, &out.Encoding - *out = new(string) - **out = **in - } - if in.Columns != nil { - in, out := &in.Columns, &out.Columns - *out = make([]BigtableColumn, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } + if in.Dataset != nil { + in, out := &in.Dataset, &out.Dataset + *out = new(DatasetReference) + (*in).DeepCopyInto(*out) } - if in.OnlyReadLatest != nil { - in, out := &in.OnlyReadLatest, &out.OnlyReadLatest - *out = new(bool) - **out = **in + if in.TargetTypes != nil { + in, out := &in.TargetTypes, &out.TargetTypes + *out = make([]string, len(*in)) + copy(*out, *in) } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigtableColumnFamily. -func (in *BigtableColumnFamily) DeepCopy() *BigtableColumnFamily { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessEntry. +func (in *DatasetAccessEntry) DeepCopy() *DatasetAccessEntry { if in == nil { return nil } - out := new(BigtableColumnFamily) + out := new(DatasetAccessEntry) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigtableOptions) DeepCopyInto(out *BigtableOptions) { +func (in *DatasetReference) DeepCopyInto(out *DatasetReference) { *out = *in - if in.ColumnFamilies != nil { - in, out := &in.ColumnFamilies, &out.ColumnFamilies - *out = make([]BigtableColumnFamily, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.IgnoreUnspecifiedColumnFamilies != nil { - in, out := &in.IgnoreUnspecifiedColumnFamilies, &out.IgnoreUnspecifiedColumnFamilies - *out = new(bool) - **out = **in - } - if in.ReadRowkeyAsString != nil { - in, out := &in.ReadRowkeyAsString, &out.ReadRowkeyAsString - *out = new(bool) + if in.DatasetId != nil { + in, out := &in.DatasetId, &out.DatasetId + *out = new(string) **out = **in } - if in.OutputColumnFamiliesAsJson != nil { - in, out := &in.OutputColumnFamiliesAsJson, &out.OutputColumnFamiliesAsJson - *out = new(bool) + if in.ProjectId != nil { + in, out := &in.ProjectId, &out.ProjectId + *out = new(string) **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigtableOptions. -func (in *BigtableOptions) DeepCopy() *BigtableOptions { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetReference. +func (in *DatasetReference) DeepCopy() *DatasetReference { if in == nil { return nil } - out := new(BigtableOptions) + out := new(DatasetReference) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CloneDefinition) DeepCopyInto(out *CloneDefinition) { +func (in *EncryptionConfiguration) DeepCopyInto(out *EncryptionConfiguration) { *out = *in - if in.BaseTableReference != nil { - in, out := &in.BaseTableReference, &out.BaseTableReference - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.CloneTime != nil { - in, out := &in.CloneTime, &out.CloneTime - *out = new(string) + if in.KmsKeyRef != nil { + in, out := &in.KmsKeyRef, &out.KmsKeyRef + *out = new(refsv1beta1.KMSCryptoKeyRef) **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloneDefinition. -func (in *CloneDefinition) DeepCopy() *CloneDefinition { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfiguration. +func (in *EncryptionConfiguration) DeepCopy() *EncryptionConfiguration { if in == nil { return nil } - out := new(CloneDefinition) + out := new(EncryptionConfiguration) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Clustering) DeepCopyInto(out *Clustering) { +func (in *ExternalCatalogDatasetOptions) DeepCopyInto(out *ExternalCatalogDatasetOptions) { *out = *in - if in.Fields != nil { - in, out := &in.Fields, &out.Fields - *out = make([]string, len(*in)) - copy(*out, *in) + if in.Parameters != nil { + in, out := &in.Parameters, &out.Parameters + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.DefaultStorageLocationUri != nil { + in, out := &in.DefaultStorageLocationUri, &out.DefaultStorageLocationUri + *out = new(string) + **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Clustering. -func (in *Clustering) DeepCopy() *Clustering { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalCatalogDatasetOptions. +func (in *ExternalCatalogDatasetOptions) DeepCopy() *ExternalCatalogDatasetOptions { if in == nil { return nil } - out := new(Clustering) + out := new(ExternalCatalogDatasetOptions) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ColumnReference) DeepCopyInto(out *ColumnReference) { +func (in *ExternalDatasetReference) DeepCopyInto(out *ExternalDatasetReference) { *out = *in - if in.ReferencingColumn != nil { - in, out := &in.ReferencingColumn, &out.ReferencingColumn + if in.ExternalSource != nil { + in, out := &in.ExternalSource, &out.ExternalSource *out = new(string) **out = **in } - if in.ReferencedColumn != nil { - in, out := &in.ReferencedColumn, &out.ReferencedColumn + if in.Connection != nil { + in, out := &in.Connection, &out.Connection *out = new(string) **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnReference. -func (in *ColumnReference) DeepCopy() *ColumnReference { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDatasetReference. +func (in *ExternalDatasetReference) DeepCopy() *ExternalDatasetReference { if in == nil { return nil } - out := new(ColumnReference) + out := new(ExternalDatasetReference) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ConnectionProperty) DeepCopyInto(out *ConnectionProperty) { +func (in *GcpTag) DeepCopyInto(out *GcpTag) { *out = *in - if in.Key != nil { - in, out := &in.Key, &out.Key + if in.TagKey != nil { + in, out := &in.TagKey, &out.TagKey *out = new(string) **out = **in } - if in.Value != nil { - in, out := &in.Value, &out.Value + if in.TagValue != nil { + in, out := &in.TagValue, &out.TagValue *out = new(string) **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConnectionProperty. -func (in *ConnectionProperty) DeepCopy() *ConnectionProperty { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GcpTag. +func (in *GcpTag) DeepCopy() *GcpTag { if in == nil { return nil } - out := new(ConnectionProperty) + out := new(GcpTag) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CopyJobStatistics) DeepCopyInto(out *CopyJobStatistics) { +func (in *LinkedDatasetMetadata) DeepCopyInto(out *LinkedDatasetMetadata) { *out = *in - if in.CopiedRows != nil { - in, out := &in.CopiedRows, &out.CopiedRows - *out = new(int64) - **out = **in - } - if in.CopiedLogicalBytes != nil { - in, out := &in.CopiedLogicalBytes, &out.CopiedLogicalBytes - *out = new(int64) - **out = **in - } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CopyJobStatistics. -func (in *CopyJobStatistics) DeepCopy() *CopyJobStatistics { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LinkedDatasetMetadata. +func (in *LinkedDatasetMetadata) DeepCopy() *LinkedDatasetMetadata { if in == nil { return nil } - out := new(CopyJobStatistics) + out := new(LinkedDatasetMetadata) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CsvOptions) DeepCopyInto(out *CsvOptions) { +func (in *LinkedDatasetSource) DeepCopyInto(out *LinkedDatasetSource) { *out = *in - if in.FieldDelimiter != nil { - in, out := &in.FieldDelimiter, &out.FieldDelimiter - *out = new(string) - **out = **in + if in.SourceDataset != nil { + in, out := &in.SourceDataset, &out.SourceDataset + *out = new(DatasetReference) + (*in).DeepCopyInto(*out) } - if in.SkipLeadingRows != nil { - in, out := &in.SkipLeadingRows, &out.SkipLeadingRows - *out = new(int64) - **out = **in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LinkedDatasetSource. +func (in *LinkedDatasetSource) DeepCopy() *LinkedDatasetSource { + if in == nil { + return nil } - if in.Quote != nil { - in, out := &in.Quote, &out.Quote - *out = new(string) - **out = **in - } - if in.AllowQuotedNewlines != nil { - in, out := &in.AllowQuotedNewlines, &out.AllowQuotedNewlines - *out = new(bool) - **out = **in - } - if in.AllowJaggedRows != nil { - in, out := &in.AllowJaggedRows, &out.AllowJaggedRows - *out = new(bool) - **out = **in - } - if in.Encoding != nil { - in, out := &in.Encoding, &out.Encoding - *out = new(string) - **out = **in - } - if in.PreserveAsciiControlCharacters != nil { - in, out := &in.PreserveAsciiControlCharacters, &out.PreserveAsciiControlCharacters - *out = new(bool) - **out = **in - } - if in.NullMarker != nil { - in, out := &in.NullMarker, &out.NullMarker - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CsvOptions. -func (in *CsvOptions) DeepCopy() *CsvOptions { - if in == nil { - return nil - } - out := new(CsvOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataFormatOptions) DeepCopyInto(out *DataFormatOptions) { - *out = *in - if in.UseInt64Timestamp != nil { - in, out := &in.UseInt64Timestamp, &out.UseInt64Timestamp - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataFormatOptions. -func (in *DataFormatOptions) DeepCopy() *DataFormatOptions { - if in == nil { - return nil - } - out := new(DataFormatOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataMaskingStatistics) DeepCopyInto(out *DataMaskingStatistics) { - *out = *in - if in.DataMaskingApplied != nil { - in, out := &in.DataMaskingApplied, &out.DataMaskingApplied - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataMaskingStatistics. -func (in *DataMaskingStatistics) DeepCopy() *DataMaskingStatistics { - if in == nil { - return nil - } - out := new(DataMaskingStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataPolicyOption) DeepCopyInto(out *DataPolicyOption) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPolicyOption. -func (in *DataPolicyOption) DeepCopy() *DataPolicyOption { - if in == nil { - return nil - } - out := new(DataPolicyOption) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Dataset) DeepCopyInto(out *Dataset) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(string) - **out = **in - } - if in.SelfLink != nil { - in, out := &in.SelfLink, &out.SelfLink - *out = new(string) - **out = **in - } - if in.DatasetReference != nil { - in, out := &in.DatasetReference, &out.DatasetReference - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } - if in.FriendlyName != nil { - in, out := &in.FriendlyName, &out.FriendlyName - *out = new(string) - **out = **in - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.DefaultTableExpirationMs != nil { - in, out := &in.DefaultTableExpirationMs, &out.DefaultTableExpirationMs - *out = new(int64) - **out = **in - } - if in.DefaultPartitionExpirationMs != nil { - in, out := &in.DefaultPartitionExpirationMs, &out.DefaultPartitionExpirationMs - *out = new(int64) - **out = **in - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.Access != nil { - in, out := &in.Access, &out.Access - *out = make([]Access, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(int64) - **out = **in - } - if in.LastModifiedTime != nil { - in, out := &in.LastModifiedTime, &out.LastModifiedTime - *out = new(int64) - **out = **in - } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } - if in.DefaultEncryptionConfiguration != nil { - in, out := &in.DefaultEncryptionConfiguration, &out.DefaultEncryptionConfiguration - *out = new(EncryptionConfiguration) - (*in).DeepCopyInto(*out) - } - if in.SatisfiesPzs != nil { - in, out := &in.SatisfiesPzs, &out.SatisfiesPzs - *out = new(bool) - **out = **in - } - if in.SatisfiesPzi != nil { - in, out := &in.SatisfiesPzi, &out.SatisfiesPzi - *out = new(bool) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.LinkedDatasetSource != nil { - in, out := &in.LinkedDatasetSource, &out.LinkedDatasetSource - *out = new(LinkedDatasetSource) - (*in).DeepCopyInto(*out) - } - if in.LinkedDatasetMetadata != nil { - in, out := &in.LinkedDatasetMetadata, &out.LinkedDatasetMetadata - *out = new(LinkedDatasetMetadata) - **out = **in - } - if in.ExternalDatasetReference != nil { - in, out := &in.ExternalDatasetReference, &out.ExternalDatasetReference - *out = new(ExternalDatasetReference) - (*in).DeepCopyInto(*out) - } - if in.ExternalCatalogDatasetOptions != nil { - in, out := &in.ExternalCatalogDatasetOptions, &out.ExternalCatalogDatasetOptions - *out = new(ExternalCatalogDatasetOptions) - (*in).DeepCopyInto(*out) - } - if in.IsCaseInsensitive != nil { - in, out := &in.IsCaseInsensitive, &out.IsCaseInsensitive - *out = new(bool) - **out = **in - } - if in.DefaultCollation != nil { - in, out := &in.DefaultCollation, &out.DefaultCollation - *out = new(string) - **out = **in - } - if in.DefaultRoundingMode != nil { - in, out := &in.DefaultRoundingMode, &out.DefaultRoundingMode - *out = new(string) - **out = **in - } - if in.MaxTimeTravelHours != nil { - in, out := &in.MaxTimeTravelHours, &out.MaxTimeTravelHours - *out = new(int64) - **out = **in - } - if in.Tags != nil { - in, out := &in.Tags, &out.Tags - *out = make([]GcpTag, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.StorageBillingModel != nil { - in, out := &in.StorageBillingModel, &out.StorageBillingModel - *out = new(string) - **out = **in - } - if in.Restrictions != nil { - in, out := &in.Restrictions, &out.Restrictions - *out = new(RestrictionConfig) - (*in).DeepCopyInto(*out) - } - if in.ResourceTags != nil { - in, out := &in.ResourceTags, &out.ResourceTags - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Dataset. -func (in *Dataset) DeepCopy() *Dataset { - if in == nil { - return nil - } - out := new(Dataset) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DatasetAccessEntry) DeepCopyInto(out *DatasetAccessEntry) { - *out = *in - if in.Dataset != nil { - in, out := &in.Dataset, &out.Dataset - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } - if in.TargetTypes != nil { - in, out := &in.TargetTypes, &out.TargetTypes - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessEntry. -func (in *DatasetAccessEntry) DeepCopy() *DatasetAccessEntry { - if in == nil { - return nil - } - out := new(DatasetAccessEntry) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DatasetList) DeepCopyInto(out *DatasetList) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.NextPageToken != nil { - in, out := &in.NextPageToken, &out.NextPageToken - *out = new(string) - **out = **in - } - if in.Datasets != nil { - in, out := &in.Datasets, &out.Datasets - *out = make([]ListFormatDataset, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Unreachable != nil { - in, out := &in.Unreachable, &out.Unreachable - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetList. -func (in *DatasetList) DeepCopy() *DatasetList { - if in == nil { - return nil - } - out := new(DatasetList) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DatasetReference) DeepCopyInto(out *DatasetReference) { - *out = *in - if in.DatasetId != nil { - in, out := &in.DatasetId, &out.DatasetId - *out = new(string) - **out = **in - } - if in.ProjectId != nil { - in, out := &in.ProjectId, &out.ProjectId - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetReference. -func (in *DatasetReference) DeepCopy() *DatasetReference { - if in == nil { - return nil - } - out := new(DatasetReference) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DestinationTableProperties) DeepCopyInto(out *DestinationTableProperties) { - *out = *in - if in.FriendlyName != nil { - in, out := &in.FriendlyName, &out.FriendlyName - *out = new(string) - **out = **in - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DestinationTableProperties. -func (in *DestinationTableProperties) DeepCopy() *DestinationTableProperties { - if in == nil { - return nil - } - out := new(DestinationTableProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DifferentialPrivacyPolicy) DeepCopyInto(out *DifferentialPrivacyPolicy) { - *out = *in - if in.MaxEpsilonPerQuery != nil { - in, out := &in.MaxEpsilonPerQuery, &out.MaxEpsilonPerQuery - *out = new(float64) - **out = **in - } - if in.DeltaPerQuery != nil { - in, out := &in.DeltaPerQuery, &out.DeltaPerQuery - *out = new(float64) - **out = **in - } - if in.MaxGroupsContributed != nil { - in, out := &in.MaxGroupsContributed, &out.MaxGroupsContributed - *out = new(int64) - **out = **in - } - if in.PrivacyUnitColumn != nil { - in, out := &in.PrivacyUnitColumn, &out.PrivacyUnitColumn - *out = new(string) - **out = **in - } - if in.EpsilonBudget != nil { - in, out := &in.EpsilonBudget, &out.EpsilonBudget - *out = new(float64) - **out = **in - } - if in.DeltaBudget != nil { - in, out := &in.DeltaBudget, &out.DeltaBudget - *out = new(float64) - **out = **in - } - if in.EpsilonBudgetRemaining != nil { - in, out := &in.EpsilonBudgetRemaining, &out.EpsilonBudgetRemaining - *out = new(float64) - **out = **in - } - if in.DeltaBudgetRemaining != nil { - in, out := &in.DeltaBudgetRemaining, &out.DeltaBudgetRemaining - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DifferentialPrivacyPolicy. -func (in *DifferentialPrivacyPolicy) DeepCopy() *DifferentialPrivacyPolicy { - if in == nil { - return nil - } - out := new(DifferentialPrivacyPolicy) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DmlStats) DeepCopyInto(out *DmlStats) { - *out = *in - if in.InsertedRowCount != nil { - in, out := &in.InsertedRowCount, &out.InsertedRowCount - *out = new(int64) - **out = **in - } - if in.DeletedRowCount != nil { - in, out := &in.DeletedRowCount, &out.DeletedRowCount - *out = new(int64) - **out = **in - } - if in.UpdatedRowCount != nil { - in, out := &in.UpdatedRowCount, &out.UpdatedRowCount - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DmlStats. -func (in *DmlStats) DeepCopy() *DmlStats { - if in == nil { - return nil - } - out := new(DmlStats) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EncryptionConfiguration) DeepCopyInto(out *EncryptionConfiguration) { - *out = *in - if in.KmsKeyRef != nil { - in, out := &in.KmsKeyRef, &out.KmsKeyRef - *out = new(refsv1beta1.KMSCryptoKeyRef) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfiguration. -func (in *EncryptionConfiguration) DeepCopy() *EncryptionConfiguration { - if in == nil { - return nil - } - out := new(EncryptionConfiguration) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ErrorProto) DeepCopyInto(out *ErrorProto) { - *out = *in - if in.Reason != nil { - in, out := &in.Reason, &out.Reason - *out = new(string) - **out = **in - } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } - if in.DebugInfo != nil { - in, out := &in.DebugInfo, &out.DebugInfo - *out = new(string) - **out = **in - } - if in.Message != nil { - in, out := &in.Message, &out.Message - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ErrorProto. -func (in *ErrorProto) DeepCopy() *ErrorProto { - if in == nil { - return nil - } - out := new(ErrorProto) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExplainQueryStage) DeepCopyInto(out *ExplainQueryStage) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(int64) - **out = **in - } - if in.StartMs != nil { - in, out := &in.StartMs, &out.StartMs - *out = new(int64) - **out = **in - } - if in.EndMs != nil { - in, out := &in.EndMs, &out.EndMs - *out = new(int64) - **out = **in - } - if in.InputStages != nil { - in, out := &in.InputStages, &out.InputStages - *out = make([]int64, len(*in)) - copy(*out, *in) - } - if in.WaitRatioAvg != nil { - in, out := &in.WaitRatioAvg, &out.WaitRatioAvg - *out = new(float64) - **out = **in - } - if in.WaitMsAvg != nil { - in, out := &in.WaitMsAvg, &out.WaitMsAvg - *out = new(int64) - **out = **in - } - if in.WaitRatioMax != nil { - in, out := &in.WaitRatioMax, &out.WaitRatioMax - *out = new(float64) - **out = **in - } - if in.WaitMsMax != nil { - in, out := &in.WaitMsMax, &out.WaitMsMax - *out = new(int64) - **out = **in - } - if in.ReadRatioAvg != nil { - in, out := &in.ReadRatioAvg, &out.ReadRatioAvg - *out = new(float64) - **out = **in - } - if in.ReadMsAvg != nil { - in, out := &in.ReadMsAvg, &out.ReadMsAvg - *out = new(int64) - **out = **in - } - if in.ReadRatioMax != nil { - in, out := &in.ReadRatioMax, &out.ReadRatioMax - *out = new(float64) - **out = **in - } - if in.ReadMsMax != nil { - in, out := &in.ReadMsMax, &out.ReadMsMax - *out = new(int64) - **out = **in - } - if in.ComputeRatioAvg != nil { - in, out := &in.ComputeRatioAvg, &out.ComputeRatioAvg - *out = new(float64) - **out = **in - } - if in.ComputeMsAvg != nil { - in, out := &in.ComputeMsAvg, &out.ComputeMsAvg - *out = new(int64) - **out = **in - } - if in.ComputeRatioMax != nil { - in, out := &in.ComputeRatioMax, &out.ComputeRatioMax - *out = new(float64) - **out = **in - } - if in.ComputeMsMax != nil { - in, out := &in.ComputeMsMax, &out.ComputeMsMax - *out = new(int64) - **out = **in - } - if in.WriteRatioAvg != nil { - in, out := &in.WriteRatioAvg, &out.WriteRatioAvg - *out = new(float64) - **out = **in - } - if in.WriteMsAvg != nil { - in, out := &in.WriteMsAvg, &out.WriteMsAvg - *out = new(int64) - **out = **in - } - if in.WriteRatioMax != nil { - in, out := &in.WriteRatioMax, &out.WriteRatioMax - *out = new(float64) - **out = **in - } - if in.WriteMsMax != nil { - in, out := &in.WriteMsMax, &out.WriteMsMax - *out = new(int64) - **out = **in - } - if in.ShuffleOutputBytes != nil { - in, out := &in.ShuffleOutputBytes, &out.ShuffleOutputBytes - *out = new(int64) - **out = **in - } - if in.ShuffleOutputBytesSpilled != nil { - in, out := &in.ShuffleOutputBytesSpilled, &out.ShuffleOutputBytesSpilled - *out = new(int64) - **out = **in - } - if in.RecordsRead != nil { - in, out := &in.RecordsRead, &out.RecordsRead - *out = new(int64) - **out = **in - } - if in.RecordsWritten != nil { - in, out := &in.RecordsWritten, &out.RecordsWritten - *out = new(int64) - **out = **in - } - if in.ParallelInputs != nil { - in, out := &in.ParallelInputs, &out.ParallelInputs - *out = new(int64) - **out = **in - } - if in.CompletedParallelInputs != nil { - in, out := &in.CompletedParallelInputs, &out.CompletedParallelInputs - *out = new(int64) - **out = **in - } - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(string) - **out = **in - } - if in.Steps != nil { - in, out := &in.Steps, &out.Steps - *out = make([]ExplainQueryStep, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SlotMs != nil { - in, out := &in.SlotMs, &out.SlotMs - *out = new(int64) - **out = **in - } - if in.ComputeMode != nil { - in, out := &in.ComputeMode, &out.ComputeMode - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExplainQueryStage. -func (in *ExplainQueryStage) DeepCopy() *ExplainQueryStage { - if in == nil { - return nil - } - out := new(ExplainQueryStage) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExplainQueryStep) DeepCopyInto(out *ExplainQueryStep) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.Substeps != nil { - in, out := &in.Substeps, &out.Substeps - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExplainQueryStep. -func (in *ExplainQueryStep) DeepCopy() *ExplainQueryStep { - if in == nil { - return nil - } - out := new(ExplainQueryStep) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExportDataStatistics) DeepCopyInto(out *ExportDataStatistics) { - *out = *in - if in.FileCount != nil { - in, out := &in.FileCount, &out.FileCount - *out = new(int64) - **out = **in - } - if in.RowCount != nil { - in, out := &in.RowCount, &out.RowCount - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExportDataStatistics. -func (in *ExportDataStatistics) DeepCopy() *ExportDataStatistics { - if in == nil { - return nil - } - out := new(ExportDataStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExternalCatalogDatasetOptions) DeepCopyInto(out *ExternalCatalogDatasetOptions) { - *out = *in - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.DefaultStorageLocationUri != nil { - in, out := &in.DefaultStorageLocationUri, &out.DefaultStorageLocationUri - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalCatalogDatasetOptions. -func (in *ExternalCatalogDatasetOptions) DeepCopy() *ExternalCatalogDatasetOptions { - if in == nil { - return nil - } - out := new(ExternalCatalogDatasetOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExternalCatalogTableOptions) DeepCopyInto(out *ExternalCatalogTableOptions) { - *out = *in - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.StorageDescriptor != nil { - in, out := &in.StorageDescriptor, &out.StorageDescriptor - *out = new(StorageDescriptor) - (*in).DeepCopyInto(*out) - } - if in.ConnectionID != nil { - in, out := &in.ConnectionID, &out.ConnectionID - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalCatalogTableOptions. -func (in *ExternalCatalogTableOptions) DeepCopy() *ExternalCatalogTableOptions { - if in == nil { - return nil - } - out := new(ExternalCatalogTableOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExternalDataConfiguration) DeepCopyInto(out *ExternalDataConfiguration) { - *out = *in - if in.SourceUris != nil { - in, out := &in.SourceUris, &out.SourceUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.FileSetSpecType != nil { - in, out := &in.FileSetSpecType, &out.FileSetSpecType - *out = new(string) - **out = **in - } - if in.Schema != nil { - in, out := &in.Schema, &out.Schema - *out = new(TableSchema) - (*in).DeepCopyInto(*out) - } - if in.SourceFormat != nil { - in, out := &in.SourceFormat, &out.SourceFormat - *out = new(string) - **out = **in - } - if in.MaxBadRecords != nil { - in, out := &in.MaxBadRecords, &out.MaxBadRecords - *out = new(int32) - **out = **in - } - if in.Autodetect != nil { - in, out := &in.Autodetect, &out.Autodetect - *out = new(bool) - **out = **in - } - if in.IgnoreUnknownValues != nil { - in, out := &in.IgnoreUnknownValues, &out.IgnoreUnknownValues - *out = new(bool) - **out = **in - } - if in.Compression != nil { - in, out := &in.Compression, &out.Compression - *out = new(string) - **out = **in - } - if in.CsvOptions != nil { - in, out := &in.CsvOptions, &out.CsvOptions - *out = new(CsvOptions) - (*in).DeepCopyInto(*out) - } - if in.JsonOptions != nil { - in, out := &in.JsonOptions, &out.JsonOptions - *out = new(JsonOptions) - (*in).DeepCopyInto(*out) - } - if in.BigtableOptions != nil { - in, out := &in.BigtableOptions, &out.BigtableOptions - *out = new(BigtableOptions) - (*in).DeepCopyInto(*out) - } - if in.GoogleSheetsOptions != nil { - in, out := &in.GoogleSheetsOptions, &out.GoogleSheetsOptions - *out = new(GoogleSheetsOptions) - (*in).DeepCopyInto(*out) - } - if in.HivePartitioningOptions != nil { - in, out := &in.HivePartitioningOptions, &out.HivePartitioningOptions - *out = new(HivePartitioningOptions) - (*in).DeepCopyInto(*out) - } - if in.ConnectionID != nil { - in, out := &in.ConnectionID, &out.ConnectionID - *out = new(string) - **out = **in - } - if in.DecimalTargetTypes != nil { - in, out := &in.DecimalTargetTypes, &out.DecimalTargetTypes - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.AvroOptions != nil { - in, out := &in.AvroOptions, &out.AvroOptions - *out = new(AvroOptions) - (*in).DeepCopyInto(*out) - } - if in.JsonExtension != nil { - in, out := &in.JsonExtension, &out.JsonExtension - *out = new(string) - **out = **in - } - if in.ParquetOptions != nil { - in, out := &in.ParquetOptions, &out.ParquetOptions - *out = new(ParquetOptions) - (*in).DeepCopyInto(*out) - } - if in.ObjectMetadata != nil { - in, out := &in.ObjectMetadata, &out.ObjectMetadata - *out = new(string) - **out = **in - } - if in.ReferenceFileSchemaUri != nil { - in, out := &in.ReferenceFileSchemaUri, &out.ReferenceFileSchemaUri - *out = new(string) - **out = **in - } - if in.MetadataCacheMode != nil { - in, out := &in.MetadataCacheMode, &out.MetadataCacheMode - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDataConfiguration. -func (in *ExternalDataConfiguration) DeepCopy() *ExternalDataConfiguration { - if in == nil { - return nil - } - out := new(ExternalDataConfiguration) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExternalDatasetReference) DeepCopyInto(out *ExternalDatasetReference) { - *out = *in - if in.ExternalSource != nil { - in, out := &in.ExternalSource, &out.ExternalSource - *out = new(string) - **out = **in - } - if in.Connection != nil { - in, out := &in.Connection, &out.Connection - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDatasetReference. -func (in *ExternalDatasetReference) DeepCopy() *ExternalDatasetReference { - if in == nil { - return nil - } - out := new(ExternalDatasetReference) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExternalServiceCost) DeepCopyInto(out *ExternalServiceCost) { - *out = *in - if in.ExternalService != nil { - in, out := &in.ExternalService, &out.ExternalService - *out = new(string) - **out = **in - } - if in.BytesProcessed != nil { - in, out := &in.BytesProcessed, &out.BytesProcessed - *out = new(int64) - **out = **in - } - if in.BytesBilled != nil { - in, out := &in.BytesBilled, &out.BytesBilled - *out = new(int64) - **out = **in - } - if in.SlotMs != nil { - in, out := &in.SlotMs, &out.SlotMs - *out = new(int64) - **out = **in - } - if in.ReservedSlotCount != nil { - in, out := &in.ReservedSlotCount, &out.ReservedSlotCount - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalServiceCost. -func (in *ExternalServiceCost) DeepCopy() *ExternalServiceCost { - if in == nil { - return nil - } - out := new(ExternalServiceCost) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ForeignKey) DeepCopyInto(out *ForeignKey) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.ReferencedTable != nil { - in, out := &in.ReferencedTable, &out.ReferencedTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.ColumnReferences != nil { - in, out := &in.ColumnReferences, &out.ColumnReferences - *out = make([]ColumnReference, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForeignKey. -func (in *ForeignKey) DeepCopy() *ForeignKey { - if in == nil { - return nil - } - out := new(ForeignKey) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ForeignTypeInfo) DeepCopyInto(out *ForeignTypeInfo) { - *out = *in - if in.TypeSystem != nil { - in, out := &in.TypeSystem, &out.TypeSystem - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForeignTypeInfo. -func (in *ForeignTypeInfo) DeepCopy() *ForeignTypeInfo { - if in == nil { - return nil - } - out := new(ForeignTypeInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ForeignViewDefinition) DeepCopyInto(out *ForeignViewDefinition) { - *out = *in - if in.Query != nil { - in, out := &in.Query, &out.Query - *out = new(string) - **out = **in - } - if in.Dialect != nil { - in, out := &in.Dialect, &out.Dialect - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForeignViewDefinition. -func (in *ForeignViewDefinition) DeepCopy() *ForeignViewDefinition { - if in == nil { - return nil - } - out := new(ForeignViewDefinition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *GcpTag) DeepCopyInto(out *GcpTag) { - *out = *in - if in.TagKey != nil { - in, out := &in.TagKey, &out.TagKey - *out = new(string) - **out = **in - } - if in.TagValue != nil { - in, out := &in.TagValue, &out.TagValue - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GcpTag. -func (in *GcpTag) DeepCopy() *GcpTag { - if in == nil { - return nil - } - out := new(GcpTag) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *GoogleSheetsOptions) DeepCopyInto(out *GoogleSheetsOptions) { - *out = *in - if in.SkipLeadingRows != nil { - in, out := &in.SkipLeadingRows, &out.SkipLeadingRows - *out = new(int64) - **out = **in - } - if in.Range != nil { - in, out := &in.Range, &out.Range - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GoogleSheetsOptions. -func (in *GoogleSheetsOptions) DeepCopy() *GoogleSheetsOptions { - if in == nil { - return nil - } - out := new(GoogleSheetsOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *HighCardinalityJoin) DeepCopyInto(out *HighCardinalityJoin) { - *out = *in - if in.LeftRows != nil { - in, out := &in.LeftRows, &out.LeftRows - *out = new(int64) - **out = **in - } - if in.RightRows != nil { - in, out := &in.RightRows, &out.RightRows - *out = new(int64) - **out = **in - } - if in.OutputRows != nil { - in, out := &in.OutputRows, &out.OutputRows - *out = new(int64) - **out = **in - } - if in.StepIndex != nil { - in, out := &in.StepIndex, &out.StepIndex - *out = new(int32) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HighCardinalityJoin. -func (in *HighCardinalityJoin) DeepCopy() *HighCardinalityJoin { - if in == nil { - return nil - } - out := new(HighCardinalityJoin) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *HivePartitioningOptions) DeepCopyInto(out *HivePartitioningOptions) { - *out = *in - if in.Mode != nil { - in, out := &in.Mode, &out.Mode - *out = new(string) - **out = **in - } - if in.SourceUriPrefix != nil { - in, out := &in.SourceUriPrefix, &out.SourceUriPrefix - *out = new(string) - **out = **in - } - if in.RequirePartitionFilter != nil { - in, out := &in.RequirePartitionFilter, &out.RequirePartitionFilter - *out = new(bool) - **out = **in - } - if in.Fields != nil { - in, out := &in.Fields, &out.Fields - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HivePartitioningOptions. -func (in *HivePartitioningOptions) DeepCopy() *HivePartitioningOptions { - if in == nil { - return nil - } - out := new(HivePartitioningOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *IndexUnusedReason) DeepCopyInto(out *IndexUnusedReason) { - *out = *in - if in.Code != nil { - in, out := &in.Code, &out.Code - *out = new(string) - **out = **in - } - if in.Message != nil { - in, out := &in.Message, &out.Message - *out = new(string) - **out = **in - } - if in.BaseTable != nil { - in, out := &in.BaseTable, &out.BaseTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.IndexName != nil { - in, out := &in.IndexName, &out.IndexName - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new IndexUnusedReason. -func (in *IndexUnusedReason) DeepCopy() *IndexUnusedReason { - if in == nil { - return nil - } - out := new(IndexUnusedReason) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InputDataChange) DeepCopyInto(out *InputDataChange) { - *out = *in - if in.RecordsReadDiffPercentage != nil { - in, out := &in.RecordsReadDiffPercentage, &out.RecordsReadDiffPercentage - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InputDataChange. -func (in *InputDataChange) DeepCopy() *InputDataChange { - if in == nil { - return nil - } - out := new(InputDataChange) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Job) DeepCopyInto(out *Job) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(string) - **out = **in - } - if in.SelfLink != nil { - in, out := &in.SelfLink, &out.SelfLink - *out = new(string) - **out = **in - } - if in.UserEmail != nil { - in, out := &in.UserEmail, &out.UserEmail - *out = new(string) - **out = **in - } - if in.Configuration != nil { - in, out := &in.Configuration, &out.Configuration - *out = new(JobConfiguration) - (*in).DeepCopyInto(*out) - } - if in.JobReference != nil { - in, out := &in.JobReference, &out.JobReference - *out = new(JobReference) - (*in).DeepCopyInto(*out) - } - if in.Statistics != nil { - in, out := &in.Statistics, &out.Statistics - *out = new(JobStatistics) - (*in).DeepCopyInto(*out) - } - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(JobStatus) - (*in).DeepCopyInto(*out) - } - if in.PrincipalSubject != nil { - in, out := &in.PrincipalSubject, &out.PrincipalSubject - *out = new(string) - **out = **in - } - if in.JobCreationReason != nil { - in, out := &in.JobCreationReason, &out.JobCreationReason - *out = new(JobCreationReason) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Job. -func (in *Job) DeepCopy() *Job { - if in == nil { - return nil - } - out := new(Job) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobConfiguration) DeepCopyInto(out *JobConfiguration) { - *out = *in - if in.JobType != nil { - in, out := &in.JobType, &out.JobType - *out = new(string) - **out = **in - } - if in.Query != nil { - in, out := &in.Query, &out.Query - *out = new(JobConfigurationQuery) - (*in).DeepCopyInto(*out) - } - if in.Load != nil { - in, out := &in.Load, &out.Load - *out = new(JobConfigurationLoad) - (*in).DeepCopyInto(*out) - } - if in.Copy != nil { - in, out := &in.Copy, &out.Copy - *out = new(JobConfigurationTableCopy) - (*in).DeepCopyInto(*out) - } - if in.Extract != nil { - in, out := &in.Extract, &out.Extract - *out = new(JobConfigurationExtract) - (*in).DeepCopyInto(*out) - } - if in.DryRun != nil { - in, out := &in.DryRun, &out.DryRun - *out = new(bool) - **out = **in - } - if in.JobTimeoutMs != nil { - in, out := &in.JobTimeoutMs, &out.JobTimeoutMs - *out = new(int64) - **out = **in - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobConfiguration. -func (in *JobConfiguration) DeepCopy() *JobConfiguration { - if in == nil { - return nil - } - out := new(JobConfiguration) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobConfigurationExtract) DeepCopyInto(out *JobConfigurationExtract) { - *out = *in - if in.SourceTable != nil { - in, out := &in.SourceTable, &out.SourceTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.SourceModel != nil { - in, out := &in.SourceModel, &out.SourceModel - *out = new(ModelReference) - (*in).DeepCopyInto(*out) - } - if in.DestinationUris != nil { - in, out := &in.DestinationUris, &out.DestinationUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.PrintHeader != nil { - in, out := &in.PrintHeader, &out.PrintHeader - *out = new(bool) - **out = **in - } - if in.FieldDelimiter != nil { - in, out := &in.FieldDelimiter, &out.FieldDelimiter - *out = new(string) - **out = **in - } - if in.DestinationFormat != nil { - in, out := &in.DestinationFormat, &out.DestinationFormat - *out = new(string) - **out = **in - } - if in.Compression != nil { - in, out := &in.Compression, &out.Compression - *out = new(string) - **out = **in - } - if in.UseAvroLogicalTypes != nil { - in, out := &in.UseAvroLogicalTypes, &out.UseAvroLogicalTypes - *out = new(bool) - **out = **in - } - if in.ModelExtractOptions != nil { - in, out := &in.ModelExtractOptions, &out.ModelExtractOptions - *out = new(JobConfigurationExtract_ModelExtractOptions) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobConfigurationExtract. -func (in *JobConfigurationExtract) DeepCopy() *JobConfigurationExtract { - if in == nil { - return nil - } - out := new(JobConfigurationExtract) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobConfigurationExtract_ModelExtractOptions) DeepCopyInto(out *JobConfigurationExtract_ModelExtractOptions) { - *out = *in - if in.TrialID != nil { - in, out := &in.TrialID, &out.TrialID - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobConfigurationExtract_ModelExtractOptions. -func (in *JobConfigurationExtract_ModelExtractOptions) DeepCopy() *JobConfigurationExtract_ModelExtractOptions { - if in == nil { - return nil - } - out := new(JobConfigurationExtract_ModelExtractOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobConfigurationLoad) DeepCopyInto(out *JobConfigurationLoad) { - *out = *in - if in.SourceUris != nil { - in, out := &in.SourceUris, &out.SourceUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.FileSetSpecType != nil { - in, out := &in.FileSetSpecType, &out.FileSetSpecType - *out = new(string) - **out = **in - } - if in.Schema != nil { - in, out := &in.Schema, &out.Schema - *out = new(TableSchema) - (*in).DeepCopyInto(*out) - } - if in.DestinationTable != nil { - in, out := &in.DestinationTable, &out.DestinationTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.DestinationTableProperties != nil { - in, out := &in.DestinationTableProperties, &out.DestinationTableProperties - *out = new(DestinationTableProperties) - (*in).DeepCopyInto(*out) - } - if in.CreateDisposition != nil { - in, out := &in.CreateDisposition, &out.CreateDisposition - *out = new(string) - **out = **in - } - if in.WriteDisposition != nil { - in, out := &in.WriteDisposition, &out.WriteDisposition - *out = new(string) - **out = **in - } - if in.NullMarker != nil { - in, out := &in.NullMarker, &out.NullMarker - *out = new(string) - **out = **in - } - if in.FieldDelimiter != nil { - in, out := &in.FieldDelimiter, &out.FieldDelimiter - *out = new(string) - **out = **in - } - if in.SkipLeadingRows != nil { - in, out := &in.SkipLeadingRows, &out.SkipLeadingRows - *out = new(int32) - **out = **in - } - if in.Encoding != nil { - in, out := &in.Encoding, &out.Encoding - *out = new(string) - **out = **in - } - if in.Quote != nil { - in, out := &in.Quote, &out.Quote - *out = new(string) - **out = **in - } - if in.MaxBadRecords != nil { - in, out := &in.MaxBadRecords, &out.MaxBadRecords - *out = new(int32) - **out = **in - } - if in.AllowQuotedNewlines != nil { - in, out := &in.AllowQuotedNewlines, &out.AllowQuotedNewlines - *out = new(bool) - **out = **in - } - if in.SourceFormat != nil { - in, out := &in.SourceFormat, &out.SourceFormat - *out = new(string) - **out = **in - } - if in.AllowJaggedRows != nil { - in, out := &in.AllowJaggedRows, &out.AllowJaggedRows - *out = new(bool) - **out = **in - } - if in.IgnoreUnknownValues != nil { - in, out := &in.IgnoreUnknownValues, &out.IgnoreUnknownValues - *out = new(bool) - **out = **in - } - if in.ProjectionFields != nil { - in, out := &in.ProjectionFields, &out.ProjectionFields - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Autodetect != nil { - in, out := &in.Autodetect, &out.Autodetect - *out = new(bool) - **out = **in - } - if in.SchemaUpdateOptions != nil { - in, out := &in.SchemaUpdateOptions, &out.SchemaUpdateOptions - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.TimePartitioning != nil { - in, out := &in.TimePartitioning, &out.TimePartitioning - *out = new(TimePartitioning) - (*in).DeepCopyInto(*out) - } - if in.RangePartitioning != nil { - in, out := &in.RangePartitioning, &out.RangePartitioning - *out = new(RangePartitioning) - (*in).DeepCopyInto(*out) - } - if in.Clustering != nil { - in, out := &in.Clustering, &out.Clustering - *out = new(Clustering) - (*in).DeepCopyInto(*out) - } - if in.DestinationEncryptionConfiguration != nil { - in, out := &in.DestinationEncryptionConfiguration, &out.DestinationEncryptionConfiguration - *out = new(EncryptionConfiguration) - (*in).DeepCopyInto(*out) - } - if in.UseAvroLogicalTypes != nil { - in, out := &in.UseAvroLogicalTypes, &out.UseAvroLogicalTypes - *out = new(bool) - **out = **in - } - if in.ReferenceFileSchemaUri != nil { - in, out := &in.ReferenceFileSchemaUri, &out.ReferenceFileSchemaUri - *out = new(string) - **out = **in - } - if in.HivePartitioningOptions != nil { - in, out := &in.HivePartitioningOptions, &out.HivePartitioningOptions - *out = new(HivePartitioningOptions) - (*in).DeepCopyInto(*out) - } - if in.DecimalTargetTypes != nil { - in, out := &in.DecimalTargetTypes, &out.DecimalTargetTypes - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.JsonExtension != nil { - in, out := &in.JsonExtension, &out.JsonExtension - *out = new(string) - **out = **in - } - if in.ParquetOptions != nil { - in, out := &in.ParquetOptions, &out.ParquetOptions - *out = new(ParquetOptions) - (*in).DeepCopyInto(*out) - } - if in.PreserveAsciiControlCharacters != nil { - in, out := &in.PreserveAsciiControlCharacters, &out.PreserveAsciiControlCharacters - *out = new(bool) - **out = **in - } - if in.ConnectionProperties != nil { - in, out := &in.ConnectionProperties, &out.ConnectionProperties - *out = make([]ConnectionProperty, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.CreateSession != nil { - in, out := &in.CreateSession, &out.CreateSession - *out = new(bool) - **out = **in - } - if in.ColumnNameCharacterMap != nil { - in, out := &in.ColumnNameCharacterMap, &out.ColumnNameCharacterMap - *out = new(string) - **out = **in - } - if in.CopyFilesOnly != nil { - in, out := &in.CopyFilesOnly, &out.CopyFilesOnly - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobConfigurationLoad. -func (in *JobConfigurationLoad) DeepCopy() *JobConfigurationLoad { - if in == nil { - return nil - } - out := new(JobConfigurationLoad) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobConfigurationQuery) DeepCopyInto(out *JobConfigurationQuery) { - *out = *in - if in.Query != nil { - in, out := &in.Query, &out.Query - *out = new(string) - **out = **in - } - if in.DestinationTable != nil { - in, out := &in.DestinationTable, &out.DestinationTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.UserDefinedFunctionResources != nil { - in, out := &in.UserDefinedFunctionResources, &out.UserDefinedFunctionResources - *out = make([]UserDefinedFunctionResource, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.CreateDisposition != nil { - in, out := &in.CreateDisposition, &out.CreateDisposition - *out = new(string) - **out = **in - } - if in.WriteDisposition != nil { - in, out := &in.WriteDisposition, &out.WriteDisposition - *out = new(string) - **out = **in - } - if in.DefaultDataset != nil { - in, out := &in.DefaultDataset, &out.DefaultDataset - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } - if in.Priority != nil { - in, out := &in.Priority, &out.Priority - *out = new(string) - **out = **in - } - if in.AllowLargeResults != nil { - in, out := &in.AllowLargeResults, &out.AllowLargeResults - *out = new(bool) - **out = **in - } - if in.UseQueryCache != nil { - in, out := &in.UseQueryCache, &out.UseQueryCache - *out = new(bool) - **out = **in - } - if in.FlattenResults != nil { - in, out := &in.FlattenResults, &out.FlattenResults - *out = new(bool) - **out = **in - } - if in.MaximumBytesBilled != nil { - in, out := &in.MaximumBytesBilled, &out.MaximumBytesBilled - *out = new(int64) - **out = **in - } - if in.UseLegacySql != nil { - in, out := &in.UseLegacySql, &out.UseLegacySql - *out = new(bool) - **out = **in - } - if in.ParameterMode != nil { - in, out := &in.ParameterMode, &out.ParameterMode - *out = new(string) - **out = **in - } - if in.QueryParameters != nil { - in, out := &in.QueryParameters, &out.QueryParameters - *out = make([]QueryParameter, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SystemVariables != nil { - in, out := &in.SystemVariables, &out.SystemVariables - *out = new(SystemVariables) - (*in).DeepCopyInto(*out) - } - if in.SchemaUpdateOptions != nil { - in, out := &in.SchemaUpdateOptions, &out.SchemaUpdateOptions - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.TimePartitioning != nil { - in, out := &in.TimePartitioning, &out.TimePartitioning - *out = new(TimePartitioning) - (*in).DeepCopyInto(*out) - } - if in.RangePartitioning != nil { - in, out := &in.RangePartitioning, &out.RangePartitioning - *out = new(RangePartitioning) - (*in).DeepCopyInto(*out) - } - if in.Clustering != nil { - in, out := &in.Clustering, &out.Clustering - *out = new(Clustering) - (*in).DeepCopyInto(*out) - } - if in.DestinationEncryptionConfiguration != nil { - in, out := &in.DestinationEncryptionConfiguration, &out.DestinationEncryptionConfiguration - *out = new(EncryptionConfiguration) - (*in).DeepCopyInto(*out) - } - if in.ScriptOptions != nil { - in, out := &in.ScriptOptions, &out.ScriptOptions - *out = new(ScriptOptions) - (*in).DeepCopyInto(*out) - } - if in.ConnectionProperties != nil { - in, out := &in.ConnectionProperties, &out.ConnectionProperties - *out = make([]ConnectionProperty, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.CreateSession != nil { - in, out := &in.CreateSession, &out.CreateSession - *out = new(bool) - **out = **in - } - if in.Continuous != nil { - in, out := &in.Continuous, &out.Continuous - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobConfigurationQuery. -func (in *JobConfigurationQuery) DeepCopy() *JobConfigurationQuery { - if in == nil { - return nil - } - out := new(JobConfigurationQuery) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobConfigurationTableCopy) DeepCopyInto(out *JobConfigurationTableCopy) { - *out = *in - if in.SourceTable != nil { - in, out := &in.SourceTable, &out.SourceTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.SourceTables != nil { - in, out := &in.SourceTables, &out.SourceTables - *out = make([]TableReference, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.DestinationTable != nil { - in, out := &in.DestinationTable, &out.DestinationTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.CreateDisposition != nil { - in, out := &in.CreateDisposition, &out.CreateDisposition - *out = new(string) - **out = **in - } - if in.WriteDisposition != nil { - in, out := &in.WriteDisposition, &out.WriteDisposition - *out = new(string) - **out = **in - } - if in.DestinationEncryptionConfiguration != nil { - in, out := &in.DestinationEncryptionConfiguration, &out.DestinationEncryptionConfiguration - *out = new(EncryptionConfiguration) - (*in).DeepCopyInto(*out) - } - if in.OperationType != nil { - in, out := &in.OperationType, &out.OperationType - *out = new(string) - **out = **in - } - if in.DestinationExpirationTime != nil { - in, out := &in.DestinationExpirationTime, &out.DestinationExpirationTime - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobConfigurationTableCopy. -func (in *JobConfigurationTableCopy) DeepCopy() *JobConfigurationTableCopy { - if in == nil { - return nil - } - out := new(JobConfigurationTableCopy) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobCreationReason) DeepCopyInto(out *JobCreationReason) { - *out = *in - if in.Code != nil { - in, out := &in.Code, &out.Code - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobCreationReason. -func (in *JobCreationReason) DeepCopy() *JobCreationReason { - if in == nil { - return nil - } - out := new(JobCreationReason) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobList) DeepCopyInto(out *JobList) { - *out = *in - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.NextPageToken != nil { - in, out := &in.NextPageToken, &out.NextPageToken - *out = new(string) - **out = **in - } - if in.Jobs != nil { - in, out := &in.Jobs, &out.Jobs - *out = make([]ListFormatJob, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Unreachable != nil { - in, out := &in.Unreachable, &out.Unreachable - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobList. -func (in *JobList) DeepCopy() *JobList { - if in == nil { - return nil - } - out := new(JobList) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobReference) DeepCopyInto(out *JobReference) { - *out = *in - if in.ProjectID != nil { - in, out := &in.ProjectID, &out.ProjectID - *out = new(string) - **out = **in - } - if in.JobID != nil { - in, out := &in.JobID, &out.JobID - *out = new(string) - **out = **in - } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } - if in.LocationAlternative != nil { - in, out := &in.LocationAlternative, &out.LocationAlternative - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobReference. -func (in *JobReference) DeepCopy() *JobReference { - if in == nil { - return nil - } - out := new(JobReference) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobStatistics) DeepCopyInto(out *JobStatistics) { - *out = *in - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(int64) - **out = **in - } - if in.StartTime != nil { - in, out := &in.StartTime, &out.StartTime - *out = new(int64) - **out = **in - } - if in.EndTime != nil { - in, out := &in.EndTime, &out.EndTime - *out = new(int64) - **out = **in - } - if in.TotalBytesProcessed != nil { - in, out := &in.TotalBytesProcessed, &out.TotalBytesProcessed - *out = new(int64) - **out = **in - } - if in.CompletionRatio != nil { - in, out := &in.CompletionRatio, &out.CompletionRatio - *out = new(float64) - **out = **in - } - if in.QuotaDeferments != nil { - in, out := &in.QuotaDeferments, &out.QuotaDeferments - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Query != nil { - in, out := &in.Query, &out.Query - *out = new(JobStatistics2) - (*in).DeepCopyInto(*out) - } - if in.Load != nil { - in, out := &in.Load, &out.Load - *out = new(JobStatistics3) - (*in).DeepCopyInto(*out) - } - if in.Extract != nil { - in, out := &in.Extract, &out.Extract - *out = new(JobStatistics4) - (*in).DeepCopyInto(*out) - } - if in.Copy != nil { - in, out := &in.Copy, &out.Copy - *out = new(CopyJobStatistics) - (*in).DeepCopyInto(*out) - } - if in.TotalSlotMs != nil { - in, out := &in.TotalSlotMs, &out.TotalSlotMs - *out = new(int64) - **out = **in - } - if in.ReservationID != nil { - in, out := &in.ReservationID, &out.ReservationID - *out = new(string) - **out = **in - } - if in.NumChildJobs != nil { - in, out := &in.NumChildJobs, &out.NumChildJobs - *out = new(int64) - **out = **in - } - if in.ParentJobID != nil { - in, out := &in.ParentJobID, &out.ParentJobID - *out = new(string) - **out = **in - } - if in.ScriptStatistics != nil { - in, out := &in.ScriptStatistics, &out.ScriptStatistics - *out = new(ScriptStatistics) - (*in).DeepCopyInto(*out) - } - if in.RowLevelSecurityStatistics != nil { - in, out := &in.RowLevelSecurityStatistics, &out.RowLevelSecurityStatistics - *out = new(RowLevelSecurityStatistics) - (*in).DeepCopyInto(*out) - } - if in.DataMaskingStatistics != nil { - in, out := &in.DataMaskingStatistics, &out.DataMaskingStatistics - *out = new(DataMaskingStatistics) - (*in).DeepCopyInto(*out) - } - if in.TransactionInfo != nil { - in, out := &in.TransactionInfo, &out.TransactionInfo - *out = new(JobStatistics_TransactionInfo) - (*in).DeepCopyInto(*out) - } - if in.SessionInfo != nil { - in, out := &in.SessionInfo, &out.SessionInfo - *out = new(SessionInfo) - (*in).DeepCopyInto(*out) - } - if in.FinalExecutionDurationMs != nil { - in, out := &in.FinalExecutionDurationMs, &out.FinalExecutionDurationMs - *out = new(int64) - **out = **in - } - if in.Edition != nil { - in, out := &in.Edition, &out.Edition - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatistics. -func (in *JobStatistics) DeepCopy() *JobStatistics { - if in == nil { - return nil - } - out := new(JobStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobStatistics2) DeepCopyInto(out *JobStatistics2) { - *out = *in - if in.QueryPlan != nil { - in, out := &in.QueryPlan, &out.QueryPlan - *out = make([]ExplainQueryStage, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.EstimatedBytesProcessed != nil { - in, out := &in.EstimatedBytesProcessed, &out.EstimatedBytesProcessed - *out = new(int64) - **out = **in - } - if in.Timeline != nil { - in, out := &in.Timeline, &out.Timeline - *out = make([]QueryTimelineSample, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.TotalPartitionsProcessed != nil { - in, out := &in.TotalPartitionsProcessed, &out.TotalPartitionsProcessed - *out = new(int64) - **out = **in - } - if in.TotalBytesProcessed != nil { - in, out := &in.TotalBytesProcessed, &out.TotalBytesProcessed - *out = new(int64) - **out = **in - } - if in.TotalBytesProcessedAccuracy != nil { - in, out := &in.TotalBytesProcessedAccuracy, &out.TotalBytesProcessedAccuracy - *out = new(string) - **out = **in - } - if in.TotalBytesBilled != nil { - in, out := &in.TotalBytesBilled, &out.TotalBytesBilled - *out = new(int64) - **out = **in - } - if in.BillingTier != nil { - in, out := &in.BillingTier, &out.BillingTier - *out = new(int32) - **out = **in - } - if in.TotalSlotMs != nil { - in, out := &in.TotalSlotMs, &out.TotalSlotMs - *out = new(int64) - **out = **in - } - if in.CacheHit != nil { - in, out := &in.CacheHit, &out.CacheHit - *out = new(bool) - **out = **in - } - if in.ReferencedTables != nil { - in, out := &in.ReferencedTables, &out.ReferencedTables - *out = make([]TableReference, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ReferencedRoutines != nil { - in, out := &in.ReferencedRoutines, &out.ReferencedRoutines - *out = make([]RoutineReference, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Schema != nil { - in, out := &in.Schema, &out.Schema - *out = new(TableSchema) - (*in).DeepCopyInto(*out) - } - if in.NumDmlAffectedRows != nil { - in, out := &in.NumDmlAffectedRows, &out.NumDmlAffectedRows - *out = new(int64) - **out = **in - } - if in.DmlStats != nil { - in, out := &in.DmlStats, &out.DmlStats - *out = new(DmlStats) - (*in).DeepCopyInto(*out) - } - if in.UndeclaredQueryParameters != nil { - in, out := &in.UndeclaredQueryParameters, &out.UndeclaredQueryParameters - *out = make([]QueryParameter, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.StatementType != nil { - in, out := &in.StatementType, &out.StatementType - *out = new(string) - **out = **in - } - if in.DdlOperationPerformed != nil { - in, out := &in.DdlOperationPerformed, &out.DdlOperationPerformed - *out = new(string) - **out = **in - } - if in.DdlTargetTable != nil { - in, out := &in.DdlTargetTable, &out.DdlTargetTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.DdlDestinationTable != nil { - in, out := &in.DdlDestinationTable, &out.DdlDestinationTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.DdlTargetRowAccessPolicy != nil { - in, out := &in.DdlTargetRowAccessPolicy, &out.DdlTargetRowAccessPolicy - *out = new(RowAccessPolicyReference) - (*in).DeepCopyInto(*out) - } - if in.DdlAffectedRowAccessPolicyCount != nil { - in, out := &in.DdlAffectedRowAccessPolicyCount, &out.DdlAffectedRowAccessPolicyCount - *out = new(int64) - **out = **in - } - if in.DdlTargetRoutine != nil { - in, out := &in.DdlTargetRoutine, &out.DdlTargetRoutine - *out = new(RoutineReference) - (*in).DeepCopyInto(*out) - } - if in.DdlTargetDataset != nil { - in, out := &in.DdlTargetDataset, &out.DdlTargetDataset - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } - if in.MlStatistics != nil { - in, out := &in.MlStatistics, &out.MlStatistics - *out = new(MlStatistics) - (*in).DeepCopyInto(*out) - } - if in.ExportDataStatistics != nil { - in, out := &in.ExportDataStatistics, &out.ExportDataStatistics - *out = new(ExportDataStatistics) - (*in).DeepCopyInto(*out) - } - if in.ExternalServiceCosts != nil { - in, out := &in.ExternalServiceCosts, &out.ExternalServiceCosts - *out = make([]ExternalServiceCost, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.BiEngineStatistics != nil { - in, out := &in.BiEngineStatistics, &out.BiEngineStatistics - *out = new(BiEngineStatistics) - (*in).DeepCopyInto(*out) - } - if in.LoadQueryStatistics != nil { - in, out := &in.LoadQueryStatistics, &out.LoadQueryStatistics - *out = new(LoadQueryStatistics) - (*in).DeepCopyInto(*out) - } - if in.DclTargetTable != nil { - in, out := &in.DclTargetTable, &out.DclTargetTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.DclTargetView != nil { - in, out := &in.DclTargetView, &out.DclTargetView - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.DclTargetDataset != nil { - in, out := &in.DclTargetDataset, &out.DclTargetDataset - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } - if in.SearchStatistics != nil { - in, out := &in.SearchStatistics, &out.SearchStatistics - *out = new(SearchStatistics) - (*in).DeepCopyInto(*out) - } - if in.VectorSearchStatistics != nil { - in, out := &in.VectorSearchStatistics, &out.VectorSearchStatistics - *out = new(VectorSearchStatistics) - (*in).DeepCopyInto(*out) - } - if in.PerformanceInsights != nil { - in, out := &in.PerformanceInsights, &out.PerformanceInsights - *out = new(PerformanceInsights) - (*in).DeepCopyInto(*out) - } - if in.QueryInfo != nil { - in, out := &in.QueryInfo, &out.QueryInfo - *out = new(QueryInfo) - (*in).DeepCopyInto(*out) - } - if in.SparkStatistics != nil { - in, out := &in.SparkStatistics, &out.SparkStatistics - *out = new(SparkStatistics) - (*in).DeepCopyInto(*out) - } - if in.TransferredBytes != nil { - in, out := &in.TransferredBytes, &out.TransferredBytes - *out = new(int64) - **out = **in - } - if in.MaterializedViewStatistics != nil { - in, out := &in.MaterializedViewStatistics, &out.MaterializedViewStatistics - *out = new(MaterializedViewStatistics) - (*in).DeepCopyInto(*out) - } - if in.MetadataCacheStatistics != nil { - in, out := &in.MetadataCacheStatistics, &out.MetadataCacheStatistics - *out = new(MetadataCacheStatistics) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatistics2. -func (in *JobStatistics2) DeepCopy() *JobStatistics2 { - if in == nil { - return nil - } - out := new(JobStatistics2) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobStatistics3) DeepCopyInto(out *JobStatistics3) { - *out = *in - if in.InputFiles != nil { - in, out := &in.InputFiles, &out.InputFiles - *out = new(int64) - **out = **in - } - if in.InputFileBytes != nil { - in, out := &in.InputFileBytes, &out.InputFileBytes - *out = new(int64) - **out = **in - } - if in.OutputRows != nil { - in, out := &in.OutputRows, &out.OutputRows - *out = new(int64) - **out = **in - } - if in.OutputBytes != nil { - in, out := &in.OutputBytes, &out.OutputBytes - *out = new(int64) - **out = **in - } - if in.BadRecords != nil { - in, out := &in.BadRecords, &out.BadRecords - *out = new(int64) - **out = **in - } - if in.Timeline != nil { - in, out := &in.Timeline, &out.Timeline - *out = make([]QueryTimelineSample, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatistics3. -func (in *JobStatistics3) DeepCopy() *JobStatistics3 { - if in == nil { - return nil - } - out := new(JobStatistics3) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobStatistics4) DeepCopyInto(out *JobStatistics4) { - *out = *in - if in.DestinationUriFileCounts != nil { - in, out := &in.DestinationUriFileCounts, &out.DestinationUriFileCounts - *out = make([]int64, len(*in)) - copy(*out, *in) - } - if in.InputBytes != nil { - in, out := &in.InputBytes, &out.InputBytes - *out = new(int64) - **out = **in - } - if in.Timeline != nil { - in, out := &in.Timeline, &out.Timeline - *out = make([]QueryTimelineSample, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatistics4. -func (in *JobStatistics4) DeepCopy() *JobStatistics4 { - if in == nil { - return nil - } - out := new(JobStatistics4) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobStatistics_TransactionInfo) DeepCopyInto(out *JobStatistics_TransactionInfo) { - *out = *in - if in.TransactionID != nil { - in, out := &in.TransactionID, &out.TransactionID - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatistics_TransactionInfo. -func (in *JobStatistics_TransactionInfo) DeepCopy() *JobStatistics_TransactionInfo { - if in == nil { - return nil - } - out := new(JobStatistics_TransactionInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobStatus) DeepCopyInto(out *JobStatus) { - *out = *in - if in.ErrorResult != nil { - in, out := &in.ErrorResult, &out.ErrorResult - *out = new(ErrorProto) - (*in).DeepCopyInto(*out) - } - if in.Errors != nil { - in, out := &in.Errors, &out.Errors - *out = make([]ErrorProto, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.State != nil { - in, out := &in.State, &out.State - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatus. -func (in *JobStatus) DeepCopy() *JobStatus { - if in == nil { - return nil - } - out := new(JobStatus) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JoinRestrictionPolicy) DeepCopyInto(out *JoinRestrictionPolicy) { - *out = *in - if in.JoinCondition != nil { - in, out := &in.JoinCondition, &out.JoinCondition - *out = new(string) - **out = **in - } - if in.JoinAllowedColumns != nil { - in, out := &in.JoinAllowedColumns, &out.JoinAllowedColumns - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JoinRestrictionPolicy. -func (in *JoinRestrictionPolicy) DeepCopy() *JoinRestrictionPolicy { - if in == nil { - return nil - } - out := new(JoinRestrictionPolicy) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JsonOptions) DeepCopyInto(out *JsonOptions) { - *out = *in - if in.Encoding != nil { - in, out := &in.Encoding, &out.Encoding - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JsonOptions. -func (in *JsonOptions) DeepCopy() *JsonOptions { - if in == nil { - return nil - } - out := new(JsonOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LinkedDatasetMetadata) DeepCopyInto(out *LinkedDatasetMetadata) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LinkedDatasetMetadata. -func (in *LinkedDatasetMetadata) DeepCopy() *LinkedDatasetMetadata { - if in == nil { - return nil - } - out := new(LinkedDatasetMetadata) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LinkedDatasetSource) DeepCopyInto(out *LinkedDatasetSource) { - *out = *in - if in.SourceDataset != nil { - in, out := &in.SourceDataset, &out.SourceDataset - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LinkedDatasetSource. -func (in *LinkedDatasetSource) DeepCopy() *LinkedDatasetSource { - if in == nil { - return nil - } - out := new(LinkedDatasetSource) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ListFormatDataset) DeepCopyInto(out *ListFormatDataset) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(string) - **out = **in - } - if in.DatasetReference != nil { - in, out := &in.DatasetReference, &out.DatasetReference - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.FriendlyName != nil { - in, out := &in.FriendlyName, &out.FriendlyName - *out = new(string) - **out = **in - } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListFormatDataset. -func (in *ListFormatDataset) DeepCopy() *ListFormatDataset { - if in == nil { - return nil - } - out := new(ListFormatDataset) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ListFormatJob) DeepCopyInto(out *ListFormatJob) { - *out = *in - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(string) - **out = **in - } - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.JobReference != nil { - in, out := &in.JobReference, &out.JobReference - *out = new(JobReference) - (*in).DeepCopyInto(*out) - } - if in.State != nil { - in, out := &in.State, &out.State - *out = new(string) - **out = **in - } - if in.ErrorResult != nil { - in, out := &in.ErrorResult, &out.ErrorResult - *out = new(ErrorProto) - (*in).DeepCopyInto(*out) - } - if in.Statistics != nil { - in, out := &in.Statistics, &out.Statistics - *out = new(JobStatistics) - (*in).DeepCopyInto(*out) - } - if in.Configuration != nil { - in, out := &in.Configuration, &out.Configuration - *out = new(JobConfiguration) - (*in).DeepCopyInto(*out) - } - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(JobStatus) - (*in).DeepCopyInto(*out) - } - if in.UserEmail != nil { - in, out := &in.UserEmail, &out.UserEmail - *out = new(string) - **out = **in - } - if in.PrincipalSubject != nil { - in, out := &in.PrincipalSubject, &out.PrincipalSubject - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListFormatJob. -func (in *ListFormatJob) DeepCopy() *ListFormatJob { - if in == nil { - return nil - } - out := new(ListFormatJob) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ListFormatTable) DeepCopyInto(out *ListFormatTable) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(string) - **out = **in - } - if in.TableReference != nil { - in, out := &in.TableReference, &out.TableReference - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.FriendlyName != nil { - in, out := &in.FriendlyName, &out.FriendlyName - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.TimePartitioning != nil { - in, out := &in.TimePartitioning, &out.TimePartitioning - *out = new(TimePartitioning) - (*in).DeepCopyInto(*out) - } - if in.RangePartitioning != nil { - in, out := &in.RangePartitioning, &out.RangePartitioning - *out = new(RangePartitioning) - (*in).DeepCopyInto(*out) - } - if in.Clustering != nil { - in, out := &in.Clustering, &out.Clustering - *out = new(Clustering) - (*in).DeepCopyInto(*out) - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.View != nil { - in, out := &in.View, &out.View - *out = new(ListFormatView) - (*in).DeepCopyInto(*out) - } - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(int64) - **out = **in - } - if in.ExpirationTime != nil { - in, out := &in.ExpirationTime, &out.ExpirationTime - *out = new(int64) - **out = **in - } - if in.RequirePartitionFilter != nil { - in, out := &in.RequirePartitionFilter, &out.RequirePartitionFilter - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListFormatTable. -func (in *ListFormatTable) DeepCopy() *ListFormatTable { - if in == nil { - return nil - } - out := new(ListFormatTable) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ListFormatView) DeepCopyInto(out *ListFormatView) { - *out = *in - if in.UseLegacySql != nil { - in, out := &in.UseLegacySql, &out.UseLegacySql - *out = new(bool) - **out = **in - } - if in.PrivacyPolicy != nil { - in, out := &in.PrivacyPolicy, &out.PrivacyPolicy - *out = new(PrivacyPolicy) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListFormatView. -func (in *ListFormatView) DeepCopy() *ListFormatView { - if in == nil { - return nil - } - out := new(ListFormatView) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LoadQueryStatistics) DeepCopyInto(out *LoadQueryStatistics) { - *out = *in - if in.InputFiles != nil { - in, out := &in.InputFiles, &out.InputFiles - *out = new(int64) - **out = **in - } - if in.InputFileBytes != nil { - in, out := &in.InputFileBytes, &out.InputFileBytes - *out = new(int64) - **out = **in - } - if in.OutputRows != nil { - in, out := &in.OutputRows, &out.OutputRows - *out = new(int64) - **out = **in - } - if in.OutputBytes != nil { - in, out := &in.OutputBytes, &out.OutputBytes - *out = new(int64) - **out = **in - } - if in.BadRecords != nil { - in, out := &in.BadRecords, &out.BadRecords - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoadQueryStatistics. -func (in *LoadQueryStatistics) DeepCopy() *LoadQueryStatistics { - if in == nil { - return nil - } - out := new(LoadQueryStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MaterializedView) DeepCopyInto(out *MaterializedView) { - *out = *in - if in.TableReference != nil { - in, out := &in.TableReference, &out.TableReference - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.Chosen != nil { - in, out := &in.Chosen, &out.Chosen - *out = new(bool) - **out = **in - } - if in.EstimatedBytesSaved != nil { - in, out := &in.EstimatedBytesSaved, &out.EstimatedBytesSaved - *out = new(int64) - **out = **in - } - if in.RejectedReason != nil { - in, out := &in.RejectedReason, &out.RejectedReason - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializedView. -func (in *MaterializedView) DeepCopy() *MaterializedView { - if in == nil { - return nil - } - out := new(MaterializedView) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MaterializedViewDefinition) DeepCopyInto(out *MaterializedViewDefinition) { - *out = *in - if in.Query != nil { - in, out := &in.Query, &out.Query - *out = new(string) - **out = **in - } - if in.LastRefreshTime != nil { - in, out := &in.LastRefreshTime, &out.LastRefreshTime - *out = new(int64) - **out = **in - } - if in.EnableRefresh != nil { - in, out := &in.EnableRefresh, &out.EnableRefresh - *out = new(bool) - **out = **in - } - if in.RefreshIntervalMs != nil { - in, out := &in.RefreshIntervalMs, &out.RefreshIntervalMs - *out = new(uint64) - **out = **in - } - if in.AllowNonIncrementalDefinition != nil { - in, out := &in.AllowNonIncrementalDefinition, &out.AllowNonIncrementalDefinition - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializedViewDefinition. -func (in *MaterializedViewDefinition) DeepCopy() *MaterializedViewDefinition { - if in == nil { - return nil - } - out := new(MaterializedViewDefinition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MaterializedViewStatistics) DeepCopyInto(out *MaterializedViewStatistics) { - *out = *in - if in.MaterializedView != nil { - in, out := &in.MaterializedView, &out.MaterializedView - *out = make([]MaterializedView, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializedViewStatistics. -func (in *MaterializedViewStatistics) DeepCopy() *MaterializedViewStatistics { - if in == nil { - return nil - } - out := new(MaterializedViewStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MaterializedViewStatus) DeepCopyInto(out *MaterializedViewStatus) { - *out = *in - if in.RefreshWatermark != nil { - in, out := &in.RefreshWatermark, &out.RefreshWatermark - *out = new(string) - **out = **in - } - if in.LastRefreshStatus != nil { - in, out := &in.LastRefreshStatus, &out.LastRefreshStatus - *out = new(ErrorProto) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializedViewStatus. -func (in *MaterializedViewStatus) DeepCopy() *MaterializedViewStatus { - if in == nil { - return nil - } - out := new(MaterializedViewStatus) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MetadataCacheStatistics) DeepCopyInto(out *MetadataCacheStatistics) { - *out = *in - if in.TableMetadataCacheUsage != nil { - in, out := &in.TableMetadataCacheUsage, &out.TableMetadataCacheUsage - *out = make([]TableMetadataCacheUsage, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetadataCacheStatistics. -func (in *MetadataCacheStatistics) DeepCopy() *MetadataCacheStatistics { - if in == nil { - return nil - } - out := new(MetadataCacheStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MlStatistics) DeepCopyInto(out *MlStatistics) { - *out = *in - if in.MaxIterations != nil { - in, out := &in.MaxIterations, &out.MaxIterations - *out = new(int64) - **out = **in - } - if in.IterationResults != nil { - in, out := &in.IterationResults, &out.IterationResults - *out = make([]Model_TrainingRun_IterationResult, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ModelType != nil { - in, out := &in.ModelType, &out.ModelType - *out = new(string) - **out = **in - } - if in.TrainingType != nil { - in, out := &in.TrainingType, &out.TrainingType - *out = new(string) - **out = **in - } - if in.HparamTrials != nil { - in, out := &in.HparamTrials, &out.HparamTrials - *out = make([]Model_HparamTuningTrial, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MlStatistics. -func (in *MlStatistics) DeepCopy() *MlStatistics { - if in == nil { - return nil - } - out := new(MlStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model) DeepCopyInto(out *Model) { - *out = *in - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.ModelReference != nil { - in, out := &in.ModelReference, &out.ModelReference - *out = new(ModelReference) - (*in).DeepCopyInto(*out) - } - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(int64) - **out = **in - } - if in.LastModifiedTime != nil { - in, out := &in.LastModifiedTime, &out.LastModifiedTime - *out = new(int64) - **out = **in - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.FriendlyName != nil { - in, out := &in.FriendlyName, &out.FriendlyName - *out = new(string) - **out = **in - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.ExpirationTime != nil { - in, out := &in.ExpirationTime, &out.ExpirationTime - *out = new(int64) - **out = **in - } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } - if in.EncryptionConfiguration != nil { - in, out := &in.EncryptionConfiguration, &out.EncryptionConfiguration - *out = new(EncryptionConfiguration) - (*in).DeepCopyInto(*out) - } - if in.ModelType != nil { - in, out := &in.ModelType, &out.ModelType - *out = new(string) - **out = **in - } - if in.TrainingRuns != nil { - in, out := &in.TrainingRuns, &out.TrainingRuns - *out = make([]Model_TrainingRun, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.FeatureColumns != nil { - in, out := &in.FeatureColumns, &out.FeatureColumns - *out = make([]StandardSqlField, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.LabelColumns != nil { - in, out := &in.LabelColumns, &out.LabelColumns - *out = make([]StandardSqlField, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.TransformColumns != nil { - in, out := &in.TransformColumns, &out.TransformColumns - *out = make([]TransformColumn, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.HparamSearchSpaces != nil { - in, out := &in.HparamSearchSpaces, &out.HparamSearchSpaces - *out = new(Model_HparamSearchSpaces) - (*in).DeepCopyInto(*out) - } - if in.DefaultTrialID != nil { - in, out := &in.DefaultTrialID, &out.DefaultTrialID - *out = new(int64) - **out = **in - } - if in.HparamTrials != nil { - in, out := &in.HparamTrials, &out.HparamTrials - *out = make([]Model_HparamTuningTrial, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.OptimalTrialIds != nil { - in, out := &in.OptimalTrialIds, &out.OptimalTrialIds - *out = make([]int64, len(*in)) - copy(*out, *in) - } - if in.RemoteModelInfo != nil { - in, out := &in.RemoteModelInfo, &out.RemoteModelInfo - *out = new(RemoteModelInfo) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model. -func (in *Model) DeepCopy() *Model { - if in == nil { - return nil - } - out := new(Model) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ModelReference) DeepCopyInto(out *ModelReference) { - *out = *in - if in.ProjectID != nil { - in, out := &in.ProjectID, &out.ProjectID - *out = new(string) - **out = **in - } - if in.DatasetID != nil { - in, out := &in.DatasetID, &out.DatasetID - *out = new(string) - **out = **in - } - if in.ModelID != nil { - in, out := &in.ModelID, &out.ModelID - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ModelReference. -func (in *ModelReference) DeepCopy() *ModelReference { - if in == nil { - return nil - } - out := new(ModelReference) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_AggregateClassificationMetrics) DeepCopyInto(out *Model_AggregateClassificationMetrics) { - *out = *in - if in.Precision != nil { - in, out := &in.Precision, &out.Precision - *out = new(float64) - **out = **in - } - if in.Recall != nil { - in, out := &in.Recall, &out.Recall - *out = new(float64) - **out = **in - } - if in.Accuracy != nil { - in, out := &in.Accuracy, &out.Accuracy - *out = new(float64) - **out = **in - } - if in.Threshold != nil { - in, out := &in.Threshold, &out.Threshold - *out = new(float64) - **out = **in - } - if in.F1Score != nil { - in, out := &in.F1Score, &out.F1Score - *out = new(float64) - **out = **in - } - if in.LogLoss != nil { - in, out := &in.LogLoss, &out.LogLoss - *out = new(float64) - **out = **in - } - if in.RocAuc != nil { - in, out := &in.RocAuc, &out.RocAuc - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_AggregateClassificationMetrics. -func (in *Model_AggregateClassificationMetrics) DeepCopy() *Model_AggregateClassificationMetrics { - if in == nil { - return nil - } - out := new(Model_AggregateClassificationMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ArimaFittingMetrics) DeepCopyInto(out *Model_ArimaFittingMetrics) { - *out = *in - if in.LogLikelihood != nil { - in, out := &in.LogLikelihood, &out.LogLikelihood - *out = new(float64) - **out = **in - } - if in.Aic != nil { - in, out := &in.Aic, &out.Aic - *out = new(float64) - **out = **in - } - if in.Variance != nil { - in, out := &in.Variance, &out.Variance - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ArimaFittingMetrics. -func (in *Model_ArimaFittingMetrics) DeepCopy() *Model_ArimaFittingMetrics { - if in == nil { - return nil - } - out := new(Model_ArimaFittingMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ArimaForecastingMetrics) DeepCopyInto(out *Model_ArimaForecastingMetrics) { - *out = *in - if in.ArimaSingleModelForecastingMetrics != nil { - in, out := &in.ArimaSingleModelForecastingMetrics, &out.ArimaSingleModelForecastingMetrics - *out = make([]Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ArimaForecastingMetrics. -func (in *Model_ArimaForecastingMetrics) DeepCopy() *Model_ArimaForecastingMetrics { - if in == nil { - return nil - } - out := new(Model_ArimaForecastingMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics) DeepCopyInto(out *Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics) { - *out = *in - if in.NonSeasonalOrder != nil { - in, out := &in.NonSeasonalOrder, &out.NonSeasonalOrder - *out = new(Model_ArimaOrder) - (*in).DeepCopyInto(*out) - } - if in.ArimaFittingMetrics != nil { - in, out := &in.ArimaFittingMetrics, &out.ArimaFittingMetrics - *out = new(Model_ArimaFittingMetrics) - (*in).DeepCopyInto(*out) - } - if in.HasDrift != nil { - in, out := &in.HasDrift, &out.HasDrift - *out = new(bool) - **out = **in - } - if in.TimeSeriesID != nil { - in, out := &in.TimeSeriesID, &out.TimeSeriesID - *out = new(string) - **out = **in - } - if in.TimeSeriesIds != nil { - in, out := &in.TimeSeriesIds, &out.TimeSeriesIds - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.SeasonalPeriods != nil { - in, out := &in.SeasonalPeriods, &out.SeasonalPeriods - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.HasHolidayEffect != nil { - in, out := &in.HasHolidayEffect, &out.HasHolidayEffect - *out = new(bool) - **out = **in - } - if in.HasSpikesAndDips != nil { - in, out := &in.HasSpikesAndDips, &out.HasSpikesAndDips - *out = new(bool) - **out = **in - } - if in.HasStepChanges != nil { - in, out := &in.HasStepChanges, &out.HasStepChanges - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics. -func (in *Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics) DeepCopy() *Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics { - if in == nil { - return nil - } - out := new(Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ArimaOrder) DeepCopyInto(out *Model_ArimaOrder) { - *out = *in - if in.P != nil { - in, out := &in.P, &out.P - *out = new(int64) - **out = **in - } - if in.D != nil { - in, out := &in.D, &out.D - *out = new(int64) - **out = **in - } - if in.Q != nil { - in, out := &in.Q, &out.Q - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ArimaOrder. -func (in *Model_ArimaOrder) DeepCopy() *Model_ArimaOrder { - if in == nil { - return nil - } - out := new(Model_ArimaOrder) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_BinaryClassificationMetrics) DeepCopyInto(out *Model_BinaryClassificationMetrics) { - *out = *in - if in.AggregateClassificationMetrics != nil { - in, out := &in.AggregateClassificationMetrics, &out.AggregateClassificationMetrics - *out = new(Model_AggregateClassificationMetrics) - (*in).DeepCopyInto(*out) - } - if in.BinaryConfusionMatrixList != nil { - in, out := &in.BinaryConfusionMatrixList, &out.BinaryConfusionMatrixList - *out = make([]Model_BinaryClassificationMetrics_BinaryConfusionMatrix, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.PositiveLabel != nil { - in, out := &in.PositiveLabel, &out.PositiveLabel - *out = new(string) - **out = **in - } - if in.NegativeLabel != nil { - in, out := &in.NegativeLabel, &out.NegativeLabel - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_BinaryClassificationMetrics. -func (in *Model_BinaryClassificationMetrics) DeepCopy() *Model_BinaryClassificationMetrics { - if in == nil { - return nil - } - out := new(Model_BinaryClassificationMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) DeepCopyInto(out *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) { - *out = *in - if in.PositiveClassThreshold != nil { - in, out := &in.PositiveClassThreshold, &out.PositiveClassThreshold - *out = new(float64) - **out = **in - } - if in.TruePositives != nil { - in, out := &in.TruePositives, &out.TruePositives - *out = new(int64) - **out = **in - } - if in.FalsePositives != nil { - in, out := &in.FalsePositives, &out.FalsePositives - *out = new(int64) - **out = **in - } - if in.TrueNegatives != nil { - in, out := &in.TrueNegatives, &out.TrueNegatives - *out = new(int64) - **out = **in - } - if in.FalseNegatives != nil { - in, out := &in.FalseNegatives, &out.FalseNegatives - *out = new(int64) - **out = **in - } - if in.Precision != nil { - in, out := &in.Precision, &out.Precision - *out = new(float64) - **out = **in - } - if in.Recall != nil { - in, out := &in.Recall, &out.Recall - *out = new(float64) - **out = **in - } - if in.F1Score != nil { - in, out := &in.F1Score, &out.F1Score - *out = new(float64) - **out = **in - } - if in.Accuracy != nil { - in, out := &in.Accuracy, &out.Accuracy - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_BinaryClassificationMetrics_BinaryConfusionMatrix. -func (in *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) DeepCopy() *Model_BinaryClassificationMetrics_BinaryConfusionMatrix { - if in == nil { - return nil - } - out := new(Model_BinaryClassificationMetrics_BinaryConfusionMatrix) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_BoostedTreeOptionEnums) DeepCopyInto(out *Model_BoostedTreeOptionEnums) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_BoostedTreeOptionEnums. -func (in *Model_BoostedTreeOptionEnums) DeepCopy() *Model_BoostedTreeOptionEnums { - if in == nil { - return nil - } - out := new(Model_BoostedTreeOptionEnums) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_CategoryEncodingMethod) DeepCopyInto(out *Model_CategoryEncodingMethod) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_CategoryEncodingMethod. -func (in *Model_CategoryEncodingMethod) DeepCopy() *Model_CategoryEncodingMethod { - if in == nil { - return nil - } - out := new(Model_CategoryEncodingMethod) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ClusteringMetrics) DeepCopyInto(out *Model_ClusteringMetrics) { - *out = *in - if in.DaviesBouldinIndex != nil { - in, out := &in.DaviesBouldinIndex, &out.DaviesBouldinIndex - *out = new(float64) - **out = **in - } - if in.MeanSquaredDistance != nil { - in, out := &in.MeanSquaredDistance, &out.MeanSquaredDistance - *out = new(float64) - **out = **in - } - if in.Clusters != nil { - in, out := &in.Clusters, &out.Clusters - *out = make([]Model_ClusteringMetrics_Cluster, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ClusteringMetrics. -func (in *Model_ClusteringMetrics) DeepCopy() *Model_ClusteringMetrics { - if in == nil { - return nil - } - out := new(Model_ClusteringMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ClusteringMetrics_Cluster) DeepCopyInto(out *Model_ClusteringMetrics_Cluster) { - *out = *in - if in.CentroidID != nil { - in, out := &in.CentroidID, &out.CentroidID - *out = new(int64) - **out = **in - } - if in.FeatureValues != nil { - in, out := &in.FeatureValues, &out.FeatureValues - *out = make([]Model_ClusteringMetrics_Cluster_FeatureValue, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Count != nil { - in, out := &in.Count, &out.Count - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ClusteringMetrics_Cluster. -func (in *Model_ClusteringMetrics_Cluster) DeepCopy() *Model_ClusteringMetrics_Cluster { - if in == nil { - return nil - } - out := new(Model_ClusteringMetrics_Cluster) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ClusteringMetrics_Cluster_FeatureValue) DeepCopyInto(out *Model_ClusteringMetrics_Cluster_FeatureValue) { - *out = *in - if in.FeatureColumn != nil { - in, out := &in.FeatureColumn, &out.FeatureColumn - *out = new(string) - **out = **in - } - if in.NumericalValue != nil { - in, out := &in.NumericalValue, &out.NumericalValue - *out = new(float64) - **out = **in - } - if in.CategoricalValue != nil { - in, out := &in.CategoricalValue, &out.CategoricalValue - *out = new(Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ClusteringMetrics_Cluster_FeatureValue. -func (in *Model_ClusteringMetrics_Cluster_FeatureValue) DeepCopy() *Model_ClusteringMetrics_Cluster_FeatureValue { - if in == nil { - return nil - } - out := new(Model_ClusteringMetrics_Cluster_FeatureValue) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue) DeepCopyInto(out *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue) { - *out = *in - if in.CategoryCounts != nil { - in, out := &in.CategoryCounts, &out.CategoryCounts - *out = make([]Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue. -func (in *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue) DeepCopy() *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue { - if in == nil { - return nil - } - out := new(Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount) DeepCopyInto(out *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount) { - *out = *in - if in.Category != nil { - in, out := &in.Category, &out.Category - *out = new(string) - **out = **in - } - if in.Count != nil { - in, out := &in.Count, &out.Count - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount. -func (in *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount) DeepCopy() *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount { - if in == nil { - return nil - } - out := new(Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_DataSplitResult) DeepCopyInto(out *Model_DataSplitResult) { - *out = *in - if in.TrainingTable != nil { - in, out := &in.TrainingTable, &out.TrainingTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.EvaluationTable != nil { - in, out := &in.EvaluationTable, &out.EvaluationTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.TestTable != nil { - in, out := &in.TestTable, &out.TestTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_DataSplitResult. -func (in *Model_DataSplitResult) DeepCopy() *Model_DataSplitResult { - if in == nil { - return nil - } - out := new(Model_DataSplitResult) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_DimensionalityReductionMetrics) DeepCopyInto(out *Model_DimensionalityReductionMetrics) { - *out = *in - if in.TotalExplainedVarianceRatio != nil { - in, out := &in.TotalExplainedVarianceRatio, &out.TotalExplainedVarianceRatio - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_DimensionalityReductionMetrics. -func (in *Model_DimensionalityReductionMetrics) DeepCopy() *Model_DimensionalityReductionMetrics { - if in == nil { - return nil - } - out := new(Model_DimensionalityReductionMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_EvaluationMetrics) DeepCopyInto(out *Model_EvaluationMetrics) { - *out = *in - if in.RegressionMetrics != nil { - in, out := &in.RegressionMetrics, &out.RegressionMetrics - *out = new(Model_RegressionMetrics) - (*in).DeepCopyInto(*out) - } - if in.BinaryClassificationMetrics != nil { - in, out := &in.BinaryClassificationMetrics, &out.BinaryClassificationMetrics - *out = new(Model_BinaryClassificationMetrics) - (*in).DeepCopyInto(*out) - } - if in.MultiClassClassificationMetrics != nil { - in, out := &in.MultiClassClassificationMetrics, &out.MultiClassClassificationMetrics - *out = new(Model_MultiClassClassificationMetrics) - (*in).DeepCopyInto(*out) - } - if in.ClusteringMetrics != nil { - in, out := &in.ClusteringMetrics, &out.ClusteringMetrics - *out = new(Model_ClusteringMetrics) - (*in).DeepCopyInto(*out) - } - if in.RankingMetrics != nil { - in, out := &in.RankingMetrics, &out.RankingMetrics - *out = new(Model_RankingMetrics) - (*in).DeepCopyInto(*out) - } - if in.ArimaForecastingMetrics != nil { - in, out := &in.ArimaForecastingMetrics, &out.ArimaForecastingMetrics - *out = new(Model_ArimaForecastingMetrics) - (*in).DeepCopyInto(*out) - } - if in.DimensionalityReductionMetrics != nil { - in, out := &in.DimensionalityReductionMetrics, &out.DimensionalityReductionMetrics - *out = new(Model_DimensionalityReductionMetrics) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_EvaluationMetrics. -func (in *Model_EvaluationMetrics) DeepCopy() *Model_EvaluationMetrics { - if in == nil { - return nil - } - out := new(Model_EvaluationMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_GlobalExplanation) DeepCopyInto(out *Model_GlobalExplanation) { - *out = *in - if in.Explanations != nil { - in, out := &in.Explanations, &out.Explanations - *out = make([]Model_GlobalExplanation_Explanation, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ClassLabel != nil { - in, out := &in.ClassLabel, &out.ClassLabel - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_GlobalExplanation. -func (in *Model_GlobalExplanation) DeepCopy() *Model_GlobalExplanation { - if in == nil { - return nil - } - out := new(Model_GlobalExplanation) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_GlobalExplanation_Explanation) DeepCopyInto(out *Model_GlobalExplanation_Explanation) { - *out = *in - if in.FeatureName != nil { - in, out := &in.FeatureName, &out.FeatureName - *out = new(string) - **out = **in - } - if in.Attribution != nil { - in, out := &in.Attribution, &out.Attribution - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_GlobalExplanation_Explanation. -func (in *Model_GlobalExplanation_Explanation) DeepCopy() *Model_GlobalExplanation_Explanation { - if in == nil { - return nil - } - out := new(Model_GlobalExplanation_Explanation) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_HparamSearchSpaces) DeepCopyInto(out *Model_HparamSearchSpaces) { - *out = *in - if in.LearnRate != nil { - in, out := &in.LearnRate, &out.LearnRate - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.L1Reg != nil { - in, out := &in.L1Reg, &out.L1Reg - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.L2Reg != nil { - in, out := &in.L2Reg, &out.L2Reg - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.NumClusters != nil { - in, out := &in.NumClusters, &out.NumClusters - *out = new(Model_IntHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.NumFactors != nil { - in, out := &in.NumFactors, &out.NumFactors - *out = new(Model_IntHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.HiddenUnits != nil { - in, out := &in.HiddenUnits, &out.HiddenUnits - *out = new(Model_IntArrayHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.BatchSize != nil { - in, out := &in.BatchSize, &out.BatchSize - *out = new(Model_IntHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.Dropout != nil { - in, out := &in.Dropout, &out.Dropout - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.MaxTreeDepth != nil { - in, out := &in.MaxTreeDepth, &out.MaxTreeDepth - *out = new(Model_IntHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.Subsample != nil { - in, out := &in.Subsample, &out.Subsample - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.MinSplitLoss != nil { - in, out := &in.MinSplitLoss, &out.MinSplitLoss - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.WalsAlpha != nil { - in, out := &in.WalsAlpha, &out.WalsAlpha - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.BoosterType != nil { - in, out := &in.BoosterType, &out.BoosterType - *out = new(Model_StringHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.NumParallelTree != nil { - in, out := &in.NumParallelTree, &out.NumParallelTree - *out = new(Model_IntHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.DartNormalizeType != nil { - in, out := &in.DartNormalizeType, &out.DartNormalizeType - *out = new(Model_StringHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.TreeMethod != nil { - in, out := &in.TreeMethod, &out.TreeMethod - *out = new(Model_StringHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.MinTreeChildWeight != nil { - in, out := &in.MinTreeChildWeight, &out.MinTreeChildWeight - *out = new(Model_IntHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.ColsampleBytree != nil { - in, out := &in.ColsampleBytree, &out.ColsampleBytree - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.ColsampleBylevel != nil { - in, out := &in.ColsampleBylevel, &out.ColsampleBylevel - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.ColsampleBynode != nil { - in, out := &in.ColsampleBynode, &out.ColsampleBynode - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.ActivationFn != nil { - in, out := &in.ActivationFn, &out.ActivationFn - *out = new(Model_StringHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.Optimizer != nil { - in, out := &in.Optimizer, &out.Optimizer - *out = new(Model_StringHparamSearchSpace) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_HparamSearchSpaces. -func (in *Model_HparamSearchSpaces) DeepCopy() *Model_HparamSearchSpaces { - if in == nil { - return nil - } - out := new(Model_HparamSearchSpaces) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_HparamTuningEnums) DeepCopyInto(out *Model_HparamTuningEnums) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_HparamTuningEnums. -func (in *Model_HparamTuningEnums) DeepCopy() *Model_HparamTuningEnums { - if in == nil { - return nil - } - out := new(Model_HparamTuningEnums) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_HparamTuningTrial) DeepCopyInto(out *Model_HparamTuningTrial) { - *out = *in - if in.TrialID != nil { - in, out := &in.TrialID, &out.TrialID - *out = new(int64) - **out = **in - } - if in.StartTimeMs != nil { - in, out := &in.StartTimeMs, &out.StartTimeMs - *out = new(int64) - **out = **in - } - if in.EndTimeMs != nil { - in, out := &in.EndTimeMs, &out.EndTimeMs - *out = new(int64) - **out = **in - } - if in.Hparams != nil { - in, out := &in.Hparams, &out.Hparams - *out = new(Model_TrainingRun_TrainingOptions) - (*in).DeepCopyInto(*out) - } - if in.EvaluationMetrics != nil { - in, out := &in.EvaluationMetrics, &out.EvaluationMetrics - *out = new(Model_EvaluationMetrics) - (*in).DeepCopyInto(*out) - } - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(string) - **out = **in - } - if in.ErrorMessage != nil { - in, out := &in.ErrorMessage, &out.ErrorMessage - *out = new(string) - **out = **in - } - if in.TrainingLoss != nil { - in, out := &in.TrainingLoss, &out.TrainingLoss - *out = new(float64) - **out = **in - } - if in.EvalLoss != nil { - in, out := &in.EvalLoss, &out.EvalLoss - *out = new(float64) - **out = **in - } - if in.HparamTuningEvaluationMetrics != nil { - in, out := &in.HparamTuningEvaluationMetrics, &out.HparamTuningEvaluationMetrics - *out = new(Model_EvaluationMetrics) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_HparamTuningTrial. -func (in *Model_HparamTuningTrial) DeepCopy() *Model_HparamTuningTrial { - if in == nil { - return nil - } - out := new(Model_HparamTuningTrial) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_IntArrayHparamSearchSpace) DeepCopyInto(out *Model_IntArrayHparamSearchSpace) { - *out = *in - if in.Candidates != nil { - in, out := &in.Candidates, &out.Candidates - *out = make([]Model_IntArrayHparamSearchSpace_IntArray, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_IntArrayHparamSearchSpace. -func (in *Model_IntArrayHparamSearchSpace) DeepCopy() *Model_IntArrayHparamSearchSpace { - if in == nil { - return nil - } - out := new(Model_IntArrayHparamSearchSpace) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_IntArrayHparamSearchSpace_IntArray) DeepCopyInto(out *Model_IntArrayHparamSearchSpace_IntArray) { - *out = *in - if in.Elements != nil { - in, out := &in.Elements, &out.Elements - *out = make([]int64, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_IntArrayHparamSearchSpace_IntArray. -func (in *Model_IntArrayHparamSearchSpace_IntArray) DeepCopy() *Model_IntArrayHparamSearchSpace_IntArray { - if in == nil { - return nil - } - out := new(Model_IntArrayHparamSearchSpace_IntArray) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_IntHparamSearchSpace) DeepCopyInto(out *Model_IntHparamSearchSpace) { - *out = *in - if in.Range != nil { - in, out := &in.Range, &out.Range - *out = new(Model_IntHparamSearchSpace_IntRange) - (*in).DeepCopyInto(*out) - } - if in.Candidates != nil { - in, out := &in.Candidates, &out.Candidates - *out = new(Model_IntHparamSearchSpace_IntCandidates) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_IntHparamSearchSpace. -func (in *Model_IntHparamSearchSpace) DeepCopy() *Model_IntHparamSearchSpace { - if in == nil { - return nil - } - out := new(Model_IntHparamSearchSpace) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_IntHparamSearchSpace_IntCandidates) DeepCopyInto(out *Model_IntHparamSearchSpace_IntCandidates) { - *out = *in - if in.Candidates != nil { - in, out := &in.Candidates, &out.Candidates - *out = make([]int64, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_IntHparamSearchSpace_IntCandidates. -func (in *Model_IntHparamSearchSpace_IntCandidates) DeepCopy() *Model_IntHparamSearchSpace_IntCandidates { - if in == nil { - return nil - } - out := new(Model_IntHparamSearchSpace_IntCandidates) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_IntHparamSearchSpace_IntRange) DeepCopyInto(out *Model_IntHparamSearchSpace_IntRange) { - *out = *in - if in.Min != nil { - in, out := &in.Min, &out.Min - *out = new(int64) - **out = **in - } - if in.Max != nil { - in, out := &in.Max, &out.Max - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_IntHparamSearchSpace_IntRange. -func (in *Model_IntHparamSearchSpace_IntRange) DeepCopy() *Model_IntHparamSearchSpace_IntRange { - if in == nil { - return nil - } - out := new(Model_IntHparamSearchSpace_IntRange) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_KmeansEnums) DeepCopyInto(out *Model_KmeansEnums) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_KmeansEnums. -func (in *Model_KmeansEnums) DeepCopy() *Model_KmeansEnums { - if in == nil { - return nil - } - out := new(Model_KmeansEnums) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ModelRegistryOptionEnums) DeepCopyInto(out *Model_ModelRegistryOptionEnums) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ModelRegistryOptionEnums. -func (in *Model_ModelRegistryOptionEnums) DeepCopy() *Model_ModelRegistryOptionEnums { - if in == nil { - return nil - } - out := new(Model_ModelRegistryOptionEnums) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_MultiClassClassificationMetrics) DeepCopyInto(out *Model_MultiClassClassificationMetrics) { - *out = *in - if in.AggregateClassificationMetrics != nil { - in, out := &in.AggregateClassificationMetrics, &out.AggregateClassificationMetrics - *out = new(Model_AggregateClassificationMetrics) - (*in).DeepCopyInto(*out) - } - if in.ConfusionMatrixList != nil { - in, out := &in.ConfusionMatrixList, &out.ConfusionMatrixList - *out = make([]Model_MultiClassClassificationMetrics_ConfusionMatrix, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_MultiClassClassificationMetrics. -func (in *Model_MultiClassClassificationMetrics) DeepCopy() *Model_MultiClassClassificationMetrics { - if in == nil { - return nil - } - out := new(Model_MultiClassClassificationMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_MultiClassClassificationMetrics_ConfusionMatrix) DeepCopyInto(out *Model_MultiClassClassificationMetrics_ConfusionMatrix) { - *out = *in - if in.ConfidenceThreshold != nil { - in, out := &in.ConfidenceThreshold, &out.ConfidenceThreshold - *out = new(float64) - **out = **in - } - if in.Rows != nil { - in, out := &in.Rows, &out.Rows - *out = make([]Model_MultiClassClassificationMetrics_ConfusionMatrix_Row, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_MultiClassClassificationMetrics_ConfusionMatrix. -func (in *Model_MultiClassClassificationMetrics_ConfusionMatrix) DeepCopy() *Model_MultiClassClassificationMetrics_ConfusionMatrix { - if in == nil { - return nil - } - out := new(Model_MultiClassClassificationMetrics_ConfusionMatrix) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) DeepCopyInto(out *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) { - *out = *in - if in.PredictedLabel != nil { - in, out := &in.PredictedLabel, &out.PredictedLabel - *out = new(string) - **out = **in - } - if in.ItemCount != nil { - in, out := &in.ItemCount, &out.ItemCount - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry. -func (in *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) DeepCopy() *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry { - if in == nil { - return nil - } - out := new(Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) DeepCopyInto(out *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) { - *out = *in - if in.ActualLabel != nil { - in, out := &in.ActualLabel, &out.ActualLabel - *out = new(string) - **out = **in - } - if in.Entries != nil { - in, out := &in.Entries, &out.Entries - *out = make([]Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_MultiClassClassificationMetrics_ConfusionMatrix_Row. -func (in *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) DeepCopy() *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row { - if in == nil { - return nil - } - out := new(Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_PcaSolverOptionEnums) DeepCopyInto(out *Model_PcaSolverOptionEnums) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_PcaSolverOptionEnums. -func (in *Model_PcaSolverOptionEnums) DeepCopy() *Model_PcaSolverOptionEnums { - if in == nil { - return nil - } - out := new(Model_PcaSolverOptionEnums) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_RankingMetrics) DeepCopyInto(out *Model_RankingMetrics) { - *out = *in - if in.MeanAveragePrecision != nil { - in, out := &in.MeanAveragePrecision, &out.MeanAveragePrecision - *out = new(float64) - **out = **in - } - if in.MeanSquaredError != nil { - in, out := &in.MeanSquaredError, &out.MeanSquaredError - *out = new(float64) - **out = **in - } - if in.NormalizedDiscountedCumulativeGain != nil { - in, out := &in.NormalizedDiscountedCumulativeGain, &out.NormalizedDiscountedCumulativeGain - *out = new(float64) - **out = **in - } - if in.AverageRank != nil { - in, out := &in.AverageRank, &out.AverageRank - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_RankingMetrics. -func (in *Model_RankingMetrics) DeepCopy() *Model_RankingMetrics { - if in == nil { - return nil - } - out := new(Model_RankingMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_RegressionMetrics) DeepCopyInto(out *Model_RegressionMetrics) { - *out = *in - if in.MeanAbsoluteError != nil { - in, out := &in.MeanAbsoluteError, &out.MeanAbsoluteError - *out = new(float64) - **out = **in - } - if in.MeanSquaredError != nil { - in, out := &in.MeanSquaredError, &out.MeanSquaredError - *out = new(float64) - **out = **in - } - if in.MeanSquaredLogError != nil { - in, out := &in.MeanSquaredLogError, &out.MeanSquaredLogError - *out = new(float64) - **out = **in - } - if in.MedianAbsoluteError != nil { - in, out := &in.MedianAbsoluteError, &out.MedianAbsoluteError - *out = new(float64) - **out = **in - } - if in.RSquared != nil { - in, out := &in.RSquared, &out.RSquared - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_RegressionMetrics. -func (in *Model_RegressionMetrics) DeepCopy() *Model_RegressionMetrics { - if in == nil { - return nil - } - out := new(Model_RegressionMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_SeasonalPeriod) DeepCopyInto(out *Model_SeasonalPeriod) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_SeasonalPeriod. -func (in *Model_SeasonalPeriod) DeepCopy() *Model_SeasonalPeriod { - if in == nil { - return nil - } - out := new(Model_SeasonalPeriod) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_StringHparamSearchSpace) DeepCopyInto(out *Model_StringHparamSearchSpace) { - *out = *in - if in.Candidates != nil { - in, out := &in.Candidates, &out.Candidates - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_StringHparamSearchSpace. -func (in *Model_StringHparamSearchSpace) DeepCopy() *Model_StringHparamSearchSpace { - if in == nil { - return nil - } - out := new(Model_StringHparamSearchSpace) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun) DeepCopyInto(out *Model_TrainingRun) { - *out = *in - if in.TrainingOptions != nil { - in, out := &in.TrainingOptions, &out.TrainingOptions - *out = new(Model_TrainingRun_TrainingOptions) - (*in).DeepCopyInto(*out) - } - if in.StartTime != nil { - in, out := &in.StartTime, &out.StartTime - *out = new(string) - **out = **in - } - if in.Results != nil { - in, out := &in.Results, &out.Results - *out = make([]Model_TrainingRun_IterationResult, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.EvaluationMetrics != nil { - in, out := &in.EvaluationMetrics, &out.EvaluationMetrics - *out = new(Model_EvaluationMetrics) - (*in).DeepCopyInto(*out) - } - if in.DataSplitResult != nil { - in, out := &in.DataSplitResult, &out.DataSplitResult - *out = new(Model_DataSplitResult) - (*in).DeepCopyInto(*out) - } - if in.ModelLevelGlobalExplanation != nil { - in, out := &in.ModelLevelGlobalExplanation, &out.ModelLevelGlobalExplanation - *out = new(Model_GlobalExplanation) - (*in).DeepCopyInto(*out) - } - if in.ClassLevelGlobalExplanations != nil { - in, out := &in.ClassLevelGlobalExplanations, &out.ClassLevelGlobalExplanations - *out = make([]Model_GlobalExplanation, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.VertexAiModelID != nil { - in, out := &in.VertexAiModelID, &out.VertexAiModelID - *out = new(string) - **out = **in - } - if in.VertexAiModelVersion != nil { - in, out := &in.VertexAiModelVersion, &out.VertexAiModelVersion - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun. -func (in *Model_TrainingRun) DeepCopy() *Model_TrainingRun { - if in == nil { - return nil - } - out := new(Model_TrainingRun) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_IterationResult) DeepCopyInto(out *Model_TrainingRun_IterationResult) { - *out = *in - if in.Index != nil { - in, out := &in.Index, &out.Index - *out = new(int32) - **out = **in - } - if in.DurationMs != nil { - in, out := &in.DurationMs, &out.DurationMs - *out = new(int64) - **out = **in - } - if in.TrainingLoss != nil { - in, out := &in.TrainingLoss, &out.TrainingLoss - *out = new(float64) - **out = **in - } - if in.EvalLoss != nil { - in, out := &in.EvalLoss, &out.EvalLoss - *out = new(float64) - **out = **in - } - if in.LearnRate != nil { - in, out := &in.LearnRate, &out.LearnRate - *out = new(float64) - **out = **in - } - if in.ClusterInfos != nil { - in, out := &in.ClusterInfos, &out.ClusterInfos - *out = make([]Model_TrainingRun_IterationResult_ClusterInfo, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ArimaResult != nil { - in, out := &in.ArimaResult, &out.ArimaResult - *out = new(Model_TrainingRun_IterationResult_ArimaResult) - (*in).DeepCopyInto(*out) - } - if in.PrincipalComponentInfos != nil { - in, out := &in.PrincipalComponentInfos, &out.PrincipalComponentInfos - *out = make([]Model_TrainingRun_IterationResult_PrincipalComponentInfo, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_IterationResult. -func (in *Model_TrainingRun_IterationResult) DeepCopy() *Model_TrainingRun_IterationResult { - if in == nil { - return nil - } - out := new(Model_TrainingRun_IterationResult) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_IterationResult_ArimaResult) DeepCopyInto(out *Model_TrainingRun_IterationResult_ArimaResult) { - *out = *in - if in.ArimaModelInfo != nil { - in, out := &in.ArimaModelInfo, &out.ArimaModelInfo - *out = make([]Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SeasonalPeriods != nil { - in, out := &in.SeasonalPeriods, &out.SeasonalPeriods - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_IterationResult_ArimaResult. -func (in *Model_TrainingRun_IterationResult_ArimaResult) DeepCopy() *Model_TrainingRun_IterationResult_ArimaResult { - if in == nil { - return nil - } - out := new(Model_TrainingRun_IterationResult_ArimaResult) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients) DeepCopyInto(out *Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients) { - *out = *in - if in.AutoRegressiveCoefficients != nil { - in, out := &in.AutoRegressiveCoefficients, &out.AutoRegressiveCoefficients - *out = make([]float64, len(*in)) - copy(*out, *in) - } - if in.MovingAverageCoefficients != nil { - in, out := &in.MovingAverageCoefficients, &out.MovingAverageCoefficients - *out = make([]float64, len(*in)) - copy(*out, *in) - } - if in.InterceptCoefficient != nil { - in, out := &in.InterceptCoefficient, &out.InterceptCoefficient - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients. -func (in *Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients) DeepCopy() *Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients { - if in == nil { - return nil - } - out := new(Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo) DeepCopyInto(out *Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo) { - *out = *in - if in.NonSeasonalOrder != nil { - in, out := &in.NonSeasonalOrder, &out.NonSeasonalOrder - *out = new(Model_ArimaOrder) - (*in).DeepCopyInto(*out) - } - if in.ArimaCoefficients != nil { - in, out := &in.ArimaCoefficients, &out.ArimaCoefficients - *out = new(Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients) - (*in).DeepCopyInto(*out) - } - if in.ArimaFittingMetrics != nil { - in, out := &in.ArimaFittingMetrics, &out.ArimaFittingMetrics - *out = new(Model_ArimaFittingMetrics) - (*in).DeepCopyInto(*out) - } - if in.HasDrift != nil { - in, out := &in.HasDrift, &out.HasDrift - *out = new(bool) - **out = **in - } - if in.TimeSeriesID != nil { - in, out := &in.TimeSeriesID, &out.TimeSeriesID - *out = new(string) - **out = **in - } - if in.TimeSeriesIds != nil { - in, out := &in.TimeSeriesIds, &out.TimeSeriesIds - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.SeasonalPeriods != nil { - in, out := &in.SeasonalPeriods, &out.SeasonalPeriods - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.HasHolidayEffect != nil { - in, out := &in.HasHolidayEffect, &out.HasHolidayEffect - *out = new(bool) - **out = **in - } - if in.HasSpikesAndDips != nil { - in, out := &in.HasSpikesAndDips, &out.HasSpikesAndDips - *out = new(bool) - **out = **in - } - if in.HasStepChanges != nil { - in, out := &in.HasStepChanges, &out.HasStepChanges - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo. -func (in *Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo) DeepCopy() *Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo { - if in == nil { - return nil - } - out := new(Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_IterationResult_ClusterInfo) DeepCopyInto(out *Model_TrainingRun_IterationResult_ClusterInfo) { - *out = *in - if in.CentroidID != nil { - in, out := &in.CentroidID, &out.CentroidID - *out = new(int64) - **out = **in - } - if in.ClusterRadius != nil { - in, out := &in.ClusterRadius, &out.ClusterRadius - *out = new(float64) - **out = **in - } - if in.ClusterSize != nil { - in, out := &in.ClusterSize, &out.ClusterSize - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_IterationResult_ClusterInfo. -func (in *Model_TrainingRun_IterationResult_ClusterInfo) DeepCopy() *Model_TrainingRun_IterationResult_ClusterInfo { - if in == nil { - return nil - } - out := new(Model_TrainingRun_IterationResult_ClusterInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_IterationResult_PrincipalComponentInfo) DeepCopyInto(out *Model_TrainingRun_IterationResult_PrincipalComponentInfo) { - *out = *in - if in.PrincipalComponentID != nil { - in, out := &in.PrincipalComponentID, &out.PrincipalComponentID - *out = new(int64) - **out = **in - } - if in.ExplainedVariance != nil { - in, out := &in.ExplainedVariance, &out.ExplainedVariance - *out = new(float64) - **out = **in - } - if in.ExplainedVarianceRatio != nil { - in, out := &in.ExplainedVarianceRatio, &out.ExplainedVarianceRatio - *out = new(float64) - **out = **in - } - if in.CumulativeExplainedVarianceRatio != nil { - in, out := &in.CumulativeExplainedVarianceRatio, &out.CumulativeExplainedVarianceRatio - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_IterationResult_PrincipalComponentInfo. -func (in *Model_TrainingRun_IterationResult_PrincipalComponentInfo) DeepCopy() *Model_TrainingRun_IterationResult_PrincipalComponentInfo { - if in == nil { - return nil - } - out := new(Model_TrainingRun_IterationResult_PrincipalComponentInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_TrainingOptions) DeepCopyInto(out *Model_TrainingRun_TrainingOptions) { - *out = *in - if in.MaxIterations != nil { - in, out := &in.MaxIterations, &out.MaxIterations - *out = new(int64) - **out = **in - } - if in.LossType != nil { - in, out := &in.LossType, &out.LossType - *out = new(string) - **out = **in - } - if in.LearnRate != nil { - in, out := &in.LearnRate, &out.LearnRate - *out = new(float64) - **out = **in - } - if in.L1Regularization != nil { - in, out := &in.L1Regularization, &out.L1Regularization - *out = new(float64) - **out = **in - } - if in.L2Regularization != nil { - in, out := &in.L2Regularization, &out.L2Regularization - *out = new(float64) - **out = **in - } - if in.MinRelativeProgress != nil { - in, out := &in.MinRelativeProgress, &out.MinRelativeProgress - *out = new(float64) - **out = **in - } - if in.WarmStart != nil { - in, out := &in.WarmStart, &out.WarmStart - *out = new(bool) - **out = **in - } - if in.EarlyStop != nil { - in, out := &in.EarlyStop, &out.EarlyStop - *out = new(bool) - **out = **in - } - if in.InputLabelColumns != nil { - in, out := &in.InputLabelColumns, &out.InputLabelColumns - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.DataSplitMethod != nil { - in, out := &in.DataSplitMethod, &out.DataSplitMethod - *out = new(string) - **out = **in - } - if in.DataSplitEvalFraction != nil { - in, out := &in.DataSplitEvalFraction, &out.DataSplitEvalFraction - *out = new(float64) - **out = **in - } - if in.DataSplitColumn != nil { - in, out := &in.DataSplitColumn, &out.DataSplitColumn - *out = new(string) - **out = **in - } - if in.LearnRateStrategy != nil { - in, out := &in.LearnRateStrategy, &out.LearnRateStrategy - *out = new(string) - **out = **in - } - if in.InitialLearnRate != nil { - in, out := &in.InitialLearnRate, &out.InitialLearnRate - *out = new(float64) - **out = **in - } - if in.UserColumn != nil { - in, out := &in.UserColumn, &out.UserColumn - *out = new(string) - **out = **in - } - if in.ItemColumn != nil { - in, out := &in.ItemColumn, &out.ItemColumn - *out = new(string) - **out = **in - } - if in.DistanceType != nil { - in, out := &in.DistanceType, &out.DistanceType - *out = new(string) - **out = **in - } - if in.NumClusters != nil { - in, out := &in.NumClusters, &out.NumClusters - *out = new(int64) - **out = **in - } - if in.ModelUri != nil { - in, out := &in.ModelUri, &out.ModelUri - *out = new(string) - **out = **in - } - if in.OptimizationStrategy != nil { - in, out := &in.OptimizationStrategy, &out.OptimizationStrategy - *out = new(string) - **out = **in - } - if in.HiddenUnits != nil { - in, out := &in.HiddenUnits, &out.HiddenUnits - *out = make([]int64, len(*in)) - copy(*out, *in) - } - if in.BatchSize != nil { - in, out := &in.BatchSize, &out.BatchSize - *out = new(int64) - **out = **in - } - if in.Dropout != nil { - in, out := &in.Dropout, &out.Dropout - *out = new(float64) - **out = **in - } - if in.MaxTreeDepth != nil { - in, out := &in.MaxTreeDepth, &out.MaxTreeDepth - *out = new(int64) - **out = **in - } - if in.Subsample != nil { - in, out := &in.Subsample, &out.Subsample - *out = new(float64) - **out = **in - } - if in.MinSplitLoss != nil { - in, out := &in.MinSplitLoss, &out.MinSplitLoss - *out = new(float64) - **out = **in - } - if in.BoosterType != nil { - in, out := &in.BoosterType, &out.BoosterType - *out = new(string) - **out = **in - } - if in.NumParallelTree != nil { - in, out := &in.NumParallelTree, &out.NumParallelTree - *out = new(int64) - **out = **in - } - if in.DartNormalizeType != nil { - in, out := &in.DartNormalizeType, &out.DartNormalizeType - *out = new(string) - **out = **in - } - if in.TreeMethod != nil { - in, out := &in.TreeMethod, &out.TreeMethod - *out = new(string) - **out = **in - } - if in.MinTreeChildWeight != nil { - in, out := &in.MinTreeChildWeight, &out.MinTreeChildWeight - *out = new(int64) - **out = **in - } - if in.ColsampleBytree != nil { - in, out := &in.ColsampleBytree, &out.ColsampleBytree - *out = new(float64) - **out = **in - } - if in.ColsampleBylevel != nil { - in, out := &in.ColsampleBylevel, &out.ColsampleBylevel - *out = new(float64) - **out = **in - } - if in.ColsampleBynode != nil { - in, out := &in.ColsampleBynode, &out.ColsampleBynode - *out = new(float64) - **out = **in - } - if in.NumFactors != nil { - in, out := &in.NumFactors, &out.NumFactors - *out = new(int64) - **out = **in - } - if in.FeedbackType != nil { - in, out := &in.FeedbackType, &out.FeedbackType - *out = new(string) - **out = **in - } - if in.WalsAlpha != nil { - in, out := &in.WalsAlpha, &out.WalsAlpha - *out = new(float64) - **out = **in - } - if in.KmeansInitializationMethod != nil { - in, out := &in.KmeansInitializationMethod, &out.KmeansInitializationMethod - *out = new(string) - **out = **in - } - if in.KmeansInitializationColumn != nil { - in, out := &in.KmeansInitializationColumn, &out.KmeansInitializationColumn - *out = new(string) - **out = **in - } - if in.TimeSeriesTimestampColumn != nil { - in, out := &in.TimeSeriesTimestampColumn, &out.TimeSeriesTimestampColumn - *out = new(string) - **out = **in - } - if in.TimeSeriesDataColumn != nil { - in, out := &in.TimeSeriesDataColumn, &out.TimeSeriesDataColumn - *out = new(string) - **out = **in - } - if in.AutoArima != nil { - in, out := &in.AutoArima, &out.AutoArima - *out = new(bool) - **out = **in - } - if in.NonSeasonalOrder != nil { - in, out := &in.NonSeasonalOrder, &out.NonSeasonalOrder - *out = new(Model_ArimaOrder) - (*in).DeepCopyInto(*out) - } - if in.DataFrequency != nil { - in, out := &in.DataFrequency, &out.DataFrequency - *out = new(string) - **out = **in - } - if in.CalculatePValues != nil { - in, out := &in.CalculatePValues, &out.CalculatePValues - *out = new(bool) - **out = **in - } - if in.IncludeDrift != nil { - in, out := &in.IncludeDrift, &out.IncludeDrift - *out = new(bool) - **out = **in - } - if in.HolidayRegion != nil { - in, out := &in.HolidayRegion, &out.HolidayRegion - *out = new(string) - **out = **in - } - if in.HolidayRegions != nil { - in, out := &in.HolidayRegions, &out.HolidayRegions - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.TimeSeriesIDColumn != nil { - in, out := &in.TimeSeriesIDColumn, &out.TimeSeriesIDColumn - *out = new(string) - **out = **in - } - if in.TimeSeriesIDColumns != nil { - in, out := &in.TimeSeriesIDColumns, &out.TimeSeriesIDColumns - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Horizon != nil { - in, out := &in.Horizon, &out.Horizon - *out = new(int64) - **out = **in - } - if in.AutoArimaMaxOrder != nil { - in, out := &in.AutoArimaMaxOrder, &out.AutoArimaMaxOrder - *out = new(int64) - **out = **in - } - if in.AutoArimaMinOrder != nil { - in, out := &in.AutoArimaMinOrder, &out.AutoArimaMinOrder - *out = new(int64) - **out = **in - } - if in.NumTrials != nil { - in, out := &in.NumTrials, &out.NumTrials - *out = new(int64) - **out = **in - } - if in.MaxParallelTrials != nil { - in, out := &in.MaxParallelTrials, &out.MaxParallelTrials - *out = new(int64) - **out = **in - } - if in.HparamTuningObjectives != nil { - in, out := &in.HparamTuningObjectives, &out.HparamTuningObjectives - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.DecomposeTimeSeries != nil { - in, out := &in.DecomposeTimeSeries, &out.DecomposeTimeSeries - *out = new(bool) - **out = **in - } - if in.CleanSpikesAndDips != nil { - in, out := &in.CleanSpikesAndDips, &out.CleanSpikesAndDips - *out = new(bool) - **out = **in - } - if in.AdjustStepChanges != nil { - in, out := &in.AdjustStepChanges, &out.AdjustStepChanges - *out = new(bool) - **out = **in - } - if in.EnableGlobalExplain != nil { - in, out := &in.EnableGlobalExplain, &out.EnableGlobalExplain - *out = new(bool) - **out = **in - } - if in.SampledShapleyNumPaths != nil { - in, out := &in.SampledShapleyNumPaths, &out.SampledShapleyNumPaths - *out = new(int64) - **out = **in - } - if in.IntegratedGradientsNumSteps != nil { - in, out := &in.IntegratedGradientsNumSteps, &out.IntegratedGradientsNumSteps - *out = new(int64) - **out = **in - } - if in.CategoryEncodingMethod != nil { - in, out := &in.CategoryEncodingMethod, &out.CategoryEncodingMethod - *out = new(string) - **out = **in - } - if in.TfVersion != nil { - in, out := &in.TfVersion, &out.TfVersion - *out = new(string) - **out = **in - } - if in.ColorSpace != nil { - in, out := &in.ColorSpace, &out.ColorSpace - *out = new(string) - **out = **in - } - if in.InstanceWeightColumn != nil { - in, out := &in.InstanceWeightColumn, &out.InstanceWeightColumn - *out = new(string) - **out = **in - } - if in.TrendSmoothingWindowSize != nil { - in, out := &in.TrendSmoothingWindowSize, &out.TrendSmoothingWindowSize - *out = new(int64) - **out = **in - } - if in.TimeSeriesLengthFraction != nil { - in, out := &in.TimeSeriesLengthFraction, &out.TimeSeriesLengthFraction - *out = new(float64) - **out = **in - } - if in.MinTimeSeriesLength != nil { - in, out := &in.MinTimeSeriesLength, &out.MinTimeSeriesLength - *out = new(int64) - **out = **in - } - if in.MaxTimeSeriesLength != nil { - in, out := &in.MaxTimeSeriesLength, &out.MaxTimeSeriesLength - *out = new(int64) - **out = **in - } - if in.XgboostVersion != nil { - in, out := &in.XgboostVersion, &out.XgboostVersion - *out = new(string) - **out = **in - } - if in.ApproxGlobalFeatureContrib != nil { - in, out := &in.ApproxGlobalFeatureContrib, &out.ApproxGlobalFeatureContrib - *out = new(bool) - **out = **in - } - if in.FitIntercept != nil { - in, out := &in.FitIntercept, &out.FitIntercept - *out = new(bool) - **out = **in - } - if in.NumPrincipalComponents != nil { - in, out := &in.NumPrincipalComponents, &out.NumPrincipalComponents - *out = new(int64) - **out = **in - } - if in.PcaExplainedVarianceRatio != nil { - in, out := &in.PcaExplainedVarianceRatio, &out.PcaExplainedVarianceRatio - *out = new(float64) - **out = **in - } - if in.ScaleFeatures != nil { - in, out := &in.ScaleFeatures, &out.ScaleFeatures - *out = new(bool) - **out = **in - } - if in.PcaSolver != nil { - in, out := &in.PcaSolver, &out.PcaSolver - *out = new(string) - **out = **in - } - if in.AutoClassWeights != nil { - in, out := &in.AutoClassWeights, &out.AutoClassWeights - *out = new(bool) - **out = **in - } - if in.ActivationFn != nil { - in, out := &in.ActivationFn, &out.ActivationFn - *out = new(string) - **out = **in - } - if in.Optimizer != nil { - in, out := &in.Optimizer, &out.Optimizer - *out = new(string) - **out = **in - } - if in.BudgetHours != nil { - in, out := &in.BudgetHours, &out.BudgetHours - *out = new(float64) - **out = **in - } - if in.StandardizeFeatures != nil { - in, out := &in.StandardizeFeatures, &out.StandardizeFeatures - *out = new(bool) - **out = **in - } - if in.L1RegActivation != nil { - in, out := &in.L1RegActivation, &out.L1RegActivation - *out = new(float64) - **out = **in - } - if in.ModelRegistry != nil { - in, out := &in.ModelRegistry, &out.ModelRegistry - *out = new(string) - **out = **in - } - if in.VertexAiModelVersionAliases != nil { - in, out := &in.VertexAiModelVersionAliases, &out.VertexAiModelVersionAliases - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_TrainingOptions. -func (in *Model_TrainingRun_TrainingOptions) DeepCopy() *Model_TrainingRun_TrainingOptions { - if in == nil { - return nil - } - out := new(Model_TrainingRun_TrainingOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_float64HparamSearchSpace) DeepCopyInto(out *Model_float64HparamSearchSpace) { - *out = *in - if in.Range != nil { - in, out := &in.Range, &out.Range - *out = new(Model_float64HparamSearchSpace_float64Range) - (*in).DeepCopyInto(*out) - } - if in.Candidates != nil { - in, out := &in.Candidates, &out.Candidates - *out = new(Model_float64HparamSearchSpace_float64Candidates) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_float64HparamSearchSpace. -func (in *Model_float64HparamSearchSpace) DeepCopy() *Model_float64HparamSearchSpace { - if in == nil { - return nil - } - out := new(Model_float64HparamSearchSpace) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_float64HparamSearchSpace_float64Candidates) DeepCopyInto(out *Model_float64HparamSearchSpace_float64Candidates) { - *out = *in - if in.Candidates != nil { - in, out := &in.Candidates, &out.Candidates - *out = make([]float64, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_float64HparamSearchSpace_float64Candidates. -func (in *Model_float64HparamSearchSpace_float64Candidates) DeepCopy() *Model_float64HparamSearchSpace_float64Candidates { - if in == nil { - return nil - } - out := new(Model_float64HparamSearchSpace_float64Candidates) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_float64HparamSearchSpace_float64Range) DeepCopyInto(out *Model_float64HparamSearchSpace_float64Range) { - *out = *in - if in.Min != nil { - in, out := &in.Min, &out.Min - *out = new(float64) - **out = **in - } - if in.Max != nil { - in, out := &in.Max, &out.Max - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_float64HparamSearchSpace_float64Range. -func (in *Model_float64HparamSearchSpace_float64Range) DeepCopy() *Model_float64HparamSearchSpace_float64Range { - if in == nil { - return nil - } - out := new(Model_float64HparamSearchSpace_float64Range) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParquetOptions) DeepCopyInto(out *ParquetOptions) { - *out = *in - if in.EnumAsString != nil { - in, out := &in.EnumAsString, &out.EnumAsString - *out = new(bool) - **out = **in - } - if in.EnableListInference != nil { - in, out := &in.EnableListInference, &out.EnableListInference - *out = new(bool) - **out = **in - } - if in.MapTargetType != nil { - in, out := &in.MapTargetType, &out.MapTargetType - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParquetOptions. -func (in *ParquetOptions) DeepCopy() *ParquetOptions { - if in == nil { - return nil - } - out := new(ParquetOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PartitionSkew) DeepCopyInto(out *PartitionSkew) { - *out = *in - if in.SkewSources != nil { - in, out := &in.SkewSources, &out.SkewSources - *out = make([]PartitionSkew_SkewSource, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PartitionSkew. -func (in *PartitionSkew) DeepCopy() *PartitionSkew { - if in == nil { - return nil - } - out := new(PartitionSkew) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PartitionSkew_SkewSource) DeepCopyInto(out *PartitionSkew_SkewSource) { - *out = *in - if in.StageID != nil { - in, out := &in.StageID, &out.StageID - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PartitionSkew_SkewSource. -func (in *PartitionSkew_SkewSource) DeepCopy() *PartitionSkew_SkewSource { - if in == nil { - return nil - } - out := new(PartitionSkew_SkewSource) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PartitionedColumn) DeepCopyInto(out *PartitionedColumn) { - *out = *in - if in.Field != nil { - in, out := &in.Field, &out.Field - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PartitionedColumn. -func (in *PartitionedColumn) DeepCopy() *PartitionedColumn { - if in == nil { - return nil - } - out := new(PartitionedColumn) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PartitioningDefinition) DeepCopyInto(out *PartitioningDefinition) { - *out = *in - if in.PartitionedColumn != nil { - in, out := &in.PartitionedColumn, &out.PartitionedColumn - *out = make([]PartitionedColumn, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PartitioningDefinition. -func (in *PartitioningDefinition) DeepCopy() *PartitioningDefinition { - if in == nil { - return nil - } - out := new(PartitioningDefinition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PerformanceInsights) DeepCopyInto(out *PerformanceInsights) { - *out = *in - if in.AvgPreviousExecutionMs != nil { - in, out := &in.AvgPreviousExecutionMs, &out.AvgPreviousExecutionMs - *out = new(int64) - **out = **in - } - if in.StagePerformanceStandaloneInsights != nil { - in, out := &in.StagePerformanceStandaloneInsights, &out.StagePerformanceStandaloneInsights - *out = make([]StagePerformanceStandaloneInsight, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.StagePerformanceChangeInsights != nil { - in, out := &in.StagePerformanceChangeInsights, &out.StagePerformanceChangeInsights - *out = make([]StagePerformanceChangeInsight, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PerformanceInsights. -func (in *PerformanceInsights) DeepCopy() *PerformanceInsights { - if in == nil { - return nil - } - out := new(PerformanceInsights) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PrimaryKey) DeepCopyInto(out *PrimaryKey) { - *out = *in - if in.Columns != nil { - in, out := &in.Columns, &out.Columns - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrimaryKey. -func (in *PrimaryKey) DeepCopy() *PrimaryKey { - if in == nil { - return nil - } - out := new(PrimaryKey) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PrivacyPolicy) DeepCopyInto(out *PrivacyPolicy) { - *out = *in - if in.AggregationThresholdPolicy != nil { - in, out := &in.AggregationThresholdPolicy, &out.AggregationThresholdPolicy - *out = new(AggregationThresholdPolicy) - (*in).DeepCopyInto(*out) - } - if in.DifferentialPrivacyPolicy != nil { - in, out := &in.DifferentialPrivacyPolicy, &out.DifferentialPrivacyPolicy - *out = new(DifferentialPrivacyPolicy) - (*in).DeepCopyInto(*out) - } - if in.JoinRestrictionPolicy != nil { - in, out := &in.JoinRestrictionPolicy, &out.JoinRestrictionPolicy - *out = new(JoinRestrictionPolicy) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrivacyPolicy. -func (in *PrivacyPolicy) DeepCopy() *PrivacyPolicy { - if in == nil { - return nil - } - out := new(PrivacyPolicy) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *QueryInfo) DeepCopyInto(out *QueryInfo) { - *out = *in - if in.OptimizationDetails != nil { - in, out := &in.OptimizationDetails, &out.OptimizationDetails - *out = new(google_protobuf_Struct) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryInfo. -func (in *QueryInfo) DeepCopy() *QueryInfo { - if in == nil { - return nil - } - out := new(QueryInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *QueryParameter) DeepCopyInto(out *QueryParameter) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.ParameterType != nil { - in, out := &in.ParameterType, &out.ParameterType - *out = new(QueryParameterType) - (*in).DeepCopyInto(*out) - } - if in.ParameterValue != nil { - in, out := &in.ParameterValue, &out.ParameterValue - *out = new(QueryParameterValue) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryParameter. -func (in *QueryParameter) DeepCopy() *QueryParameter { - if in == nil { - return nil - } - out := new(QueryParameter) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *QueryParameterStructType) DeepCopyInto(out *QueryParameterStructType) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(QueryParameterType) - (*in).DeepCopyInto(*out) - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryParameterStructType. -func (in *QueryParameterStructType) DeepCopy() *QueryParameterStructType { - if in == nil { - return nil - } - out := new(QueryParameterStructType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *QueryParameterType) DeepCopyInto(out *QueryParameterType) { - *out = *in - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.ArrayType != nil { - in, out := &in.ArrayType, &out.ArrayType - *out = new(QueryParameterType) - (*in).DeepCopyInto(*out) - } - if in.StructTypes != nil { - in, out := &in.StructTypes, &out.StructTypes - *out = make([]QueryParameterStructType, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.RangeElementType != nil { - in, out := &in.RangeElementType, &out.RangeElementType - *out = new(QueryParameterType) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryParameterType. -func (in *QueryParameterType) DeepCopy() *QueryParameterType { - if in == nil { - return nil - } - out := new(QueryParameterType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *QueryParameterValue) DeepCopyInto(out *QueryParameterValue) { - *out = *in - if in.Value != nil { - in, out := &in.Value, &out.Value - *out = new(string) - **out = **in - } - if in.ArrayValues != nil { - in, out := &in.ArrayValues, &out.ArrayValues - *out = make([]QueryParameterValue, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.RangeValue != nil { - in, out := &in.RangeValue, &out.RangeValue - *out = new(RangeValue) - (*in).DeepCopyInto(*out) - } - if in.AltStructValues != nil { - in, out := &in.AltStructValues, &out.AltStructValues - *out = make([]google_protobuf_Value, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryParameterValue. -func (in *QueryParameterValue) DeepCopy() *QueryParameterValue { - if in == nil { - return nil - } - out := new(QueryParameterValue) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *QueryTimelineSample) DeepCopyInto(out *QueryTimelineSample) { - *out = *in - if in.ElapsedMs != nil { - in, out := &in.ElapsedMs, &out.ElapsedMs - *out = new(int64) - **out = **in - } - if in.TotalSlotMs != nil { - in, out := &in.TotalSlotMs, &out.TotalSlotMs - *out = new(int64) - **out = **in - } - if in.PendingUnits != nil { - in, out := &in.PendingUnits, &out.PendingUnits - *out = new(int64) - **out = **in - } - if in.CompletedUnits != nil { - in, out := &in.CompletedUnits, &out.CompletedUnits - *out = new(int64) - **out = **in - } - if in.ActiveUnits != nil { - in, out := &in.ActiveUnits, &out.ActiveUnits - *out = new(int64) - **out = **in - } - if in.EstimatedRunnableUnits != nil { - in, out := &in.EstimatedRunnableUnits, &out.EstimatedRunnableUnits - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryTimelineSample. -func (in *QueryTimelineSample) DeepCopy() *QueryTimelineSample { - if in == nil { - return nil - } - out := new(QueryTimelineSample) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RangePartitioning) DeepCopyInto(out *RangePartitioning) { - *out = *in - if in.Field != nil { - in, out := &in.Field, &out.Field - *out = new(string) - **out = **in - } - if in.Range != nil { - in, out := &in.Range, &out.Range - *out = new(RangePartitioning_Range) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RangePartitioning. -func (in *RangePartitioning) DeepCopy() *RangePartitioning { - if in == nil { - return nil - } - out := new(RangePartitioning) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RangePartitioning_Range) DeepCopyInto(out *RangePartitioning_Range) { - *out = *in - if in.Start != nil { - in, out := &in.Start, &out.Start - *out = new(string) - **out = **in - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(string) - **out = **in - } - if in.Interval != nil { - in, out := &in.Interval, &out.Interval - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RangePartitioning_Range. -func (in *RangePartitioning_Range) DeepCopy() *RangePartitioning_Range { - if in == nil { - return nil - } - out := new(RangePartitioning_Range) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RangeValue) DeepCopyInto(out *RangeValue) { - *out = *in - if in.Start != nil { - in, out := &in.Start, &out.Start - *out = new(QueryParameterValue) - (*in).DeepCopyInto(*out) - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(QueryParameterValue) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RangeValue. -func (in *RangeValue) DeepCopy() *RangeValue { - if in == nil { - return nil - } - out := new(RangeValue) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RemoteModelInfo) DeepCopyInto(out *RemoteModelInfo) { - *out = *in - if in.Endpoint != nil { - in, out := &in.Endpoint, &out.Endpoint - *out = new(string) - **out = **in - } - if in.RemoteServiceType != nil { - in, out := &in.RemoteServiceType, &out.RemoteServiceType - *out = new(string) - **out = **in - } - if in.Connection != nil { - in, out := &in.Connection, &out.Connection - *out = new(string) - **out = **in - } - if in.MaxBatchingRows != nil { - in, out := &in.MaxBatchingRows, &out.MaxBatchingRows - *out = new(int64) - **out = **in - } - if in.RemoteModelVersion != nil { - in, out := &in.RemoteModelVersion, &out.RemoteModelVersion - *out = new(string) - **out = **in - } - if in.SpeechRecognizer != nil { - in, out := &in.SpeechRecognizer, &out.SpeechRecognizer - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RemoteModelInfo. -func (in *RemoteModelInfo) DeepCopy() *RemoteModelInfo { - if in == nil { - return nil - } - out := new(RemoteModelInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RestrictionConfig) DeepCopyInto(out *RestrictionConfig) { - *out = *in - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RestrictionConfig. -func (in *RestrictionConfig) DeepCopy() *RestrictionConfig { - if in == nil { - return nil - } - out := new(RestrictionConfig) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Routine) DeepCopyInto(out *Routine) { - *out = *in - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.RoutineReference != nil { - in, out := &in.RoutineReference, &out.RoutineReference - *out = new(RoutineReference) - (*in).DeepCopyInto(*out) - } - if in.RoutineType != nil { - in, out := &in.RoutineType, &out.RoutineType - *out = new(string) - **out = **in - } - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(int64) - **out = **in - } - if in.LastModifiedTime != nil { - in, out := &in.LastModifiedTime, &out.LastModifiedTime - *out = new(int64) - **out = **in - } - if in.Language != nil { - in, out := &in.Language, &out.Language - *out = new(string) - **out = **in - } - if in.Arguments != nil { - in, out := &in.Arguments, &out.Arguments - *out = make([]Routine_Argument, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ReturnType != nil { - in, out := &in.ReturnType, &out.ReturnType - *out = new(StandardSqlDataType) - (*in).DeepCopyInto(*out) - } - if in.ReturnTableType != nil { - in, out := &in.ReturnTableType, &out.ReturnTableType - *out = new(StandardSqlTableType) - (*in).DeepCopyInto(*out) - } - if in.ImportedLibraries != nil { - in, out := &in.ImportedLibraries, &out.ImportedLibraries - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.DefinitionBody != nil { - in, out := &in.DefinitionBody, &out.DefinitionBody - *out = new(string) - **out = **in - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.DeterminismLevel != nil { - in, out := &in.DeterminismLevel, &out.DeterminismLevel - *out = new(string) - **out = **in - } - if in.SecurityMode != nil { - in, out := &in.SecurityMode, &out.SecurityMode - *out = new(string) - **out = **in - } - if in.StrictMode != nil { - in, out := &in.StrictMode, &out.StrictMode - *out = new(bool) - **out = **in - } - if in.RemoteFunctionOptions != nil { - in, out := &in.RemoteFunctionOptions, &out.RemoteFunctionOptions - *out = new(Routine_RemoteFunctionOptions) - (*in).DeepCopyInto(*out) - } - if in.SparkOptions != nil { - in, out := &in.SparkOptions, &out.SparkOptions - *out = new(SparkOptions) - (*in).DeepCopyInto(*out) - } - if in.DataGovernanceType != nil { - in, out := &in.DataGovernanceType, &out.DataGovernanceType - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Routine. -func (in *Routine) DeepCopy() *Routine { - if in == nil { - return nil - } - out := new(Routine) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RoutineReference) DeepCopyInto(out *RoutineReference) { - *out = *in - if in.ProjectId != nil { - in, out := &in.ProjectId, &out.ProjectId - *out = new(string) - **out = **in - } - if in.DatasetId != nil { - in, out := &in.DatasetId, &out.DatasetId - *out = new(string) - **out = **in - } - if in.RoutineId != nil { - in, out := &in.RoutineId, &out.RoutineId - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineReference. -func (in *RoutineReference) DeepCopy() *RoutineReference { - if in == nil { - return nil - } - out := new(RoutineReference) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Routine_Argument) DeepCopyInto(out *Routine_Argument) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.ArgumentKind != nil { - in, out := &in.ArgumentKind, &out.ArgumentKind - *out = new(string) - **out = **in - } - if in.Mode != nil { - in, out := &in.Mode, &out.Mode - *out = new(string) - **out = **in - } - if in.DataType != nil { - in, out := &in.DataType, &out.DataType - *out = new(StandardSqlDataType) - (*in).DeepCopyInto(*out) - } - if in.IsAggregate != nil { - in, out := &in.IsAggregate, &out.IsAggregate - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Routine_Argument. -func (in *Routine_Argument) DeepCopy() *Routine_Argument { - if in == nil { - return nil - } - out := new(Routine_Argument) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Routine_RemoteFunctionOptions) DeepCopyInto(out *Routine_RemoteFunctionOptions) { - *out = *in - if in.Endpoint != nil { - in, out := &in.Endpoint, &out.Endpoint - *out = new(string) - **out = **in - } - if in.Connection != nil { - in, out := &in.Connection, &out.Connection - *out = new(string) - **out = **in - } - if in.UserDefinedContext != nil { - in, out := &in.UserDefinedContext, &out.UserDefinedContext - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.MaxBatchingRows != nil { - in, out := &in.MaxBatchingRows, &out.MaxBatchingRows - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Routine_RemoteFunctionOptions. -func (in *Routine_RemoteFunctionOptions) DeepCopy() *Routine_RemoteFunctionOptions { - if in == nil { - return nil - } - out := new(Routine_RemoteFunctionOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RowAccessPolicy) DeepCopyInto(out *RowAccessPolicy) { - *out = *in - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.RowAccessPolicyReference != nil { - in, out := &in.RowAccessPolicyReference, &out.RowAccessPolicyReference - *out = new(RowAccessPolicyReference) - (*in).DeepCopyInto(*out) - } - if in.FilterPredicate != nil { - in, out := &in.FilterPredicate, &out.FilterPredicate - *out = new(string) - **out = **in - } - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(string) - **out = **in - } - if in.LastModifiedTime != nil { - in, out := &in.LastModifiedTime, &out.LastModifiedTime - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RowAccessPolicy. -func (in *RowAccessPolicy) DeepCopy() *RowAccessPolicy { - if in == nil { - return nil - } - out := new(RowAccessPolicy) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RowAccessPolicyReference) DeepCopyInto(out *RowAccessPolicyReference) { - *out = *in - if in.ProjectID != nil { - in, out := &in.ProjectID, &out.ProjectID - *out = new(string) - **out = **in - } - if in.DatasetID != nil { - in, out := &in.DatasetID, &out.DatasetID - *out = new(string) - **out = **in - } - if in.TableID != nil { - in, out := &in.TableID, &out.TableID - *out = new(string) - **out = **in - } - if in.PolicyID != nil { - in, out := &in.PolicyID, &out.PolicyID - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RowAccessPolicyReference. -func (in *RowAccessPolicyReference) DeepCopy() *RowAccessPolicyReference { - if in == nil { - return nil - } - out := new(RowAccessPolicyReference) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RowLevelSecurityStatistics) DeepCopyInto(out *RowLevelSecurityStatistics) { - *out = *in - if in.RowLevelSecurityApplied != nil { - in, out := &in.RowLevelSecurityApplied, &out.RowLevelSecurityApplied - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RowLevelSecurityStatistics. -func (in *RowLevelSecurityStatistics) DeepCopy() *RowLevelSecurityStatistics { - if in == nil { - return nil - } - out := new(RowLevelSecurityStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ScriptOptions) DeepCopyInto(out *ScriptOptions) { - *out = *in - if in.StatementTimeoutMs != nil { - in, out := &in.StatementTimeoutMs, &out.StatementTimeoutMs - *out = new(int64) - **out = **in - } - if in.StatementByteBudget != nil { - in, out := &in.StatementByteBudget, &out.StatementByteBudget - *out = new(int64) - **out = **in - } - if in.KeyResultStatement != nil { - in, out := &in.KeyResultStatement, &out.KeyResultStatement - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScriptOptions. -func (in *ScriptOptions) DeepCopy() *ScriptOptions { - if in == nil { - return nil - } - out := new(ScriptOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ScriptStatistics) DeepCopyInto(out *ScriptStatistics) { - *out = *in - if in.EvaluationKind != nil { - in, out := &in.EvaluationKind, &out.EvaluationKind - *out = new(string) - **out = **in - } - if in.StackFrames != nil { - in, out := &in.StackFrames, &out.StackFrames - *out = make([]ScriptStatistics_ScriptStackFrame, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScriptStatistics. -func (in *ScriptStatistics) DeepCopy() *ScriptStatistics { - if in == nil { - return nil - } - out := new(ScriptStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ScriptStatistics_ScriptStackFrame) DeepCopyInto(out *ScriptStatistics_ScriptStackFrame) { - *out = *in - if in.StartLine != nil { - in, out := &in.StartLine, &out.StartLine - *out = new(int32) - **out = **in - } - if in.StartColumn != nil { - in, out := &in.StartColumn, &out.StartColumn - *out = new(int32) - **out = **in - } - if in.EndLine != nil { - in, out := &in.EndLine, &out.EndLine - *out = new(int32) - **out = **in - } - if in.EndColumn != nil { - in, out := &in.EndColumn, &out.EndColumn - *out = new(int32) - **out = **in - } - if in.ProcedureID != nil { - in, out := &in.ProcedureID, &out.ProcedureID - *out = new(string) - **out = **in - } - if in.Text != nil { - in, out := &in.Text, &out.Text - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScriptStatistics_ScriptStackFrame. -func (in *ScriptStatistics_ScriptStackFrame) DeepCopy() *ScriptStatistics_ScriptStackFrame { - if in == nil { - return nil - } - out := new(ScriptStatistics_ScriptStackFrame) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SearchStatistics) DeepCopyInto(out *SearchStatistics) { - *out = *in - if in.IndexUsageMode != nil { - in, out := &in.IndexUsageMode, &out.IndexUsageMode - *out = new(string) - **out = **in - } - if in.IndexUnusedReasons != nil { - in, out := &in.IndexUnusedReasons, &out.IndexUnusedReasons - *out = make([]IndexUnusedReason, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SearchStatistics. -func (in *SearchStatistics) DeepCopy() *SearchStatistics { - if in == nil { - return nil - } - out := new(SearchStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SerDeInfo) DeepCopyInto(out *SerDeInfo) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.SerializationLibrary != nil { - in, out := &in.SerializationLibrary, &out.SerializationLibrary - *out = new(string) - **out = **in - } - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SerDeInfo. -func (in *SerDeInfo) DeepCopy() *SerDeInfo { - if in == nil { - return nil - } - out := new(SerDeInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SessionInfo) DeepCopyInto(out *SessionInfo) { - *out = *in - if in.SessionID != nil { - in, out := &in.SessionID, &out.SessionID - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SessionInfo. -func (in *SessionInfo) DeepCopy() *SessionInfo { - if in == nil { - return nil - } - out := new(SessionInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SnapshotDefinition) DeepCopyInto(out *SnapshotDefinition) { - *out = *in - if in.BaseTableReference != nil { - in, out := &in.BaseTableReference, &out.BaseTableReference - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.SnapshotTime != nil { - in, out := &in.SnapshotTime, &out.SnapshotTime - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SnapshotDefinition. -func (in *SnapshotDefinition) DeepCopy() *SnapshotDefinition { - if in == nil { - return nil - } - out := new(SnapshotDefinition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkOptions) DeepCopyInto(out *SparkOptions) { - *out = *in - if in.Connection != nil { - in, out := &in.Connection, &out.Connection - *out = new(string) - **out = **in - } - if in.RuntimeVersion != nil { - in, out := &in.RuntimeVersion, &out.RuntimeVersion - *out = new(string) - **out = **in - } - if in.ContainerImage != nil { - in, out := &in.ContainerImage, &out.ContainerImage - *out = new(string) - **out = **in - } - if in.Properties != nil { - in, out := &in.Properties, &out.Properties - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.MainFileUri != nil { - in, out := &in.MainFileUri, &out.MainFileUri - *out = new(string) - **out = **in - } - if in.PyFileUris != nil { - in, out := &in.PyFileUris, &out.PyFileUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.JarUris != nil { - in, out := &in.JarUris, &out.JarUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.FileUris != nil { - in, out := &in.FileUris, &out.FileUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.ArchiveUris != nil { - in, out := &in.ArchiveUris, &out.ArchiveUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.MainClass != nil { - in, out := &in.MainClass, &out.MainClass - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkOptions. -func (in *SparkOptions) DeepCopy() *SparkOptions { - if in == nil { - return nil - } - out := new(SparkOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkStatistics) DeepCopyInto(out *SparkStatistics) { - *out = *in - if in.SparkJobID != nil { - in, out := &in.SparkJobID, &out.SparkJobID - *out = new(string) - **out = **in - } - if in.SparkJobLocation != nil { - in, out := &in.SparkJobLocation, &out.SparkJobLocation - *out = new(string) - **out = **in - } - if in.Endpoints != nil { - in, out := &in.Endpoints, &out.Endpoints - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.LoggingInfo != nil { - in, out := &in.LoggingInfo, &out.LoggingInfo - *out = new(SparkStatistics_LoggingInfo) - (*in).DeepCopyInto(*out) - } - if in.KmsKeyName != nil { - in, out := &in.KmsKeyName, &out.KmsKeyName - *out = new(string) - **out = **in - } - if in.GcsStagingBucket != nil { - in, out := &in.GcsStagingBucket, &out.GcsStagingBucket - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkStatistics. -func (in *SparkStatistics) DeepCopy() *SparkStatistics { - if in == nil { - return nil - } - out := new(SparkStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkStatistics_LoggingInfo) DeepCopyInto(out *SparkStatistics_LoggingInfo) { - *out = *in - if in.ResourceType != nil { - in, out := &in.ResourceType, &out.ResourceType - *out = new(string) - **out = **in - } - if in.ProjectID != nil { - in, out := &in.ProjectID, &out.ProjectID - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkStatistics_LoggingInfo. -func (in *SparkStatistics_LoggingInfo) DeepCopy() *SparkStatistics_LoggingInfo { - if in == nil { - return nil - } - out := new(SparkStatistics_LoggingInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StagePerformanceChangeInsight) DeepCopyInto(out *StagePerformanceChangeInsight) { - *out = *in - if in.StageID != nil { - in, out := &in.StageID, &out.StageID - *out = new(int64) - **out = **in - } - if in.InputDataChange != nil { - in, out := &in.InputDataChange, &out.InputDataChange - *out = new(InputDataChange) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StagePerformanceChangeInsight. -func (in *StagePerformanceChangeInsight) DeepCopy() *StagePerformanceChangeInsight { - if in == nil { - return nil - } - out := new(StagePerformanceChangeInsight) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StagePerformanceStandaloneInsight) DeepCopyInto(out *StagePerformanceStandaloneInsight) { - *out = *in - if in.StageID != nil { - in, out := &in.StageID, &out.StageID - *out = new(int64) - **out = **in - } - if in.SlotContention != nil { - in, out := &in.SlotContention, &out.SlotContention - *out = new(bool) - **out = **in - } - if in.InsufficientShuffleQuota != nil { - in, out := &in.InsufficientShuffleQuota, &out.InsufficientShuffleQuota - *out = new(bool) - **out = **in - } - if in.BiEngineReasons != nil { - in, out := &in.BiEngineReasons, &out.BiEngineReasons - *out = make([]BiEngineReason, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.HighCardinalityJoins != nil { - in, out := &in.HighCardinalityJoins, &out.HighCardinalityJoins - *out = make([]HighCardinalityJoin, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.PartitionSkew != nil { - in, out := &in.PartitionSkew, &out.PartitionSkew - *out = new(PartitionSkew) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StagePerformanceStandaloneInsight. -func (in *StagePerformanceStandaloneInsight) DeepCopy() *StagePerformanceStandaloneInsight { - if in == nil { - return nil - } - out := new(StagePerformanceStandaloneInsight) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StandardSqlDataType) DeepCopyInto(out *StandardSqlDataType) { - *out = *in - if in.TypeKind != nil { - in, out := &in.TypeKind, &out.TypeKind - *out = new(string) - **out = **in - } - if in.ArrayElementType != nil { - in, out := &in.ArrayElementType, &out.ArrayElementType - *out = new(StandardSqlDataType) - (*in).DeepCopyInto(*out) - } - if in.StructType != nil { - in, out := &in.StructType, &out.StructType - *out = new(StandardSqlStructType) - (*in).DeepCopyInto(*out) - } - if in.RangeElementType != nil { - in, out := &in.RangeElementType, &out.RangeElementType - *out = new(StandardSqlDataType) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StandardSqlDataType. -func (in *StandardSqlDataType) DeepCopy() *StandardSqlDataType { - if in == nil { - return nil - } - out := new(StandardSqlDataType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StandardSqlField) DeepCopyInto(out *StandardSqlField) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(StandardSqlDataType) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StandardSqlField. -func (in *StandardSqlField) DeepCopy() *StandardSqlField { - if in == nil { - return nil - } - out := new(StandardSqlField) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StandardSqlStructType) DeepCopyInto(out *StandardSqlStructType) { - *out = *in - if in.Fields != nil { - in, out := &in.Fields, &out.Fields - *out = make([]StandardSqlField, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StandardSqlStructType. -func (in *StandardSqlStructType) DeepCopy() *StandardSqlStructType { - if in == nil { - return nil - } - out := new(StandardSqlStructType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StandardSqlTableType) DeepCopyInto(out *StandardSqlTableType) { - *out = *in - if in.Columns != nil { - in, out := &in.Columns, &out.Columns - *out = make([]StandardSqlField, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StandardSqlTableType. -func (in *StandardSqlTableType) DeepCopy() *StandardSqlTableType { - if in == nil { - return nil - } - out := new(StandardSqlTableType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StorageDescriptor) DeepCopyInto(out *StorageDescriptor) { - *out = *in - if in.LocationUri != nil { - in, out := &in.LocationUri, &out.LocationUri - *out = new(string) - **out = **in - } - if in.InputFormat != nil { - in, out := &in.InputFormat, &out.InputFormat - *out = new(string) - **out = **in - } - if in.OutputFormat != nil { - in, out := &in.OutputFormat, &out.OutputFormat - *out = new(string) - **out = **in - } - if in.SerdeInfo != nil { - in, out := &in.SerdeInfo, &out.SerdeInfo - *out = new(SerDeInfo) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StorageDescriptor. -func (in *StorageDescriptor) DeepCopy() *StorageDescriptor { - if in == nil { - return nil - } - out := new(StorageDescriptor) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Streamingbuffer) DeepCopyInto(out *Streamingbuffer) { - *out = *in - if in.EstimatedBytes != nil { - in, out := &in.EstimatedBytes, &out.EstimatedBytes - *out = new(uint64) - **out = **in - } - if in.EstimatedRows != nil { - in, out := &in.EstimatedRows, &out.EstimatedRows - *out = new(uint64) - **out = **in - } - if in.OldestEntryTime != nil { - in, out := &in.OldestEntryTime, &out.OldestEntryTime - *out = new(uint64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Streamingbuffer. -func (in *Streamingbuffer) DeepCopy() *Streamingbuffer { - if in == nil { - return nil - } - out := new(Streamingbuffer) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SystemVariables) DeepCopyInto(out *SystemVariables) { - *out = *in - if in.Values != nil { - in, out := &in.Values, &out.Values - *out = new(google_protobuf_Struct) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SystemVariables. -func (in *SystemVariables) DeepCopy() *SystemVariables { - if in == nil { - return nil - } - out := new(SystemVariables) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Table) DeepCopyInto(out *Table) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(string) - **out = **in - } - if in.SelfLink != nil { - in, out := &in.SelfLink, &out.SelfLink - *out = new(string) - **out = **in - } - if in.TableReference != nil { - in, out := &in.TableReference, &out.TableReference - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.FriendlyName != nil { - in, out := &in.FriendlyName, &out.FriendlyName - *out = new(string) - **out = **in - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.Schema != nil { - in, out := &in.Schema, &out.Schema - *out = new(TableSchema) - (*in).DeepCopyInto(*out) - } - if in.TimePartitioning != nil { - in, out := &in.TimePartitioning, &out.TimePartitioning - *out = new(TimePartitioning) - (*in).DeepCopyInto(*out) - } - if in.RangePartitioning != nil { - in, out := &in.RangePartitioning, &out.RangePartitioning - *out = new(RangePartitioning) - (*in).DeepCopyInto(*out) - } - if in.Clustering != nil { - in, out := &in.Clustering, &out.Clustering - *out = new(Clustering) - (*in).DeepCopyInto(*out) - } - if in.RequirePartitionFilter != nil { - in, out := &in.RequirePartitionFilter, &out.RequirePartitionFilter - *out = new(bool) - **out = **in - } - if in.PartitionDefinition != nil { - in, out := &in.PartitionDefinition, &out.PartitionDefinition - *out = new(PartitioningDefinition) - (*in).DeepCopyInto(*out) - } - if in.NumBytes != nil { - in, out := &in.NumBytes, &out.NumBytes - *out = new(int64) - **out = **in - } - if in.NumPhysicalBytes != nil { - in, out := &in.NumPhysicalBytes, &out.NumPhysicalBytes - *out = new(int64) - **out = **in - } - if in.NumLongTermBytes != nil { - in, out := &in.NumLongTermBytes, &out.NumLongTermBytes - *out = new(int64) - **out = **in - } - if in.NumRows != nil { - in, out := &in.NumRows, &out.NumRows - *out = new(uint64) - **out = **in - } - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(int64) - **out = **in - } - if in.ExpirationTime != nil { - in, out := &in.ExpirationTime, &out.ExpirationTime - *out = new(int64) - **out = **in - } - if in.LastModifiedTime != nil { - in, out := &in.LastModifiedTime, &out.LastModifiedTime - *out = new(uint64) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.View != nil { - in, out := &in.View, &out.View - *out = new(ViewDefinition) - (*in).DeepCopyInto(*out) - } - if in.MaterializedView != nil { - in, out := &in.MaterializedView, &out.MaterializedView - *out = new(MaterializedViewDefinition) - (*in).DeepCopyInto(*out) - } - if in.MaterializedViewStatus != nil { - in, out := &in.MaterializedViewStatus, &out.MaterializedViewStatus - *out = new(MaterializedViewStatus) - (*in).DeepCopyInto(*out) - } - if in.ExternalDataConfiguration != nil { - in, out := &in.ExternalDataConfiguration, &out.ExternalDataConfiguration - *out = new(ExternalDataConfiguration) - (*in).DeepCopyInto(*out) - } - if in.BiglakeConfiguration != nil { - in, out := &in.BiglakeConfiguration, &out.BiglakeConfiguration - *out = new(BigLakeConfiguration) - (*in).DeepCopyInto(*out) - } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } - if in.StreamingBuffer != nil { - in, out := &in.StreamingBuffer, &out.StreamingBuffer - *out = new(Streamingbuffer) - (*in).DeepCopyInto(*out) - } - if in.EncryptionConfiguration != nil { - in, out := &in.EncryptionConfiguration, &out.EncryptionConfiguration - *out = new(EncryptionConfiguration) - (*in).DeepCopyInto(*out) - } - if in.SnapshotDefinition != nil { - in, out := &in.SnapshotDefinition, &out.SnapshotDefinition - *out = new(SnapshotDefinition) - (*in).DeepCopyInto(*out) - } - if in.DefaultCollation != nil { - in, out := &in.DefaultCollation, &out.DefaultCollation - *out = new(string) - **out = **in - } - if in.DefaultRoundingMode != nil { - in, out := &in.DefaultRoundingMode, &out.DefaultRoundingMode - *out = new(string) - **out = **in - } - if in.CloneDefinition != nil { - in, out := &in.CloneDefinition, &out.CloneDefinition - *out = new(CloneDefinition) - (*in).DeepCopyInto(*out) - } - if in.NumTimeTravelPhysicalBytes != nil { - in, out := &in.NumTimeTravelPhysicalBytes, &out.NumTimeTravelPhysicalBytes - *out = new(int64) - **out = **in - } - if in.NumTotalLogicalBytes != nil { - in, out := &in.NumTotalLogicalBytes, &out.NumTotalLogicalBytes - *out = new(int64) - **out = **in - } - if in.NumActiveLogicalBytes != nil { - in, out := &in.NumActiveLogicalBytes, &out.NumActiveLogicalBytes - *out = new(int64) - **out = **in - } - if in.NumLongTermLogicalBytes != nil { - in, out := &in.NumLongTermLogicalBytes, &out.NumLongTermLogicalBytes - *out = new(int64) - **out = **in - } - if in.NumCurrentPhysicalBytes != nil { - in, out := &in.NumCurrentPhysicalBytes, &out.NumCurrentPhysicalBytes - *out = new(int64) - **out = **in - } - if in.NumTotalPhysicalBytes != nil { - in, out := &in.NumTotalPhysicalBytes, &out.NumTotalPhysicalBytes - *out = new(int64) - **out = **in - } - if in.NumActivePhysicalBytes != nil { - in, out := &in.NumActivePhysicalBytes, &out.NumActivePhysicalBytes - *out = new(int64) - **out = **in - } - if in.NumLongTermPhysicalBytes != nil { - in, out := &in.NumLongTermPhysicalBytes, &out.NumLongTermPhysicalBytes - *out = new(int64) - **out = **in - } - if in.NumPartitions != nil { - in, out := &in.NumPartitions, &out.NumPartitions - *out = new(int64) - **out = **in - } - if in.MaxStaleness != nil { - in, out := &in.MaxStaleness, &out.MaxStaleness - *out = new(string) - **out = **in - } - if in.Restrictions != nil { - in, out := &in.Restrictions, &out.Restrictions - *out = new(RestrictionConfig) - (*in).DeepCopyInto(*out) - } - if in.TableConstraints != nil { - in, out := &in.TableConstraints, &out.TableConstraints - *out = new(TableConstraints) - (*in).DeepCopyInto(*out) - } - if in.ResourceTags != nil { - in, out := &in.ResourceTags, &out.ResourceTags - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.TableReplicationInfo != nil { - in, out := &in.TableReplicationInfo, &out.TableReplicationInfo - *out = new(TableReplicationInfo) - (*in).DeepCopyInto(*out) - } - if in.Replicas != nil { - in, out := &in.Replicas, &out.Replicas - *out = make([]TableReference, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ExternalCatalogTableOptions != nil { - in, out := &in.ExternalCatalogTableOptions, &out.ExternalCatalogTableOptions - *out = new(ExternalCatalogTableOptions) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Table. -func (in *Table) DeepCopy() *Table { - if in == nil { - return nil - } - out := new(Table) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableConstraints) DeepCopyInto(out *TableConstraints) { - *out = *in - if in.PrimaryKey != nil { - in, out := &in.PrimaryKey, &out.PrimaryKey - *out = new(PrimaryKey) - (*in).DeepCopyInto(*out) - } - if in.ForeignKeys != nil { - in, out := &in.ForeignKeys, &out.ForeignKeys - *out = make([]ForeignKey, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableConstraints. -func (in *TableConstraints) DeepCopy() *TableConstraints { - if in == nil { - return nil - } - out := new(TableConstraints) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableFieldSchema) DeepCopyInto(out *TableFieldSchema) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.Mode != nil { - in, out := &in.Mode, &out.Mode - *out = new(string) - **out = **in - } - if in.Fields != nil { - in, out := &in.Fields, &out.Fields - *out = make([]TableFieldSchema, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.PolicyTags != nil { - in, out := &in.PolicyTags, &out.PolicyTags - *out = new(TableFieldSchema_PolicyTagList) - (*in).DeepCopyInto(*out) - } - if in.DataPolicies != nil { - in, out := &in.DataPolicies, &out.DataPolicies - *out = make([]DataPolicyOption, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.MaxLength != nil { - in, out := &in.MaxLength, &out.MaxLength - *out = new(int64) - **out = **in - } - if in.Precision != nil { - in, out := &in.Precision, &out.Precision - *out = new(int64) - **out = **in - } - if in.Scale != nil { - in, out := &in.Scale, &out.Scale - *out = new(int64) - **out = **in - } - if in.RoundingMode != nil { - in, out := &in.RoundingMode, &out.RoundingMode - *out = new(string) - **out = **in - } - if in.Collation != nil { - in, out := &in.Collation, &out.Collation - *out = new(string) - **out = **in - } - if in.DefaultValueExpression != nil { - in, out := &in.DefaultValueExpression, &out.DefaultValueExpression - *out = new(string) - **out = **in - } - if in.RangeElementType != nil { - in, out := &in.RangeElementType, &out.RangeElementType - *out = new(TableFieldSchema_FieldElementType) - (*in).DeepCopyInto(*out) - } - if in.ForeignTypeDefinition != nil { - in, out := &in.ForeignTypeDefinition, &out.ForeignTypeDefinition - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableFieldSchema. -func (in *TableFieldSchema) DeepCopy() *TableFieldSchema { - if in == nil { - return nil - } - out := new(TableFieldSchema) + out := new(LinkedDatasetSource) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableFieldSchema_FieldElementType) DeepCopyInto(out *TableFieldSchema_FieldElementType) { +func (in *RestrictionConfig) DeepCopyInto(out *RestrictionConfig) { *out = *in if in.Type != nil { in, out := &in.Type, &out.Type @@ -7663,114 +537,42 @@ func (in *TableFieldSchema_FieldElementType) DeepCopyInto(out *TableFieldSchema_ } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableFieldSchema_FieldElementType. -func (in *TableFieldSchema_FieldElementType) DeepCopy() *TableFieldSchema_FieldElementType { - if in == nil { - return nil - } - out := new(TableFieldSchema_FieldElementType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableFieldSchema_PolicyTagList) DeepCopyInto(out *TableFieldSchema_PolicyTagList) { - *out = *in - if in.Names != nil { - in, out := &in.Names, &out.Names - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableFieldSchema_PolicyTagList. -func (in *TableFieldSchema_PolicyTagList) DeepCopy() *TableFieldSchema_PolicyTagList { - if in == nil { - return nil - } - out := new(TableFieldSchema_PolicyTagList) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableList) DeepCopyInto(out *TableList) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.NextPageToken != nil { - in, out := &in.NextPageToken, &out.NextPageToken - *out = new(string) - **out = **in - } - if in.Tables != nil { - in, out := &in.Tables, &out.Tables - *out = make([]ListFormatTable, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.TotalItems != nil { - in, out := &in.TotalItems, &out.TotalItems - *out = new(int32) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableList. -func (in *TableList) DeepCopy() *TableList { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RestrictionConfig. +func (in *RestrictionConfig) DeepCopy() *RestrictionConfig { if in == nil { return nil } - out := new(TableList) + out := new(RestrictionConfig) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableMetadataCacheUsage) DeepCopyInto(out *TableMetadataCacheUsage) { +func (in *RoutineReference) DeepCopyInto(out *RoutineReference) { *out = *in - if in.TableReference != nil { - in, out := &in.TableReference, &out.TableReference - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.UnusedReason != nil { - in, out := &in.UnusedReason, &out.UnusedReason - *out = new(string) - **out = **in - } - if in.Explanation != nil { - in, out := &in.Explanation, &out.Explanation + if in.ProjectId != nil { + in, out := &in.ProjectId, &out.ProjectId *out = new(string) **out = **in } - if in.Staleness != nil { - in, out := &in.Staleness, &out.Staleness + if in.DatasetId != nil { + in, out := &in.DatasetId, &out.DatasetId *out = new(string) **out = **in } - if in.TableType != nil { - in, out := &in.TableType, &out.TableType + if in.RoutineId != nil { + in, out := &in.RoutineId, &out.RoutineId *out = new(string) **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableMetadataCacheUsage. -func (in *TableMetadataCacheUsage) DeepCopy() *TableMetadataCacheUsage { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineReference. +func (in *RoutineReference) DeepCopy() *RoutineReference { if in == nil { return nil } - out := new(TableMetadataCacheUsage) + out := new(RoutineReference) in.DeepCopyInto(out) return out } @@ -7804,231 +606,3 @@ func (in *TableReference) DeepCopy() *TableReference { in.DeepCopyInto(out) return out } - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableReplicationInfo) DeepCopyInto(out *TableReplicationInfo) { - *out = *in - if in.SourceTable != nil { - in, out := &in.SourceTable, &out.SourceTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.ReplicationIntervalMs != nil { - in, out := &in.ReplicationIntervalMs, &out.ReplicationIntervalMs - *out = new(int64) - **out = **in - } - if in.ReplicatedSourceLastRefreshTime != nil { - in, out := &in.ReplicatedSourceLastRefreshTime, &out.ReplicatedSourceLastRefreshTime - *out = new(int64) - **out = **in - } - if in.ReplicationStatus != nil { - in, out := &in.ReplicationStatus, &out.ReplicationStatus - *out = new(string) - **out = **in - } - if in.ReplicationError != nil { - in, out := &in.ReplicationError, &out.ReplicationError - *out = new(ErrorProto) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableReplicationInfo. -func (in *TableReplicationInfo) DeepCopy() *TableReplicationInfo { - if in == nil { - return nil - } - out := new(TableReplicationInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableSchema) DeepCopyInto(out *TableSchema) { - *out = *in - if in.Fields != nil { - in, out := &in.Fields, &out.Fields - *out = make([]TableFieldSchema, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ForeignTypeInfo != nil { - in, out := &in.ForeignTypeInfo, &out.ForeignTypeInfo - *out = new(ForeignTypeInfo) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSchema. -func (in *TableSchema) DeepCopy() *TableSchema { - if in == nil { - return nil - } - out := new(TableSchema) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TimePartitioning) DeepCopyInto(out *TimePartitioning) { - *out = *in - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.ExpirationMs != nil { - in, out := &in.ExpirationMs, &out.ExpirationMs - *out = new(int64) - **out = **in - } - if in.Field != nil { - in, out := &in.Field, &out.Field - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TimePartitioning. -func (in *TimePartitioning) DeepCopy() *TimePartitioning { - if in == nil { - return nil - } - out := new(TimePartitioning) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TransformColumn) DeepCopyInto(out *TransformColumn) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(StandardSqlDataType) - (*in).DeepCopyInto(*out) - } - if in.TransformSql != nil { - in, out := &in.TransformSql, &out.TransformSql - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TransformColumn. -func (in *TransformColumn) DeepCopy() *TransformColumn { - if in == nil { - return nil - } - out := new(TransformColumn) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *UserDefinedFunctionResource) DeepCopyInto(out *UserDefinedFunctionResource) { - *out = *in - if in.ResourceUri != nil { - in, out := &in.ResourceUri, &out.ResourceUri - *out = new(string) - **out = **in - } - if in.InlineCode != nil { - in, out := &in.InlineCode, &out.InlineCode - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new UserDefinedFunctionResource. -func (in *UserDefinedFunctionResource) DeepCopy() *UserDefinedFunctionResource { - if in == nil { - return nil - } - out := new(UserDefinedFunctionResource) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *VectorSearchStatistics) DeepCopyInto(out *VectorSearchStatistics) { - *out = *in - if in.IndexUsageMode != nil { - in, out := &in.IndexUsageMode, &out.IndexUsageMode - *out = new(string) - **out = **in - } - if in.IndexUnusedReasons != nil { - in, out := &in.IndexUnusedReasons, &out.IndexUnusedReasons - *out = make([]IndexUnusedReason, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VectorSearchStatistics. -func (in *VectorSearchStatistics) DeepCopy() *VectorSearchStatistics { - if in == nil { - return nil - } - out := new(VectorSearchStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ViewDefinition) DeepCopyInto(out *ViewDefinition) { - *out = *in - if in.Query != nil { - in, out := &in.Query, &out.Query - *out = new(string) - **out = **in - } - if in.UserDefinedFunctionResources != nil { - in, out := &in.UserDefinedFunctionResources, &out.UserDefinedFunctionResources - *out = make([]UserDefinedFunctionResource, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.UseLegacySql != nil { - in, out := &in.UseLegacySql, &out.UseLegacySql - *out = new(bool) - **out = **in - } - if in.UseExplicitColumnNames != nil { - in, out := &in.UseExplicitColumnNames, &out.UseExplicitColumnNames - *out = new(bool) - **out = **in - } - if in.PrivacyPolicy != nil { - in, out := &in.PrivacyPolicy, &out.PrivacyPolicy - *out = new(PrivacyPolicy) - (*in).DeepCopyInto(*out) - } - if in.ForeignDefinitions != nil { - in, out := &in.ForeignDefinitions, &out.ForeignDefinitions - *out = make([]ForeignViewDefinition, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ViewDefinition. -func (in *ViewDefinition) DeepCopy() *ViewDefinition { - if in == nil { - return nil - } - out := new(ViewDefinition) - in.DeepCopyInto(out) - return out -} diff --git a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatasets.bigquery.cnrm.cloud.google.com.yaml b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatasets.bigquery.cnrm.cloud.google.com.yaml index d6c6320192..ba6d1e1e76 100644 --- a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatasets.bigquery.cnrm.cloud.google.com.yaml +++ b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatasets.bigquery.cnrm.cloud.google.com.yaml @@ -77,14 +77,13 @@ spec: description: The dataset this entry applies to. properties: datasetId: - description: Required. A unique ID for this dataset, - without the project name. The ID must contain only - letters (a-z, A-Z), numbers (0-9), or underscores - (_). The maximum length is 1,024 characters. + description: A unique Id for this dataset, without the + project name. The Id must contain only letters (a-z, + A-Z), numbers (0-9), or underscores (_). The maximum + length is 1,024 characters. type: string projectId: - description: Required. The ID of the project containing - this dataset. + description: The ID of the project containing this dataset. type: string required: - datasetId @@ -140,16 +139,14 @@ spec: an update operation.' properties: datasetId: - description: Required. The ID of the dataset containing - this routine. + description: The ID of the dataset containing this routine. type: string projectId: - description: Required. The ID of the project containing - this routine. + description: The ID of the project containing this routine. type: string routineId: - description: Required. The ID of the routine. The ID must - contain only letters (a-z, A-Z), numbers (0-9), or underscores + description: The Id of the routine. The Id must contain + only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters. type: string required: @@ -182,20 +179,18 @@ spec: granted again via an update operation.' properties: datasetId: - description: Required. The ID of the dataset containing - this table. + description: The ID of the dataset containing this table. type: string projectId: - description: Required. The ID of the project containing - this table. + description: The ID of the project containing this table. type: string tableId: - description: Required. The ID of the table. The ID can contain - Unicode characters in category L (letter), M (mark), N - (number), Pc (connector, including underscore), Pd (dash), - and Zs (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). + description: The Id of the table. The Id can contain Unicode + characters in category L (letter), M (mark), N (number), + Pc (connector, including underscore), Pd (dash), and Zs + (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). The maximum length is 1,024 characters. Certain operations - allow suffixing of the table ID with a partition decorator, + allow suffixing of the table Id with a partition decorator, such as `sample_table$20190123`. type: string required: @@ -309,7 +304,7 @@ spec: is 168 hours if this is not set. type: string projectRef: - description: The project that this resource belongs to. optional. + description: ' Optional. The project that this resource belongs to.' oneOf: - not: required: @@ -347,6 +342,8 @@ spec: storageBillingModel: description: Optional. Updates storage_billing_model for the dataset. type: string + required: + - location type: object status: description: BigQueryDatasetStatus defines the config connector machine @@ -386,6 +383,10 @@ spec: etag: description: Output only. A hash of the resource. type: string + externalRef: + description: A unique specifier for the BigQueryAnalyticsHubDataExchangeListing + resource in GCP. + type: string lastModifiedTime: description: Output only. The date when this dataset was last modified, in milliseconds since the epoch. diff --git a/config/samples/resources/bigquerytable/bigquery_v1beta1_bigquerydataset.yaml b/config/samples/resources/bigquerytable/bigquery_v1beta1_bigquerydataset.yaml index 7782391cfe..a499069c59 100644 --- a/config/samples/resources/bigquerytable/bigquery_v1beta1_bigquerydataset.yaml +++ b/config/samples/resources/bigquerytable/bigquery_v1beta1_bigquerydataset.yaml @@ -17,4 +17,5 @@ kind: BigQueryDataset metadata: name: bigquerytabledep spec: - friendlyName: bigquerytable-dep \ No newline at end of file + friendlyName: bigquerytable-dep + location: us-central1 diff --git a/config/samples/resources/dataflowflextemplatejob/streaming-dataflow-flex-template-job/bigquery_v1beta1_bigquerydataset.yaml b/config/samples/resources/dataflowflextemplatejob/streaming-dataflow-flex-template-job/bigquery_v1beta1_bigquerydataset.yaml index 7b466b014d..d2d1f5d00a 100644 --- a/config/samples/resources/dataflowflextemplatejob/streaming-dataflow-flex-template-job/bigquery_v1beta1_bigquerydataset.yaml +++ b/config/samples/resources/dataflowflextemplatejob/streaming-dataflow-flex-template-job/bigquery_v1beta1_bigquerydataset.yaml @@ -16,3 +16,5 @@ apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataset metadata: name: dataflowflextemplatejobdepstreaming +spec: + location: us-central1 diff --git a/config/samples/resources/pubsubsubscription/bigquery-pubsub-subscription/bigquery_v1beta1_bigquerydataset.yaml b/config/samples/resources/pubsubsubscription/bigquery-pubsub-subscription/bigquery_v1beta1_bigquerydataset.yaml index 6bcdebf190..e9e651d7e3 100644 --- a/config/samples/resources/pubsubsubscription/bigquery-pubsub-subscription/bigquery_v1beta1_bigquerydataset.yaml +++ b/config/samples/resources/pubsubsubscription/bigquery-pubsub-subscription/bigquery_v1beta1_bigquerydataset.yaml @@ -22,3 +22,4 @@ metadata: cnrm.cloud.google.com/project-id: ${PROJECT_ID?} spec: resourceID: pubsubsubscriptiondepbigquery + location: us-central1 diff --git a/dev/tools/controllerbuilder/pkg/codegen/mappergenerator.go b/dev/tools/controllerbuilder/pkg/codegen/mappergenerator.go index 13da3ee538..6359f7e78c 100644 --- a/dev/tools/controllerbuilder/pkg/codegen/mappergenerator.go +++ b/dev/tools/controllerbuilder/pkg/codegen/mappergenerator.go @@ -132,6 +132,8 @@ func (v *MapperGenerator) visitMessage(msg protoreflect.MessageDescriptor) { switch protoGoPackage { case "cloud.google.com/go/networkconnectivity/apiv1/networkconnectivitypb": protoGoPackage = "github.com/GoogleCloudPlatform/k8s-config-connector/mockgcp/generated/mockgcp/cloud/networkconnectivity/v1" + case "cloud.google.com/go/bigquery/apiv2/bigquerypb": + protoGoPackage = "github.com/GoogleCloudPlatform/k8s-config-connector/mockgcp/generated/mockgcp/cloud/bigquery/v2" } for _, goType := range goTypes { diff --git a/go.mod b/go.mod index 36c7d6e458..db15dbd7f7 100644 --- a/go.mod +++ b/go.mod @@ -49,6 +49,7 @@ require ( github.com/hashicorp/hcl/v2 v2.19.1 github.com/hashicorp/terraform-plugin-sdk/v2 v2.24.0 github.com/hashicorp/terraform-provider-google-beta v3.73.0+incompatible + github.com/huandu/go-clone v1.7.2 github.com/nasa9084/go-openapi v0.0.0-20200604141640-2875b7376353 github.com/olekukonko/tablewriter v0.0.5 github.com/onsi/gomega v1.27.10 @@ -102,7 +103,6 @@ require ( github.com/Microsoft/go-winio v0.6.1 // indirect github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371 // indirect github.com/agext/levenshtein v1.2.3 // indirect - github.com/apache/arrow/go/v15 v15.0.2 // indirect github.com/apparentlymart/go-cidr v1.1.0 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect @@ -136,13 +136,11 @@ require ( github.com/go-openapi/jsonreference v0.20.2 // indirect github.com/go-openapi/swag v0.22.3 // indirect github.com/gobuffalo/flect v0.2.3 // indirect - github.com/goccy/go-json v0.10.2 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/glog v1.2.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/google/btree v1.1.3 // indirect - github.com/google/flatbuffers v23.5.26+incompatible // indirect github.com/google/gnostic v0.6.9 // indirect github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 // indirect github.com/google/gofuzz v1.2.0 // indirect @@ -179,8 +177,6 @@ require ( github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect - github.com/klauspost/compress v1.16.7 // indirect - github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/kylelemons/godebug v1.1.0 // indirect github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect github.com/mailru/easyjson v0.7.7 // indirect @@ -204,7 +200,6 @@ require ( github.com/oklog/run v1.0.0 // indirect github.com/onsi/ginkgo v1.16.5 // indirect github.com/peterbourgon/diskv v2.0.1+incompatible // indirect - github.com/pierrec/lz4/v4 v4.1.18 // indirect github.com/pjbgf/sha1cd v0.3.0 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/prometheus/client_model v0.6.0 // indirect @@ -219,7 +214,6 @@ require ( github.com/vmihailenco/tagparser v0.1.2 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect github.com/xlab/treeprint v1.1.0 // indirect - github.com/zeebo/xxh3 v1.0.2 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 // indirect go.opentelemetry.io/otel v1.29.0 // indirect @@ -230,14 +224,12 @@ require ( go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 // indirect go.uber.org/multierr v1.10.0 // indirect golang.org/x/crypto v0.28.0 // indirect - golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect golang.org/x/mod v0.20.0 // indirect golang.org/x/net v0.30.0 // indirect golang.org/x/sys v0.26.0 // indirect golang.org/x/term v0.25.0 // indirect golang.org/x/text v0.19.0 // indirect golang.org/x/tools v0.24.0 // indirect - golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect gomodules.xyz/jsonpatch/v2 v2.3.0 // indirect google.golang.org/appengine v1.6.8 // indirect gopkg.in/inf.v0 v0.9.1 // indirect diff --git a/go.sum b/go.sum index c610b36b93..c715574541 100644 --- a/go.sum +++ b/go.sum @@ -48,8 +48,6 @@ cloud.google.com/go/compute v1.28.1 h1:XwPcZjgMCnU2tkwY10VleUjSAfpTj9RDn+kGrbYsi cloud.google.com/go/compute v1.28.1/go.mod h1:b72iXMY4FucVry3NR3Li4kVyyTvbMDE7x5WsqvxjsYk= cloud.google.com/go/compute/metadata v0.5.2 h1:UxK4uu/Tn+I3p2dYWTfiX4wva7aYlKixAHn3fyqngqo= cloud.google.com/go/compute/metadata v0.5.2/go.mod h1:C66sj2AluDcIqakBq/M8lw8/ybHgOZqin2obFxa/E5k= -cloud.google.com/go/datacatalog v1.22.1 h1:i0DyKb/o7j+0vgaFtimcRFjYsD6wFw1jpnODYUyiYRs= -cloud.google.com/go/datacatalog v1.22.1/go.mod h1:MscnJl9B2lpYlFoxRjicw19kFTwEke8ReKL5Y/6TWg8= cloud.google.com/go/dataflow v0.10.1 h1:RoVpCZ1BjJBH/5mzaXCgNg+l9FgTIYQ7C9xBRGvhkzo= cloud.google.com/go/dataflow v0.10.1/go.mod h1:zP4/tNjONFRcS4NcI9R94YDQEkPalimdbPkijVNJt/g= cloud.google.com/go/dataform v0.10.1 h1:FkOPrxf8sN9J2TMc4CIBhVivhMiO8D0eYN33s5A5Uo4= @@ -154,8 +152,6 @@ github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYU github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/apache/arrow/go/v15 v15.0.2 h1:60IliRbiyTWCWjERBCkO1W4Qun9svcYoZrSLcyOsMLE= -github.com/apache/arrow/go/v15 v15.0.2/go.mod h1:DGXsR3ajT524njufqf95822i+KTh+yea1jass9YXgjA= github.com/apparentlymart/go-cidr v1.1.0 h1:2mAhrMoF+nhXqxTzSZMUzDHkLjmIHC+Zzn4tdgBZjnU= github.com/apparentlymart/go-cidr v1.1.0/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= @@ -409,8 +405,6 @@ github.com/go-test/deep v1.0.7 h1:/VSMRlnY/JSyqxQUzQLKVMAskpY/NZKFA5j2P+0pP2M= github.com/go-test/deep v1.0.7/go.mod h1:QV8Hv/iy04NyLBxAdO9njL0iVPN1S4d/A3NVv1V36o8= github.com/gobuffalo/flect v0.2.3 h1:f/ZukRnSNA/DUpSNDadko7Qc0PhGvsew35p/2tu+CRY= github.com/gobuffalo/flect v0.2.3/go.mod h1:vmkQwuZYhN5Pc4ljYQZzP+1sq+NEkK+lh20jmEmX3jc= -github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= -github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= @@ -465,8 +459,6 @@ github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Z github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= -github.com/google/flatbuffers v23.5.26+incompatible h1:M9dgRyhJemaM4Sw8+66GHBu8ioaQmyPLg1b8VwK5WJg= -github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/gnostic v0.6.9 h1:ZK/5VhkoX835RikCHpSUJV9a+S3e1zLh59YnyWeBW+0= github.com/google/gnostic v0.6.9/go.mod h1:Nm8234We1lq6iB9OmlgNv3nH91XLLVZHCDayfA3xq+E= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -492,12 +484,9 @@ github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/ github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= -github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -625,6 +614,10 @@ github.com/hashicorp/terraform-svchost v0.0.0-20200729002733-f050f53b9734/go.mod github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d h1:kJCB4vdITiW1eC1vq2e6IsrXKrZit1bv/TDYFGMp4BQ= github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/go-assert v1.1.5 h1:fjemmA7sSfYHJD7CUqs9qTwwfdNAx7/j2/ZlHXzNB3c= +github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U= +github.com/huandu/go-clone v1.7.2 h1:3+Aq0Ed8XK+zKkLjE2dfHg0XrpIfcohBE1K+c8Usxoo= +github.com/huandu/go-clone v1.7.2/go.mod h1:ReGivhG6op3GYr+UY3lS6mxjKp7MIGTknuU5TbTVaXE= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= @@ -665,10 +658,6 @@ github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvW github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= -github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= -github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= -github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= @@ -803,8 +792,6 @@ github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/9 github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= -github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ= -github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -965,10 +952,6 @@ github.com/zclconf/go-cty v1.10.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uU github.com/zclconf/go-cty v1.13.0 h1:It5dfKTTZHe9aeppbNOda3mN7Ag7sg6QkBNm6TkyFa0= github.com/zclconf/go-cty v1.13.0/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0= github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= -github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= -github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= -github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= -github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= @@ -1059,8 +1042,6 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI= -golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -1371,14 +1352,10 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= -golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= gomodules.xyz/jsonpatch/v2 v2.0.1/go.mod h1:IhYNNY4jnS53ZnfE4PAmpKtDpTCj1JFXc+3mwe7XcUU= gomodules.xyz/jsonpatch/v2 v2.3.0 h1:8NFhfS6gzxNqjLIYnZxg319wZ5Qjnx4m/CcX+Klzazc= gomodules.xyz/jsonpatch/v2 v2.3.0/go.mod h1:AH3dM2RI6uoBZxn3LVrfvJ3E0/9dG4cSrbuBJT4moAY= gonum.org/v1/gonum v0.0.0-20190331200053-3d26580ed485/go.mod h1:2ltnJ7xHfj0zHS40VVPYEAAMTa3ZGguvHGBSJeRWqE0= -gonum.org/v1/gonum v0.12.0 h1:xKuo6hzt+gMav00meVPUlXwSdoEJP46BR+wdxQEFK2o= -gonum.org/v1/gonum v0.12.0/go.mod h1:73TDxJfAAHeA8Mk9mf8NlIppyhQNo5GLTcYeqgo2lvY= gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= gonum.org/v1/netlib v0.0.0-20190331212654-76723241ea4e/go.mod h1:kS+toOQn6AQKjmKJ7gzohV1XkqsFehRA2FbsbkopSuQ= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= diff --git a/mockgcp/mockbigquery/datasets.go b/mockgcp/mockbigquery/datasets.go index 243a442b68..6dc1d09efe 100644 --- a/mockgcp/mockbigquery/datasets.go +++ b/mockgcp/mockbigquery/datasets.go @@ -53,7 +53,6 @@ func (s *datasetsServer) GetDataset(ctx context.Context, req *pb.GetDatasetReque } return nil, err } - if obj.MaxTimeTravelHours == nil { obj.MaxTimeTravelHours = &defaultMaxTimeTravelHours } diff --git a/pkg/clients/generated/apis/bigquery/v1beta1/bigquerydataset_types.go b/pkg/clients/generated/apis/bigquery/v1beta1/bigquerydataset_types.go index 891088e838..24c87195db 100644 --- a/pkg/clients/generated/apis/bigquery/v1beta1/bigquerydataset_types.go +++ b/pkg/clients/generated/apis/bigquery/v1beta1/bigquerydataset_types.go @@ -91,10 +91,10 @@ type DatasetAccess struct { } type DatasetDataset struct { - /* Required. A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters. */ + /* A unique Id for this dataset, without the project name. The Id must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters. */ DatasetId string `json:"datasetId"` - /* Required. The ID of the project containing this dataset. */ + /* The ID of the project containing this dataset. */ ProjectId string `json:"projectId"` } @@ -105,24 +105,24 @@ type DatasetDefaultEncryptionConfiguration struct { } type DatasetRoutine struct { - /* Required. The ID of the dataset containing this routine. */ + /* The ID of the dataset containing this routine. */ DatasetId string `json:"datasetId"` - /* Required. The ID of the project containing this routine. */ + /* The ID of the project containing this routine. */ ProjectId string `json:"projectId"` - /* Required. The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters. */ + /* The Id of the routine. The Id must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters. */ RoutineId string `json:"routineId"` } type DatasetView struct { - /* Required. The ID of the dataset containing this table. */ + /* The ID of the dataset containing this table. */ DatasetId string `json:"datasetId"` - /* Required. The ID of the project containing this table. */ + /* The ID of the project containing this table. */ ProjectId string `json:"projectId"` - /* Required. The ID of the table. The ID can contain Unicode characters in category L (letter), M (mark), N (number), Pc (connector, including underscore), Pd (dash), and Zs (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). The maximum length is 1,024 characters. Certain operations allow suffixing of the table ID with a partition decorator, such as `sample_table$20190123`. */ + /* The Id of the table. The Id can contain Unicode characters in category L (letter), M (mark), N (number), Pc (connector, including underscore), Pd (dash), and Zs (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). The maximum length is 1,024 characters. Certain operations allow suffixing of the table Id with a partition decorator, such as `sample_table$20190123`. */ TableId string `json:"tableId"` } @@ -179,14 +179,13 @@ type BigQueryDatasetSpec struct { IsCaseInsensitive *bool `json:"isCaseInsensitive,omitempty"` /* The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations. */ - // +optional - Location *string `json:"location,omitempty"` + Location string `json:"location"` /* Optional. Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days). The default value is 168 hours if this is not set. */ // +optional MaxTimeTravelHours *string `json:"maxTimeTravelHours,omitempty"` - /* The project that this resource belongs to. optional. */ + /* Optional. The project that this resource belongs to. */ // +optional ProjectRef *v1alpha1.ResourceRef `json:"projectRef,omitempty"` @@ -211,6 +210,10 @@ type BigQueryDatasetStatus struct { // +optional Etag *string `json:"etag,omitempty"` + /* A unique specifier for the BigQueryAnalyticsHubDataExchangeListing resource in GCP. */ + // +optional + ExternalRef *string `json:"externalRef,omitempty"` + /* Output only. The date when this dataset was last modified, in milliseconds since the epoch. */ // +optional LastModifiedTime *int64 `json:"lastModifiedTime,omitempty"` diff --git a/pkg/clients/generated/apis/bigquery/v1beta1/zz_generated.deepcopy.go b/pkg/clients/generated/apis/bigquery/v1beta1/zz_generated.deepcopy.go index ff2aa02203..c23d48c6fd 100644 --- a/pkg/clients/generated/apis/bigquery/v1beta1/zz_generated.deepcopy.go +++ b/pkg/clients/generated/apis/bigquery/v1beta1/zz_generated.deepcopy.go @@ -135,11 +135,6 @@ func (in *BigQueryDatasetSpec) DeepCopyInto(out *BigQueryDatasetSpec) { *out = new(bool) **out = **in } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } if in.MaxTimeTravelHours != nil { in, out := &in.MaxTimeTravelHours, &out.MaxTimeTravelHours *out = new(string) @@ -191,6 +186,11 @@ func (in *BigQueryDatasetStatus) DeepCopyInto(out *BigQueryDatasetStatus) { *out = new(string) **out = **in } + if in.ExternalRef != nil { + in, out := &in.ExternalRef, &out.ExternalRef + *out = new(string) + **out = **in + } if in.LastModifiedTime != nil { in, out := &in.LastModifiedTime, &out.LastModifiedTime *out = new(int64) diff --git a/pkg/controller/direct/bigquery/v2/mapper.generated.go b/pkg/controller/direct/bigquery/v2/mapper.generated.go new file mode 100644 index 0000000000..f58fc9b069 --- /dev/null +++ b/pkg/controller/direct/bigquery/v2/mapper.generated.go @@ -0,0 +1,248 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bigquery + +import ( + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquery/v1beta1" + pb "github.com/GoogleCloudPlatform/k8s-config-connector/mockgcp/generated/mockgcp/cloud/bigquery/v2" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" +) + +func Access_FromProto(mapCtx *direct.MapContext, in *pb.DatasetAccess) *krm.Access { + if in == nil { + return nil + } + out := &krm.Access{} + out.Role = direct.LazyPtr(in.GetRole()) + out.UserByEmail = direct.LazyPtr(in.GetUserByEmail()) + out.GroupByEmail = direct.LazyPtr(in.GetGroupByEmail()) + out.Domain = direct.LazyPtr(in.GetDomain()) + out.SpecialGroup = direct.LazyPtr(in.GetSpecialGroup()) + out.IamMember = direct.LazyPtr(in.GetIamMember()) + out.View = ReferencedTable_FromProto(mapCtx, in.GetView()) + out.Routine = ReferencedRoutine_FromProto(mapCtx, in.GetRoutine()) + out.Dataset = DatasetAccessEntry_FromProto(mapCtx, in.GetDataset()) + return out +} +func Access_ToProto(mapCtx *direct.MapContext, in *krm.Access) *pb.DatasetAccess { + if in == nil { + return nil + } + out := &pb.DatasetAccess{} + out.Role = in.Role + out.UserByEmail = in.UserByEmail + out.GroupByEmail = in.GroupByEmail + out.Domain = in.Domain + out.SpecialGroup = in.SpecialGroup + out.IamMember = in.IamMember + out.View = ReferencedTable_ToProto(mapCtx, in.View) + out.Routine = ReferencedRoutine_ToProto(mapCtx, in.Routine) + out.Dataset = DatasetAccessEntry_ToProto(mapCtx, in.Dataset) + return out +} + +func DatasetAccessEntry_FromProto(mapCtx *direct.MapContext, in *pb.DatasetAccessEntry) *krm.DatasetAccessEntry { + if in == nil { + return nil + } + out := &krm.DatasetAccessEntry{} + out.Dataset = DatasetReference_FromProto(mapCtx, in.GetDataset()) + out.TargetTypes = in.TargetTypes + return out +} +func DatasetAccessEntry_ToProto(mapCtx *direct.MapContext, in *krm.DatasetAccessEntry) *pb.DatasetAccessEntry { + if in == nil { + return nil + } + out := &pb.DatasetAccessEntry{} + out.Dataset = DatasetReference_ToProto(mapCtx, in.Dataset) + out.TargetTypes = in.TargetTypes + return out +} +func DatasetReference_FromProto(mapCtx *direct.MapContext, in *pb.DatasetReference) *krm.DatasetReference { + if in == nil { + return nil + } + out := &krm.DatasetReference{} + out.DatasetId = in.DatasetId + out.ProjectId = in.ProjectId + return out +} +func DatasetReference_ToProto(mapCtx *direct.MapContext, in *krm.DatasetReference) *pb.DatasetReference { + if in == nil { + return nil + } + out := &pb.DatasetReference{} + out.DatasetId = in.DatasetId + out.ProjectId = in.ProjectId + return out +} +func EncryptionConfiguration_FromProto(mapCtx *direct.MapContext, in *pb.EncryptionConfiguration) *krm.EncryptionConfiguration { + if in == nil { + return nil + } + out := &krm.EncryptionConfiguration{} + out.KmsKeyRef.Name = *in.KmsKeyName + return out +} +func EncryptionConfiguration_ToProto(mapCtx *direct.MapContext, in *krm.EncryptionConfiguration) *pb.EncryptionConfiguration { + if in == nil { + return nil + } + out := &pb.EncryptionConfiguration{} + out.KmsKeyName = &in.KmsKeyRef.Name + return out +} + +func ExternalCatalogDatasetOptions_FromProto(mapCtx *direct.MapContext, in *pb.ExternalCatalogDatasetOptions) *krm.ExternalCatalogDatasetOptions { + if in == nil { + return nil + } + out := &krm.ExternalCatalogDatasetOptions{} + out.Parameters = in.Parameters + out.DefaultStorageLocationUri = direct.LazyPtr(in.GetDefaultStorageLocationUri()) + return out +} +func ExternalCatalogDatasetOptions_ToProto(mapCtx *direct.MapContext, in *krm.ExternalCatalogDatasetOptions) *pb.ExternalCatalogDatasetOptions { + if in == nil { + return nil + } + out := &pb.ExternalCatalogDatasetOptions{} + out.Parameters = in.Parameters + out.DefaultStorageLocationUri = in.DefaultStorageLocationUri + return out +} +func ExternalDatasetReference_FromProto(mapCtx *direct.MapContext, in *pb.ExternalDatasetReference) *krm.ExternalDatasetReference { + if in == nil { + return nil + } + out := &krm.ExternalDatasetReference{} + out.ExternalSource = direct.LazyPtr(in.GetExternalSource()) + out.Connection = direct.LazyPtr(in.GetConnection()) + return out +} +func ExternalDatasetReference_ToProto(mapCtx *direct.MapContext, in *krm.ExternalDatasetReference) *pb.ExternalDatasetReference { + if in == nil { + return nil + } + out := &pb.ExternalDatasetReference{} + out.ExternalSource = in.ExternalSource + out.Connection = in.Connection + return out +} +func GcpTag_FromProto(mapCtx *direct.MapContext, in *pb.DatasetTags) *krm.GcpTag { + if in == nil { + return nil + } + out := &krm.GcpTag{} + out.TagKey = direct.LazyPtr(in.GetTagKey()) + out.TagValue = direct.LazyPtr(in.GetTagValue()) + return out +} +func GcpTag_ToProto(mapCtx *direct.MapContext, in *krm.GcpTag) *pb.DatasetTags { + if in == nil { + return nil + } + out := &pb.DatasetTags{} + out.TagKey = in.TagKey + out.TagValue = in.TagValue + return out +} +func LinkedDatasetMetadata_FromProto(mapCtx *direct.MapContext, in *pb.LinkedDatasetMetadata) *krm.LinkedDatasetMetadata { + if in == nil { + return nil + } + out := &krm.LinkedDatasetMetadata{} + return out +} +func LinkedDatasetMetadata_ToProto(mapCtx *direct.MapContext, in *krm.LinkedDatasetMetadata) *pb.LinkedDatasetMetadata { + if in == nil { + return nil + } + out := &pb.LinkedDatasetMetadata{} + return out +} +func LinkedDatasetSource_FromProto(mapCtx *direct.MapContext, in *pb.LinkedDatasetSource) *krm.LinkedDatasetSource { + if in == nil { + return nil + } + out := &krm.LinkedDatasetSource{} + out.SourceDataset = DatasetReference_FromProto(mapCtx, in.GetSourceDataset()) + return out +} +func LinkedDatasetSource_ToProto(mapCtx *direct.MapContext, in *krm.LinkedDatasetSource) *pb.LinkedDatasetSource { + if in == nil { + return nil + } + out := &pb.LinkedDatasetSource{} + out.SourceDataset = DatasetReference_ToProto(mapCtx, in.SourceDataset) + return out +} +func ReferencedTable_FromProto(mapCtx *direct.MapContext, in *pb.TableReference) *krm.TableReference { + if in == nil { + return nil + } + out := &krm.TableReference{} + out.ProjectId = in.ProjectId + out.DatasetId = in.DatasetId + out.TableId = in.TableId + return out +} +func ReferencedTable_ToProto(mapCtx *direct.MapContext, in *krm.TableReference) *pb.TableReference { + if in == nil { + return nil + } + out := &pb.TableReference{} + out.ProjectId = in.ProjectId + out.DatasetId = in.DatasetId + out.TableId = in.TableId + return out +} +func ReferencedRoutine_FromProto(mapCtx *direct.MapContext, in *pb.RoutineReference) *krm.RoutineReference { + if in == nil { + return nil + } + out := &krm.RoutineReference{} + out.ProjectId = in.ProjectId + out.DatasetId = in.DatasetId + out.RoutineId = in.RoutineId + return out +} +func ReferencedRoutine_ToProto(mapCtx *direct.MapContext, in *krm.RoutineReference) *pb.RoutineReference { + if in == nil { + return nil + } + out := &pb.RoutineReference{} + out.ProjectId = in.ProjectId + out.DatasetId = in.DatasetId + out.RoutineId = in.RoutineId + return out +} +func RestrictionConfig_FromProto(mapCtx *direct.MapContext, in *pb.RestrictionConfig) *krm.RestrictionConfig { + if in == nil { + return nil + } + out := &krm.RestrictionConfig{} + out.Type = direct.LazyPtr(in.GetType()) + return out +} +func RestrictionConfig_ToProto(mapCtx *direct.MapContext, in *krm.RestrictionConfig) *pb.RestrictionConfig { + if in == nil { + return nil + } + out := &pb.RestrictionConfig{} + out.Type = in.Type + return out +} diff --git a/pkg/controller/direct/bigquerydataset/bigquerydataset_mappings.go b/pkg/controller/direct/bigquerydataset/bigquerydataset_mappings.go index b7bc5b841a..05f6649eda 100644 --- a/pkg/controller/direct/bigquerydataset/bigquerydataset_mappings.go +++ b/pkg/controller/direct/bigquerydataset/bigquerydataset_mappings.go @@ -21,314 +21,208 @@ package bigquerydataset import ( - "fmt" - "time" + "strconv" - pb "cloud.google.com/go/bigquery" krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquery/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" + api "google.golang.org/api/bigquery/v2" ) -func Access_FromProto(mapCtx *direct.MapContext, in *pb.AccessEntry) *krm.Access { +func BigQueryDatasetSpec_ToAPI(mapCtx *direct.MapContext, in *krm.BigQueryDatasetSpec, name string) *api.Dataset { if in == nil { return nil } - out := &krm.Access{} - out.Role = direct.LazyPtr(fmt.Sprintf("%s", in.Role)) - switch in.EntityType { - case 1: - out.Domain = direct.LazyPtr(in.Entity) - case 2: - out.GroupByEmail = direct.LazyPtr(in.Entity) - case 3: - out.UserByEmail = direct.LazyPtr(in.Entity) - case 4: - out.SpecialGroup = direct.LazyPtr(in.Entity) - case 6: - out.IamMember = direct.LazyPtr(in.Entity) - } - out.View = TableReference_FromProto(mapCtx, in.View) - out.Routine = RoutineReference_FromProto(mapCtx, in.Routine) - out.Dataset = DatasetAccessEntry_FromProto(mapCtx, in.Dataset) - return out -} -func Access_ToProto(mapCtx *direct.MapContext, in *krm.Access) *pb.AccessEntry { - if in == nil { - return nil - } - out := &pb.AccessEntry{} - out.Role = pb.AccessRole(direct.ValueOf(in.Role)) - if in.Domain != nil { - out.EntityType = 1 - out.Entity = direct.ValueOf(in.Domain) - } - if in.GroupByEmail != nil { - out.EntityType = 2 - out.Entity = direct.ValueOf(in.GroupByEmail) - } - if in.UserByEmail != nil { - out.EntityType = 3 - out.Entity = direct.ValueOf(in.UserByEmail) + out := &api.Dataset{} + acccessList := []*api.DatasetAccess{} + for _, access := range in.Access { + curAccess := Access_ToAPI(mapCtx, direct.LazyPtr(access)) + acccessList = append(acccessList, curAccess) } - if in.SpecialGroup != nil { - out.EntityType = 4 - out.Entity = direct.ValueOf(in.SpecialGroup) - } - if in.IamMember != nil { - out.EntityType = 6 - out.Entity = direct.ValueOf(in.IamMember) + out.Access = acccessList + out.DefaultCollation = direct.ValueOf(in.DefaultCollation) + out.DefaultPartitionExpirationMs = direct.ValueOf(in.DefaultPartitionExpirationMs) + out.DefaultTableExpirationMs = direct.ValueOf(in.DefaultTableExpirationMs) + out.DefaultEncryptionConfiguration = EncryptionConfiguration_ToAPI(mapCtx, in.DefaultEncryptionConfiguration) + out.Description = direct.ValueOf(in.Description) + out.FriendlyName = direct.ValueOf(in.FriendlyName) + out.DatasetReference = DatasetReference_ToAPI(mapCtx, in, name) + out.Location = direct.ValueOf(in.Location) + out.IsCaseInsensitive = direct.ValueOf(in.IsCaseInsensitive) + if in.MaxTimeTravelHours != nil { + out.MaxTimeTravelHours, _ = strconv.ParseInt(direct.ValueOf(in.MaxTimeTravelHours), 10, 64) } - out.View = TableReference_ToProto(mapCtx, in.View) - out.Routine = RoutineReference_ToProto(mapCtx, in.Routine) - out.Dataset = DatasetAccessEntry_ToProto(mapCtx, in.Dataset) + out.StorageBillingModel = direct.ValueOf(in.StorageBillingModel) return out } -func Dataset_FromProto(mapCtx *direct.MapContext, in *pb.DatasetMetadata) *krm.Dataset { +func BigQueryDatasetSpec_FromAPI(mapCtx *direct.MapContext, in *api.Dataset) *krm.BigQueryDatasetSpec { if in == nil { return nil } - out := &krm.Dataset{} - out.Kind = direct.LazyPtr("BigQueryDataset") - out.Etag = direct.LazyPtr(in.ETag) - out.ID = direct.LazyPtr(in.FullID) - out.FriendlyName = direct.LazyPtr(in.Name) + out := &krm.BigQueryDatasetSpec{} + accessList := []krm.Access{} + for _, access := range in.Access { + curAccess := Access_FromAPI(mapCtx, access) + accessList = append(accessList, direct.ValueOf(curAccess)) + } + out.Access = accessList + out.DefaultCollation = direct.LazyPtr(in.DefaultCollation) + out.DefaultPartitionExpirationMs = direct.LazyPtr(in.DefaultPartitionExpirationMs) + out.DefaultTableExpirationMs = direct.LazyPtr(in.DefaultTableExpirationMs) + out.DefaultEncryptionConfiguration = EncryptionConfiguration_FromAPI(mapCtx, in.DefaultEncryptionConfiguration) out.Description = direct.LazyPtr(in.Description) - defaultTableExpirationMs := int64(in.DefaultTableExpiration / time.Millisecond) - out.DefaultTableExpirationMs = &defaultTableExpirationMs - defaultPartitionExpirationMs := int64(in.DefaultPartitionExpiration / time.Millisecond) - out.DefaultPartitionExpirationMs = &defaultPartitionExpirationMs - out.Labels = in.Labels - out.Access = direct.Slice_FromProto(mapCtx, in.Access, Access_FromProto) - //TODO: convert from time.Time to int64 - // out.CreationTime = in.CreationTime - // out.LastModifiedTime = in.LastModifiedTime - time.Now().UnixNano() + out.FriendlyName = direct.LazyPtr(in.FriendlyName) out.Location = direct.LazyPtr(in.Location) - out.DefaultEncryptionConfiguration = EncryptionConfiguration_FromProto(mapCtx, in.DefaultEncryptionConfig) - out.ExternalDatasetReference = ExternalDatasetReference_FromProto(mapCtx, in.ExternalDatasetReference) - out.DefaultCollation = direct.LazyPtr(in.DefaultCollation) - maxTimeTravelHours := (int64)(in.MaxTimeTravel / time.Hour) - out.MaxTimeTravelHours = &maxTimeTravelHours - out.Tags = direct.Slice_FromProto(mapCtx, in.Tags, DatasetTag_FromProto) + out.IsCaseInsensitive = direct.LazyPtr(in.IsCaseInsensitive) + maxTime := strconv.FormatInt(in.MaxTimeTravelHours, 10) + out.MaxTimeTravelHours = direct.LazyPtr(maxTime) out.StorageBillingModel = direct.LazyPtr(in.StorageBillingModel) return out } -func Dataset_ToProto(mapCtx *direct.MapContext, in *krm.Dataset) *pb.DatasetMetadata { +func BigQueryDatasetStatus_FromAPI(mapCtx *direct.MapContext, in *api.Dataset) *krm.BigQueryDatasetStatus { if in == nil { return nil } - out := &pb.DatasetMetadata{} - out.ETag = direct.ValueOf(in.Etag) - out.FullID = direct.ValueOf(in.ID) - out.Name = direct.ValueOf(in.FriendlyName) - out.Description = direct.ValueOf(in.Description) - out.DefaultTableExpiration = time.Duration(*in.DefaultTableExpirationMs) * time.Millisecond - out.DefaultPartitionExpiration = time.Duration(*in.DefaultPartitionExpirationMs) * time.Millisecond - out.Labels = in.Labels - out.Access = direct.Slice_ToProto(mapCtx, in.Access, Access_ToProto) - out.CreationTime = time.UnixMilli(*in.CreationTime) - out.LastModifiedTime = time.UnixMilli(*in.LastModifiedTime) - out.Location = direct.ValueOf(in.Location) - out.DefaultEncryptionConfig = EncryptionConfiguration_ToProto(mapCtx, in.DefaultEncryptionConfiguration) - out.ExternalDatasetReference = ExternalDatasetReference_ToProto(mapCtx, in.ExternalDatasetReference) - out.DefaultCollation = *in.DefaultCollation - out.MaxTimeTravel = time.Duration(*in.MaxTimeTravelHours) * time.Hour - out.Tags = direct.Slice_ToProto(mapCtx, in.Tags, DatasetTag_ToProto) - out.StorageBillingModel = direct.ValueOf(in.StorageBillingModel) + out := &krm.BigQueryDatasetStatus{} + out.Etag = direct.LazyPtr(in.Etag) + out.CreationTime = direct.LazyPtr(in.CreationTime) + out.LastModifiedTime = direct.LazyPtr(in.LastModifiedTime) + out.SelfLink = direct.LazyPtr(in.SelfLink) return out } -func DatasetAccessEntry_FromProto(mapCtx *direct.MapContext, in *pb.DatasetAccessEntry) *krm.DatasetAccessEntry { +func BigQueryDatasetStatusObservedState_ToAPI(mapCtx *direct.MapContext, in *krm.BigQueryDatasetStatus) *api.Dataset { if in == nil { return nil } - out := &krm.DatasetAccessEntry{} - out.Dataset = DatasetReference_FromProto(mapCtx, in.Dataset) - out.TargetTypes = in.TargetTypes + out := &api.Dataset{} + out.Etag = direct.ValueOf(in.Etag) + out.CreationTime = direct.ValueOf(in.CreationTime) + out.LastModifiedTime = direct.ValueOf(in.LastModifiedTime) + out.SelfLink = direct.ValueOf(in.SelfLink) return out } -func DatasetAccessEntry_ToProto(mapCtx *direct.MapContext, in *krm.DatasetAccessEntry) *pb.DatasetAccessEntry { +func Access_ToAPI(mapCtx *direct.MapContext, in *krm.Access) *api.DatasetAccess { if in == nil { return nil } - out := &pb.DatasetAccessEntry{} - out.Dataset = DatasetReference_ToProto(mapCtx, in.Dataset) - out.TargetTypes = in.TargetTypes + out := &api.DatasetAccess{} + out.Domain = direct.ValueOf(in.Domain) + out.GroupByEmail = direct.ValueOf(in.GroupByEmail) + out.IamMember = direct.ValueOf(in.IamMember) + out.UserByEmail = direct.ValueOf(in.UserByEmail) + out.SpecialGroup = direct.ValueOf(in.SpecialGroup) + out.Role = direct.ValueOf(in.Role) + out.Dataset = DatasetAccessEntry_ToAPI(mapCtx, in.Dataset) + out.Routine = RoutineReference_ToAPI(mapCtx, in.Routine) + out.View = TableReference_ToAPI(mapCtx, in.View) return out } -func DatasetList_FromProto(mapCtx *direct.MapContext, in *pb.DatasetIterator) *krm.DatasetList { +func Access_FromAPI(mapCtx *direct.MapContext, in *api.DatasetAccess) *krm.Access { if in == nil { return nil } - out := &krm.DatasetList{} - in.ListHidden = true - out.Kind = direct.LazyPtr("BigQueryDataset") - var datasets []krm.ListFormatDataset - var next *pb.Dataset - next, _ = in.Next() - for next != nil { - datasets = append(datasets, *ListFormatDataset_FromProto(mapCtx, next)) - next, _ = in.Next() - } - out.Datasets = datasets - + out := &krm.Access{} + out.Domain = direct.LazyPtr(in.Domain) + out.GroupByEmail = direct.LazyPtr(in.GroupByEmail) + out.IamMember = direct.LazyPtr(in.IamMember) + out.UserByEmail = direct.LazyPtr(in.UserByEmail) + out.SpecialGroup = direct.LazyPtr(in.SpecialGroup) + out.Role = direct.LazyPtr(in.Role) + out.Dataset = DatasetAccessEntry_FromAPI(mapCtx, in.Dataset) + out.Routine = RoutineReference_FromAPI(mapCtx, in.Routine) + out.View = TableReference_FromAPI(mapCtx, in.View) return out } -func DatasetList_ToProto(mapCtx *direct.MapContext, in *krm.DatasetList) *pb.DatasetIterator { +func DatasetAccessEntry_FromAPI(mapCtx *direct.MapContext, in *api.DatasetAccessEntry) *krm.DatasetAccessEntry { if in == nil { return nil } - out := &pb.DatasetIterator{} - // Missing - return out -} -func DatasetReference_FromProto(mapCtx *direct.MapContext, in *pb.Dataset) *krm.DatasetReference { - if in == nil { - return nil + out := &krm.DatasetAccessEntry{} + out.Dataset = &krm.DatasetReference{ + DatasetId: direct.LazyPtr(in.Dataset.DatasetId), + ProjectId: direct.LazyPtr(in.Dataset.ProjectId), } - out := &krm.DatasetReference{} - out.DatasetId = &in.DatasetID - out.ProjectId = &in.ProjectID + out.TargetTypes = in.TargetTypes return out } -func DatasetReference_ToProto(mapCtx *direct.MapContext, in *krm.DatasetReference) *pb.Dataset { +func DatasetAccessEntry_ToAPI(mapCtx *direct.MapContext, in *krm.DatasetAccessEntry) *api.DatasetAccessEntry { if in == nil { return nil } - out := &pb.Dataset{} - out.DatasetID = *in.DatasetId - out.ProjectID = *in.ProjectId - return out -} -func DatasetTag_FromProto(mapCtx *direct.MapContext, in *pb.DatasetTag) *krm.GcpTag { - if in == nil { - return nil + out := &api.DatasetAccessEntry{} + out.Dataset = &api.DatasetReference{ + DatasetId: direct.ValueOf(in.Dataset.DatasetId), + ProjectId: direct.ValueOf(in.Dataset.ProjectId), } - out := &krm.GcpTag{} - out.TagKey = direct.LazyPtr(in.TagKey) - out.TagValue = direct.LazyPtr(in.TagValue) + out.TargetTypes = in.TargetTypes return out } -func DatasetTag_ToProto(mapCtx *direct.MapContext, in *krm.GcpTag) *pb.DatasetTag { +func DatasetReference_ToAPI(mapCtx *direct.MapContext, in *krm.BigQueryDatasetSpec, name string) *api.DatasetReference { if in == nil { return nil } - out := &pb.DatasetTag{} - out.TagKey = direct.ValueOf(in.TagKey) - out.TagValue = direct.ValueOf(in.TagValue) + out := &api.DatasetReference{} + out.DatasetId = name return out } -func EncryptionConfiguration_FromProto(mapCtx *direct.MapContext, in *pb.EncryptionConfig) *krm.EncryptionConfiguration { +func EncryptionConfiguration_ToAPI(mapCtx *direct.MapContext, in *krm.EncryptionConfiguration) *api.EncryptionConfiguration { if in == nil { return nil } - out := &krm.EncryptionConfiguration{} - out.KmsKeyRef = &v1beta1.KMSCryptoKeyRef{ - Name: in.KMSKeyName, + out := &api.EncryptionConfiguration{} + if in.KmsKeyRef != nil { + out.KmsKeyName = in.KmsKeyRef.External } return out } -func EncryptionConfiguration_ToProto(mapCtx *direct.MapContext, in *krm.EncryptionConfiguration) *pb.EncryptionConfig { - if in == nil { - return nil - } - out := &pb.EncryptionConfig{} - out.KMSKeyName = in.KmsKeyRef.Name - return out -} -func ErrorProto_FromProto(mapCtx *direct.MapContext, in *pb.Error) *krm.ErrorProto { - if in == nil { - return nil - } - out := &krm.ErrorProto{} - out.Reason = direct.LazyPtr(in.Reason) - out.Location = direct.LazyPtr(in.Location) - out.Message = direct.LazyPtr(in.Message) - return out -} -func ErrorProto_ToProto(mapCtx *direct.MapContext, in *krm.ErrorProto) *pb.Error { +func EncryptionConfiguration_FromAPI(mapCtx *direct.MapContext, in *api.EncryptionConfiguration) *krm.EncryptionConfiguration { if in == nil { return nil } - out := &pb.Error{} - out.Reason = direct.ValueOf(in.Reason) - out.Location = direct.ValueOf(in.Location) - out.Message = direct.ValueOf(in.Message) - return out -} -func ExternalDatasetReference_FromProto(mapCtx *direct.MapContext, in *pb.ExternalDatasetReference) *krm.ExternalDatasetReference { - if in == nil { - return nil - } - out := &krm.ExternalDatasetReference{} - out.ExternalSource = direct.LazyPtr(in.ExternalSource) - out.Connection = direct.LazyPtr(in.Connection) - return out -} -func ExternalDatasetReference_ToProto(mapCtx *direct.MapContext, in *krm.ExternalDatasetReference) *pb.ExternalDatasetReference { - if in == nil { - return nil - } - out := &pb.ExternalDatasetReference{} - out.ExternalSource = direct.ValueOf(in.ExternalSource) - out.Connection = direct.ValueOf(in.Connection) - return out -} -func ListFormatDataset_FromProto(mapCtx *direct.MapContext, in *pb.Dataset) *krm.ListFormatDataset { - if in == nil { - return nil - } - out := &krm.ListFormatDataset{} - out.Kind = direct.LazyPtr("BigQueryDataset") - out.DatasetReference = DatasetReference_FromProto(mapCtx, in) - return out -} -func ListFormatDataset_ToProto(mapCtx *direct.MapContext, in *krm.ListFormatDataset) *pb.Dataset { - if in == nil { - return nil + out := &krm.EncryptionConfiguration{} + out.KmsKeyRef = &v1beta1.KMSCryptoKeyRef{ + External: in.KmsKeyName, } - out := &pb.Dataset{} - out = DatasetReference_ToProto(mapCtx, in.DatasetReference) return out } -func RoutineReference_FromProto(mapCtx *direct.MapContext, in *pb.Routine) *krm.RoutineReference { +func RoutineReference_FromAPI(mapCtx *direct.MapContext, in *api.RoutineReference) *krm.RoutineReference { if in == nil { return nil } out := &krm.RoutineReference{} - out.DatasetId = &in.DatasetID - out.ProjectId = &in.ProjectID - out.RoutineId = &in.RoutineID + out.DatasetId = direct.LazyPtr(in.DatasetId) + out.ProjectId = direct.LazyPtr(in.ProjectId) + out.RoutineId = direct.LazyPtr(in.RoutineId) return out } -func RoutineReference_ToProto(mapCtx *direct.MapContext, in *krm.RoutineReference) *pb.Routine { +func RoutineReference_ToAPI(mapCtx *direct.MapContext, in *krm.RoutineReference) *api.RoutineReference { if in == nil { return nil } - out := &pb.Routine{} - out.DatasetID = *in.DatasetId - out.ProjectID = *in.ProjectId - out.RoutineID = *in.RoutineId + out := &api.RoutineReference{} + out.DatasetId = direct.ValueOf(in.DatasetId) + out.ProjectId = direct.ValueOf(in.ProjectId) + out.RoutineId = direct.ValueOf(in.RoutineId) return out } -func TableReference_FromProto(mapCtx *direct.MapContext, in *pb.Table) *krm.TableReference { +func TableReference_FromAPI(mapCtx *direct.MapContext, in *api.TableReference) *krm.TableReference { if in == nil { return nil } out := &krm.TableReference{} - out.DatasetId = &in.DatasetID - out.ProjectId = &in.ProjectID - out.TableId = &in.TableID + out.DatasetId = direct.LazyPtr(in.DatasetId) + out.ProjectId = direct.LazyPtr(in.ProjectId) + out.TableId = direct.LazyPtr(in.TableId) return out } -func TableReference_ToProto(mapCtx *direct.MapContext, in *krm.TableReference) *pb.Table { +func TableReference_ToAPI(mapCtx *direct.MapContext, in *krm.TableReference) *api.TableReference { if in == nil { return nil } - out := &pb.Table{} - out.DatasetID = *in.DatasetId - out.ProjectID = *in.ProjectId - out.TableID = *in.TableId + out := &api.TableReference{} + out.DatasetId = direct.ValueOf(in.DatasetId) + out.ProjectId = direct.ValueOf(in.ProjectId) + out.TableId = direct.ValueOf(in.TableId) return out } diff --git a/pkg/controller/direct/bigquerydataset/dataset_controller.go b/pkg/controller/direct/bigquerydataset/dataset_controller.go new file mode 100644 index 0000000000..9b3c36786f --- /dev/null +++ b/pkg/controller/direct/bigquerydataset/dataset_controller.go @@ -0,0 +1,315 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bigquerydataset + +import ( + "context" + "fmt" + "reflect" + + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquery/v1beta1" + refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/config" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/directbase" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/registry" + + clone "github.com/huandu/go-clone" + api "google.golang.org/api/bigquery/v2" + "google.golang.org/api/option" + "google.golang.org/protobuf/types/known/fieldmaskpb" + + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/runtime" + "k8s.io/klog/v2" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +const ( + ctrlName = "bigquery-controller" + serviceDomain = "//bigquery.googleapis.com" +) + +func init() { + registry.RegisterModel(krm.BigQueryDatasetGVK, NewModel) +} + +func NewModel(ctx context.Context, config *config.ControllerConfig) (directbase.Model, error) { + return &model{config: *config}, nil +} + +var _ directbase.Model = &model{} + +type model struct { + config config.ControllerConfig +} + +func (m *model) service(ctx context.Context) (*api.Service, error) { + var opts []option.ClientOption + opts, err := m.config.RESTClientOptions() + if err != nil { + return nil, err + } + gcpService, err := api.NewService(ctx, opts...) + if err != nil { + return nil, fmt.Errorf("building Dataset client: %w", err) + } + return gcpService, err +} + +func (m *model) AdapterForObject(ctx context.Context, reader client.Reader, u *unstructured.Unstructured) (directbase.Adapter, error) { + obj := &krm.BigQueryDataset{} + if err := runtime.DefaultUnstructuredConverter.FromUnstructured(u.Object, &obj); err != nil { + return nil, fmt.Errorf("error converting to %T: %w", obj, err) + } + + id, err := krm.NewBigQueryDatasetRef(ctx, reader, obj) + if err != nil { + return nil, err + } + + // Get bigquery GCP client + gcpService, err := m.service(ctx) + if err != nil { + return nil, err + } + return &Adapter{ + id: id, + gcpService: gcpService, + desired: obj, + reader: reader, + }, nil +} + +func (m *model) AdapterForURL(ctx context.Context, url string) (directbase.Adapter, error) { + // TODO: Support URLs + return nil, nil +} + +type Adapter struct { + id *krm.BigQueryDatasetRef + gcpService *api.Service + desired *krm.BigQueryDataset + actual *api.Dataset + reader client.Reader +} + +var _ directbase.Adapter = &Adapter{} + +func (a *Adapter) Find(ctx context.Context) (bool, error) { + log := klog.FromContext(ctx).WithName(ctrlName) + log.V(2).Info("getting BigQueryDataset", "name", a.id.External) + + parent, datasetId, err := krm.ParseBigQueryDatasetExternal(a.id.External) + if err != nil { + return false, fmt.Errorf("failed to parse bigquery dataset full name, %w", err) + } + datasetGetCall := a.gcpService.Datasets.Get(parent.ProjectID, datasetId) + datasetpb, err := datasetGetCall.Do() + if err != nil { + if direct.IsNotFound(err) { + return false, nil + } + return false, fmt.Errorf("getting BigQueryDataset %q: %w", a.id.External, err) + } + a.actual = datasetpb + return true, nil +} + +func (a *Adapter) Create(ctx context.Context, createOp *directbase.CreateOperation) error { + + log := klog.FromContext(ctx).WithName(ctrlName) + log.V(2).Info("creating Dataset", "name", a.id.External) + mapCtx := &direct.MapContext{} + + desiredDataset := BigQueryDatasetSpec_ToAPI(mapCtx, &a.desired.Spec, a.desired.Name) + desiredDataset.Labels = make(map[string]string) + for k, v := range a.desired.GetObjectMeta().GetLabels() { + desiredDataset.Labels[k] = v + } + desiredDataset.Labels["managed-by-cnrm"] = "true" + parent, _, err := krm.ParseBigQueryDatasetExternal(a.id.External) + if err != nil { + return fmt.Errorf("failed to parse bigquery dataset full name, %w", err) + } + // Resolve KMS key reference + if a.desired.Spec.DefaultEncryptionConfiguration != nil { + kmsRef, err := refs.ResolveKMSCryptoKeyRef(ctx, a.reader, a.desired, a.desired.Spec.DefaultEncryptionConfiguration.KmsKeyRef) + if err != nil { + return err + } + desiredDataset.DefaultEncryptionConfiguration.KmsKeyName = kmsRef.External + } + insertDatasetCall := a.gcpService.Datasets.Insert(parent.ProjectID, desiredDataset) + inserted, err := insertDatasetCall.Do() + if err != nil { + return fmt.Errorf("inserting Dataset %s: %w", a.id.External, err) + } + log.V(2).Info("successfully inserted Dataset", "name", a.id.External) + + status := &krm.BigQueryDatasetStatus{} + status = BigQueryDatasetStatus_FromAPI(mapCtx, inserted) + if mapCtx.Err() != nil { + return mapCtx.Err() + } + status.ExternalRef = &a.id.External + return createOp.UpdateStatus(ctx, status, nil) +} + +func (a *Adapter) Update(ctx context.Context, updateOp *directbase.UpdateOperation) error { + u := updateOp.GetUnstructured() + + log := klog.FromContext(ctx).WithName(ctrlName) + log.V(2).Info("updating Dataset", "name", a.id.External) + mapCtx := &direct.MapContext{} + + // Convert KRM object to proto message + desiredKRM := a.desired.DeepCopy() + desired := BigQueryDatasetSpec_ToAPI(mapCtx, &desiredKRM.Spec, desiredKRM.Name) + if mapCtx.Err() != nil { + return mapCtx.Err() + } + + resource := clone.Clone(a.actual).(*api.Dataset) + + // Check for immutable fields + if !reflect.DeepEqual(desired.Location, resource.Location) { + return fmt.Errorf("BigQueryDataset %s/%s location cannot be changed, actual: %s, desired: %s", u.GetNamespace(), u.GetName(), resource.Location, desired.Location) + } + + // Find diff + updateMask := &fieldmaskpb.FieldMask{} + if !reflect.DeepEqual(desired.Description, resource.Description) { + resource.Description = desired.Description + updateMask.Paths = append(updateMask.Paths, "description") + } + if !reflect.DeepEqual(desired.FriendlyName, resource.FriendlyName) { + resource.FriendlyName = desired.FriendlyName + updateMask.Paths = append(updateMask.Paths, "friendly_name") + } + if !reflect.DeepEqual(desired.DefaultPartitionExpirationMs, resource.DefaultPartitionExpirationMs) { + resource.DefaultPartitionExpirationMs = desired.DefaultPartitionExpirationMs + updateMask.Paths = append(updateMask.Paths, "default_partition_expirationMs") + } + if !reflect.DeepEqual(desired.DefaultTableExpirationMs, resource.DefaultTableExpirationMs) { + resource.DefaultTableExpirationMs = desired.DefaultTableExpirationMs + updateMask.Paths = append(updateMask.Paths, "default_table_expirationMs") + } + if !reflect.DeepEqual(desired.DefaultCollation, resource.DefaultCollation) { + resource.DefaultCollation = desired.DefaultCollation + updateMask.Paths = append(updateMask.Paths, "default_collation") + } + if desired.DefaultEncryptionConfiguration != nil && resource.DefaultEncryptionConfiguration != nil && !reflect.DeepEqual(desired.DefaultEncryptionConfiguration, resource.DefaultEncryptionConfiguration) { + // Resolve KMS key reference + if a.desired.Spec.DefaultEncryptionConfiguration != nil { + kmsRef, err := refs.ResolveKMSCryptoKeyRef(ctx, a.reader, a.desired, a.desired.Spec.DefaultEncryptionConfiguration.KmsKeyRef) + if err != nil { + return err + } + desired.DefaultEncryptionConfiguration.KmsKeyName = kmsRef.External + } + resource.DefaultEncryptionConfiguration.KmsKeyName = desired.DefaultEncryptionConfiguration.KmsKeyName + updateMask.Paths = append(updateMask.Paths, "default_encryption_configuration") + } + if !reflect.DeepEqual(desired.IsCaseInsensitive, resource.IsCaseInsensitive) { + resource.IsCaseInsensitive = desired.IsCaseInsensitive + updateMask.Paths = append(updateMask.Paths, "is_case_sensitive") + } + if !reflect.DeepEqual(desired.MaxTimeTravelHours, resource.MaxTimeTravelHours) { + resource.MaxTimeTravelHours = desired.MaxTimeTravelHours + updateMask.Paths = append(updateMask.Paths, "max_time_interval_hours") + } + if desired.Access != nil && resource.Access != nil && len(desired.Access) > 0 && !reflect.DeepEqual(desired.Access, resource.Access) { + for _, access := range desired.Access { + resource.Access = append(resource.Access, access) + } + updateMask.Paths = append(updateMask.Paths, "access") + } + if !reflect.DeepEqual(desired.StorageBillingModel, resource.StorageBillingModel) { + resource.StorageBillingModel = desired.StorageBillingModel + updateMask.Paths = append(updateMask.Paths, "storage_billing_model") + } + + if len(updateMask.Paths) == 0 { + return nil + } + parent, datasetId, err := krm.ParseBigQueryDatasetExternal(a.id.External) + if err != nil { + return fmt.Errorf("failed to parse bigquery dataset full name, %w", err) + } + + if desired.Access == nil || len(desired.Access) == 0 { + resource.Access = a.actual.Access + } + updateDatasetCall := a.gcpService.Datasets.Update(parent.ProjectID, datasetId, resource) + updated, err := updateDatasetCall.Do() + if err != nil { + return fmt.Errorf("updating Dataset %s: %w", a.id.External, err) + } + log.V(2).Info("successfully updated Dataset", "name", a.id.External) + + status := &krm.BigQueryDatasetStatus{} + status = BigQueryDatasetStatus_FromAPI(mapCtx, updated) + if mapCtx.Err() != nil { + return mapCtx.Err() + } + return updateOp.UpdateStatus(ctx, status, nil) +} + +func (a *Adapter) Export(ctx context.Context) (*unstructured.Unstructured, error) { + if a.actual == nil { + return nil, fmt.Errorf("Find() not called") + } + u := &unstructured.Unstructured{} + + obj := &krm.BigQueryDataset{} + mapCtx := &direct.MapContext{} + obj.Spec = direct.ValueOf(BigQueryDatasetSpec_FromAPI(mapCtx, a.actual)) + if mapCtx.Err() != nil { + return nil, mapCtx.Err() + } + parent, _, err := krm.ParseBigQueryDatasetExternal(a.id.External) + if err != nil { + return nil, fmt.Errorf("failed to parse bigquery dataset full name, %w", err) + } + + obj.Spec.ProjectRef = &refs.ProjectRef{Name: parent.ProjectID} + obj.Spec.Location = &parent.Location + uObj, err := runtime.DefaultUnstructuredConverter.ToUnstructured(obj) + if err != nil { + return nil, err + } + u.Object = uObj + return u, nil +} + +// Delete implements the Adapter interface. +func (a *Adapter) Delete(ctx context.Context, deleteOp *directbase.DeleteOperation) (bool, error) { + log := klog.FromContext(ctx).WithName(ctrlName) + log.V(2).Info("deleting Dataset", "name", a.id.External) + + parent, datasetId, err := krm.ParseBigQueryDatasetExternal(a.id.External) + if err != nil { + return false, fmt.Errorf("failed to parse bigquery dataset full name, %w", err) + } + deleteDatasetCall := a.gcpService.Datasets.Delete(parent.ProjectID, datasetId) + err = deleteDatasetCall.Do() + if err != nil { + return false, fmt.Errorf("deleting Dataset %s: %w", a.id.External, err) + } + log.V(2).Info("successfully deleted Dataset", "name", a.id.External) + + return true, nil +} diff --git a/pkg/controller/direct/bigquerydataset/dataset_externalresource.go b/pkg/controller/direct/bigquerydataset/dataset_externalresource.go new file mode 100644 index 0000000000..74ccbc2c9e --- /dev/null +++ b/pkg/controller/direct/bigquerydataset/dataset_externalresource.go @@ -0,0 +1,25 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bigquerydataset + +import ( + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquery/v1beta1" +) + +// AsExternalRef builds a externalRef from a BigQueryDataTransferConfig +func AsExternalRef(datasetRef *krm.BigQueryDatasetRef) *string { + e := serviceDomain + "/" + datasetRef.External + return &e +} diff --git a/pkg/controller/direct/bigquerydataset/utils.go b/pkg/controller/direct/bigquerydataset/utils.go new file mode 100644 index 0000000000..71e4335e1e --- /dev/null +++ b/pkg/controller/direct/bigquerydataset/utils.go @@ -0,0 +1,57 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bigquerydataset + +import ( + "encoding/json" + "fmt" + + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/reflect/protoreflect" +) + +func convertProtoToAPI(u protoreflect.ProtoMessage, v any) error { + if u == nil { + return nil + } + + j, err := protojson.Marshal(u) + if err != nil { + return fmt.Errorf("converting proto to json: %w", err) + } + + if err := json.Unmarshal(j, v); err != nil { + return fmt.Errorf("converting json to cloud API type: %w", err) + } + return nil +} + +func convertAPIToProto[V protoreflect.ProtoMessage](u any, pV *V) error { + if u == nil { + return nil + } + + j, err := json.Marshal(u) + if err != nil { + return fmt.Errorf("converting proto to json: %w", err) + } + + var v V + if err := json.Unmarshal(j, &v); err != nil { + return fmt.Errorf("converting json to proto type: %w", err) + } + *pV = v + return nil +} diff --git a/pkg/controller/direct/register/register.go b/pkg/controller/direct/register/register.go index 81fc73bf53..5a478ad8e4 100644 --- a/pkg/controller/direct/register/register.go +++ b/pkg/controller/direct/register/register.go @@ -19,6 +19,7 @@ import ( _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/apikeys" _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/bigqueryanalyticshub" _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/bigqueryconnection" + _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/bigquerydataset" _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/bigquerydatatransfer" _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/certificatemanager" _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/cloudbuild" diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_export_basicbigquerydataset-direct.golden b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_export_basicbigquerydataset-direct.golden new file mode 100644 index 0000000000..6c64b9259f --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_export_basicbigquerydataset-direct.golden @@ -0,0 +1,25 @@ +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + annotations: + cnrm.cloud.google.com/delete-contents-on-destroy: "false" + labels: + cnrm-test: "true" + managed-by-cnrm: "true" + name: bigquerydatasetsample${uniqueId} +spec: + access: + - role: OWNER + specialGroup: projectOwners + - role: OWNER + userByEmail: user@google.com + - role: READER + specialGroup: projectReaders + - role: WRITER + specialGroup: projectWriters + friendlyName: bigquerydataset-sample-updated + location: us-central1 + maxTimeTravelHours: "168" + projectRef: + external: ${projectId} + resourceID: bigquerydatasetsample${uniqueId} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_object_basicbigquerydataset-direct.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_object_basicbigquerydataset-direct.golden.yaml new file mode 100644 index 0000000000..34f4f241a1 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_object_basicbigquerydataset-direct.golden.yaml @@ -0,0 +1,32 @@ +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + annotations: + alpha.cnrm.cloud.google.com/reconciler: direct + cnrm.cloud.google.com/management-conflict-prevention-policy: none + finalizers: + - cnrm.cloud.google.com/finalizer + - cnrm.cloud.google.com/deletion-defender + generation: 2 + labels: + cnrm-test: "true" + name: bigquerydatasetsample${uniqueId} + namespace: ${uniqueId} +spec: + friendlyName: bigquerydataset-sample-updated + location: us-central1 + projectRef: + external: ${projectId} +status: + conditions: + - lastTransitionTime: "1970-01-01T00:00:00Z" + message: The resource is up to date + reason: UpToDate + status: "True" + type: Ready + creationTime: "1970-01-01T00:00:00Z" + etag: abcdef123456 + externalRef: projects/${projectId}/locations/us-central1/datasets/bigquerydatasetsample${uniqueId} + lastModifiedTime: "1970-01-01T00:00:00Z" + observedGeneration: 2 + selfLink: https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_http.log new file mode 100644 index 0000000000..6b3fc1f9c8 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_http.log @@ -0,0 +1,369 @@ +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Not found: Dataset ${projectId}:bigquerydatasetsample${uniqueId}", + "reason": "notFound" + } + ], + "message": "Not found: Dataset ${projectId}:bigquerydatasetsample${uniqueId}", + "status": "NOT_FOUND" + } +} + +--- + +POST https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: kcc/controller-manager + +{ + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}" + }, + "friendlyName": "bigquerydataset-sample", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "location": "us-central1" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}", + "projectId": "${projectId}" + }, + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-sample", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "us-central1", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}", + "projectId": "${projectId}" + }, + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-sample", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "us-central1", + "maxTimeTravelHours": "168", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", + "type": "DEFAULT" +} + +--- + +PUT https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: kcc/controller-manager + +{ + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}", + "projectId": "${projectId}" + }, + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-sample-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "us-central1", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", + "type": "DEFAULT" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}", + "projectId": "${projectId}" + }, + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-sample-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "us-central1", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}", + "projectId": "${projectId}" + }, + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-sample-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "us-central1", + "maxTimeTravelHours": "168", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}", + "projectId": "${projectId}" + }, + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-sample-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "us-central1", + "maxTimeTravelHours": "168", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", + "type": "DEFAULT" +} + +--- + +DELETE https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +204 No Content +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/create.yaml new file mode 100644 index 0000000000..8598aa3435 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/create.yaml @@ -0,0 +1,23 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydatasetsample${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + friendlyName: bigquerydataset-sample + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/update.yaml new file mode 100644 index 0000000000..c1e87a2805 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/update.yaml @@ -0,0 +1,23 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydatasetsample${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + friendlyName: bigquerydataset-sample-updated + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_export_basicbigquerydataset.golden b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_export_basicbigquerydataset.golden index 95391263e2..6c64b9259f 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_export_basicbigquerydataset.golden +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_export_basicbigquerydataset.golden @@ -18,7 +18,7 @@ spec: - role: WRITER specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated - location: US + location: us-central1 maxTimeTravelHours: "168" projectRef: external: ${projectId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_object_basicbigquerydataset.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_object_basicbigquerydataset.golden.yaml index 9ca8d07847..e1b26c8300 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_object_basicbigquerydataset.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_object_basicbigquerydataset.golden.yaml @@ -14,6 +14,7 @@ metadata: namespace: ${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 projectRef: external: ${projectId} resourceID: bigquerydatasetsample${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_http.log index 4f53cdab6e..7db6b66ec7 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_http.log @@ -43,7 +43,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -90,7 +90,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -145,7 +145,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -184,7 +184,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168" } @@ -232,7 +232,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -288,7 +288,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/create.yaml index 94f172a61a..ad18e71f5a 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/create.yaml @@ -18,3 +18,4 @@ metadata: name: bigquerydatasetsample${uniqueId} spec: friendlyName: bigquerydataset-sample + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/update.yaml index 5038f6c984..461dc1354f 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/update.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/update.yaml @@ -18,3 +18,4 @@ metadata: name: bigquerydatasetsample${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_export_bigquerydatasetaccessblock-direct.golden b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_export_bigquerydatasetaccessblock-direct.golden new file mode 100644 index 0000000000..778d0db239 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_export_bigquerydatasetaccessblock-direct.golden @@ -0,0 +1,25 @@ +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + annotations: + cnrm.cloud.google.com/delete-contents-on-destroy: "false" + labels: + cnrm-test: "true" + managed-by-cnrm: "true" + name: bigquerydataset${uniqueId} +spec: + access: + - domain: google.com + role: READER + - iamMember: allUsers + role: READER + - role: OWNER + specialGroup: projectOwners + defaultTableExpirationMs: 7200000 + description: BigQuery Dataset With Access Block v2 + friendlyName: bigquerydataset-accessblock + location: US + maxTimeTravelHours: "168" + projectRef: + external: ${projectId} + resourceID: bigquerydataset${uniqueId} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_object_bigquerydatasetaccessblock-direct.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_object_bigquerydatasetaccessblock-direct.golden.yaml new file mode 100644 index 0000000000..91d9811539 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_object_bigquerydatasetaccessblock-direct.golden.yaml @@ -0,0 +1,41 @@ +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + annotations: + alpha.cnrm.cloud.google.com/reconciler: direct + cnrm.cloud.google.com/management-conflict-prevention-policy: none + finalizers: + - cnrm.cloud.google.com/finalizer + - cnrm.cloud.google.com/deletion-defender + generation: 2 + labels: + cnrm-test: "true" + name: bigquerydataset${uniqueId} + namespace: ${uniqueId} +spec: + access: + - domain: google.com + role: READER + - iamMember: allUsers + role: READER + - role: OWNER + specialGroup: projectOwners + defaultTableExpirationMs: 7200000 + description: BigQuery Dataset With Access Block v2 + friendlyName: bigquerydataset-accessblock + location: US + projectRef: + external: ${projectId} +status: + conditions: + - lastTransitionTime: "1970-01-01T00:00:00Z" + message: The resource is up to date + reason: UpToDate + status: "True" + type: Ready + creationTime: "1970-01-01T00:00:00Z" + etag: abcdef123456 + externalRef: projects/${projectId}/locations/US/datasets/bigquerydataset${uniqueId} + lastModifiedTime: "1970-01-01T00:00:00Z" + observedGeneration: 2 + selfLink: https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_http.log new file mode 100644 index 0000000000..5dec562437 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_http.log @@ -0,0 +1,393 @@ +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Not found: Dataset ${projectId}:bigquerydataset${uniqueId}", + "reason": "notFound" + } + ], + "message": "Not found: Dataset ${projectId}:bigquerydataset${uniqueId}", + "status": "NOT_FOUND" + } +} + +--- + +POST https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: kcc/controller-manager + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}" + }, + "defaultTableExpirationMs": "3600000", + "description": "BigQuery Dataset With Access Block", + "friendlyName": "bigquerydataset-accessblock", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "location": "US" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultTableExpirationMs": "3600000", + "description": "BigQuery Dataset With Access Block", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-accessblock", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultTableExpirationMs": "3600000", + "description": "BigQuery Dataset With Access Block", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-accessblock", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "168", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "type": "DEFAULT" +} + +--- + +PUT https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: kcc/controller-manager + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "domain": "google.com", + "role": "READER" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "iamMember": "allUsers", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultTableExpirationMs": "7200000", + "description": "BigQuery Dataset With Access Block v2", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-accessblock", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "type": "DEFAULT" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "iamMember": "allUsers", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultTableExpirationMs": "7200000", + "description": "BigQuery Dataset With Access Block v2", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-accessblock", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "iamMember": "allUsers", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultTableExpirationMs": "7200000", + "description": "BigQuery Dataset With Access Block v2", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-accessblock", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "168", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "iamMember": "allUsers", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultTableExpirationMs": "7200000", + "description": "BigQuery Dataset With Access Block v2", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-accessblock", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "168", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "type": "DEFAULT" +} + +--- + +DELETE https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +204 No Content +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/create.yaml new file mode 100644 index 0000000000..064707f437 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/create.yaml @@ -0,0 +1,32 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydataset${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + defaultTableExpirationMs: 3600000 + description: "BigQuery Dataset With Access Block" + friendlyName: bigquerydataset-accessblock + location: US + projectRef: + external: ${projectId} + access: + - role: OWNER + specialGroup: projectOwners + - role: READER + domain: google.com diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/update.yaml new file mode 100644 index 0000000000..68a8e31750 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/update.yaml @@ -0,0 +1,34 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydataset${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + defaultTableExpirationMs: 7200000 + description: "BigQuery Dataset With Access Block v2" + friendlyName: bigquerydataset-accessblock + location: US + projectRef: + external: ${projectId} + access: + - role: OWNER + specialGroup: projectOwners + - role: READER + iamMember: allUsers + - role: READER + domain: google.com \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/create.yaml index 12584981ac..925b8db34d 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/create.yaml @@ -21,6 +21,8 @@ spec: description: "BigQuery Dataset With Access Block" friendlyName: bigquerydataset-accessblock location: US + projectRef: + external: ${projectId} access: - role: OWNER specialGroup: projectOwners diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/update.yaml index 5c6ed42223..e49cc61aae 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/update.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/update.yaml @@ -21,6 +21,8 @@ spec: description: "BigQuery Dataset With Access Block v2" friendlyName: bigquerydataset-accessblock location: US + projectRef: + external: ${projectId} access: - role: OWNER specialGroup: projectOwners diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_export_fullybigquerydataset-direct.golden b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_export_fullybigquerydataset-direct.golden new file mode 100644 index 0000000000..0523cbdbdf --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_export_fullybigquerydataset-direct.golden @@ -0,0 +1,30 @@ +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + annotations: + cnrm.cloud.google.com/delete-contents-on-destroy: "false" + labels: + cnrm-test: "true" + managed-by-cnrm: "true" + name: bigquerydataset${uniqueId} +spec: + access: + - domain: google.com + role: READER + - role: OWNER + specialGroup: projectOwners + - role: OWNER + userByEmail: user@google.com + defaultEncryptionConfiguration: + kmsKeyRef: + external: projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId} + defaultPartitionExpirationMs: 3800000 + defaultTableExpirationMs: 3800000 + description: Fully Configured BigQuery Dataset updated + friendlyName: bigquerydataset-fullyconfigured-updated + location: US + maxTimeTravelHours: "96" + projectRef: + external: ${projectId} + resourceID: bigquerydataset${uniqueId} + storageBillingModel: LOGICAL \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_object_fullybigquerydataset-direct.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_object_fullybigquerydataset-direct.golden.yaml new file mode 100644 index 0000000000..e717654a27 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_object_fullybigquerydataset-direct.golden.yaml @@ -0,0 +1,49 @@ +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + annotations: + alpha.cnrm.cloud.google.com/reconciler: direct + cnrm.cloud.google.com/management-conflict-prevention-policy: none + finalizers: + - cnrm.cloud.google.com/finalizer + - cnrm.cloud.google.com/deletion-defender + generation: 2 + labels: + cnrm-test: "true" + name: bigquerydataset${uniqueId} + namespace: ${uniqueId} +spec: + access: + - domain: google.com + role: READER + - role: OWNER + specialGroup: projectOwners + - role: OWNER + userByEmail: user@google.com + defaultCollation: "" + defaultEncryptionConfiguration: + kmsKeyRef: + name: kmscryptokey-${uniqueId} + defaultPartitionExpirationMs: 3800000 + defaultTableExpirationMs: 3800000 + description: Fully Configured BigQuery Dataset updated + friendlyName: bigquerydataset-fullyconfigured-updated + isCaseInsensitive: false + location: US + maxTimeTravelHours: "96" + projectRef: + external: ${projectId} + storageBillingModel: LOGICAL +status: + conditions: + - lastTransitionTime: "1970-01-01T00:00:00Z" + message: The resource is up to date + reason: UpToDate + status: "True" + type: Ready + creationTime: "1970-01-01T00:00:00Z" + etag: abcdef123456 + externalRef: projects/${projectId}/locations/US/datasets/bigquerydataset${uniqueId} + lastModifiedTime: "1970-01-01T00:00:00Z" + observedGeneration: 2 + selfLink: https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_http.log new file mode 100644 index 0000000000..4e7cf5c240 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_http.log @@ -0,0 +1,999 @@ +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "KeyRing projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId} not found.", + "status": "NOT_FOUND" + } +} + +--- + +POST https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings?alt=json&keyRingId=kmskeyring-${uniqueId} +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "createTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}" +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "createTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}" +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "CryptoKey projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId} not found.", + "status": "NOT_FOUND" + } +} + +--- + +POST https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys?alt=json&cryptoKeyId=kmscryptokey-${uniqueId}&skipInitialVersionCreation=false +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "labels": { + "cnrm-test": "true", + "key-one": "value-one", + "managed-by-cnrm": "true" + }, + "purpose": "ENCRYPT_DECRYPT" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "createTime": "2024-04-01T12:34:56.123456Z", + "destroyScheduledDuration": "2592000s", + "labels": { + "cnrm-test": "true", + "key-one": "value-one", + "managed-by-cnrm": "true" + }, + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}", + "primary": { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "createTime": "2024-04-01T12:34:56.123456Z", + "generateTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions/1", + "protectionLevel": "SOFTWARE", + "state": "ENABLED" + }, + "purpose": "ENCRYPT_DECRYPT", + "versionTemplate": { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "protectionLevel": "SOFTWARE" + } +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "createTime": "2024-04-01T12:34:56.123456Z", + "destroyScheduledDuration": "2592000s", + "labels": { + "cnrm-test": "true", + "key-one": "value-one", + "managed-by-cnrm": "true" + }, + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}", + "primary": { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "createTime": "2024-04-01T12:34:56.123456Z", + "generateTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions/1", + "protectionLevel": "SOFTWARE", + "state": "ENABLED" + }, + "purpose": "ENCRYPT_DECRYPT", + "versionTemplate": { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "protectionLevel": "SOFTWARE" + } +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}:getIamPolicy?alt=json&options.requestedPolicyVersion=3&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "etag": "abcdef0123A=" +} + +--- + +POST https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}:setIamPolicy?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "policy": { + "bindings": [ + { + "members": [ + "serviceAccount:bq-${projectNumber}@bigquery-encryption.iam.gserviceaccount.com" + ], + "role": "roles/cloudkms.cryptoKeyEncrypterDecrypter" + } + ], + "version": 3 + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "bindings": [ + { + "members": [ + "serviceAccount:bq-${projectNumber}@bigquery-encryption.iam.gserviceaccount.com" + ], + "role": "roles/cloudkms.cryptoKeyEncrypterDecrypter" + } + ], + "etag": "abcdef0123A=", + "version": 1 +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}:getIamPolicy?alt=json&options.requestedPolicyVersion=3&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "bindings": [ + { + "members": [ + "serviceAccount:bq-${projectNumber}@bigquery-encryption.iam.gserviceaccount.com" + ], + "role": "roles/cloudkms.cryptoKeyEncrypterDecrypter" + } + ], + "etag": "abcdef0123A=", + "version": 1 +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Unknown service account", + "reason": "notFound" + } + ], + "message": "Unknown service account", + "status": "NOT_FOUND" + } +} + +--- + +POST https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "accountId": "bigquerydataset-dep", + "serviceAccount": {} +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "bigquerydataset-dep@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "bigquerydataset-dep@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Not found: Dataset ${projectId}:bigquerydataset${uniqueId}", + "reason": "notFound" + } + ], + "message": "Not found: Dataset ${projectId}:bigquerydataset${uniqueId}", + "status": "NOT_FOUND" + } +} + +--- + +POST https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: kcc/controller-manager + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + } + ], + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}" + }, + "defaultCollation": "und:ci", + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3600000", + "defaultTableExpirationMs": "3600000", + "description": "Fully Configured BigQuery Dataset", + "friendlyName": "bigquerydataset-fullyconfigured", + "isCaseInsensitive": true, + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "location": "US", + "maxTimeTravelHours": "72", + "storageBillingModel": "LOGICAL" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultCollation": "und:ci", + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3600000", + "defaultTableExpirationMs": "3600000", + "description": "Fully Configured BigQuery Dataset", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-fullyconfigured", + "id": "000000000000000000000", + "isCaseInsensitive": true, + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "72", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "storageBillingModel": "LOGICAL", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultCollation": "und:ci", + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3600000", + "defaultTableExpirationMs": "3600000", + "description": "Fully Configured BigQuery Dataset", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-fullyconfigured", + "id": "000000000000000000000", + "isCaseInsensitive": true, + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "72", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "storageBillingModel": "LOGICAL", + "type": "DEFAULT" +} + +--- + +PUT https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: kcc/controller-manager + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "domain": "google.com", + "role": "READER" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3800000", + "defaultTableExpirationMs": "3800000", + "description": "Fully Configured BigQuery Dataset updated", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-fullyconfigured-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "96", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "storageBillingModel": "LOGICAL", + "type": "DEFAULT" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3800000", + "defaultTableExpirationMs": "3800000", + "description": "Fully Configured BigQuery Dataset updated", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-fullyconfigured-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "96", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "storageBillingModel": "LOGICAL", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3800000", + "defaultTableExpirationMs": "3800000", + "description": "Fully Configured BigQuery Dataset updated", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-fullyconfigured-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "96", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "storageBillingModel": "LOGICAL", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3800000", + "defaultTableExpirationMs": "3800000", + "description": "Fully Configured BigQuery Dataset updated", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-fullyconfigured-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "96", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "storageBillingModel": "LOGICAL", + "type": "DEFAULT" +} + +--- + +DELETE https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +204 No Content +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "bigquerydataset-dep@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +DELETE https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}:getIamPolicy?alt=json&options.requestedPolicyVersion=3&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "bindings": [ + { + "members": [ + "serviceAccount:bq-${projectNumber}@bigquery-encryption.iam.gserviceaccount.com" + ], + "role": "roles/cloudkms.cryptoKeyEncrypterDecrypter" + } + ], + "etag": "abcdef0123A=", + "version": 1 +} + +--- + +POST https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}:setIamPolicy?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "policy": { + "etag": "abcdef0123A=", + "version": 3 + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "etag": "abcdef0123A=", + "version": 1 +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "createTime": "2024-04-01T12:34:56.123456Z", + "destroyScheduledDuration": "2592000s", + "labels": { + "cnrm-test": "true", + "key-one": "value-one", + "managed-by-cnrm": "true" + }, + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}", + "primary": { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "createTime": "2024-04-01T12:34:56.123456Z", + "generateTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions/1", + "protectionLevel": "SOFTWARE", + "state": "ENABLED" + }, + "purpose": "ENCRYPT_DECRYPT", + "versionTemplate": { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "protectionLevel": "SOFTWARE" + } +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "cryptoKeyVersions": [ + { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "createTime": "2024-04-01T12:34:56.123456Z", + "generateTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions/1", + "protectionLevel": "SOFTWARE", + "state": "ENABLED" + } + ], + "totalSize": 1 +} + +--- + +POST https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions/1:destroy?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "createTime": "2024-04-01T12:34:56.123456Z", + "destroyTime": "2024-04-01T12:34:56.123456Z", + "generateTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions/1", + "protectionLevel": "SOFTWARE", + "state": "DESTROY_SCHEDULED" +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "createTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}" +} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/create.yaml new file mode 100644 index 0000000000..d8b4b77639 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/create.yaml @@ -0,0 +1,38 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydataset${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + description: "Fully Configured BigQuery Dataset" + friendlyName: bigquerydataset-fullyconfigured + defaultPartitionExpirationMs: 3600000 + defaultTableExpirationMs: 3600000 + defaultCollation: und:ci + defaultEncryptionConfiguration: + kmsKeyRef: + name: kmscryptokey-${uniqueId} + isCaseInsensitive: true + location: US + maxTimeTravelHours: "72" + projectRef: + external: ${projectId} + access: + - role: OWNER + specialGroup: projectOwners + storageBillingModel: LOGICAL diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/dependencies.yaml new file mode 100644 index 0000000000..47a1531b49 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/dependencies.yaml @@ -0,0 +1,53 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: kms.cnrm.cloud.google.com/v1beta1 +kind: KMSKeyRing +metadata: + name: kmskeyring-${uniqueId} +spec: + location: us +--- +apiVersion: kms.cnrm.cloud.google.com/v1beta1 +kind: KMSCryptoKey +metadata: + annotations: + cnrm.cloud.google.com/project-id: ${projectId} + labels: + key-one: value-one + name: kmscryptokey-${uniqueId} +spec: + keyRingRef: + name: kmskeyring-${uniqueId} +--- +apiVersion: iam.cnrm.cloud.google.com/v1beta1 +kind: IAMPolicy +metadata: + name: iampolicy-${uniqueId} +spec: + resourceRef: + apiVersion: kms.cnrm.cloud.google.com/v1beta1 + kind: KMSCryptoKey + name: kmscryptokey-${uniqueId} + bindings: + - role: roles/cloudkms.cryptoKeyEncrypterDecrypter + members: + - serviceAccount:bq-${projectNumber}@bigquery-encryption.iam.gserviceaccount.com +--- +apiVersion: iam.cnrm.cloud.google.com/v1beta1 +kind: IAMServiceAccount +metadata: + annotations: + cnrm.cloud.google.com/project-id: "${projectId}" + name: bigquerydataset-dep diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/update.yaml new file mode 100644 index 0000000000..3fe9cae024 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/update.yaml @@ -0,0 +1,42 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydataset${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + description: "Fully Configured BigQuery Dataset updated" + friendlyName: bigquerydataset-fullyconfigured-updated + defaultPartitionExpirationMs: 3800000 + defaultTableExpirationMs: 3800000 + defaultCollation: "" + defaultEncryptionConfiguration: + kmsKeyRef: + name: kmscryptokey-${uniqueId} + isCaseInsensitive: false + location: US + maxTimeTravelHours: "96" + projectRef: + external: ${projectId} + access: + - role: OWNER + specialGroup: projectOwners + - role: READER + domain: google.com + - role: OWNER + userByEmail: bigquerydataset-dep@${projectId}.iam.gserviceaccount.com + storageBillingModel: LOGICAL \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/_vcr_cassettes/tf.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/_vcr_cassettes/tf.yaml index dfc500b102..10c9ffaed8 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/_vcr_cassettes/tf.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/_vcr_cassettes/tf.yaml @@ -15,1666 +15,1214 @@ --- version: 2 interactions: - - id: 0 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: true - body: fake error message - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 404 Not Found - code: 404 - duration: 369.624831ms - - id: 1 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: true - body: fake error message - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 404 Not Found - code: 404 - duration: 392.395968ms - - id: 2 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 138 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: | - {"datasetReference":{"datasetId":"bigquerydataset12yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"location":"US"} - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets?alt=json - method: POST - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "1oxb+cE169ziQbocLgYz5w==", - "id": "example-project:bigquerydataset12yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset12yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: 0 + uncompressed: true + body: fake error message + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 404 Not Found + code: 404 + duration: 234.053167ms + - id: 1 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: 0 + uncompressed: true + body: fake error message + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 404 Not Found + code: 404 + duration: 241.932587ms + - id: 2 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 147 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: | + {"datasetReference":{"datasetId":"bigquerydataset22yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"location":"us-central1"} + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets?alt=json + method: POST + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" - } - ], - "creationTime": "1714007921736", - "lastModifiedTime": "1714007921736", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 468.340052ms - - id: 3 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 138 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: | - {"datasetReference":{"datasetId":"bigquerydataset22yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"location":"US"} - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets?alt=json - method: POST - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "5BIDIpkSuP2XjG2TO/A+pA==", - "id": "example-project:bigquerydataset22yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" - } - ], - "creationTime": "1714007921933", - "lastModifiedTime": "1714007921933", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 555.587721ms - - id: 4 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "1oxb+cE169ziQbocLgYz5w==", - "id": "example-project:bigquerydataset12yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset12yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" - } - ], - "creationTime": "1714007921736", - "lastModifiedTime": "1714007921736", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 212.337709ms - - id: 5 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "5BIDIpkSuP2XjG2TO/A+pA==", - "id": "example-project:bigquerydataset22yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" - } - ], - "creationTime": "1714007921933", - "lastModifiedTime": "1714007921933", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 296.156142ms - - id: 6 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "1oxb+cE169ziQbocLgYz5w==", - "id": "example-project:bigquerydataset12yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset12yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" - } - ], - "creationTime": "1714007921736", - "lastModifiedTime": "1714007921736", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 252.63049ms - - id: 7 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - X-Goog-Api-Client: - - gl-go/1.21.5 gdcl/0.160.0 - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: true - body: fake error message - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 404 Not Found - code: 404 - duration: 201.043185ms - - id: 8 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "5BIDIpkSuP2XjG2TO/A+pA==", - "id": "example-project:bigquerydataset22yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" - } - ], - "creationTime": "1714007921933", - "lastModifiedTime": "1714007921933", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 279.454008ms - - id: 9 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 200 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: | - {"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"tableReference":{"datasetId":"bigquerydataset22yq2ldf3wcoir","projectId":"example-project","tableId":"bigquerytable2yq2ldf3wcoir"}} - form: {} - headers: - Content-Type: - - application/json - X-Goog-Api-Client: - - gl-go/1.21.5 gdcl/0.160.0 - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables?alt=json&prettyPrint=false - method: POST - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: '{"kind":"bigquery#table","etag":"5tjsrJs4nIXqBqyB5xL59w==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1714007923844","lastModifiedTime":"1714007923961","type":"TABLE","location":"US","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 327.577094ms - - id: 10 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - X-Goog-Api-Client: - - gl-go/1.21.5 gdcl/0.160.0 - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: '{"kind":"bigquery#table","etag":"5tjsrJs4nIXqBqyB5xL59w==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1714007923844","lastModifiedTime":"1714007923961","type":"TABLE","location":"US","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 294.786438ms - - id: 11 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=US - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: true - body: fake error message - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 404 Not Found - code: 404 - duration: 159.509962ms - - id: 12 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - X-Goog-Api-Client: - - gl-go/1.21.5 gdcl/0.160.0 - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: '{"kind":"bigquery#table","etag":"5tjsrJs4nIXqBqyB5xL59w==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1714007923844","lastModifiedTime":"1714007923961","type":"TABLE","location":"US","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 242.697825ms - - id: 13 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 887 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: | - {"configuration":{"jobTimeoutMs":"600000","labels":{"cnrm-test":"true","label-one":"value-one","managed-by-cnrm":"true"},"query":{"allowLargeResults":true,"createDisposition":"CREATE_NEVER","defaultDataset":{"datasetId":"bigquerydataset12yq2ldf3wcoir","projectId":"example-project"},"destinationTable":{"datasetId":"bigquerydataset22yq2ldf3wcoir","projectId":"example-project","tableId":"bigquerytable2yq2ldf3wcoir"},"flattenResults":true,"priority":"INTERACTIVE","query":"SELECT state FROM [lookerdata:cdc.project_tycho_reports]","schemaUpdateOptions":["ALLOW_FIELD_ADDITION","ALLOW_FIELD_RELAXATION"],"scriptOptions":{"keyResultStatement":"LAST","statementTimeoutMs":"300000"},"useLegacySql":true,"useQueryCache":true,"writeDisposition":"WRITE_APPEND"}},"jobReference":{"jobId":"bigqueryjob-2yq2ldf3wcoir","location":"US","project":"example-project"}} - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs?alt=json - method: POST - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#job", - "etag": "2HQmN+wdbuo22yXteShF0Q==", - "id": "example-project:US.bigqueryjob-2yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=US", - "user_email": "integration-test@example-project.iam.gserviceaccount.com", - "configuration": { - "query": { - "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", - "destinationTable": { - "projectId": "example-project", + "kind": "bigquery#dataset", + "etag": "UTD7bH0f//NO3dLHXzmVTQ==", + "id": "example-project:bigquerydataset22yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", + "datasetReference": { "datasetId": "bigquerydataset22yq2ldf3wcoir", - "tableId": "bigquerytable2yq2ldf3wcoir" - }, - "createDisposition": "CREATE_NEVER", - "writeDisposition": "WRITE_APPEND", - "defaultDataset": { - "datasetId": "bigquerydataset12yq2ldf3wcoir", "projectId": "example-project" }, - "priority": "INTERACTIVE", - "allowLargeResults": true, - "useQueryCache": true, - "flattenResults": true, - "useLegacySql": true, - "schemaUpdateOptions": [ - "ALLOW_FIELD_ADDITION", - "ALLOW_FIELD_RELAXATION" + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } ], - "scriptOptions": { - "statementTimeoutMs": "300000", - "keyResultStatement": "LAST" - } - }, - "jobTimeoutMs": "600000", - "labels": { - "cnrm-test": "true", - "label-one": "value-one", - "managed-by-cnrm": "true" - }, - "jobType": "QUERY" - }, - "jobReference": { - "projectId": "example-project", - "jobId": "bigqueryjob-2yq2ldf3wcoir", - "location": "US" - }, - "statistics": { - "creationTime": "1714007925436", - "startTime": "1714007926032", - "query": { - "statementType": "SELECT" + "creationTime": "1729233764136", + "lastModifiedTime": "1729233764136", + "location": "us-central1", + "type": "DEFAULT" } - }, - "status": { - "state": "RUNNING" - }, - "principal_subject": "serviceAccount:integration-test@example-project.iam.gserviceaccount.com", - "jobCreationReason": { - "code": "REQUESTED" - } - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 777.691638ms - - id: 14 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=US - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#job", - "etag": "2HQmN+wdbuo22yXteShF0Q==", - "id": "example-project:US.bigqueryjob-2yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=US", - "user_email": "integration-test@example-project.iam.gserviceaccount.com", - "configuration": { - "query": { - "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", - "destinationTable": { - "projectId": "example-project", - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "tableId": "bigquerytable2yq2ldf3wcoir" - }, - "createDisposition": "CREATE_NEVER", - "writeDisposition": "WRITE_APPEND", - "defaultDataset": { + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 361.322656ms + - id: 3 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 147 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: | + {"datasetReference":{"datasetId":"bigquerydataset12yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"location":"us-central1"} + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets?alt=json + method: POST + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#dataset", + "etag": "JfHG5KJ6x0vqD5TJAHK4ag==", + "id": "example-project:bigquerydataset12yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", + "datasetReference": { "datasetId": "bigquerydataset12yq2ldf3wcoir", "projectId": "example-project" }, - "priority": "INTERACTIVE", - "allowLargeResults": true, - "useQueryCache": true, - "flattenResults": true, - "useLegacySql": true, - "schemaUpdateOptions": [ - "ALLOW_FIELD_ADDITION", - "ALLOW_FIELD_RELAXATION" + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } ], - "scriptOptions": { - "statementTimeoutMs": "300000", - "keyResultStatement": "LAST" - } - }, - "jobTimeoutMs": "600000", - "labels": { - "cnrm-test": "true", - "label-one": "value-one", - "managed-by-cnrm": "true" - }, - "jobType": "QUERY" - }, - "jobReference": { - "projectId": "example-project", - "jobId": "bigqueryjob-2yq2ldf3wcoir", - "location": "US" - }, - "statistics": { - "creationTime": "1714007925436", - "startTime": "1714007926032", - "query": { - "statementType": "SELECT" + "creationTime": "1729233764153", + "lastModifiedTime": "1729233764153", + "location": "us-central1", + "type": "DEFAULT" } - }, - "status": { - "state": "RUNNING" - }, - "principal_subject": "serviceAccount:integration-test@example-project.iam.gserviceaccount.com", - "jobCreationReason": { - "code": "REQUESTED" - } - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 142.036947ms - - id: 15 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=US - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#job", - "etag": "NJVLiQ1/htz+e/SOteT2FQ==", - "id": "example-project:US.bigqueryjob-2yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=US", - "user_email": "integration-test@example-project.iam.gserviceaccount.com", - "configuration": { - "query": { - "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", - "destinationTable": { - "projectId": "example-project", - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "tableId": "bigquerytable2yq2ldf3wcoir" - }, - "createDisposition": "CREATE_NEVER", - "writeDisposition": "WRITE_APPEND", - "defaultDataset": { + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 356.365747ms + - id: 4 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#dataset", + "etag": "JfHG5KJ6x0vqD5TJAHK4ag==", + "id": "example-project:bigquerydataset12yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", + "datasetReference": { "datasetId": "bigquerydataset12yq2ldf3wcoir", "projectId": "example-project" }, - "priority": "INTERACTIVE", - "allowLargeResults": true, - "useQueryCache": true, - "flattenResults": true, - "useLegacySql": true, - "schemaUpdateOptions": [ - "ALLOW_FIELD_ADDITION", - "ALLOW_FIELD_RELAXATION" - ], - "scriptOptions": { - "statementTimeoutMs": "300000", - "keyResultStatement": "LAST" - } - }, - "jobTimeoutMs": "600000", - "labels": { - "cnrm-test": "true", - "label-one": "value-one", - "managed-by-cnrm": "true" - }, - "jobType": "QUERY" - }, - "jobReference": { - "projectId": "example-project", - "jobId": "bigqueryjob-2yq2ldf3wcoir", - "location": "US" - }, - "statistics": { - "creationTime": "1714007925436", - "startTime": "1714007926032", - "query": { - "queryPlan": [ + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ { - "name": "S00: Output", - "id": "0", - "startMs": "1714007926424", - "waitMsAvg": "0", - "waitMsMax": "0", - "readMsAvg": "0", - "readMsMax": "0", - "computeMsAvg": "0", - "computeMsMax": "0", - "writeMsAvg": "0", - "writeMsMax": "0", - "shuffleOutputBytes": "0", - "shuffleOutputBytesSpilled": "0", - "recordsRead": "0", - "recordsWritten": "0", - "parallelInputs": "1", - "completedParallelInputs": "0", - "status": "RUNNING", - "steps": [ - { - "kind": "READ", - "substeps": [ - "state", - "FROM lookerdata:cdc.project_tycho_reports AS lookerdata:cdc.project_tycho_reports" - ] - }, - { - "kind": "WRITE", - "substeps": [ - "state", - "TO __stage00_output" - ] - } - ], - "slotMs": "0", - "computeMode": "BIGQUERY" + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" } ], - "estimatedBytesProcessed": "3037868", - "timeline": [ + "creationTime": "1729233764153", + "lastModifiedTime": "1729233764153", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" + } + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 142.666316ms + - id: 5 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#dataset", + "etag": "UTD7bH0f//NO3dLHXzmVTQ==", + "id": "example-project:bigquerydataset22yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", + "datasetReference": { + "datasetId": "bigquerydataset22yq2ldf3wcoir", + "projectId": "example-project" + }, + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, { - "elapsedMs": "892", - "totalSlotMs": "107", - "pendingUnits": "1", - "completedUnits": "0", - "activeUnits": "1", - "estimatedRunnableUnits": "0" + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" } ], - "totalSlotMs": "107", - "statementType": "SELECT" - }, - "totalSlotMs": "107" - }, - "status": { - "state": "RUNNING" - }, - "principal_subject": "serviceAccount:integration-test@example-project.iam.gserviceaccount.com", - "jobCreationReason": { - "code": "REQUESTED" - } - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 165.695217ms - - id: 16 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=US - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#job", - "etag": "NJVLiQ1/htz+e/SOteT2FQ==", - "id": "example-project:US.bigqueryjob-2yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=US", - "user_email": "integration-test@example-project.iam.gserviceaccount.com", - "configuration": { - "query": { - "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", - "destinationTable": { - "projectId": "example-project", + "creationTime": "1729233764136", + "lastModifiedTime": "1729233764136", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" + } + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 146.095736ms + - id: 6 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#dataset", + "etag": "UTD7bH0f//NO3dLHXzmVTQ==", + "id": "example-project:bigquerydataset22yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", + "datasetReference": { "datasetId": "bigquerydataset22yq2ldf3wcoir", - "tableId": "bigquerytable2yq2ldf3wcoir" - }, - "createDisposition": "CREATE_NEVER", - "writeDisposition": "WRITE_APPEND", - "defaultDataset": { - "datasetId": "bigquerydataset12yq2ldf3wcoir", "projectId": "example-project" }, - "priority": "INTERACTIVE", - "allowLargeResults": true, - "useQueryCache": true, - "flattenResults": true, - "useLegacySql": true, - "schemaUpdateOptions": [ - "ALLOW_FIELD_ADDITION", - "ALLOW_FIELD_RELAXATION" - ], - "scriptOptions": { - "statementTimeoutMs": "300000", - "keyResultStatement": "LAST" - } - }, - "jobTimeoutMs": "600000", - "labels": { - "cnrm-test": "true", - "label-one": "value-one", - "managed-by-cnrm": "true" - }, - "jobType": "QUERY" - }, - "jobReference": { - "projectId": "example-project", - "jobId": "bigqueryjob-2yq2ldf3wcoir", - "location": "US" - }, - "statistics": { - "creationTime": "1714007925436", - "startTime": "1714007926032", - "query": { - "queryPlan": [ + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ { - "name": "S00: Output", - "id": "0", - "startMs": "1714007926424", - "waitMsAvg": "0", - "waitMsMax": "0", - "readMsAvg": "0", - "readMsMax": "0", - "computeMsAvg": "0", - "computeMsMax": "0", - "writeMsAvg": "0", - "writeMsMax": "0", - "shuffleOutputBytes": "0", - "shuffleOutputBytesSpilled": "0", - "recordsRead": "0", - "recordsWritten": "0", - "parallelInputs": "1", - "completedParallelInputs": "0", - "status": "RUNNING", - "steps": [ - { - "kind": "READ", - "substeps": [ - "state", - "FROM lookerdata:cdc.project_tycho_reports AS lookerdata:cdc.project_tycho_reports" - ] - }, - { - "kind": "WRITE", - "substeps": [ - "state", - "TO __stage00_output" - ] - } - ], - "slotMs": "0", - "computeMode": "BIGQUERY" + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" } ], - "estimatedBytesProcessed": "3037868", - "timeline": [ + "creationTime": "1729233764136", + "lastModifiedTime": "1729233764136", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" + } + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 139.543417ms + - id: 7 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + X-Goog-Api-Client: + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: 0 + uncompressed: true + body: fake error message + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 404 Not Found + code: 404 + duration: 135.950427ms + - id: 8 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#dataset", + "etag": "JfHG5KJ6x0vqD5TJAHK4ag==", + "id": "example-project:bigquerydataset12yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", + "datasetReference": { + "datasetId": "bigquerydataset12yq2ldf3wcoir", + "projectId": "example-project" + }, + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, { - "elapsedMs": "892", - "totalSlotMs": "107", - "pendingUnits": "1", - "completedUnits": "0", - "activeUnits": "1", - "estimatedRunnableUnits": "0" + "role": "READER", + "specialGroup": "projectReaders" } ], - "totalSlotMs": "107", - "statementType": "SELECT" - }, - "totalSlotMs": "107" - }, - "status": { - "state": "RUNNING" - }, - "principal_subject": "serviceAccount:integration-test@example-project.iam.gserviceaccount.com", - "jobCreationReason": { - "code": "REQUESTED" - } - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 147.020072ms - - id: 17 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=US - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#job", - "etag": "NJVLiQ1/htz+e/SOteT2FQ==", - "id": "example-project:US.bigqueryjob-2yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=US", - "user_email": "integration-test@example-project.iam.gserviceaccount.com", - "configuration": { - "query": { - "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", - "destinationTable": { + "creationTime": "1729233764153", + "lastModifiedTime": "1729233764153", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" + } + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 240.845627ms + - id: 9 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 190 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: | + {"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"tableReference":{"datasetId":"bigquerydataset22yq2ldf3wcoir","projectId":"example-project","tableId":"bigquerytable2yq2ldf3wcoir"}} + form: {} + headers: + Content-Type: + - application/json + X-Goog-Api-Client: + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables?alt=json&prettyPrint=false + method: POST + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: '{"kind":"bigquery#table","etag":"zP4N7TpLpSchHSlPBuMVew==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233765250","lastModifiedTime":"1729233765330","type":"TABLE","location":"us-central1","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 175.494323ms + - id: 10 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + X-Goog-Api-Client: + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: '{"kind":"bigquery#table","etag":"zP4N7TpLpSchHSlPBuMVew==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233765250","lastModifiedTime":"1729233765330","type":"TABLE","location":"us-central1","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 237.337328ms + - id: 11 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + X-Goog-Api-Client: + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: '{"kind":"bigquery#table","etag":"zP4N7TpLpSchHSlPBuMVew==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233765250","lastModifiedTime":"1729233765330","type":"TABLE","location":"us-central1","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 133.567967ms + - id: 12 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=us-central1 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#job", + "etag": "qywbpQBNxG6Twvxjt/luNw==", + "id": "example-project:us-central1.bigqueryjob-2yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=us-central1", + "user_email": "xiaoweim@google.com", + "configuration": { + "query": { + "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", + "destinationTable": { + "projectId": "example-project", + "datasetId": "bigquerydataset22yq2ldf3wcoir", + "tableId": "bigquerytable2yq2ldf3wcoir" + }, + "createDisposition": "CREATE_NEVER", + "writeDisposition": "WRITE_APPEND", + "defaultDataset": { + "datasetId": "bigquerydataset12yq2ldf3wcoir", + "projectId": "example-project" + }, + "priority": "INTERACTIVE", + "allowLargeResults": true, + "useQueryCache": true, + "flattenResults": true, + "useLegacySql": true, + "schemaUpdateOptions": [ + "ALLOW_FIELD_ADDITION", + "ALLOW_FIELD_RELAXATION" + ], + "scriptOptions": { + "statementTimeoutMs": "300000", + "keyResultStatement": "LAST" + } + }, + "jobTimeoutMs": "600000", + "labels": { + "cnrm-test": "true", + "label-one": "value-one", + "managed-by-cnrm": "true" + }, + "jobType": "QUERY" + }, + "jobReference": { "projectId": "example-project", - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "tableId": "bigquerytable2yq2ldf3wcoir" + "jobId": "bigqueryjob-2yq2ldf3wcoir", + "location": "us-central1" + }, + "statistics": { + "creationTime": "1729044292505", + "startTime": "1729044292825", + "endTime": "1729044292825" + }, + "status": { + "errorResult": { + "reason": "accessDenied", + "debugInfo": "[ACCESS_DENIED] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Authorization.AuthorizeQuery;domain: \"cloud.helix.ErrorDomain\" code: \"ACCESS_DENIED\" argument: \"Table\" argument: \"lookerdata:cdc.project_tycho_reports\" argument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\" debug_info: \"[ACCESS_DENIED] errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Table\\\"\\nargument: \\\"lookerdata:cdc.project_tycho_reports\\\"\\nargument: \\\"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\\\"\\n\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:615)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:619)\\n\\tat com.google.cloud.helix.common.UserTableReference.buildAccessDeniedException(UserTableReference.java:79)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.authorizeMissingDataset(QueryEntityReferenceConverter.java:257)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:206)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convertTable(QueryEntityReferenceConverter.java:117)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convert(QueryEntityReferenceConverter.java:93)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$1(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:94)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:96)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$0(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.runWithSecurityContext(AuthorizationService.java:1082)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.authorizeQuery(AuthorizationService.java:395)\\n\\tat com.google.cloud.helix.proto2.Authorization$ServiceParameters$2.handleRequest(Authorization.java:511)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\n\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\n\\tat java.base/java.lang.Thread.run(Unknown Source)\\n\\tSuppressed: [NOT_FOUND] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001 debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\"NOT_FOUND\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\n\\n\\t\\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:82)\\n\\t\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:201)\\n\\t\\t... 36 more\\n\\tCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\\n\\t\\tat com.google.cloud.helix.proto2.Storage$Stub.getDataset(Storage.java:1349)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:55)\\n\\t\\t... 37 more\\n\\tSuppressed: [ACCESS_DENIED] debug=User 768386550392: IAM access denied errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\nargument: \\\"Permission bigquery.datasets.get denied on dataset lookerdata:cdc (or it may not exist).\\\"\\n\\n\\t\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.createPermissionDeny(AuthorizerExceptions.java:262)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.datasetAccessDenied(AuthorizerExceptions.java:156)\\n\\t\\tat com.google.cloud.helix.server.auth.IamAuthorizer.lambda$authorizeDatasetInternal$4(IamAuthorizer.java:1194)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\t\\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\\n\\t\\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\\n\\t\\t... 7 more\\n\";AppErrorCode=4;StartTimeMs=1729044292643;unknown;Deadline(sec)=60.0;ResFormat=uncompressed;ServerTimeSec=0.174171421;LogBytes=256;FailFast;EffSecLevel=privacy_and_integrity;ReqFormat=uncompressed;ReqID=1d5a3c9d34b06964;GlobalID=0;Server=[2002:a05:6845:6018:b0:3d:366e:f847]:4001 errorProto=code: \"ACCESS_DENIED\"\nargument: \"Table\"\nargument: \"lookerdata:cdc.project_tycho_reports\"\nargument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\"\n\n\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\n\tat com.google.cloud.helix.common.auth.client.AuthorizationServiceClientImpl.authorizeQuery(AuthorizationServiceClientImpl.java:350)\n\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.callAuthServer(QueryAuthorizer.java:415)\n\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.lambda$authorizeQuery$2(QueryAuthorizer.java:183)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\n\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\n\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\n\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\n\tat io.grpc.Context.run(Context.java:536)\n\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\n\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\n\tat java.base/java.lang.Thread.run(Unknown Source)\n\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\n\t\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\n\t\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\n\t\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.authorizeQuery(QueryAuthorizer.java:194)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.authorizeEntitiesRead(ReferenceCollector.java:1640)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.authorizeEntitiesRead(ReferenceCollector.java:1626)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.getReferenceTree(ReferenceCollector.java:575)\n\t\tat com.google.cloud.helix.server.job.CatalogMetadataResolver.resolve(CatalogMetadataResolver.java:150)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.addTables(QueryAnalyzer.java:948)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.parseReferencedTables(QueryAnalyzer.java:4298)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.buildQueryInfo(QueryAnalyzer.java:3603)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfoInternal(LocalQueryJobController.java:4014)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfo(LocalQueryJobController.java:4089)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkInternal(LocalQueryJobController.java:4704)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkAsync(LocalQueryJobController.java:4620)\n\t\tat com.google.cloud.helix.server.job.LocalSqlJobController.checkAsync(LocalSqlJobController.java:129)\n\t\tat com.google.cloud.helix.server.job.LocalJobController.check(LocalJobController.java:1503)\n\t\tat com.google.cloud.helix.server.job.JobControllerModule$1.check(JobControllerModule.java:831)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine$1.check(JobStateMachine.java:3794)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3063)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\n\tSuppressed: [NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.643-07:00 errorProto=code: \"NOT_FOUND\"\nargument: \"Dataset\"\nargument: \"lookerdata:cdc\"\n\n\t\tat com.google.cloud.helix.server.job.CrossRegionDatasetResolver.resolve(CrossRegionDatasetResolver.java:162)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.addTablesAndRoutinesToCache(ReferenceCollector.java:1380)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.getReferenceTree(ReferenceCollector.java:560)\n\t\tat com.google.cloud.helix.server.job.CatalogMetadataResolver.resolve(CatalogMetadataResolver.java:150)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.addTables(QueryAnalyzer.java:948)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.parseReferencedTables(QueryAnalyzer.java:4298)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.buildQueryInfo(QueryAnalyzer.java:3603)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfoInternal(LocalQueryJobController.java:4014)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfo(LocalQueryJobController.java:4089)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkInternal(LocalQueryJobController.java:4704)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkAsync(LocalQueryJobController.java:4620)\n\t\tat com.google.cloud.helix.server.job.LocalSqlJobController.checkAsync(LocalSqlJobController.java:129)\n\t\tat com.google.cloud.helix.server.job.LocalJobController.check(LocalJobController.java:1503)\n\t\tat com.google.cloud.helix.server.job.JobControllerModule$1.check(JobControllerModule.java:831)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine$1.check(JobStateMachine.java:3794)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3063)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\n\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\n\t\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\n\t\tat com.google.cloud.helix.common.HelixFutures.getDone(HelixFutures.java:55)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.handleCheckDone(JobStateMachine.java:3088)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.handleCheckDoneInSpan(JobStateMachine.java:3077)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$checkInternal$16(JobStateMachine.java:3067)\n\t\tat com.google.common.util.concurrent.CombinedFuture$CallableInterruptibleTask.runInterruptibly(CombinedFuture.java:198)\n\t\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\t\tat com.google.common.util.concurrent.DirectExecutor.execute(DirectExecutor.java:32)\n\t\tat com.google.common.util.concurrent.CombinedFuture$CombinedFutureInterruptibleTask.execute(CombinedFuture.java:110)\n\t\tat com.google.common.util.concurrent.CombinedFuture.handleAllCompleted(CombinedFuture.java:67)\n\t\tat com.google.common.util.concurrent.AggregateFuture.processCompleted(AggregateFuture.java:317)\n\t\tat com.google.common.util.concurrent.AggregateFuture.decrementCountAndMaybeComplete(AggregateFuture.java:299)\n\t\tat com.google.common.util.concurrent.AggregateFuture.init(AggregateFuture.java:174)\n\t\tat com.google.common.util.concurrent.CombinedFuture.\u003cinit\u003e(CombinedFuture.java:57)\n\t\tat com.google.common.util.concurrent.Futures$FutureCombiner.call(Futures.java:883)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3066)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\nCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Authorization.AuthorizeQuery;domain: \"cloud.helix.ErrorDomain\" code: \"ACCESS_DENIED\" argument: \"Table\" argument: \"lookerdata:cdc.project_tycho_reports\" argument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\" debug_info: \"[ACCESS_DENIED] errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Table\\\"\\nargument: \\\"lookerdata:cdc.project_tycho_reports\\\"\\nargument: \\\"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\\\"\\n\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:615)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:619)\\n\\tat com.google.cloud.helix.common.UserTableReference.buildAccessDeniedException(UserTableReference.java:79)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.authorizeMissingDataset(QueryEntityReferenceConverter.java:257)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:206)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convertTable(QueryEntityReferenceConverter.java:117)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convert(QueryEntityReferenceConverter.java:93)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$1(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:94)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:96)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$0(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.runWithSecurityContext(AuthorizationService.java:1082)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.authorizeQuery(AuthorizationService.java:395)\\n\\tat com.google.cloud.helix.proto2.Authorization$ServiceParameters$2.handleRequest(Authorization.java:511)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\n\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\n\\tat java.base/java.lang.Thread.run(Unknown Source)\\n\\tSuppressed: [NOT_FOUND] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001 debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\"NOT_FOUND\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\n\\n\\t\\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:82)\\n\\t\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:201)\\n\\t\\t... 36 more\\n\\tCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\\n\\t\\tat com.google.cloud.helix.proto2.Storage$Stub.getDataset(Storage.java:1349)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:55)\\n\\t\\t... 37 more\\n\\tSuppressed: [ACCESS_DENIED] debug=User 768386550392: IAM access denied errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\nargument: \\\"Permission bigquery.datasets.get denied on dataset lookerdata:cdc (or it may not exist).\\\"\\n\\n\\t\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.createPermissionDeny(AuthorizerExceptions.java:262)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.datasetAccessDenied(AuthorizerExceptions.java:156)\\n\\t\\tat com.google.cloud.helix.server.auth.IamAuthorizer.lambda$authorizeDatasetInternal$4(IamAuthorizer.java:1194)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\t\\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\\n\\t\\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\\n\\t\\t... 7 more\\n\";AppErrorCode=4;StartTimeMs=1729044292643;unknown;Deadline(sec)=60.0;ResFormat=uncompressed;ServerTimeSec=0.174171421;LogBytes=256;FailFast;EffSecLevel=privacy_and_integrity;ReqFormat=uncompressed;ReqID=1d5a3c9d34b06964;GlobalID=0;Server=[2002:a05:6845:6018:b0:3d:366e:f847]:4001\n\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\n\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\n\tat com.google.cloud.helix.proto2.Authorization$Stub.authorizeQuery(Authorization.java:198)\n\tat com.google.cloud.helix.common.auth.client.AuthorizationServiceClientImpl.authorizeQuery(AuthorizationServiceClientImpl.java:332)\n\t... 16 more\n", + "message": "Access Denied: Table lookerdata:cdc.project_tycho_reports: User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist." + }, + "errors": [ + { + "reason": "accessDenied", + "message": "Access Denied: Table lookerdata:cdc.project_tycho_reports: User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist." + } + ], + "state": "DONE" + }, + "principal_subject": "user:xiaoweim@google.com", + "jobCreationReason": { + "code": "REQUESTED" + } + } + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 225.331399ms + - id: 13 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=us-central1 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#job", + "etag": "qywbpQBNxG6Twvxjt/luNw==", + "id": "example-project:us-central1.bigqueryjob-2yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=us-central1", + "user_email": "xiaoweim@google.com", + "configuration": { + "query": { + "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", + "destinationTable": { + "projectId": "example-project", + "datasetId": "bigquerydataset22yq2ldf3wcoir", + "tableId": "bigquerytable2yq2ldf3wcoir" + }, + "createDisposition": "CREATE_NEVER", + "writeDisposition": "WRITE_APPEND", + "defaultDataset": { + "datasetId": "bigquerydataset12yq2ldf3wcoir", + "projectId": "example-project" + }, + "priority": "INTERACTIVE", + "allowLargeResults": true, + "useQueryCache": true, + "flattenResults": true, + "useLegacySql": true, + "schemaUpdateOptions": [ + "ALLOW_FIELD_ADDITION", + "ALLOW_FIELD_RELAXATION" + ], + "scriptOptions": { + "statementTimeoutMs": "300000", + "keyResultStatement": "LAST" + } + }, + "jobTimeoutMs": "600000", + "labels": { + "cnrm-test": "true", + "label-one": "value-one", + "managed-by-cnrm": "true" + }, + "jobType": "QUERY" + }, + "jobReference": { + "projectId": "example-project", + "jobId": "bigqueryjob-2yq2ldf3wcoir", + "location": "us-central1" }, - "createDisposition": "CREATE_NEVER", - "writeDisposition": "WRITE_APPEND", - "defaultDataset": { + "statistics": { + "creationTime": "1729044292505", + "startTime": "1729044292825", + "endTime": "1729044292825" + }, + "status": { + "errorResult": { + "reason": "accessDenied", + "debugInfo": "[ACCESS_DENIED] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Authorization.AuthorizeQuery;domain: \"cloud.helix.ErrorDomain\" code: \"ACCESS_DENIED\" argument: \"Table\" argument: \"lookerdata:cdc.project_tycho_reports\" argument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\" debug_info: \"[ACCESS_DENIED] errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Table\\\"\\nargument: \\\"lookerdata:cdc.project_tycho_reports\\\"\\nargument: \\\"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\\\"\\n\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:615)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:619)\\n\\tat com.google.cloud.helix.common.UserTableReference.buildAccessDeniedException(UserTableReference.java:79)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.authorizeMissingDataset(QueryEntityReferenceConverter.java:257)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:206)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convertTable(QueryEntityReferenceConverter.java:117)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convert(QueryEntityReferenceConverter.java:93)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$1(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:94)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:96)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$0(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.runWithSecurityContext(AuthorizationService.java:1082)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.authorizeQuery(AuthorizationService.java:395)\\n\\tat com.google.cloud.helix.proto2.Authorization$ServiceParameters$2.handleRequest(Authorization.java:511)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\n\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\n\\tat java.base/java.lang.Thread.run(Unknown Source)\\n\\tSuppressed: [NOT_FOUND] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001 debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\"NOT_FOUND\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\n\\n\\t\\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:82)\\n\\t\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:201)\\n\\t\\t... 36 more\\n\\tCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\\n\\t\\tat com.google.cloud.helix.proto2.Storage$Stub.getDataset(Storage.java:1349)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:55)\\n\\t\\t... 37 more\\n\\tSuppressed: [ACCESS_DENIED] debug=User 768386550392: IAM access denied errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\nargument: \\\"Permission bigquery.datasets.get denied on dataset lookerdata:cdc (or it may not exist).\\\"\\n\\n\\t\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.createPermissionDeny(AuthorizerExceptions.java:262)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.datasetAccessDenied(AuthorizerExceptions.java:156)\\n\\t\\tat com.google.cloud.helix.server.auth.IamAuthorizer.lambda$authorizeDatasetInternal$4(IamAuthorizer.java:1194)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\t\\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\\n\\t\\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\\n\\t\\t... 7 more\\n\";AppErrorCode=4;StartTimeMs=1729044292643;unknown;Deadline(sec)=60.0;ResFormat=uncompressed;ServerTimeSec=0.174171421;LogBytes=256;FailFast;EffSecLevel=privacy_and_integrity;ReqFormat=uncompressed;ReqID=1d5a3c9d34b06964;GlobalID=0;Server=[2002:a05:6845:6018:b0:3d:366e:f847]:4001 errorProto=code: \"ACCESS_DENIED\"\nargument: \"Table\"\nargument: \"lookerdata:cdc.project_tycho_reports\"\nargument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\"\n\n\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\n\tat com.google.cloud.helix.common.auth.client.AuthorizationServiceClientImpl.authorizeQuery(AuthorizationServiceClientImpl.java:350)\n\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.callAuthServer(QueryAuthorizer.java:415)\n\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.lambda$authorizeQuery$2(QueryAuthorizer.java:183)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\n\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\n\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\n\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\n\tat io.grpc.Context.run(Context.java:536)\n\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\n\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\n\tat java.base/java.lang.Thread.run(Unknown Source)\n\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\n\t\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\n\t\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\n\t\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.authorizeQuery(QueryAuthorizer.java:194)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.authorizeEntitiesRead(ReferenceCollector.java:1640)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.authorizeEntitiesRead(ReferenceCollector.java:1626)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.getReferenceTree(ReferenceCollector.java:575)\n\t\tat com.google.cloud.helix.server.job.CatalogMetadataResolver.resolve(CatalogMetadataResolver.java:150)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.addTables(QueryAnalyzer.java:948)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.parseReferencedTables(QueryAnalyzer.java:4298)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.buildQueryInfo(QueryAnalyzer.java:3603)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfoInternal(LocalQueryJobController.java:4014)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfo(LocalQueryJobController.java:4089)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkInternal(LocalQueryJobController.java:4704)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkAsync(LocalQueryJobController.java:4620)\n\t\tat com.google.cloud.helix.server.job.LocalSqlJobController.checkAsync(LocalSqlJobController.java:129)\n\t\tat com.google.cloud.helix.server.job.LocalJobController.check(LocalJobController.java:1503)\n\t\tat com.google.cloud.helix.server.job.JobControllerModule$1.check(JobControllerModule.java:831)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine$1.check(JobStateMachine.java:3794)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3063)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\n\tSuppressed: [NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.643-07:00 errorProto=code: \"NOT_FOUND\"\nargument: \"Dataset\"\nargument: \"lookerdata:cdc\"\n\n\t\tat com.google.cloud.helix.server.job.CrossRegionDatasetResolver.resolve(CrossRegionDatasetResolver.java:162)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.addTablesAndRoutinesToCache(ReferenceCollector.java:1380)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.getReferenceTree(ReferenceCollector.java:560)\n\t\tat com.google.cloud.helix.server.job.CatalogMetadataResolver.resolve(CatalogMetadataResolver.java:150)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.addTables(QueryAnalyzer.java:948)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.parseReferencedTables(QueryAnalyzer.java:4298)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.buildQueryInfo(QueryAnalyzer.java:3603)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfoInternal(LocalQueryJobController.java:4014)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfo(LocalQueryJobController.java:4089)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkInternal(LocalQueryJobController.java:4704)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkAsync(LocalQueryJobController.java:4620)\n\t\tat com.google.cloud.helix.server.job.LocalSqlJobController.checkAsync(LocalSqlJobController.java:129)\n\t\tat com.google.cloud.helix.server.job.LocalJobController.check(LocalJobController.java:1503)\n\t\tat com.google.cloud.helix.server.job.JobControllerModule$1.check(JobControllerModule.java:831)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine$1.check(JobStateMachine.java:3794)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3063)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\n\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\n\t\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\n\t\tat com.google.cloud.helix.common.HelixFutures.getDone(HelixFutures.java:55)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.handleCheckDone(JobStateMachine.java:3088)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.handleCheckDoneInSpan(JobStateMachine.java:3077)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$checkInternal$16(JobStateMachine.java:3067)\n\t\tat com.google.common.util.concurrent.CombinedFuture$CallableInterruptibleTask.runInterruptibly(CombinedFuture.java:198)\n\t\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\t\tat com.google.common.util.concurrent.DirectExecutor.execute(DirectExecutor.java:32)\n\t\tat com.google.common.util.concurrent.CombinedFuture$CombinedFutureInterruptibleTask.execute(CombinedFuture.java:110)\n\t\tat com.google.common.util.concurrent.CombinedFuture.handleAllCompleted(CombinedFuture.java:67)\n\t\tat com.google.common.util.concurrent.AggregateFuture.processCompleted(AggregateFuture.java:317)\n\t\tat com.google.common.util.concurrent.AggregateFuture.decrementCountAndMaybeComplete(AggregateFuture.java:299)\n\t\tat com.google.common.util.concurrent.AggregateFuture.init(AggregateFuture.java:174)\n\t\tat com.google.common.util.concurrent.CombinedFuture.\u003cinit\u003e(CombinedFuture.java:57)\n\t\tat com.google.common.util.concurrent.Futures$FutureCombiner.call(Futures.java:883)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3066)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\nCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Authorization.AuthorizeQuery;domain: \"cloud.helix.ErrorDomain\" code: \"ACCESS_DENIED\" argument: \"Table\" argument: \"lookerdata:cdc.project_tycho_reports\" argument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\" debug_info: \"[ACCESS_DENIED] errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Table\\\"\\nargument: \\\"lookerdata:cdc.project_tycho_reports\\\"\\nargument: \\\"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\\\"\\n\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:615)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:619)\\n\\tat com.google.cloud.helix.common.UserTableReference.buildAccessDeniedException(UserTableReference.java:79)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.authorizeMissingDataset(QueryEntityReferenceConverter.java:257)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:206)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convertTable(QueryEntityReferenceConverter.java:117)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convert(QueryEntityReferenceConverter.java:93)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$1(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:94)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:96)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$0(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.runWithSecurityContext(AuthorizationService.java:1082)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.authorizeQuery(AuthorizationService.java:395)\\n\\tat com.google.cloud.helix.proto2.Authorization$ServiceParameters$2.handleRequest(Authorization.java:511)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\n\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\n\\tat java.base/java.lang.Thread.run(Unknown Source)\\n\\tSuppressed: [NOT_FOUND] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001 debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\"NOT_FOUND\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\n\\n\\t\\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:82)\\n\\t\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:201)\\n\\t\\t... 36 more\\n\\tCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\\n\\t\\tat com.google.cloud.helix.proto2.Storage$Stub.getDataset(Storage.java:1349)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:55)\\n\\t\\t... 37 more\\n\\tSuppressed: [ACCESS_DENIED] debug=User 768386550392: IAM access denied errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\nargument: \\\"Permission bigquery.datasets.get denied on dataset lookerdata:cdc (or it may not exist).\\\"\\n\\n\\t\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.createPermissionDeny(AuthorizerExceptions.java:262)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.datasetAccessDenied(AuthorizerExceptions.java:156)\\n\\t\\tat com.google.cloud.helix.server.auth.IamAuthorizer.lambda$authorizeDatasetInternal$4(IamAuthorizer.java:1194)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\t\\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\\n\\t\\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\\n\\t\\t... 7 more\\n\";AppErrorCode=4;StartTimeMs=1729044292643;unknown;Deadline(sec)=60.0;ResFormat=uncompressed;ServerTimeSec=0.174171421;LogBytes=256;FailFast;EffSecLevel=privacy_and_integrity;ReqFormat=uncompressed;ReqID=1d5a3c9d34b06964;GlobalID=0;Server=[2002:a05:6845:6018:b0:3d:366e:f847]:4001\n\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\n\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\n\tat com.google.cloud.helix.proto2.Authorization$Stub.authorizeQuery(Authorization.java:198)\n\tat com.google.cloud.helix.common.auth.client.AuthorizationServiceClientImpl.authorizeQuery(AuthorizationServiceClientImpl.java:332)\n\t... 16 more\n", + "message": "Access Denied: Table lookerdata:cdc.project_tycho_reports: User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist." + }, + "errors": [ + { + "reason": "accessDenied", + "message": "Access Denied: Table lookerdata:cdc.project_tycho_reports: User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist." + } + ], + "state": "DONE" + }, + "principal_subject": "user:xiaoweim@google.com", + "jobCreationReason": { + "code": "REQUESTED" + } + } + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 249.474456ms + - id: 14 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + X-Goog-Api-Client: + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: '{"kind":"bigquery#table","etag":"zP4N7TpLpSchHSlPBuMVew==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233765250","lastModifiedTime":"1729233765330","type":"TABLE","location":"us-central1","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 114.392789ms + - id: 15 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#dataset", + "etag": "JfHG5KJ6x0vqD5TJAHK4ag==", + "id": "example-project:bigquerydataset12yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", + "datasetReference": { "datasetId": "bigquerydataset12yq2ldf3wcoir", "projectId": "example-project" }, - "priority": "INTERACTIVE", - "allowLargeResults": true, - "useQueryCache": true, - "flattenResults": true, - "useLegacySql": true, - "schemaUpdateOptions": [ - "ALLOW_FIELD_ADDITION", - "ALLOW_FIELD_RELAXATION" - ], - "scriptOptions": { - "statementTimeoutMs": "300000", - "keyResultStatement": "LAST" - } - }, - "jobTimeoutMs": "600000", - "labels": { - "cnrm-test": "true", - "label-one": "value-one", - "managed-by-cnrm": "true" - }, - "jobType": "QUERY" - }, - "jobReference": { - "projectId": "example-project", - "jobId": "bigqueryjob-2yq2ldf3wcoir", - "location": "US" - }, - "statistics": { - "creationTime": "1714007925436", - "startTime": "1714007926032", - "query": { - "queryPlan": [ + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ { - "name": "S00: Output", - "id": "0", - "startMs": "1714007926424", - "waitMsAvg": "0", - "waitMsMax": "0", - "readMsAvg": "0", - "readMsMax": "0", - "computeMsAvg": "0", - "computeMsMax": "0", - "writeMsAvg": "0", - "writeMsMax": "0", - "shuffleOutputBytes": "0", - "shuffleOutputBytesSpilled": "0", - "recordsRead": "0", - "recordsWritten": "0", - "parallelInputs": "1", - "completedParallelInputs": "0", - "status": "RUNNING", - "steps": [ - { - "kind": "READ", - "substeps": [ - "state", - "FROM lookerdata:cdc.project_tycho_reports AS lookerdata:cdc.project_tycho_reports" - ] - }, - { - "kind": "WRITE", - "substeps": [ - "state", - "TO __stage00_output" - ] - } - ], - "slotMs": "0", - "computeMode": "BIGQUERY" - } - ], - "estimatedBytesProcessed": "3037868", - "timeline": [ + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, { - "elapsedMs": "892", - "totalSlotMs": "107", - "pendingUnits": "1", - "completedUnits": "0", - "activeUnits": "1", - "estimatedRunnableUnits": "0" + "role": "READER", + "specialGroup": "projectReaders" } ], - "totalSlotMs": "107", - "statementType": "SELECT" - }, - "totalSlotMs": "107" - }, - "status": { - "state": "RUNNING" - }, - "principal_subject": "serviceAccount:integration-test@example-project.iam.gserviceaccount.com", - "jobCreationReason": { - "code": "REQUESTED" - } - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 157.11189ms - - id: 18 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "5BIDIpkSuP2XjG2TO/A+pA==", - "id": "example-project:bigquerydataset22yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" + "creationTime": "1729233764153", + "lastModifiedTime": "1729233764153", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" } - ], - "creationTime": "1714007921933", - "lastModifiedTime": "1714007921933", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 226.627284ms - - id: 19 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "1oxb+cE169ziQbocLgYz5w==", - "id": "example-project:bigquerydataset12yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset12yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 164.637625ms + - id: 16 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" + "kind": "bigquery#dataset", + "etag": "UTD7bH0f//NO3dLHXzmVTQ==", + "id": "example-project:bigquerydataset22yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", + "datasetReference": { + "datasetId": "bigquerydataset22yq2ldf3wcoir", + "projectId": "example-project" + }, + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "1729233764136", + "lastModifiedTime": "1729233764136", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" } - ], - "creationTime": "1714007921736", - "lastModifiedTime": "1714007921736", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 227.96517ms - - id: 20 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - X-Goog-Api-Client: - - gl-go/1.21.5 gdcl/0.160.0 - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: '{"kind":"bigquery#table","etag":"5tjsrJs4nIXqBqyB5xL59w==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1714007923844","lastModifiedTime":"1714007923961","type":"TABLE","location":"US","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 318.668537ms - - id: 21 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json&deleteContents=false - method: DELETE - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: true - body: fake error message - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 400 Bad Request - code: 400 - duration: 188.401707ms - - id: 22 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json&deleteContents=false - method: DELETE - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: false - body: "" - headers: - Content-Length: - - "0" - Content-Type: - - application/json; charset=UTF-8 - status: 204 No Content - code: 204 - duration: 246.780652ms - - id: 23 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - X-Goog-Api-Client: - - gl-go/1.21.5 gdcl/0.160.0 - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false - method: DELETE - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: false - body: "" - headers: - Content-Length: - - "0" - Content-Type: - - application/json; charset=UTF-8 - status: 204 No Content - code: 204 - duration: 250.765634ms - - id: 24 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "5BIDIpkSuP2XjG2TO/A+pA==", - "id": "example-project:bigquerydataset22yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 169.425364ms + - id: 17 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=us-central1 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" + "kind": "bigquery#job", + "etag": "qywbpQBNxG6Twvxjt/luNw==", + "id": "example-project:us-central1.bigqueryjob-2yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=us-central1", + "user_email": "xiaoweim@google.com", + "configuration": { + "query": { + "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", + "destinationTable": { + "projectId": "example-project", + "datasetId": "bigquerydataset22yq2ldf3wcoir", + "tableId": "bigquerytable2yq2ldf3wcoir" + }, + "createDisposition": "CREATE_NEVER", + "writeDisposition": "WRITE_APPEND", + "defaultDataset": { + "datasetId": "bigquerydataset12yq2ldf3wcoir", + "projectId": "example-project" + }, + "priority": "INTERACTIVE", + "allowLargeResults": true, + "useQueryCache": true, + "flattenResults": true, + "useLegacySql": true, + "schemaUpdateOptions": [ + "ALLOW_FIELD_ADDITION", + "ALLOW_FIELD_RELAXATION" + ], + "scriptOptions": { + "statementTimeoutMs": "300000", + "keyResultStatement": "LAST" + } + }, + "jobTimeoutMs": "600000", + "labels": { + "cnrm-test": "true", + "label-one": "value-one", + "managed-by-cnrm": "true" + }, + "jobType": "QUERY" + }, + "jobReference": { + "projectId": "example-project", + "jobId": "bigqueryjob-2yq2ldf3wcoir", + "location": "us-central1" + }, + "statistics": { + "creationTime": "1729044292505", + "startTime": "1729044292825", + "endTime": "1729044292825" + }, + "status": { + "errorResult": { + "reason": "accessDenied", + "debugInfo": "[ACCESS_DENIED] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Authorization.AuthorizeQuery;domain: \"cloud.helix.ErrorDomain\" code: \"ACCESS_DENIED\" argument: \"Table\" argument: \"lookerdata:cdc.project_tycho_reports\" argument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\" debug_info: \"[ACCESS_DENIED] errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Table\\\"\\nargument: \\\"lookerdata:cdc.project_tycho_reports\\\"\\nargument: \\\"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\\\"\\n\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:615)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:619)\\n\\tat com.google.cloud.helix.common.UserTableReference.buildAccessDeniedException(UserTableReference.java:79)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.authorizeMissingDataset(QueryEntityReferenceConverter.java:257)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:206)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convertTable(QueryEntityReferenceConverter.java:117)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convert(QueryEntityReferenceConverter.java:93)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$1(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:94)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:96)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$0(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.runWithSecurityContext(AuthorizationService.java:1082)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.authorizeQuery(AuthorizationService.java:395)\\n\\tat com.google.cloud.helix.proto2.Authorization$ServiceParameters$2.handleRequest(Authorization.java:511)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\n\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\n\\tat java.base/java.lang.Thread.run(Unknown Source)\\n\\tSuppressed: [NOT_FOUND] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001 debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\"NOT_FOUND\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\n\\n\\t\\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:82)\\n\\t\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:201)\\n\\t\\t... 36 more\\n\\tCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\\n\\t\\tat com.google.cloud.helix.proto2.Storage$Stub.getDataset(Storage.java:1349)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:55)\\n\\t\\t... 37 more\\n\\tSuppressed: [ACCESS_DENIED] debug=User 768386550392: IAM access denied errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\nargument: \\\"Permission bigquery.datasets.get denied on dataset lookerdata:cdc (or it may not exist).\\\"\\n\\n\\t\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.createPermissionDeny(AuthorizerExceptions.java:262)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.datasetAccessDenied(AuthorizerExceptions.java:156)\\n\\t\\tat com.google.cloud.helix.server.auth.IamAuthorizer.lambda$authorizeDatasetInternal$4(IamAuthorizer.java:1194)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\t\\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\\n\\t\\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\\n\\t\\t... 7 more\\n\";AppErrorCode=4;StartTimeMs=1729044292643;unknown;Deadline(sec)=60.0;ResFormat=uncompressed;ServerTimeSec=0.174171421;LogBytes=256;FailFast;EffSecLevel=privacy_and_integrity;ReqFormat=uncompressed;ReqID=1d5a3c9d34b06964;GlobalID=0;Server=[2002:a05:6845:6018:b0:3d:366e:f847]:4001 errorProto=code: \"ACCESS_DENIED\"\nargument: \"Table\"\nargument: \"lookerdata:cdc.project_tycho_reports\"\nargument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\"\n\n\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\n\tat com.google.cloud.helix.common.auth.client.AuthorizationServiceClientImpl.authorizeQuery(AuthorizationServiceClientImpl.java:350)\n\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.callAuthServer(QueryAuthorizer.java:415)\n\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.lambda$authorizeQuery$2(QueryAuthorizer.java:183)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\n\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\n\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\n\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\n\tat io.grpc.Context.run(Context.java:536)\n\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\n\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\n\tat java.base/java.lang.Thread.run(Unknown Source)\n\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\n\t\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\n\t\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\n\t\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.authorizeQuery(QueryAuthorizer.java:194)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.authorizeEntitiesRead(ReferenceCollector.java:1640)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.authorizeEntitiesRead(ReferenceCollector.java:1626)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.getReferenceTree(ReferenceCollector.java:575)\n\t\tat com.google.cloud.helix.server.job.CatalogMetadataResolver.resolve(CatalogMetadataResolver.java:150)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.addTables(QueryAnalyzer.java:948)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.parseReferencedTables(QueryAnalyzer.java:4298)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.buildQueryInfo(QueryAnalyzer.java:3603)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfoInternal(LocalQueryJobController.java:4014)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfo(LocalQueryJobController.java:4089)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkInternal(LocalQueryJobController.java:4704)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkAsync(LocalQueryJobController.java:4620)\n\t\tat com.google.cloud.helix.server.job.LocalSqlJobController.checkAsync(LocalSqlJobController.java:129)\n\t\tat com.google.cloud.helix.server.job.LocalJobController.check(LocalJobController.java:1503)\n\t\tat com.google.cloud.helix.server.job.JobControllerModule$1.check(JobControllerModule.java:831)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine$1.check(JobStateMachine.java:3794)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3063)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\n\tSuppressed: [NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.643-07:00 errorProto=code: \"NOT_FOUND\"\nargument: \"Dataset\"\nargument: \"lookerdata:cdc\"\n\n\t\tat com.google.cloud.helix.server.job.CrossRegionDatasetResolver.resolve(CrossRegionDatasetResolver.java:162)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.addTablesAndRoutinesToCache(ReferenceCollector.java:1380)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.getReferenceTree(ReferenceCollector.java:560)\n\t\tat com.google.cloud.helix.server.job.CatalogMetadataResolver.resolve(CatalogMetadataResolver.java:150)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.addTables(QueryAnalyzer.java:948)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.parseReferencedTables(QueryAnalyzer.java:4298)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.buildQueryInfo(QueryAnalyzer.java:3603)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfoInternal(LocalQueryJobController.java:4014)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfo(LocalQueryJobController.java:4089)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkInternal(LocalQueryJobController.java:4704)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkAsync(LocalQueryJobController.java:4620)\n\t\tat com.google.cloud.helix.server.job.LocalSqlJobController.checkAsync(LocalSqlJobController.java:129)\n\t\tat com.google.cloud.helix.server.job.LocalJobController.check(LocalJobController.java:1503)\n\t\tat com.google.cloud.helix.server.job.JobControllerModule$1.check(JobControllerModule.java:831)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine$1.check(JobStateMachine.java:3794)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3063)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\n\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\n\t\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\n\t\tat com.google.cloud.helix.common.HelixFutures.getDone(HelixFutures.java:55)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.handleCheckDone(JobStateMachine.java:3088)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.handleCheckDoneInSpan(JobStateMachine.java:3077)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$checkInternal$16(JobStateMachine.java:3067)\n\t\tat com.google.common.util.concurrent.CombinedFuture$CallableInterruptibleTask.runInterruptibly(CombinedFuture.java:198)\n\t\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\t\tat com.google.common.util.concurrent.DirectExecutor.execute(DirectExecutor.java:32)\n\t\tat com.google.common.util.concurrent.CombinedFuture$CombinedFutureInterruptibleTask.execute(CombinedFuture.java:110)\n\t\tat com.google.common.util.concurrent.CombinedFuture.handleAllCompleted(CombinedFuture.java:67)\n\t\tat com.google.common.util.concurrent.AggregateFuture.processCompleted(AggregateFuture.java:317)\n\t\tat com.google.common.util.concurrent.AggregateFuture.decrementCountAndMaybeComplete(AggregateFuture.java:299)\n\t\tat com.google.common.util.concurrent.AggregateFuture.init(AggregateFuture.java:174)\n\t\tat com.google.common.util.concurrent.CombinedFuture.\u003cinit\u003e(CombinedFuture.java:57)\n\t\tat com.google.common.util.concurrent.Futures$FutureCombiner.call(Futures.java:883)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3066)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\nCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Authorization.AuthorizeQuery;domain: \"cloud.helix.ErrorDomain\" code: \"ACCESS_DENIED\" argument: \"Table\" argument: \"lookerdata:cdc.project_tycho_reports\" argument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\" debug_info: \"[ACCESS_DENIED] errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Table\\\"\\nargument: \\\"lookerdata:cdc.project_tycho_reports\\\"\\nargument: \\\"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\\\"\\n\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:615)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:619)\\n\\tat com.google.cloud.helix.common.UserTableReference.buildAccessDeniedException(UserTableReference.java:79)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.authorizeMissingDataset(QueryEntityReferenceConverter.java:257)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:206)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convertTable(QueryEntityReferenceConverter.java:117)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convert(QueryEntityReferenceConverter.java:93)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$1(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:94)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:96)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$0(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.runWithSecurityContext(AuthorizationService.java:1082)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.authorizeQuery(AuthorizationService.java:395)\\n\\tat com.google.cloud.helix.proto2.Authorization$ServiceParameters$2.handleRequest(Authorization.java:511)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\n\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\n\\tat java.base/java.lang.Thread.run(Unknown Source)\\n\\tSuppressed: [NOT_FOUND] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001 debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\"NOT_FOUND\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\n\\n\\t\\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:82)\\n\\t\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:201)\\n\\t\\t... 36 more\\n\\tCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\\n\\t\\tat com.google.cloud.helix.proto2.Storage$Stub.getDataset(Storage.java:1349)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:55)\\n\\t\\t... 37 more\\n\\tSuppressed: [ACCESS_DENIED] debug=User 768386550392: IAM access denied errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\nargument: \\\"Permission bigquery.datasets.get denied on dataset lookerdata:cdc (or it may not exist).\\\"\\n\\n\\t\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.createPermissionDeny(AuthorizerExceptions.java:262)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.datasetAccessDenied(AuthorizerExceptions.java:156)\\n\\t\\tat com.google.cloud.helix.server.auth.IamAuthorizer.lambda$authorizeDatasetInternal$4(IamAuthorizer.java:1194)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\t\\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\\n\\t\\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\\n\\t\\t... 7 more\\n\";AppErrorCode=4;StartTimeMs=1729044292643;unknown;Deadline(sec)=60.0;ResFormat=uncompressed;ServerTimeSec=0.174171421;LogBytes=256;FailFast;EffSecLevel=privacy_and_integrity;ReqFormat=uncompressed;ReqID=1d5a3c9d34b06964;GlobalID=0;Server=[2002:a05:6845:6018:b0:3d:366e:f847]:4001\n\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\n\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\n\tat com.google.cloud.helix.proto2.Authorization$Stub.authorizeQuery(Authorization.java:198)\n\tat com.google.cloud.helix.common.auth.client.AuthorizationServiceClientImpl.authorizeQuery(AuthorizationServiceClientImpl.java:332)\n\t... 16 more\n", + "message": "Access Denied: Table lookerdata:cdc.project_tycho_reports: User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist." + }, + "errors": [ + { + "reason": "accessDenied", + "message": "Access Denied: Table lookerdata:cdc.project_tycho_reports: User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist." + } + ], + "state": "DONE" + }, + "principal_subject": "user:xiaoweim@google.com", + "jobCreationReason": { + "code": "REQUESTED" + } } - ], - "creationTime": "1714007921933", - "lastModifiedTime": "1714007921933", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 249.096259ms - - id: 25 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json&deleteContents=false - method: DELETE - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: false - body: "" - headers: - Content-Length: - - "0" - Content-Type: - - application/json; charset=UTF-8 - status: 204 No Content - code: 204 - duration: 219.625286ms \ No newline at end of file + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 277.227604ms + - id: 18 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + X-Goog-Api-Client: + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false + method: DELETE + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: 0 + uncompressed: false + body: "" + headers: + Content-Length: + - "0" + Content-Type: + - application/json; charset=UTF-8 + status: 204 No Content + code: 204 + duration: 172.077964ms + - id: 19 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json&deleteContents=false + method: DELETE + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: 0 + uncompressed: false + body: "" + headers: + Content-Length: + - "0" + Content-Type: + - application/json; charset=UTF-8 + status: 204 No Content + code: 204 + duration: 126.402208ms + - id: 20 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json&deleteContents=false + method: DELETE + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: 0 + uncompressed: false + body: "" + headers: + Content-Length: + - "0" + Content-Type: + - application/json; charset=UTF-8 + status: 204 No Content + code: 204 + duration: 164.101024ms diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/create.yaml index 932807a87f..f3d0ce6d36 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/create.yaml @@ -19,7 +19,7 @@ metadata: label-one: "value-one" name: bigqueryjob-${uniqueId} spec: - location: "US" + location: "us-central1" jobTimeoutMs: "600000" query: query: "SELECT state FROM [lookerdata:cdc.project_tycho_reports]" diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/dependencies.yaml index 8c5848bc33..072a5fee49 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/dependencies.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/dependencies.yaml @@ -16,11 +16,15 @@ apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataset metadata: name: bigquerydataset1${uniqueId} +spec: + location: us-central1 --- apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataset metadata: name: bigquerydataset2${uniqueId} +spec: + location: us-central1 --- apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryTable diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_http.log index 4263316151..abb5eb32c8 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_http.log @@ -42,7 +42,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -88,7 +88,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -142,7 +142,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -881,7 +881,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_vcr_cassettes/tf.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_vcr_cassettes/tf.yaml index 8cf0aea6c7..3bef110462 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_vcr_cassettes/tf.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_vcr_cassettes/tf.yaml @@ -47,20 +47,20 @@ interactions: - application/json; charset=UTF-8 status: 404 Not Found code: 404 - duration: 615.256925ms + duration: 249.604457ms - id: 1 request: proto: HTTP/1.1 proto_major: 1 proto_minor: 1 - content_length: 142 + content_length: 151 transfer_encoding: [] trailer: {} host: bigquery.googleapis.com remote_addr: "" request_uri: "" body: | - {"datasetReference":{"datasetId":"bigquerydatasetsamplel7b81f5rgmgk"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"location":"US"} + {"datasetReference":{"datasetId":"bigquerydatasetsamplel7b81f5rgmgk"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"location":"us-central1"} form: {} headers: Content-Type: @@ -78,7 +78,7 @@ interactions: body: | { "kind": "bigquery#dataset", - "etag": "pYo/zUITYaROvtieJn/Efg==", + "etag": "uJ2Ltq8bm7J0FUJdKMsN4Q==", "id": "example-project:bigquerydatasetsamplel7b81f5rgmgk", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk", "datasetReference": { @@ -100,16 +100,16 @@ interactions: }, { "role": "OWNER", - "userByEmail": "andylu@pisces.joonix.net" + "userByEmail": "xiaoweim@google.com" }, { "role": "READER", "specialGroup": "projectReaders" } ], - "creationTime": "1720515934342", - "lastModifiedTime": "1720515934342", - "location": "US", + "creationTime": "1729233685041", + "lastModifiedTime": "1729233685041", + "location": "us-central1", "type": "DEFAULT" } headers: @@ -117,7 +117,7 @@ interactions: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 705.821848ms + duration: 455.813917ms - id: 2 request: proto: HTTP/1.1 @@ -147,7 +147,7 @@ interactions: body: | { "kind": "bigquery#dataset", - "etag": "pYo/zUITYaROvtieJn/Efg==", + "etag": "uJ2Ltq8bm7J0FUJdKMsN4Q==", "id": "example-project:bigquerydatasetsamplel7b81f5rgmgk", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk", "datasetReference": { @@ -169,24 +169,25 @@ interactions: }, { "role": "OWNER", - "userByEmail": "andylu@pisces.joonix.net" + "userByEmail": "xiaoweim@google.com" }, { "role": "READER", "specialGroup": "projectReaders" } ], - "creationTime": "1720515934342", - "lastModifiedTime": "1720515934342", - "location": "US", - "type": "DEFAULT" + "creationTime": "1729233685041", + "lastModifiedTime": "1729233685041", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" } headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 444.121271ms + duration: 155.125015ms - id: 3 request: proto: HTTP/1.1 @@ -202,7 +203,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: GET response: @@ -219,7 +220,7 @@ interactions: - application/json; charset=UTF-8 status: 404 Not Found code: 404 - duration: 212.578131ms + duration: 124.971748ms - id: 4 request: proto: HTTP/1.1 @@ -249,7 +250,7 @@ interactions: body: | { "kind": "bigquery#dataset", - "etag": "pYo/zUITYaROvtieJn/Efg==", + "etag": "uJ2Ltq8bm7J0FUJdKMsN4Q==", "id": "example-project:bigquerydatasetsamplel7b81f5rgmgk", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk", "datasetReference": { @@ -271,30 +272,31 @@ interactions: }, { "role": "OWNER", - "userByEmail": "andylu@pisces.joonix.net" + "userByEmail": "xiaoweim@google.com" }, { "role": "READER", "specialGroup": "projectReaders" } ], - "creationTime": "1720515934342", - "lastModifiedTime": "1720515934342", - "location": "US", - "type": "DEFAULT" + "creationTime": "1729233685041", + "lastModifiedTime": "1729233685041", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" } headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 419.825033ms + duration: 224.821628ms - id: 5 request: proto: HTTP/1.1 proto_major: 1 proto_minor: 1 - content_length: 507 + content_length: 509 transfer_encoding: [] trailer: {} host: bigquery.googleapis.com @@ -307,7 +309,7 @@ interactions: Content-Type: - application/json X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables?alt=json&prettyPrint=false method: POST response: @@ -318,13 +320,13 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"ZYaCcTJ8PF5Tok2MBL096Q==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515937237","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"KksuznIrRtc4MrfLoiTYlQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233686562","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 689.112379ms + duration: 437.450628ms - id: 6 request: proto: HTTP/1.1 @@ -340,7 +342,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: GET response: @@ -351,13 +353,13 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"ZYaCcTJ8PF5Tok2MBL096Q==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515937237","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"KksuznIrRtc4MrfLoiTYlQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233686562","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 326.434921ms + duration: 221.730449ms - id: 7 request: proto: HTTP/1.1 @@ -373,7 +375,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: GET response: @@ -384,13 +386,13 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"ZYaCcTJ8PF5Tok2MBL096Q==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515937237","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"KksuznIrRtc4MrfLoiTYlQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233686562","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 441.704191ms + duration: 227.818618ms - id: 8 request: proto: HTTP/1.1 @@ -406,7 +408,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: GET response: @@ -417,19 +419,19 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"ZYaCcTJ8PF5Tok2MBL096Q==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515937237","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"KksuznIrRtc4MrfLoiTYlQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233686562","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 448.04455ms + duration: 147.233476ms - id: 9 request: proto: HTTP/1.1 proto_major: 1 proto_minor: 1 - content_length: 1575 + content_length: 1577 transfer_encoding: [] trailer: {} host: bigquery.googleapis.com @@ -442,7 +444,7 @@ interactions: Content-Type: - application/json X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: PUT response: @@ -453,13 +455,13 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"BMni8WOOCXFgFa5BC/jfkg==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample-updated","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515940935","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":false,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"rCwyhaRq2nEw0eAFProieQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample-updated","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233689324","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":false,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 329.29309ms + duration: 232.683408ms - id: 10 request: proto: HTTP/1.1 @@ -475,7 +477,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: GET response: @@ -486,13 +488,13 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"BMni8WOOCXFgFa5BC/jfkg==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample-updated","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515940935","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":false,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"rCwyhaRq2nEw0eAFProieQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample-updated","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233689324","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":false,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 282.727165ms + duration: 276.581644ms - id: 11 request: proto: HTTP/1.1 @@ -522,7 +524,7 @@ interactions: body: | { "kind": "bigquery#dataset", - "etag": "pYo/zUITYaROvtieJn/Efg==", + "etag": "uJ2Ltq8bm7J0FUJdKMsN4Q==", "id": "example-project:bigquerydatasetsamplel7b81f5rgmgk", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk", "datasetReference": { @@ -544,24 +546,25 @@ interactions: }, { "role": "OWNER", - "userByEmail": "andylu@pisces.joonix.net" + "userByEmail": "xiaoweim@google.com" }, { "role": "READER", "specialGroup": "projectReaders" } ], - "creationTime": "1720515934342", - "lastModifiedTime": "1720515934342", - "location": "US", - "type": "DEFAULT" + "creationTime": "1729233685041", + "lastModifiedTime": "1729233685041", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" } headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 327.354671ms + duration: 145.635566ms - id: 12 request: proto: HTTP/1.1 @@ -577,7 +580,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: GET response: @@ -588,13 +591,13 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"BMni8WOOCXFgFa5BC/jfkg==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample-updated","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515940935","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":false,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"rCwyhaRq2nEw0eAFProieQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample-updated","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233689324","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":false,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 371.806487ms + duration: 182.848192ms - id: 13 request: proto: HTTP/1.1 @@ -627,7 +630,7 @@ interactions: - application/json; charset=UTF-8 status: 400 Bad Request code: 400 - duration: 287.000684ms + duration: 138.945657ms - id: 14 request: proto: HTTP/1.1 @@ -643,7 +646,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: DELETE response: @@ -662,7 +665,7 @@ interactions: - application/json; charset=UTF-8 status: 204 No Content code: 204 - duration: 316.441881ms + duration: 205.179181ms - id: 15 request: proto: HTTP/1.1 @@ -692,7 +695,7 @@ interactions: body: | { "kind": "bigquery#dataset", - "etag": "pYo/zUITYaROvtieJn/Efg==", + "etag": "uJ2Ltq8bm7J0FUJdKMsN4Q==", "id": "example-project:bigquerydatasetsamplel7b81f5rgmgk", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk", "datasetReference": { @@ -714,24 +717,25 @@ interactions: }, { "role": "OWNER", - "userByEmail": "andylu@pisces.joonix.net" + "userByEmail": "xiaoweim@google.com" }, { "role": "READER", "specialGroup": "projectReaders" } ], - "creationTime": "1720515934342", - "lastModifiedTime": "1720515934342", - "location": "US", - "type": "DEFAULT" + "creationTime": "1729233685041", + "lastModifiedTime": "1729233685041", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" } headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 436.728931ms + duration: 163.479275ms - id: 16 request: proto: HTTP/1.1 @@ -766,4 +770,4 @@ interactions: - application/json; charset=UTF-8 status: 204 No Content code: 204 - duration: 391.253016ms + duration: 191.252482ms diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/dependencies.yaml index 0ee6474d32..32ef5d84e8 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/dependencies.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/dependencies.yaml @@ -16,3 +16,5 @@ apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataset metadata: name: bigquerydatasetsample${uniqueId} +spec: + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/_http.log b/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/_http.log index 6f2bafb6f2..1c46ce6b25 100644 --- a/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/_http.log @@ -42,7 +42,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -88,7 +88,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } @@ -142,7 +142,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" @@ -1413,7 +1413,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/dependencies.yaml index c3cfc6cd83..9ac0d27274 100644 --- a/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/dependencies.yaml +++ b/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/dependencies.yaml @@ -18,6 +18,8 @@ metadata: name: bigquerydataset${uniqueId} annotations: cnrm.cloud.google.com/reconcile-interval-in-seconds: "0" # Make more deterministic +spec: + location: us-central1 --- apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryTable diff --git a/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/_http.log b/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/_http.log index 4cb23f7d40..065b1fac9a 100644 --- a/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/_http.log @@ -729,7 +729,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -775,7 +775,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } @@ -829,7 +829,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" @@ -1607,7 +1607,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/dependencies.yaml index 19fe071483..ac24492d6e 100644 --- a/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/dependencies.yaml +++ b/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/dependencies.yaml @@ -61,6 +61,7 @@ metadata: name: bigquerydataset-${uniqueId} spec: resourceID: bigquerydataset${uniqueId} + location: us-central1 --- apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryTable diff --git a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_export_projectid.golden b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_export_projectid.golden index 95391263e2..6c64b9259f 100644 --- a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_export_projectid.golden +++ b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_export_projectid.golden @@ -18,7 +18,7 @@ spec: - role: WRITER specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated - location: US + location: us-central1 maxTimeTravelHours: "168" projectRef: external: ${projectId} diff --git a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_object_projectid.golden.yaml b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_object_projectid.golden.yaml index 002fd319cd..55b7474270 100644 --- a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_object_projectid.golden.yaml +++ b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_object_projectid.golden.yaml @@ -15,6 +15,7 @@ metadata: namespace: ${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 projectRef: external: ${projectId} resourceID: bigquerydatasetsample${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_http.log b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_http.log index 4f53cdab6e..7db6b66ec7 100644 --- a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_http.log +++ b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_http.log @@ -43,7 +43,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -90,7 +90,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -145,7 +145,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -184,7 +184,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168" } @@ -232,7 +232,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -288,7 +288,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/containerannotations/projectid/create.yaml b/pkg/test/resourcefixture/testdata/containerannotations/projectid/create.yaml index 9c84ae2468..ae60ea9963 100644 --- a/pkg/test/resourcefixture/testdata/containerannotations/projectid/create.yaml +++ b/pkg/test/resourcefixture/testdata/containerannotations/projectid/create.yaml @@ -20,3 +20,4 @@ metadata: name: bigquerydatasetsample${uniqueId} spec: friendlyName: bigquerydataset-sample + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/containerannotations/projectid/update.yaml b/pkg/test/resourcefixture/testdata/containerannotations/projectid/update.yaml index 0692aa1b85..6664f6fcb2 100644 --- a/pkg/test/resourcefixture/testdata/containerannotations/projectid/update.yaml +++ b/pkg/test/resourcefixture/testdata/containerannotations/projectid/update.yaml @@ -20,3 +20,4 @@ metadata: name: bigquerydatasetsample${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_export_bigquerydataset.golden b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_export_bigquerydataset.golden index 95391263e2..6c64b9259f 100644 --- a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_export_bigquerydataset.golden +++ b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_export_bigquerydataset.golden @@ -18,7 +18,7 @@ spec: - role: WRITER specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated - location: US + location: us-central1 maxTimeTravelHours: "168" projectRef: external: ${projectId} diff --git a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_object_bigquerydataset.golden.yaml b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_object_bigquerydataset.golden.yaml index e51bf29cb4..cc5af0df9e 100644 --- a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_object_bigquerydataset.golden.yaml +++ b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_object_bigquerydataset.golden.yaml @@ -15,6 +15,7 @@ metadata: namespace: ${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 projectRef: external: ${projectId} resourceID: bigquerydatasetsample${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_http.log b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_http.log index 4f53cdab6e..7db6b66ec7 100644 --- a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_http.log +++ b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_http.log @@ -43,7 +43,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -90,7 +90,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -145,7 +145,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -184,7 +184,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168" } @@ -232,7 +232,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -288,7 +288,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/create.yaml b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/create.yaml index 4875c42478..6f7cabcd6a 100644 --- a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/create.yaml +++ b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/create.yaml @@ -20,3 +20,4 @@ metadata: cnrm.cloud.google.com/reconcile-interval-in-seconds : "5" spec: friendlyName: bigquerydataset-sample + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/update.yaml b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/update.yaml index 621d320506..f4aa168169 100644 --- a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/update.yaml +++ b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/update.yaml @@ -20,3 +20,4 @@ metadata: cnrm.cloud.google.com/reconcile-interval-in-seconds : "10" spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/_http.log b/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/_http.log index 9c2d5b2eee..794acc3ecd 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/_http.log +++ b/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/_http.log @@ -42,7 +42,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -88,7 +88,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_resourceid_${uniqueId}", "type": "DEFAULT" } @@ -142,7 +142,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_resourceid_${uniqueId}", "type": "DEFAULT" @@ -445,7 +445,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_resourceid_${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/dependencies.yaml b/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/dependencies.yaml index a6863fd831..7130aee1be 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/dependencies.yaml +++ b/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/dependencies.yaml @@ -18,3 +18,4 @@ metadata: name: bigquerydataset-resourceid-${uniqueId} spec: resourceID: bigquerydataset_resourceid_${uniqueId} + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_export_userspecifiedresourceid.golden b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_export_userspecifiedresourceid.golden index c6deeeb8c5..5f60085e3c 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_export_userspecifiedresourceid.golden +++ b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_export_userspecifiedresourceid.golden @@ -18,7 +18,7 @@ spec: - role: WRITER specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated - location: US + location: us-central1 maxTimeTravelHours: "168" projectRef: external: ${projectId} diff --git a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_object_userspecifiedresourceid.golden.yaml b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_object_userspecifiedresourceid.golden.yaml index 78a3f1328d..10cf7fe794 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_object_userspecifiedresourceid.golden.yaml +++ b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_object_userspecifiedresourceid.golden.yaml @@ -14,6 +14,7 @@ metadata: namespace: ${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 projectRef: external: ${projectId} resourceID: bigquerydataset_${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_http.log b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_http.log index fd92a16a92..323f1ba5db 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_http.log +++ b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_http.log @@ -43,7 +43,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -90,7 +90,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_${uniqueId}", "type": "DEFAULT" } @@ -145,7 +145,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_${uniqueId}", "type": "DEFAULT" @@ -184,7 +184,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168" } @@ -232,7 +232,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_${uniqueId}", "type": "DEFAULT" @@ -288,7 +288,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/create.yaml b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/create.yaml index c6310cccaa..fb453ab139 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/create.yaml +++ b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/create.yaml @@ -19,5 +19,4 @@ metadata: spec: resourceID: bigquerydataset_${uniqueId} friendlyName: bigquerydataset-sample - projectRef: - external: ${projectId} + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/update.yaml b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/update.yaml index 3b599c01d3..9403a75c8d 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/update.yaml +++ b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/update.yaml @@ -19,5 +19,4 @@ metadata: spec: resourceID: bigquerydataset_${uniqueId} friendlyName: bigquerydataset-sample-updated - projectRef: - external: ${projectId} + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_export_bigquerydataset#01.golden b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_export_bigquerydataset#01.golden index 95391263e2..6c64b9259f 100644 --- a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_export_bigquerydataset#01.golden +++ b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_export_bigquerydataset#01.golden @@ -18,7 +18,7 @@ spec: - role: WRITER specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated - location: US + location: us-central1 maxTimeTravelHours: "168" projectRef: external: ${projectId} diff --git a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_object_bigquerydataset#01.golden.yaml b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_object_bigquerydataset#01.golden.yaml index 3c4ff9cc03..e1b26c8300 100644 --- a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_object_bigquerydataset#01.golden.yaml +++ b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_object_bigquerydataset#01.golden.yaml @@ -1,17 +1,3 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataset metadata: @@ -28,6 +14,7 @@ metadata: namespace: ${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 projectRef: external: ${projectId} resourceID: bigquerydatasetsample${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_http.log b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_http.log index 4f53cdab6e..7db6b66ec7 100644 --- a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_http.log +++ b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_http.log @@ -43,7 +43,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -90,7 +90,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -145,7 +145,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -184,7 +184,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168" } @@ -232,7 +232,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -288,7 +288,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/create.yaml b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/create.yaml index e1f01a5a3e..31fa868bdc 100644 --- a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/create.yaml +++ b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/create.yaml @@ -20,3 +20,4 @@ metadata: cnrm.cloud.google.com/state-into-spec: absent spec: friendlyName: bigquerydataset-sample + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/update.yaml b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/update.yaml index fba11be2c9..4e3b2fde98 100644 --- a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/update.yaml +++ b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/update.yaml @@ -20,3 +20,4 @@ metadata: cnrm.cloud.google.com/state-into-spec: absent spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerydataset.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerydataset.md index b7e523671d..793457b24d 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerydataset.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerydataset.md @@ -189,7 +189,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.{% endverbatim %}

+

{% verbatim %}A unique Id for this dataset, without the project name. The Id must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.{% endverbatim %}

@@ -199,7 +199,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the project containing this dataset.{% endverbatim %}

+

{% verbatim %}The ID of the project containing this dataset.{% endverbatim %}

@@ -289,7 +289,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the dataset containing this routine.{% endverbatim %}

+

{% verbatim %}The ID of the dataset containing this routine.{% endverbatim %}

@@ -299,7 +299,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the project containing this routine.{% endverbatim %}

+

{% verbatim %}The ID of the project containing this routine.{% endverbatim %}

@@ -309,7 +309,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.{% endverbatim %}

+

{% verbatim %}The Id of the routine. The Id must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.{% endverbatim %}

@@ -356,7 +356,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the dataset containing this table.{% endverbatim %}

+

{% verbatim %}The ID of the dataset containing this table.{% endverbatim %}

@@ -366,7 +366,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the project containing this table.{% endverbatim %}

+

{% verbatim %}The ID of the project containing this table.{% endverbatim %}

@@ -376,7 +376,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the table. The ID can contain Unicode characters in category L (letter), M (mark), N (number), Pc (connector, including underscore), Pd (dash), and Zs (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). The maximum length is 1,024 characters. Certain operations allow suffixing of the table ID with a partition decorator, such as `sample_table$20190123`.{% endverbatim %}

+

{% verbatim %}The Id of the table. The Id can contain Unicode characters in category L (letter), M (mark), N (number), Pc (connector, including underscore), Pd (dash), and Zs (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). The maximum length is 1,024 characters. Certain operations allow suffixing of the table Id with a partition decorator, such as `sample_table$20190123`.{% endverbatim %}

@@ -511,7 +511,7 @@ storageBillingModel: string

location

-

Optional

+

Required*

string

@@ -535,7 +535,7 @@ storageBillingModel: string

object

-

{% verbatim %}The project that this resource belongs to. optional.{% endverbatim %}

+

{% verbatim %} Optional. The project that this resource belongs to.{% endverbatim %}

@@ -616,6 +616,7 @@ conditions: type: string creationTime: integer etag: string +externalRef: string lastModifiedTime: integer observedGeneration: integer selfLink: string @@ -691,6 +692,13 @@ selfLink: string

{% verbatim %}Output only. A hash of the resource.{% endverbatim %}

+ + externalRef + +

string

+

{% verbatim %}A unique specifier for the BigQueryAnalyticsHubDataExchangeListing resource in GCP.{% endverbatim %}

+ + lastModifiedTime diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerytable.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerytable.md index 2b63a482a6..5aa0060080 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerytable.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerytable.md @@ -1264,6 +1264,7 @@ metadata: name: bigquerytabledep spec: friendlyName: bigquerytable-dep + location: us-central1 ``` diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/dataflow/dataflowflextemplatejob.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/dataflow/dataflowflextemplatejob.md index 5653f0783b..ade91e7965 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/dataflow/dataflowflextemplatejob.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/dataflow/dataflowflextemplatejob.md @@ -649,6 +649,8 @@ apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataset metadata: name: dataflowflextemplatejobdepstreaming +spec: + location: us-central1 --- apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryTable diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/pubsub/pubsubsubscription.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/pubsub/pubsubsubscription.md index ca88e19e48..ce5486fa22 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/pubsub/pubsubsubscription.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/pubsub/pubsubsubscription.md @@ -947,6 +947,7 @@ metadata: cnrm.cloud.google.com/project-id: ${PROJECT_ID?} spec: resourceID: pubsubsubscriptiondepbigquery + location: us-central1 --- # Replace ${PROJECT_ID?} below with your desired project ID. apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 From b774633b1f545ee59d5fb53f9464447ced9511db Mon Sep 17 00:00:00 2001 From: Jason Vigil Date: Tue, 5 Nov 2024 23:33:35 +0000 Subject: [PATCH 28/31] fix: Use actual sqlinstance location preference if not specified Otherwise, the controller will always detect a diff and try to update on each re-reconcile. --- pkg/controller/direct/sql/sqlinstance_defaults.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkg/controller/direct/sql/sqlinstance_defaults.go b/pkg/controller/direct/sql/sqlinstance_defaults.go index 7fec6a26a1..07e5e9a351 100644 --- a/pkg/controller/direct/sql/sqlinstance_defaults.go +++ b/pkg/controller/direct/sql/sqlinstance_defaults.go @@ -94,6 +94,10 @@ func ApplySQLInstanceGCPDefaults(in *krm.SQLInstance, out *api.DatabaseInstance, } } } + if in.Spec.Settings.LocationPreference == nil && actual != nil { + // Use GCP specified locationPreference. + out.Settings.LocationPreference = actual.Settings.LocationPreference + } if in.Spec.Settings.PricingPlan == nil { // GCP default PricingPlan is PER_USE. out.Settings.PricingPlan = "PER_USE" From fd37a2915ab91b320df87f5c65142c201798c026 Mon Sep 17 00:00:00 2001 From: Gemma Hou Date: Thu, 10 Oct 2024 17:15:11 +0000 Subject: [PATCH 29/31] Add externalRef for ComputeFirewallPolicyRule --- .../computefirewallpolicyrule_reference.go | 167 ++++++++++++++++++ apis/compute/v1beta1/zz_generated.deepcopy.go | 35 ++++ apis/refs/v1beta1/computerefs.go | 46 +++++ .../firewallpolicyrule_controller.go | 117 ++++++++---- .../direct/compute/firewallpolicyrule/refs.go | 43 ----- ...lpolicyrule-egress-full-direct.golden.yaml | 1 + ...policyrule-ingress-full-direct.golden.yaml | 1 + ...ewallpolicyrule-minimal-direct.golden.yaml | 1 + tests/e2e/normalize.go | 17 ++ 9 files changed, 347 insertions(+), 81 deletions(-) create mode 100644 apis/compute/v1beta1/computefirewallpolicyrule_reference.go diff --git a/apis/compute/v1beta1/computefirewallpolicyrule_reference.go b/apis/compute/v1beta1/computefirewallpolicyrule_reference.go new file mode 100644 index 0000000000..526f61ce62 --- /dev/null +++ b/apis/compute/v1beta1/computefirewallpolicyrule_reference.go @@ -0,0 +1,167 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1beta1 + +import ( + "context" + "fmt" + "strconv" + "strings" + + refsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/k8s" + apierrors "k8s.io/apimachinery/pkg/api/errors" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/types" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +var _ refsv1beta1.ExternalNormalizer = &ComputeFirewallPolicyRuleRef{} + +// ComputeFirewallPolicyRuleRef defines the resource reference to ComputeFirewallPolicyRule, which "External" field +// holds the GCP identifier for the KRM object. +type ComputeFirewallPolicyRuleRef struct { + // A reference to an externally managed ComputeFirewallPolicyRule resource. + // Should be in the format "locations/global/firewallPolicies//rules/". + External string `json:"external,omitempty"` + + // The name of a ComputeFirewallPolicyRule resource. + Name string `json:"name,omitempty"` + + // The namespace of a ComputeFirewallPolicyRule resource. + Namespace string `json:"namespace,omitempty"` + + parent *ComputeFirewallPolicyRuleParent +} + +// NormalizedExternal provision the "External" value for other resource that depends on ComputeFirewallPolicyRule. +// If the "External" is given in the other resource's spec.ComputeFirewallPolicyRuleRef, the given value will be used. +// Otherwise, the "Name" and "Namespace" will be used to query the actual ComputeFirewallPolicyRule object from the cluster. +func (r *ComputeFirewallPolicyRuleRef) NormalizedExternal(ctx context.Context, reader client.Reader, otherNamespace string) (string, error) { + if r.External != "" && r.Name != "" { + return "", fmt.Errorf("cannot specify both name and external on %s reference", ComputeFirewallPolicyRuleGVK.Kind) + } + // From given External + if r.External != "" { + if _, _, err := parseComputeFirewallPolicyRuleExternal(r.External); err != nil { + return "", err + } + return r.External, nil + } + + // From the Config Connector object + if r.Namespace == "" { + r.Namespace = otherNamespace + } + key := types.NamespacedName{Name: r.Name, Namespace: r.Namespace} + u := &unstructured.Unstructured{} + u.SetGroupVersionKind(ComputeFirewallPolicyRuleGVK) + if err := reader.Get(ctx, key, u); err != nil { + if apierrors.IsNotFound(err) { + return "", k8s.NewReferenceNotFoundError(u.GroupVersionKind(), key) + } + return "", fmt.Errorf("reading referenced %s %s: %w", ComputeFirewallPolicyRuleGVK, key, err) + } + // Get external from status.externalRef. This is the most trustworthy place. + actualExternalRef, _, err := unstructured.NestedString(u.Object, "status", "externalRef") + if err != nil { + return "", fmt.Errorf("reading status.externalRef: %w", err) + } + if actualExternalRef == "" { + return "", fmt.Errorf("ComputeFirewallPolicyRule is not ready yet") + } + r.External = actualExternalRef + return r.External, nil +} + +// New builds a NewComputeFirewallPolicyRuleRef from the Config Connector ComputeFirewallPolicyRule object. +func NewComputeFirewallPolicyRuleRef(ctx context.Context, reader client.Reader, obj *ComputeFirewallPolicyRule) (*ComputeFirewallPolicyRuleRef, error) { + id := &ComputeFirewallPolicyRuleRef{} + + firewallPolicyRef, err := refsv1beta1.ResolveComputeFirewallPolicy(ctx, reader, obj, obj.Spec.FirewallPolicyRef) + if err != nil { + return nil, err + } + firewallPolicy := firewallPolicyRef.External + if firewallPolicy == "" { + return nil, fmt.Errorf("cannot resolve firewallPolicy") + } + + id.parent = &ComputeFirewallPolicyRuleParent{FirewallPolicy: firewallPolicy} + + // Get priority. Priority is a required field + priority := strconv.Itoa(int(obj.Spec.Priority)) + + // Use approved External + externalRef := valueOf(obj.Status.ExternalRef) + if externalRef == "" { + id.External = asComputeFirewallPolicyRuleExternal(id.parent, priority) + return id, nil + } + + // Validate desired with actual + actualParent, actualPriority, err := parseComputeFirewallPolicyRuleExternal(externalRef) + if err != nil { + return nil, err + } + if actualParent.FirewallPolicy != firewallPolicy { + return nil, fmt.Errorf("spec.firewallPolicyRef changed, expect %s, got %s", actualParent.FirewallPolicy, firewallPolicy) + } + if actualPriority != priority { + return nil, fmt.Errorf("cannot reset `spec.priority` to %s, since it has already assigned to %s", + priority, actualPriority) + } + id.External = externalRef + id.parent = &ComputeFirewallPolicyRuleParent{FirewallPolicy: firewallPolicy} + return id, nil +} + +func (r *ComputeFirewallPolicyRuleRef) Parent() (*ComputeFirewallPolicyRuleParent, error) { + if r.parent != nil { + return r.parent, nil + } + if r.External != "" { + parent, _, err := parseComputeFirewallPolicyRuleExternal(r.External) + if err != nil { + return nil, err + } + return parent, nil + } + return nil, fmt.Errorf("ComputeFirewallPolicyRule not initialized from `NewComputeFirewallPolicyRuleRef` or `NormalizedExternal`") +} + +type ComputeFirewallPolicyRuleParent struct { + FirewallPolicy string +} + +func (p *ComputeFirewallPolicyRuleParent) String() string { + return "locations/global/firewallPolicies/" + p.FirewallPolicy +} + +func asComputeFirewallPolicyRuleExternal(parent *ComputeFirewallPolicyRuleParent, priority string) (external string) { + return parent.String() + "/rules/" + priority +} + +func parseComputeFirewallPolicyRuleExternal(external string) (parent *ComputeFirewallPolicyRuleParent, priority string, err error) { + tokens := strings.Split(external, "/") + if len(tokens) != 6 || tokens[0] != "locations" || tokens[2] != "firewallPolicies" || tokens[4] != "rules" { + return nil, "", fmt.Errorf("format of ComputeFirewallPolicyRule external=%q was not known (use firewallPolicies//rules/)", external) + } + parent = &ComputeFirewallPolicyRuleParent{ + FirewallPolicy: tokens[3], + } + priority = tokens[5] + return parent, priority, nil +} diff --git a/apis/compute/v1beta1/zz_generated.deepcopy.go b/apis/compute/v1beta1/zz_generated.deepcopy.go index 4e08b179a3..ca24003cfc 100644 --- a/apis/compute/v1beta1/zz_generated.deepcopy.go +++ b/apis/compute/v1beta1/zz_generated.deepcopy.go @@ -83,6 +83,41 @@ func (in *ComputeFirewallPolicyRuleList) DeepCopyObject() runtime.Object { return nil } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ComputeFirewallPolicyRuleParent) DeepCopyInto(out *ComputeFirewallPolicyRuleParent) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ComputeFirewallPolicyRuleParent. +func (in *ComputeFirewallPolicyRuleParent) DeepCopy() *ComputeFirewallPolicyRuleParent { + if in == nil { + return nil + } + out := new(ComputeFirewallPolicyRuleParent) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ComputeFirewallPolicyRuleRef) DeepCopyInto(out *ComputeFirewallPolicyRuleRef) { + *out = *in + if in.parent != nil { + in, out := &in.parent, &out.parent + *out = new(ComputeFirewallPolicyRuleParent) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ComputeFirewallPolicyRuleRef. +func (in *ComputeFirewallPolicyRuleRef) DeepCopy() *ComputeFirewallPolicyRuleRef { + if in == nil { + return nil + } + out := new(ComputeFirewallPolicyRuleRef) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ComputeFirewallPolicyRuleSpec) DeepCopyInto(out *ComputeFirewallPolicyRuleSpec) { *out = *in diff --git a/apis/refs/v1beta1/computerefs.go b/apis/refs/v1beta1/computerefs.go index 410bd24ac1..a491f5fca0 100644 --- a/apis/refs/v1beta1/computerefs.go +++ b/apis/refs/v1beta1/computerefs.go @@ -303,3 +303,49 @@ type ComputeFirewallPolicyRef struct { /* The `namespace` field of a `ComputeFirewallPolicy ` resource. */ Namespace string `json:"namespace,omitempty"` } + +func ResolveComputeFirewallPolicy(ctx context.Context, reader client.Reader, src client.Object, ref *ComputeFirewallPolicyRef) (*ComputeFirewallPolicyRef, error) { + if ref == nil { + return nil, nil + } + + if ref.External != "" { + if ref.Name != "" { + return nil, fmt.Errorf("cannot specify both name and external on reference") + } + return ref, nil + } + + if ref.Name == "" { + return nil, fmt.Errorf("must specify either name or external on reference") + } + + key := types.NamespacedName{ + Namespace: ref.Namespace, + Name: ref.Name, + } + if key.Namespace == "" { + key.Namespace = src.GetNamespace() + } + + computeFirewallPolicy := &unstructured.Unstructured{} + computeFirewallPolicy.SetGroupVersionKind(schema.GroupVersionKind{ + Group: "compute.cnrm.cloud.google.com", + Version: "v1beta1", + Kind: "ComputeFirewallPolicy", + }) + if err := reader.Get(ctx, key, computeFirewallPolicy); err != nil { + if apierrors.IsNotFound(err) { + return nil, k8s.NewReferenceNotFoundError(computeFirewallPolicy.GroupVersionKind(), key) + } + return nil, fmt.Errorf("error reading referenced ComputeFirewallPolicy %v: %w", key, err) + } + + resourceID, err := GetResourceID(computeFirewallPolicy) + if err != nil { + return nil, err + } + + return &ComputeFirewallPolicyRef{ + External: fmt.Sprintf("%s", resourceID)}, nil +} diff --git a/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go index d305c44218..b70f820063 100644 --- a/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go +++ b/pkg/controller/direct/compute/firewallpolicyrule/firewallpolicyrule_controller.go @@ -17,6 +17,8 @@ package firewallpolicyrule import ( "context" "fmt" + "strconv" + "strings" "google.golang.org/api/option" @@ -51,8 +53,7 @@ type firewallPolicyRuleModel struct { var _ directbase.Model = &firewallPolicyRuleModel{} type firewallPolicyRuleAdapter struct { - firewallPolicy string - priority int32 + id *krm.ComputeFirewallPolicyRuleRef firewallPoliciesClient *gcp.FirewallPoliciesClient desired *krm.ComputeFirewallPolicyRule actual *computepb.FirewallPolicyRule @@ -80,23 +81,15 @@ func (m *firewallPolicyRuleModel) AdapterForObject(ctx context.Context, reader c return nil, fmt.Errorf("error converting to %T: %w", obj, err) } - // Get firewall policy - firewallPolicyRef, err := ResolveComputeFirewallPolicy(ctx, reader, obj, obj.Spec.FirewallPolicyRef) + firewallPolicyRuleRef, err := krm.NewComputeFirewallPolicyRuleRef(ctx, reader, obj) if err != nil { return nil, err - } - obj.Spec.FirewallPolicyRef.External = firewallPolicyRef.External - firewallPolicy := obj.Spec.FirewallPolicyRef.External - - // Get priority - priority := int32(obj.Spec.Priority) firewallPolicyRuleAdapter := &firewallPolicyRuleAdapter{ - firewallPolicy: firewallPolicy, - priority: priority, - desired: obj, - reader: reader, + id: firewallPolicyRuleRef, + desired: obj, + reader: reader, } // Get GCP client @@ -116,7 +109,7 @@ func (m *firewallPolicyRuleModel) AdapterForURL(ctx context.Context, url string) func (a *firewallPolicyRuleAdapter) Find(ctx context.Context) (bool, error) { log := klog.FromContext(ctx) - log.V(2).Info("getting ComputeFirewallPolicyRule", "priority", a.priority) + log.V(2).Info("getting ComputeFirewallPolicyRule", "name", a.id.External) firewallPolicyRule, err := a.get(ctx) if err != nil { @@ -126,7 +119,7 @@ func (a *firewallPolicyRuleAdapter) Find(ctx context.Context) (bool, error) { if direct.IsBadRequest(err) { return false, nil } - return false, fmt.Errorf("getting ComputeFirewallPolicyRule %d: %w", a.priority, err) + return false, fmt.Errorf("getting ComputeFirewallPolicyRule %s: %w", a.id.External, err) } a.actual = firewallPolicyRule return true, nil @@ -139,7 +132,8 @@ func (a *firewallPolicyRuleAdapter) Create(ctx context.Context, createOp *direct } log := klog.FromContext(ctx) - log.V(2).Info("creating ComputeFirewallPolicyRule", "priority", a.priority) + log.V(2).Info("creating ComputeFirewallPolicyRule", "name", a.id.External) + mapCtx := &direct.MapContext{} desired := a.desired.DeepCopy() @@ -149,31 +143,45 @@ func (a *firewallPolicyRuleAdapter) Create(ctx context.Context, createOp *direct return mapCtx.Err() } + parent, err := a.id.Parent() + if err != nil { + return fmt.Errorf("get ComputeFirewallPolicyRule parent %s: %w", a.id.External, err) + } + req := &computepb.AddRuleFirewallPolicyRequest{ FirewallPolicyRuleResource: firewallPolicyRule, - FirewallPolicy: a.firewallPolicy, + FirewallPolicy: parent.FirewallPolicy, } op, err := a.firewallPoliciesClient.AddRule(ctx, req) if err != nil { - return fmt.Errorf("creating ComputeFirewallPolicyRule %d: %w", a.priority, err) + return fmt.Errorf("creating ComputeFirewallPolicyRule %s: %w", a.id.External, err) } if !op.Done() { err = op.Wait(ctx) if err != nil { - return fmt.Errorf("waiting ComputeFirewallPolicyRule %d create failed: %w", a.priority, err) + return fmt.Errorf("waiting ComputeFirewallPolicyRule %s create failed: %w", a.id.External, err) } } - log.V(2).Info("successfully created ComputeFirewallPolicyRule", "priority", a.priority) + log.V(2).Info("successfully created ComputeFirewallPolicyRule", "name", a.id.External) // Get the created resource created, err := a.get(ctx) if err != nil { - return fmt.Errorf("getting ComputeFirewallPolicyRule %d: %w", a.priority, err) + return fmt.Errorf("getting ComputeFirewallPolicyRule %s: %w", a.id.External, err) } status := &krm.ComputeFirewallPolicyRuleStatus{} status = ComputeFirewallPolicyRuleStatus_FromProto(mapCtx, created) + + parent, err = a.id.Parent() + if err != nil { + return err + } + + priority := strconv.Itoa(int(*created.Priority)) + externalRef := parent.String() + "/rules/" + priority + status.ExternalRef = &externalRef return createOp.UpdateStatus(ctx, status, nil) } @@ -186,7 +194,7 @@ func (a *firewallPolicyRuleAdapter) Update(ctx context.Context, updateOp *direct } log := klog.FromContext(ctx) - log.V(2).Info("updating ComputeFirewallPolicyRule", "priority", a.priority) + log.V(2).Info("updating ComputeFirewallPolicyRule", "name", a.id.External) mapCtx := &direct.MapContext{} desired := a.desired.DeepCopy() @@ -200,27 +208,38 @@ func (a *firewallPolicyRuleAdapter) Update(ctx context.Context, updateOp *direct updated := &computepb.FirewallPolicyRule{} + parent, err := a.id.Parent() + if err != nil { + return fmt.Errorf("get ComputeFirewallPolicyRule parent %s: %w", a.id.External, err) + } + + tokens := strings.Split(a.id.External, "/") + priority, err := strconv.ParseInt(tokens[5], 10, 32) + if err != nil { + return fmt.Errorf("get ComputeFirewallPolicyRule priority %s: %w", a.id.External, err) + } + updateReq := &computepb.PatchRuleFirewallPolicyRequest{ FirewallPolicyRuleResource: firewallPolicyRule, - FirewallPolicy: a.firewallPolicy, - Priority: direct.PtrTo(a.priority), + FirewallPolicy: parent.FirewallPolicy, + Priority: direct.PtrTo(int32(priority)), } op, err := a.firewallPoliciesClient.PatchRule(ctx, updateReq) if err != nil { - return fmt.Errorf("updating ComputeFirewallPolicyRule %d: %w", a.priority, err) + return fmt.Errorf("updating ComputeFirewallPolicyRule %s: %w", a.id.External, err) } if !op.Done() { err = op.Wait(ctx) if err != nil { - return fmt.Errorf("waiting ComputeFirewallPolicyRule %d update failed: %w", a.priority, err) + return fmt.Errorf("waiting ComputeFirewallPolicyRule %s update failed: %w", a.id.External, err) } } - log.V(2).Info("successfully updated ComputeFirewallPolicyRule", "priority", a.priority) + log.V(2).Info("successfully updated ComputeFirewallPolicyRule", "name", a.id.External) // Get the updated resource updated, err = a.get(ctx) if err != nil { - return fmt.Errorf("getting ComputeFirewallPolicyRule %d: %w", a.priority, err) + return fmt.Errorf("getting ComputeFirewallPolicyRule %s: %w", a.id.External, err) } status := &krm.ComputeFirewallPolicyRuleStatus{} @@ -230,7 +249,7 @@ func (a *firewallPolicyRuleAdapter) Update(ctx context.Context, updateOp *direct func (a *firewallPolicyRuleAdapter) Export(ctx context.Context) (*unstructured.Unstructured, error) { if a.actual == nil { - return nil, fmt.Errorf("firewallPolicyRule %d not found", a.priority) + return nil, fmt.Errorf("firewallPolicyRule %s not found", a.id.External) } mc := &direct.MapContext{} @@ -253,23 +272,34 @@ func (a *firewallPolicyRuleAdapter) Export(ctx context.Context) (*unstructured.U // Delete implements the Adapter interface. func (a *firewallPolicyRuleAdapter) Delete(ctx context.Context, deleteOp *directbase.DeleteOperation) (bool, error) { log := klog.FromContext(ctx) - log.V(2).Info("deleting ComputeFirewallPolicyRule", "priority", a.priority) + log.V(2).Info("deleting ComputeFirewallPolicyRule", "name", a.id.External) + + parent, err := a.id.Parent() + if err != nil { + return false, fmt.Errorf("get ComputeFirewallPolicyRule parent %s: %w", a.id.External, err) + } + + tokens := strings.Split(a.id.External, "/") + priority, err := strconv.ParseInt(tokens[5], 10, 32) + if err != nil { + return false, fmt.Errorf("get ComputeFirewallPolicyRule parent %s: %w", a.id.External, err) + } delReq := &computepb.RemoveRuleFirewallPolicyRequest{ - FirewallPolicy: a.firewallPolicy, - Priority: direct.PtrTo(a.priority), + FirewallPolicy: parent.FirewallPolicy, + Priority: direct.PtrTo(int32(priority)), } op, err := a.firewallPoliciesClient.RemoveRule(ctx, delReq) if err != nil { - return false, fmt.Errorf("deleting ComputeFirewallPolicyRule %d: %w", a.priority, err) + return false, fmt.Errorf("deleting ComputeFirewallPolicyRule %s: %w", a.id.External, err) } if !op.Done() { err = op.Wait(ctx) if err != nil { - return false, fmt.Errorf("waiting ComputeFirewallPolicyRule %d delete failed: %w", a.priority, err) + return false, fmt.Errorf("waiting ComputeFirewallPolicyRule %s delete failed: %w", a.id.External, err) } } - log.V(2).Info("successfully deleted ComputeFirewallPolicyRule", "priority", a.priority) + log.V(2).Info("successfully deleted ComputeFirewallPolicyRule", "name", a.id.External) // Get the deleted rules _, err = a.get(ctx) @@ -280,9 +310,20 @@ func (a *firewallPolicyRuleAdapter) Delete(ctx context.Context, deleteOp *direct } func (a *firewallPolicyRuleAdapter) get(ctx context.Context) (*computepb.FirewallPolicyRule, error) { + parent, err := a.id.Parent() + if err != nil { + return nil, fmt.Errorf("get ComputeFirewallPolicyRule parent %s: %w", a.id.External, err) + } + + tokens := strings.Split(a.id.External, "/") + priority, err := strconv.ParseInt(tokens[5], 10, 32) + if err != nil { + return nil, fmt.Errorf("get ComputeFirewallPolicyRule parent %s: %w", a.id.External, err) + } + getReq := &computepb.GetRuleFirewallPolicyRequest{ - FirewallPolicy: a.firewallPolicy, - Priority: direct.PtrTo(a.priority), + FirewallPolicy: parent.FirewallPolicy, + Priority: direct.PtrTo(int32(priority)), } return a.firewallPoliciesClient.GetRule(ctx, getReq) } diff --git a/pkg/controller/direct/compute/firewallpolicyrule/refs.go b/pkg/controller/direct/compute/firewallpolicyrule/refs.go index 1597cb1c5f..408a1e8782 100644 --- a/pkg/controller/direct/compute/firewallpolicyrule/refs.go +++ b/pkg/controller/direct/compute/firewallpolicyrule/refs.go @@ -29,49 +29,6 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client" ) -func ResolveComputeFirewallPolicy(ctx context.Context, reader client.Reader, src client.Object, ref *refs.ComputeFirewallPolicyRef) (*refs.ComputeFirewallPolicyRef, error) { - if ref == nil { - return nil, nil - } - - if ref.External != "" { - if ref.Name != "" { - return nil, fmt.Errorf("cannot specify both name and external on reference") - } - return ref, nil - } - - if ref.Name == "" { - return nil, fmt.Errorf("must specify either name or external on reference") - } - - key := types.NamespacedName{ - Namespace: ref.Namespace, - Name: ref.Name, - } - if key.Namespace == "" { - key.Namespace = src.GetNamespace() - } - - computeFirwallPolicy, err := resolveResourceName(ctx, reader, key, schema.GroupVersionKind{ - Group: "compute.cnrm.cloud.google.com", - Version: "v1beta1", - Kind: "ComputeFirewallPolicy", - }) - - if err != nil { - return nil, err - } - - resourceID, err := refs.GetResourceID(computeFirwallPolicy) - if err != nil { - return nil, err - } - - return &refs.ComputeFirewallPolicyRef{ - External: fmt.Sprintf("%s", resourceID)}, nil -} - func ResolveComputeNetwork(ctx context.Context, reader client.Reader, src client.Object, ref *refs.ComputeNetworkRef) (*refs.ComputeNetworkRef, error) { if ref == nil { return nil, nil diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/_generated_object_computefirewallpolicyrule-egress-full-direct.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/_generated_object_computefirewallpolicyrule-egress-full-direct.golden.yaml index 29528ddc81..d25b67e628 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/_generated_object_computefirewallpolicyrule-egress-full-direct.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-egress-full-direct/_generated_object_computefirewallpolicyrule-egress-full-direct.golden.yaml @@ -47,6 +47,7 @@ status: reason: UpToDate status: "True" type: Ready + externalRef: locations/global/firewallPolicies/${firewallPolicyID}/rules/9000 kind: compute#firewallPolicyRule observedGeneration: 2 ruleTupleCount: 4 diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/_generated_object_computefirewallpolicyrule-ingress-full-direct.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/_generated_object_computefirewallpolicyrule-ingress-full-direct.golden.yaml index e9d64550a1..96cfbdc7a3 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/_generated_object_computefirewallpolicyrule-ingress-full-direct.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-ingress-full-direct/_generated_object_computefirewallpolicyrule-ingress-full-direct.golden.yaml @@ -47,6 +47,7 @@ status: reason: UpToDate status: "True" type: Ready + externalRef: locations/global/firewallPolicies/${firewallPolicyID}/rules/9000 kind: compute#firewallPolicyRule observedGeneration: 2 ruleTupleCount: 4 diff --git a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/_generated_object_computefirewallpolicyrule-minimal-direct.golden.yaml b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/_generated_object_computefirewallpolicyrule-minimal-direct.golden.yaml index c032270873..85be828bb9 100644 --- a/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/_generated_object_computefirewallpolicyrule-minimal-direct.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/compute/v1beta1/computefirewallpolicyrule/computefirewallpolicyrule-minimal-direct/_generated_object_computefirewallpolicyrule-minimal-direct.golden.yaml @@ -30,6 +30,7 @@ status: reason: UpToDate status: "True" type: Ready + externalRef: locations/global/firewallPolicies/${firewallPolicyID}/rules/9000 kind: compute#firewallPolicyRule observedGeneration: 2 ruleTupleCount: 2 diff --git a/tests/e2e/normalize.go b/tests/e2e/normalize.go index c845952255..daace98617 100644 --- a/tests/e2e/normalize.go +++ b/tests/e2e/normalize.go @@ -291,6 +291,22 @@ func normalizeKRMObject(t *testing.T, u *unstructured.Unstructured, project test } } + // Get firewall policy id from firewall policy rule's externalRef and replace it + externalRef, _, _ := unstructured.NestedString(u.Object, "status", "externalRef") + if externalRef != "" { + tokens := strings.Split(externalRef, "/") + n := len(tokens) + if n >= 2 { + typeName := tokens[len(tokens)-2] + firewallPolicyId := tokens[len(tokens)-3] + if typeName == "rules" { + visitor.stringTransforms = append(visitor.stringTransforms, func(path string, s string) string { + return strings.ReplaceAll(s, firewallPolicyId, "${firewallPolicyID}") + }) + } + } + } + resourceID, _, _ := unstructured.NestedString(u.Object, "spec", "resourceID") if resourceID != "" { switch u.GroupVersionKind() { @@ -306,6 +322,7 @@ func normalizeKRMObject(t *testing.T, u *unstructured.Unstructured, project test case schema.GroupVersionKind{Group: "compute.cnrm.cloud.google.com", Version: "v1beta1", Kind: "ComputeFirewallPolicy"}: visitor.stringTransforms = append(visitor.stringTransforms, func(path string, s string) string { return strings.ReplaceAll(s, resourceID, "${firewallPolicyID}") + }) } } From 841e0a2980431840d159f6a4ebea7579bb6084d4 Mon Sep 17 00:00:00 2001 From: Jingyi Hu Date: Tue, 5 Nov 2024 23:19:45 +0000 Subject: [PATCH 30/31] chore: remove a sample for bigquerydatatransferconfig --- .../bigquery_v1beta1_bigquerydataset.yaml | 21 --------- ...er_v1beta1_bigquerydatatransferconfig.yaml | 32 -------------- .../snippetgeneration/snippetgeneration.go | 2 +- .../bigquerydatatransferconfig.md | 44 ------------------- 4 files changed, 1 insertion(+), 98 deletions(-) delete mode 100644 config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquery_v1beta1_bigquerydataset.yaml delete mode 100644 config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquerydatatransfer_v1beta1_bigquerydatatransferconfig.yaml diff --git a/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquery_v1beta1_bigquerydataset.yaml b/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquery_v1beta1_bigquerydataset.yaml deleted file mode 100644 index df478adf76..0000000000 --- a/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquery_v1beta1_bigquerydataset.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 -kind: BigQueryDataset -metadata: - name: bigquerydatatransferconfigdepsalesforce -spec: - friendlyName: bigquerydatatransferconfig-dep-salesforce - location: us-central1 diff --git a/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquerydatatransfer_v1beta1_bigquerydatatransferconfig.yaml b/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquerydatatransfer_v1beta1_bigquerydatatransferconfig.yaml deleted file mode 100644 index 7673609b25..0000000000 --- a/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquerydatatransfer_v1beta1_bigquerydatatransferconfig.yaml +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 -kind: BigQueryDataTransferConfig -metadata: - name: bigquerydatatransferconfig-sample-salesforce -spec: - projectRef: - # Replace ${PROJECT_ID?} with your project ID. - external: ${PROJECT_ID?} - location: us-central1 - displayName: "example of big query data transfer config" - dataSourceID: "salesforce" - datasetRef: - name: bigquerydatatransferconfigdepsalesforce - params: - "connector.authentication.oauth.clientId": "client-id" - "connector.authentication.oauth.clientSecret": "client-secret" - "connector.authentication.oauth.myDomain": "MyDomainName" - "assets": "asset-a" diff --git a/pkg/snippet/snippetgeneration/snippetgeneration.go b/pkg/snippet/snippetgeneration/snippetgeneration.go index 81c7ba4df7..6905e370fc 100644 --- a/pkg/snippet/snippetgeneration/snippetgeneration.go +++ b/pkg/snippet/snippetgeneration/snippetgeneration.go @@ -38,7 +38,7 @@ var preferredSampleForResource = map[string]string{ "bigqueryjob": "query-bigquery-job", "bigtableappprofile": "multicluster-bigtable-app-profile", "bigtableinstance": "replicated-instance", - "bigquerydatatransferconfig": "bigquerydatatransferconfig-salesforce", + "bigquerydatatransferconfig": "bigquerydatatransferconfig-scheduledquery", "billingbudgetsbudget": "calendar-budget", "binaryauthorizationpolicy": "cluster-policy", "certificatemanagercertificate": "self-managed-certificate", diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquerydatatransfer/bigquerydatatransferconfig.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquerydatatransfer/bigquerydatatransferconfig.md index d52eca762c..2ee69f5794 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquerydatatransfer/bigquerydatatransferconfig.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquerydatatransfer/bigquerydatatransferconfig.md @@ -646,50 +646,6 @@ observedState: ## Sample YAML(s) -### BigQueryDataTransferConfig Salesforce -```yaml -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 -kind: BigQueryDataTransferConfig -metadata: - name: bigquerydatatransferconfig-sample-salesforce -spec: - projectRef: - # Replace ${PROJECT_ID?} with your project ID. - external: ${PROJECT_ID?} - location: us-central1 - displayName: "example of big query data transfer config" - dataSourceID: "salesforce" - datasetRef: - name: bigquerydatatransferconfigdepsalesforce - params: - "connector.authentication.oauth.clientId": "client-id" - "connector.authentication.oauth.clientSecret": "client-secret" - "connector.authentication.oauth.myDomain": "MyDomainName" - "assets": "asset-a" ---- -apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 -kind: BigQueryDataset -metadata: - name: bigquerydatatransferconfigdepsalesforce -spec: - friendlyName: bigquerydatatransferconfig-dep-salesforce - location: us-central1 -``` - ### BigQueryDataTransferConfig Scheduledquery ```yaml # Copyright 2024 Google LLC From 0cea0905594825d725d8ec36ef8c3dd5a8f130ba Mon Sep 17 00:00:00 2001 From: Joyce Ma Date: Thu, 7 Nov 2024 00:09:22 +0000 Subject: [PATCH 31/31] Update release note --- docs/releasenotes/release-1.125.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/releasenotes/release-1.125.md b/docs/releasenotes/release-1.125.md index db4be43de3..f9dd1e8112 100644 --- a/docs/releasenotes/release-1.125.md +++ b/docs/releasenotes/release-1.125.md @@ -10,8 +10,9 @@ TODO: list contributors with `git log v1.124.0... | grep Merge | grep from | awk ## Resources promoted from alpha to beta: -* `RedisCluster` is now a v1beta1 resource. * `BigQueryAnlayticsHubDataExchange` is now a v1beta1 resource. +* `PrivilegedAccessManagerEntitlement` is now a v1beta1 resource. +* `RedisCluster` is now a v1beta1 resource. ## New Resources: @@ -19,8 +20,9 @@ TODO: list contributors with `git log v1.124.0... | grep Merge | grep from | awk ## New Fields: -* PlaceholderKind - * Added `spec.placeholder` field. +* `AlloyDBInstance` + * Added `spec.networkConfig.enableOutboundPublicIp` field. + * Added `status.outboundPublicIpAddresses` field. ## New features: