From 3650d092f581a5f1ce9ee710461697897cd998dc Mon Sep 17 00:00:00 2001 From: Ivan Kornienko Date: Mon, 23 Sep 2019 18:29:47 +0300 Subject: [PATCH] Migrated to Cloud Build #29 --- .ruby-version | 1 - CHANGELOG.md | 10 +- CONTRIBUTING.md | 101 +++++++ Gemfile | 19 -- Makefile | 196 ++++-------- README.md | 108 +------ build/int.cloudbuild.yaml | 41 +++ .../lint.cloudbuild.yaml | 11 + modules/pubsub/README.md | 2 +- test/.gitignore | 1 + test/boilerplate/boilerplate.Dockerfile.txt | 13 - test/boilerplate/boilerplate.Makefile.txt | 13 - test/boilerplate/boilerplate.go.txt | 15 - test/boilerplate/boilerplate.sh.txt | 13 - test/boilerplate/boilerplate.xml.txt | 15 - test/boilerplate/boilerplate.yaml.txt | 13 - test/ci_integration.sh | 68 ----- test/make.sh | 163 ---------- test/setup/.gitignore | 1 + test/setup/iam.tf | 125 ++++++++ test/setup/main.tf | 49 +++ test/setup/make_source.sh | 28 ++ test/setup/outputs.tf | 24 ++ test/setup/variables.tf | 29 ++ .../boilerplate.tf.txt => setup/versions.tf} | 12 + test/test_verify_boilerplate.py | 140 --------- test/verify_boilerplate.py | 283 ------------------ variables.tf | 1 - 28 files changed, 488 insertions(+), 1007 deletions(-) delete mode 100644 .ruby-version create mode 100644 CONTRIBUTING.md delete mode 100644 Gemfile create mode 100644 build/int.cloudbuild.yaml rename test/boilerplate/boilerplate.py.txt => build/lint.cloudbuild.yaml (66%) create mode 100644 test/.gitignore delete mode 100644 test/boilerplate/boilerplate.Dockerfile.txt delete mode 100644 test/boilerplate/boilerplate.Makefile.txt delete mode 100644 test/boilerplate/boilerplate.go.txt delete mode 100644 test/boilerplate/boilerplate.sh.txt delete mode 100644 test/boilerplate/boilerplate.xml.txt delete mode 100644 test/boilerplate/boilerplate.yaml.txt delete mode 100755 test/ci_integration.sh delete mode 100755 test/make.sh create mode 100644 test/setup/.gitignore create mode 100644 test/setup/iam.tf create mode 100644 test/setup/main.tf create mode 100755 test/setup/make_source.sh create mode 100644 test/setup/outputs.tf create mode 100644 test/setup/variables.tf rename test/{boilerplate/boilerplate.tf.txt => setup/versions.tf} (80%) delete mode 100755 test/test_verify_boilerplate.py delete mode 100644 test/verify_boilerplate.py diff --git a/.ruby-version b/.ruby-version deleted file mode 100644 index aedc15bb..00000000 --- a/.ruby-version +++ /dev/null @@ -1 +0,0 @@ -2.5.3 diff --git a/CHANGELOG.md b/CHANGELOG.md index 268d8205..c0f49bf3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [3.0.1] - 2019-XX-YY + +### Changed + +- Migrated to Cloud Build. [#33] + ## [3.0.0] - 2019-07-23 ### Changed @@ -45,13 +51,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - Initial release of log export module. -[Unreleased]: https://github.com/terraform-google-modules/terraform-google-log-export/compare/v3.0.0...HEAD +[Unreleased]: https://github.com/terraform-google-modules/terraform-google-log-export/compare/v3.0.1...HEAD +[3.0.1]: https://github.com/terraform-google-modules/terraform-google-log-export/compare/v3.0.0...v3.0.1 [3.0.0]: https://github.com/terraform-google-modules/terraform-google-log-export/compare/v2.3.0...v3.0.0 [2.3.0]: https://github.com/terraform-google-modules/terraform-google-log-export/compare/v2.2.0...v2.3.0 [2.2.0]: https://github.com/terraform-google-modules/terraform-google-log-export/compare/v2.1.0...v2.2.0 [2.1.0]: https://github.com/terraform-google-modules/terraform-google-log-export/compare/v2.0.0...v2.1.0 [2.0.0]: https://github.com/terraform-google-modules/terraform-google-log-export/compare/v1.0.0...v2.0.0 [1.0.0]: https://github.com/terraform-google-modules/terraform-google-log-export/releases/tag/v1.0.0 +[#33]: https://github.com/terraform-google-modules/terraform-google-log-export/pull/33 [#22]: https://github.com/terraform-google-modules/terraform-google-log-export/pull/22 [#19]: https://github.com/terraform-google-modules/terraform-google-log-export/pull/19 [#18]: https://github.com/terraform-google-modules/terraform-google-log-export/pull/18 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..3502a701 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,101 @@ +# Contributing + +This document provides guidelines for contributing to the module. + +## Dependencies + +The following dependencies must be installed on the development system: + +- [Docker Engine][docker-engine] +- [Google Cloud SDK][google-cloud-sdk] +- [make] + +## Generating Documentation for Inputs and Outputs + +The Inputs and Outputs tables in the READMEs of the root module, +submodules, and example modules are automatically generated based on +the `variables` and `outputs` of the respective modules. These tables +must be refreshed if the module interfaces are changed. + +### Execution + +Run `make generate_docs` to generate new Inputs and Outputs tables. + +## Integration Testing + +Integration tests are used to verify the behaviour of the root module, +submodules, and example modules. Additions, changes, and fixes should +be accompanied with tests. + +The integration tests are run using [Kitchen][kitchen], +[Kitchen-Terraform][kitchen-terraform], and [InSpec][inspec]. These +tools are packaged within a Docker image for convenience. + +The general strategy for these tests is to verify the behaviour of the +[example modules](./examples/), thus ensuring that the root module, +submodules, and example modules are all functionally correct. + +### Test Environment +The easiest way to test the module is in an isolated test project. The setup for such a project is defined in [test/setup](./test/setup/) directory. + +To use this setup, you need a service account with Project Creator access on a folder. Export the Service Account credentials to your environment like so: + +``` +export SERVICE_ACCOUNT_JSON=$(< credentials.json) +``` + +You will also need to set a few environment variables: +``` +export TF_VAR_project_id="project_id_of_test_project" +export TF_VAR_parent_resource_project="project_id_of_test_project" +export TF_VAR_parent_resource_folder="folder_id_of_test_folder" +export TF_VAR_parent_resource_organization="org_id_of_test_organization" +export TF_VAR_parent_resource_billing_account="billing_account_id_of_test_billing_account" +``` + +With these settings in place, you can prepare a test project using Docker: +``` +make docker_test_prepare +``` + +### Noninteractive Execution + +Run `make docker_test_integration` to test all of the example modules +noninteractively, using the prepared test project. + +### Interactive Execution + +1. Run `make docker_run` to start the testing Docker container in + interactive mode. + +1. Run `kitchen_do create ` to initialize the working + directory for an example module. + +1. Run `kitchen_do converge ` to apply the example module. + +1. Run `kitchen_do verify ` to test the example module. + +1. Run `kitchen_do destroy ` to destroy the example module + state. + +## Linting and Formatting + +Many of the files in the repository can be linted or formatted to +maintain a standard of quality. + +### Execution + +Run `make docker_test_lint`. + +[docker-engine]: https://www.docker.com/products/docker-engine +[flake8]: http://flake8.pycqa.org/en/latest/ +[gofmt]: https://golang.org/cmd/gofmt/ +[google-cloud-sdk]: https://cloud.google.com/sdk/install +[hadolint]: https://github.com/hadolint/hadolint +[inspec]: https://inspec.io/ +[kitchen-terraform]: https://github.com/newcontext-oss/kitchen-terraform +[kitchen]: https://kitchen.ci/ +[make]: https://en.wikipedia.org/wiki/Make_(software) +[shellcheck]: https://www.shellcheck.net/ +[terraform-docs]: https://github.com/segmentio/terraform-docs +[terraform]: https://terraform.io/ diff --git a/Gemfile b/Gemfile deleted file mode 100644 index e06f99da..00000000 --- a/Gemfile +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -ruby '2.6.3' - -source 'https://rubygems.org/' do - gem 'kitchen-terraform', '~> 4.0' -end diff --git a/Makefile b/Makefile index 6f565346..0c4e0523 100644 --- a/Makefile +++ b/Makefile @@ -18,162 +18,72 @@ # Make will use bash instead of sh SHELL := /usr/bin/env bash -# Docker build config variables -CREDENTIALS_PATH ?= /cft/workdir/credentials.json +DOCKER_TAG_VERSION_DEVELOPER_TOOLS := 0.1.0 +DOCKER_IMAGE_DEVELOPER_TOOLS := cft/developer-tools +REGISTRY_URL := gcr.io/cloud-foundation-cicd - -# Docker build config variables -DOCKER_ORG := gcr.io/cloud-foundation-cicd -DOCKER_TAG_BASE_KITCHEN_TERRAFORM ?= 2.1.0 -DOCKER_REPO_BASE_KITCHEN_TERRAFORM := ${DOCKER_ORG}/cft/kitchen-terraform:${DOCKER_TAG_BASE_KITCHEN_TERRAFORM} - -# All is the first target in the file so it will get picked up when you just run 'make' on its own -all: check generate_docs - -# Run all available linters -check: check_shell check_python check_golang check_terraform check_docker check_base_files test_check_headers check_headers check_trailing_whitespace - -# The .PHONY directive tells make that this isn't a real target and so -# the presence of a file named 'check_shell' won't cause this target to stop -# working -.PHONY: check_shell -check_shell: - @source test/make.sh && check_shell - -.PHONY: check_python -check_python: - @source test/make.sh && check_python - -.PHONY: check_golang -check_golang: - @source test/make.sh && golang - -.PHONY: check_terraform -check_terraform: - @source test/make.sh && check_terraform - -.PHONY: check_docker -check_docker: - @source test/make.sh && docker - -.PHONY: check_base_files -check_base_files: - @source test/make.sh && basefiles - -.PHONY: check_trailing_whitespace -check_trailing_whitespace: - @source test/make.sh && check_trailing_whitespace - -.PHONY: test_check_headers -test_check_headers: - @echo "Testing the validity of the header check" - @python test/test_verify_boilerplate.py - -.PHONY: check_headers -check_headers: - @source test/make.sh && check_headers - -# Integration tests -.PHONY: test_integration -test_integration: - test/ci_integration.sh - -.PHONY: generate_docs -generate_docs: - @source test/make.sh && generate_docs - -# Versioning -.PHONY: version -version: - @source helpers/version-repo.sh - -# Run docker +# Enter docker container for local development .PHONY: docker_run docker_run: docker run --rm -it \ - -e PROJECT_ID \ - -e PARENT_RESOURCE_PROJECT \ - -e PARENT_RESOURCE_FOLDER \ - -e PARENT_RESOURCE_BILLING_ACCOUNT \ - -e PARENT_RESOURCE_ORGANIZATION \ -e SERVICE_ACCOUNT_JSON \ - -e GOOGLE_APPLICATION_CREDENTIALS=${CREDENTIALS_PATH} \ - -e SUITE \ - -v "$(CURDIR)":/cft/workdir \ - ${DOCKER_REPO_BASE_KITCHEN_TERRAFORM} \ - /bin/bash -c "source test/ci_integration.sh && setup_environment && exec /bin/bash" - -.PHONY: docker_create -docker_create: + -e TF_VAR_project_id \ + -e TF_VAR_parent_resource_project \ + -e TF_VAR_parent_resource_folder \ + -e TF_VAR_parent_resource_billing_account \ + -e TF_VAR_parent_resource_organization \ + -v $(CURDIR):/workspace \ + $(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \ + /bin/bash + +# Execute prepare tests within the docker container +.PHONY: docker_test_prepare +docker_test_prepare: docker run --rm -it \ - -e PROJECT_ID \ - -e PARENT_RESOURCE_PROJECT \ - -e PARENT_RESOURCE_FOLDER \ - -e PARENT_RESOURCE_BILLING_ACCOUNT \ - -e PARENT_RESOURCE_ORGANIZATION \ -e SERVICE_ACCOUNT_JSON \ - -e GOOGLE_APPLICATION_CREDENTIALS=${CREDENTIALS_PATH} \ - -e SUITE \ - -v "$(CURDIR)":/cft/workdir \ - ${DOCKER_REPO_BASE_KITCHEN_TERRAFORM} \ - /bin/bash -c "source test/ci_integration.sh && setup_environment && kitchen create" - -.PHONY: docker_converge -docker_converge: + -e TF_VAR_project_id \ + -e TF_VAR_parent_resource_project \ + -e TF_VAR_parent_resource_folder \ + -e TF_VAR_parent_resource_billing_account \ + -e TF_VAR_parent_resource_organization \ + -v $(CURDIR):/workspace \ + $(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \ + /usr/local/bin/execute_with_credentials.sh prepare_environment + +# Clean up test environment within the docker container +.PHONY: docker_test_cleanup +docker_test_cleanup: docker run --rm -it \ - -e PROJECT_ID \ - -e PARENT_RESOURCE_PROJECT \ - -e PARENT_RESOURCE_FOLDER \ - -e PARENT_RESOURCE_BILLING_ACCOUNT \ - -e PARENT_RESOURCE_ORGANIZATION \ -e SERVICE_ACCOUNT_JSON \ - -e GOOGLE_APPLICATION_CREDENTIALS=${CREDENTIALS_PATH} \ - -e SUITE \ - -v "$(CURDIR)":/cft/workdir \ - ${DOCKER_REPO_BASE_KITCHEN_TERRAFORM} \ - /bin/bash -c "source test/ci_integration.sh && setup_environment && kitchen converge" + -v $(CURDIR):/workspace \ + $(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \ + /usr/local/bin/execute_with_credentials.sh cleanup_environment -.PHONY: docker_verify -docker_verify: +# Execute integration tests within the docker container +.PHONY: docker_test_integration +docker_test_integration: docker run --rm -it \ - -e PROJECT_ID \ - -e PARENT_RESOURCE_PROJECT \ - -e PARENT_RESOURCE_FOLDER \ - -e PARENT_RESOURCE_BILLING_ACCOUNT \ - -e PARENT_RESOURCE_ORGANIZATION \ -e SERVICE_ACCOUNT_JSON \ - -e GOOGLE_APPLICATION_CREDENTIALS=${CREDENTIALS_PATH} \ - -e SUITE \ - -v "$(CURDIR)":/cft/workdir \ - ${DOCKER_REPO_BASE_KITCHEN_TERRAFORM} \ - /bin/bash -c "source test/ci_integration.sh && setup_environment && kitchen verify" + -v $(CURDIR):/workspace \ + $(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \ + /usr/local/bin/test_integration.sh -.PHONY: docker_destroy -docker_destroy: +# Execute lint tests within the docker container +.PHONY: docker_test_lint +docker_test_lint: docker run --rm -it \ - -e PROJECT_ID \ - -e PARENT_RESOURCE_PROJECT \ - -e PARENT_RESOURCE_FOLDER \ - -e PARENT_RESOURCE_BILLING_ACCOUNT \ - -e PARENT_RESOURCE_ORGANIZATION \ - -e SERVICE_ACCOUNT_JSON \ - -e GOOGLE_APPLICATION_CREDENTIALS=${CREDENTIALS_PATH} \ - -e SUITE \ - -v "$(CURDIR)":/cft/workdir \ - ${DOCKER_REPO_BASE_KITCHEN_TERRAFORM} \ - /bin/bash -c "source test/ci_integration.sh && setup_environment && kitchen destroy" + -v $(CURDIR):/workspace \ + $(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \ + /usr/local/bin/test_lint.sh -.PHONY: test_integration_docker -test_integration_docker: +# Generate documentation +.PHONY: docker_generate_docs +docker_generate_docs: docker run --rm -it \ - -e PROJECT_ID \ - -e PARENT_RESOURCE_PROJECT \ - -e PARENT_RESOURCE_FOLDER \ - -e PARENT_RESOURCE_BILLING_ACCOUNT \ - -e PARENT_RESOURCE_ORGANIZATION \ - -e SERVICE_ACCOUNT_JSON \ - -e GOOGLE_APPLICATION_CREDENTIALS=${CREDENTIALS_PATH} \ - -e SUITE \ - -v "$(CURDIR)":/cft/workdir \ - ${DOCKER_REPO_BASE_KITCHEN_TERRAFORM} \ - make test_integration + -v $(CURDIR):/workspace \ + $(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \ + /bin/bash -c 'source /usr/local/bin/task_helper_functions.sh && generate_docs' + +# Alias for backwards compatibility +.PHONY: generate_docs +generate_docs: docker_generate_docs diff --git a/README.md b/README.md index d8c988f4..10769669 100644 --- a/README.md +++ b/README.md @@ -69,8 +69,8 @@ so that all dependencies are met. ## Requirements ### Terraform plugins -- [Terraform](https://www.terraform.io/downloads.html) 0.11.x -- [terraform-provider-google](https://github.com/terraform-providers/terraform-provider-google) plugin ~> v2.0.x +- [Terraform](https://www.terraform.io/downloads.html) 0.12.x +- [terraform-provider-google](https://github.com/terraform-providers/terraform-provider-google) plugin ~> v2.7.x ### Configure a Service Account In order to execute this module you must have a Service Account with the following: @@ -112,107 +112,5 @@ In order to operate with the Service Account you must activate the following API ## Install ### Terraform -Be sure you have the correct Terraform version (0.11.x), you can choose the binary here: +Be sure you have the correct Terraform version (0.12.x), you can choose the binary here: - https://releases.hashicorp.com/terraform/ - -## Testing - -### Requirements -- [bundler](https://github.com/bundler/bundler) -- [gcloud](https://cloud.google.com/sdk/install) -- [terraform-docs](https://github.com/segmentio/terraform-docs/releases) 0.6.0 - -### Autogeneration of documentation from .tf files -Run -``` -make generate_docs -``` - -### Integration test - -Integration tests are run though [test-kitchen](https://github.com/test-kitchen/test-kitchen), [kitchen-terraform](https://github.com/newcontext-oss/kitchen-terraform), and [InSpec](https://github.com/inspec/inspec). - -`test-kitchen` instances are defined in [`.kitchen.yml`](./.kitchen.yml). The test-kitchen instances in `test/fixtures/` wrap identically-named examples in the `examples/` directory. - -#### Setup - -1. Configure the [test fixtures](#test-configuration) -2. Download a Service Account key with the necessary permissions and copy the contents of that JSON file into the `SERVICE_ACCOUNT_JSON` environment variable: - - ``` - export SERVICE_ACCOUNT_JSON=$(cat /path/to/credentials.json) - ``` - -3. Set the required environment variables as defined in [`./test/ci_integration.sh`](./test/ci_integration.sh): - - ``` - export PROJECT_ID="project_id_of_test_project" - export PARENT_RESOURCE_PROJECT="project_id_of_test_project" - export PARENT_RESOURCE_FOLDER="folder_id_of_test_folder" - export PARENT_RESOURCE_ORGANIZATION="org_id_of_test_organization" - export PARENT_RESOURCE_BILLING_ACCOUNT="billing_account_id_of_test_billing_account" - export SUITE="test_suite_name" # Leave empty to run all tests - ``` - -4. Run the testing container in interactive mode: - - ``` - make docker_run - ``` - - The module root directory will be loaded into the Docker container at `/cft/workdir/`. -5. Run kitchen-terraform to test the infrastructure: - - 1. `make docker_create` creates Terraform state and downloads modules, if applicable. - 2. `make docker_converge` creates the underlying resources. Run `source test/ci_integration.sh && setup_environment && kitchen converge ` to run a specific test case. - 3. `make docker_verify` tests the created infrastructure. Run `source test/ci_integration.sh && setup_environment && kitchen verify ` to run a specific test case. - 4. `make docker_destroy` tears down the underlying resources created by `make docker_converge`. Run `source test/ci_integration.sh && setup_environment && kitchen destroy ` to tear down resources for a specific test case. - -Alternatively, you can simply run `make test_integration_docker` to run all the test steps non-interactively. - -### Autogeneration of documentation from .tf files -Run -``` -make generate_docs -``` - -### Linting -The makefile in this project will lint or sometimes just format any shell, -Python, golang, Terraform, or Dockerfiles. The linters will only be run if -the makefile finds files with the appropriate file extension. - -All of the linter checks are in the default make target, so you just have to -run - -``` -make -s -``` - -The -s is for 'silent'. Successful output looks like this - -``` -Running shellcheck -Running flake8 -Running go fmt and go vet -Running terraform validate -Running hadolint on Dockerfiles -Checking for required files -Testing the validity of the header check -.. ----------------------------------------------------------------------- -Ran 2 tests in 0.026s - -OK -Checking file headers -The following lines have trailing whitespace -``` - -The linters -are as follows: -* Shell - shellcheck. Can be found in homebrew -* Python - flake8. Can be installed with 'pip install flake8' -* Golang - gofmt. gofmt comes with the standard golang installation. golang -is a compiled language so there is no standard linter. -* Terraform - terraform has a built-in linter in the 'terraform validate' -command. -* Dockerfiles - hadolint. Can be found in homebrew diff --git a/build/int.cloudbuild.yaml b/build/int.cloudbuild.yaml new file mode 100644 index 00000000..bcd9b6d5 --- /dev/null +++ b/build/int.cloudbuild.yaml @@ -0,0 +1,41 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +timeout: 3600s +steps: +- id: prepare + name: 'gcr.io/cloud-foundation-cicd/$_DOCKER_IMAGE_DEVELOPER_TOOLS:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS' + args: ['/bin/bash', '-c', 'source /usr/local/bin/task_helper_functions.sh && prepare_environment'] + env: + - 'TF_VAR_org_id=$_ORG_ID' + - 'TF_VAR_folder_id=$_FOLDER_ID' + - 'TF_VAR_billing_account=$_BILLING_ACCOUNT' +- id: create + name: 'gcr.io/cloud-foundation-cicd/$_DOCKER_IMAGE_DEVELOPER_TOOLS:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS' + args: ['/bin/bash', '-c', 'cat test/source.sh && source /usr/local/bin/task_helper_functions.sh && kitchen_do create'] +- id: converge + name: 'gcr.io/cloud-foundation-cicd/$_DOCKER_IMAGE_DEVELOPER_TOOLS:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS' + args: ['/bin/bash', '-c', 'source /usr/local/bin/task_helper_functions.sh && kitchen_do converge'] +- id: verify + name: 'gcr.io/cloud-foundation-cicd/$_DOCKER_IMAGE_DEVELOPER_TOOLS:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS' + args: ['/bin/bash', '-c', 'source /usr/local/bin/task_helper_functions.sh && kitchen_do verify'] +- id: destroy + name: 'gcr.io/cloud-foundation-cicd/$_DOCKER_IMAGE_DEVELOPER_TOOLS:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS' + args: ['/bin/bash', '-c', 'source /usr/local/bin/task_helper_functions.sh && kitchen_do destroy'] +tags: +- 'ci' +- 'integration' +substitutions: + _DOCKER_IMAGE_DEVELOPER_TOOLS: 'cft/developer-tools' + _DOCKER_TAG_VERSION_DEVELOPER_TOOLS: '0.1.0' diff --git a/test/boilerplate/boilerplate.py.txt b/build/lint.cloudbuild.yaml similarity index 66% rename from test/boilerplate/boilerplate.py.txt rename to build/lint.cloudbuild.yaml index 086a24e6..1dc48c3a 100644 --- a/test/boilerplate/boilerplate.py.txt +++ b/build/lint.cloudbuild.yaml @@ -11,3 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +steps: +- name: 'gcr.io/cloud-foundation-cicd/cft/developer-tools:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS' + id: 'lint' + args: ['/usr/local/bin/test_lint.sh'] +tags: +- 'ci' +- 'lint' +substitutions: + _DOCKER_IMAGE_DEVELOPER_TOOLS: 'cft/developer-tools' + _DOCKER_TAG_VERSION_DEVELOPER_TOOLS: '0.1.0' diff --git a/modules/pubsub/README.md b/modules/pubsub/README.md index 01777379..fb06025f 100644 --- a/modules/pubsub/README.md +++ b/modules/pubsub/README.md @@ -40,7 +40,7 @@ so that all dependencies are met. | create\_subscriber | Whether to create a subscription to the topic that was created and used for log entries matching the filter. If 'true', a subscription is created along with a service account that is granted roles/pubsub.subscriber and roles/pubsub.viewer to the topic. | string | `"false"` | no | | log\_sink\_writer\_identity | The service account that logging uses to write log entries to the destination. (This is available as an output coming from the root module). | string | n/a | yes | | project\_id | The ID of the project in which the pubsub topic will be created. | string | n/a | yes | -| topic\_labels | A set of key/value label pairs to assign to the pubsub topic. | map | `` | no | +| topic\_labels | A set of key/value label pairs to assign to the pubsub topic. | map(string) | `` | no | | topic\_name | The name of the pubsub topic to be created and used for log entries matching the filter. | string | n/a | yes | ## Outputs diff --git a/test/.gitignore b/test/.gitignore new file mode 100644 index 00000000..d69ba0d4 --- /dev/null +++ b/test/.gitignore @@ -0,0 +1 @@ +source.sh diff --git a/test/boilerplate/boilerplate.Dockerfile.txt b/test/boilerplate/boilerplate.Dockerfile.txt deleted file mode 100644 index 086a24e6..00000000 --- a/test/boilerplate/boilerplate.Dockerfile.txt +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/test/boilerplate/boilerplate.Makefile.txt b/test/boilerplate/boilerplate.Makefile.txt deleted file mode 100644 index 086a24e6..00000000 --- a/test/boilerplate/boilerplate.Makefile.txt +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/test/boilerplate/boilerplate.go.txt b/test/boilerplate/boilerplate.go.txt deleted file mode 100644 index 0fb04bee..00000000 --- a/test/boilerplate/boilerplate.go.txt +++ /dev/null @@ -1,15 +0,0 @@ -/* -Copyright 2019 Google LLC - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ diff --git a/test/boilerplate/boilerplate.sh.txt b/test/boilerplate/boilerplate.sh.txt deleted file mode 100644 index ddd71c00..00000000 --- a/test/boilerplate/boilerplate.sh.txt +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/test/boilerplate/boilerplate.xml.txt b/test/boilerplate/boilerplate.xml.txt deleted file mode 100644 index cd0c8f4f..00000000 --- a/test/boilerplate/boilerplate.xml.txt +++ /dev/null @@ -1,15 +0,0 @@ - diff --git a/test/boilerplate/boilerplate.yaml.txt b/test/boilerplate/boilerplate.yaml.txt deleted file mode 100644 index 086a24e6..00000000 --- a/test/boilerplate/boilerplate.yaml.txt +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/test/ci_integration.sh b/test/ci_integration.sh deleted file mode 100755 index 600ee664..00000000 --- a/test/ci_integration.sh +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Always clean up. -DELETE_AT_EXIT="$(mktemp -d)" -finish() { - echo 'BEGIN: finish() trap handler' >&2 - kitchen destroy "$SUITE" - [[ -d "${DELETE_AT_EXIT}" ]] && rm -rf "${DELETE_AT_EXIT}" - echo 'END: finish() trap handler' >&2 -} - -# Map the input parameters provided by Concourse CI, or whatever mechanism is -# running the tests to Terraform input variables. Also setup credentials for -# use with kitchen-terraform, inspec, and gcloud. -setup_environment() { - local tmpfile - tmpfile="$(mktemp)" - echo "${SERVICE_ACCOUNT_JSON}" > "${tmpfile}" - - # gcloud variables - export CLOUDSDK_AUTH_CREDENTIAL_FILE_OVERRIDE="${tmpfile}" - # Application default credentials (Terraform google provider and inspec-gcp) - export GOOGLE_APPLICATION_CREDENTIALS="${tmpfile}" - - # Terraform variables - export TF_VAR_project_id="$PROJECT_ID" - export TF_VAR_parent_resource_project="$PARENT_RESOURCE_PROJECT" - export TF_VAR_parent_resource_folder="$PARENT_RESOURCE_FOLDER" - export TF_VAR_parent_resource_billing_account="$PARENT_RESOURCE_BILLING_ACCOUNT" - export TF_VAR_parent_resource_organization="$PARENT_RESOURCE_ORGANIZATION" -} - -main() { - export SUITE="${SUITE:-}" - - set -eu - # Setup trap handler to auto-cleanup - export TMPDIR="${DELETE_AT_EXIT}" - trap finish EXIT - - # Setup environment variables - setup_environment - set -x - - # Execute the test lifecycle - kitchen create "$SUITE" - kitchen converge "$SUITE" - kitchen verify "$SUITE" -} - -# if script is being executed and not sourced. -if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then - main "$@" -fi diff --git a/test/make.sh b/test/make.sh deleted file mode 100755 index 70ccb96e..00000000 --- a/test/make.sh +++ /dev/null @@ -1,163 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Please note that this file was generated from [terraform-google-module-template](https://github.com/terraform-google-modules/terraform-google-module-template). -# Please make sure to contribute relevant changes upstream! - -# Create a temporary directory that's auto-cleaned, even if the process aborts. -DELETE_AT_EXIT="$(mktemp -d)" -finish() { - [[ -d "${DELETE_AT_EXIT}" ]] && rm -rf "${DELETE_AT_EXIT}" -} -trap finish EXIT -# Create a temporary file in the auto-cleaned up directory while avoiding -# overwriting TMPDIR for other processes. -# shellcheck disable=SC2120 # (Arguments may be passed, e.g. maketemp -d) -maketemp() { - TMPDIR="${DELETE_AT_EXIT}" mktemp "$@" -} - -# find_files is a helper to exclude .git directories and match only regular -# files to avoid double-processing symlinks. -find_files() { - local pth="$1" - shift - find "${pth}" '(' -path '*/.git' -o -path '*/.terraform' ')' \ - -prune -o -type f "$@" -} - -# Compatibility with both GNU and BSD style xargs. -compat_xargs() { - local compat=() - # Test if xargs is GNU or BSD style. GNU xargs will succeed with status 0 - # when given --no-run-if-empty and no input on STDIN. BSD xargs will fail and - # exit status non-zero If xargs fails, assume it is BSD style and proceed. - # stderr is silently redirected to avoid console log spam. - if xargs --no-run-if-empty /dev/null; then - compat=("--no-run-if-empty") - fi - xargs "${compat[@]}" "$@" -} - -# This function makes sure that the required files for -# releasing to OSS are present -function basefiles() { - local fn required_files="LICENSE README.md" - echo "Checking for required files ${required_files}" - for fn in ${required_files}; do - test -f "${fn}" || echo "Missing required file ${fn}" - done -} - -# This function runs the hadolint linter on -# every file named 'Dockerfile' -function docker() { - echo "Running hadolint on Dockerfiles" - find_files . -name "Dockerfile" -print0 \ - | compat_xargs -0 hadolint -} - -# This function runs 'terraform validate' against all -# directory paths which contain *.tf files. -function check_terraform() { - echo "Running terraform validate" - find . -name "*.tf" \ - -not -path "./.terraform/*" \ - -not -path "./test/fixtures/*/.terraform/*" \ - -not -path "./test/fixtures/all_examples/*" \ - -not -path "./test/fixtures/shared/*" \ - -print0 \ - | xargs -0 dirname | sort | uniq \ - | xargs -L 1 -i{} bash -c 'terraform init "{}" > /dev/null && terraform validate "{}"' - echo "Running terraform fmt" - terraform fmt -check=true -write=false -} - -# This function runs 'go fmt' and 'go vet' on every file -# that ends in '.go' -function golang() { - echo "Running go fmt and go vet" - find_files . -name "*.go" -print0 | compat_xargs -0 -n1 go fmt - find_files . -name "*.go" -print0 | compat_xargs -0 -n1 go vet -} - -# This function runs the flake8 linter on every file -# ending in '.py' -function check_python() { - echo "Running flake8" - find_files . -name "*.py" -print0 | compat_xargs -0 flake8 - return 0 -} - -# This function runs the shellcheck linter on every -# file ending in '.sh' -function check_shell() { - echo "Running shellcheck" - find_files . -name "*.sh" -print0 | compat_xargs -0 shellcheck -x -} - -# This function makes sure that there is no trailing whitespace -# in any files in the project. -# There are some exclusions -function check_trailing_whitespace() { - local rc - echo "Checking for trailing whitespace" - find_files . -print \ - | grep -v -E '\.(pyc|png)$' \ - | grep -v '\.kitchen/' \ - | compat_xargs grep -H -n '[[:blank:]]$' - rc=$? - if [[ ${rc} -eq 0 ]]; then - return 1 - fi -} - -function generate_docs() { - echo "Generating markdown docs with terraform-docs" - local path - while read -r path; do - if [[ -e "${path}/README.md" ]]; then - # script seem to be designed to work into current directory - cd "${path}" && echo "Working in ${path} ..." - terraform_docs.sh . && echo Success! || echo "Warning! Exit code: ${?}" - #shellcheck disable=2164 - cd - >/dev/null - else - echo "Skipping ${path} because README.md does not exist." - fi - done < <(find_files . -name '*.tf' -print0 \ - | compat_xargs -0 -n1 dirname \ - | sort -u) -} - -function prepare_test_variables() { - echo "Preparing terraform.tfvars files for integration tests" - #shellcheck disable=2044 - for i in $(find ./test/fixtures -type f -name terraform.tfvars.sample); do - destination=${i/%.sample/} - if [ ! -f "${destination}" ]; then - cp "${i}" "${destination}" - echo "${destination} has been created. Please edit it to reflect your GCP configuration." - fi - done -} - -function check_headers() { - echo "Checking file headers" - # Use the exclusion behavior of find_files - find_files . -type f -print0 \ - | compat_xargs -0 python test/verify_boilerplate.py -} diff --git a/test/setup/.gitignore b/test/setup/.gitignore new file mode 100644 index 00000000..3f5ca68a --- /dev/null +++ b/test/setup/.gitignore @@ -0,0 +1 @@ +terraform.tfvars diff --git a/test/setup/iam.tf b/test/setup/iam.tf new file mode 100644 index 00000000..cc3688ea --- /dev/null +++ b/test/setup/iam.tf @@ -0,0 +1,125 @@ +/** + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +locals { + log_export_required_roles = [ + # Needed for the Pubsub submodule to create a service account for the + # subscription it creates + "roles/iam.serviceAccountAdmin", + + # Needed for the cloud storage submodule to create/delete a bucket + "roles/storage.admin", + + # Needed for the pubsub submodule to create/delete a pubsub topic + "roles/pubsub.admin", + + # Needed for the bigquery submodule to create/delete a bigquery dataset + "roles/bigquery.dataOwner", + + # Needed for the root module to activate APIs + "roles/serviceusage.serviceUsageAdmin", + + # Needed for the Pubsub submodule to assign roles/bigquery.dataEditor to + # the service account it creates + "roles/resourcemanager.projectIamAdmin", + + # Required to create log sinks from the project level + "roles/logging.configWriter", + ] + + log_export_billing_account_roles = [ + # Required to associate billing accounts to new projects + "roles/billing.user", + ] + + log_export_organization_roles = [ + # Required to create log sinks from the organization level on down + "roles/logging.configWriter", + + # Required to associate billing accounts to new projects + "roles/billing.projectManager", + ] + + log_export_folder_roles = [ + # Required to spin up a project within the log_export folder + "roles/resourcemanager.projectCreator", + + # Required to create log sinks from the folder level + "roles/logging.configWriter", + ] +} + +resource "google_service_account" "int_test" { + project = module.project.project_id + account_id = "ci-account" + display_name = "ci-account" +} + +resource "google_project_iam_member" "int_test" { + for_each = toset(local.log_export_required_roles) + + project = module.project.project_id + role = each.value + member = "serviceAccount:${google_service_account.int_test.email}" +} + +resource "google_billing_account_iam_member" "int_test" { + for_each = toset(local.log_export_billing_account_roles) + + billing_account_id = var.billing_account + role = each.value + member = "serviceAccount:${google_service_account.int_test.email}" +} + +# roles/logging.configWriter is needed at the organization level to be able to +# test organization level log sinks. +resource "google_organization_iam_member" "int_test" { + for_each = toset(local.log_export_organization_roles) + + org_id = var.org_id + role = each.value + member = "serviceAccount:${google_service_account.int_test.email}" +} + +# There is a test in the log-exports module that needs to spin up a project +# within a folder, and then reference that project within the test. Because +# of that test we need to assign roles/resourcemanager.projectCreator on the +# folder we're using for log-exports +resource "google_folder_iam_member" "int_test" { + for_each = toset(local.log_export_folder_roles) + + folder = var.folder_id + role = each.value + member = "serviceAccount:${google_service_account.int_test.email}" +} + +resource "google_service_account_key" "int_test" { + service_account_id = google_service_account.int_test.id +} + +resource "null_resource" "wait_permissions" { + # Adding a pause as a workaround for of the provider issue + # https://github.com/terraform-providers/terraform-provider-google/issues/1131 + provisioner "local-exec" { + command = "echo sleep 30s for permissions to get granted; sleep 30" + } + depends_on = [ + google_billing_account_iam_member.int_test, + google_folder_iam_member.int_test, + google_organization_iam_member.int_test, + google_project_iam_member.int_test + ] +} diff --git a/test/setup/main.tf b/test/setup/main.tf new file mode 100644 index 00000000..2665615c --- /dev/null +++ b/test/setup/main.tf @@ -0,0 +1,49 @@ +/** + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +module "project" { + source = "terraform-google-modules/project-factory/google" + version = "~> 3.0" + + name = "ci-log-export" + random_project_id = "true" + org_id = var.org_id + folder_id = var.folder_id + billing_account = var.billing_account + + activate_apis = [ + "cloudresourcemanager.googleapis.com", + "oslogin.googleapis.com", + "serviceusage.googleapis.com", + "compute.googleapis.com", + "bigquery-json.googleapis.com", + "pubsub.googleapis.com", + "storage-component.googleapis.com", + "storage-api.googleapis.com", + "logging.googleapis.com", + "iam.googleapis.com", + "cloudbilling.googleapis.com" + ] +} + +resource "null_resource" "wait_apis" { + # Adding a pause as a workaround for of the provider issue + # https://github.com/terraform-providers/terraform-provider-google/issues/1131 + provisioner "local-exec" { + command = "echo sleep 30s for APIs to get enabled; sleep 30" + } + depends_on = [module.project.project_id] +} diff --git a/test/setup/make_source.sh b/test/setup/make_source.sh new file mode 100755 index 00000000..46555b1c --- /dev/null +++ b/test/setup/make_source.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +echo "#!/usr/bin/env bash" > ../source.sh + +project_id=$(terraform output project_id) +sa_json=$(terraform output sa_key) + +# shellcheck disable=SC2086,SC2154 +{ echo "export TF_VAR_project_id='$project_id'"; \ +echo "export TF_VAR_parent_resource_project='$project_id'"; \ +echo "export TF_VAR_parent_resource_folder='$TF_VAR_folder_id'"; \ +echo "export TF_VAR_parent_resource_billing_account='$TF_VAR_billing_account'"; \ +echo "export TF_VAR_parent_resource_organization='$TF_VAR_org_id'"; \ +echo "export SERVICE_ACCOUNT_JSON='$(echo $sa_json | base64 --decode)'"; } >> ../source.sh diff --git a/test/setup/outputs.tf b/test/setup/outputs.tf new file mode 100644 index 00000000..357bb1e4 --- /dev/null +++ b/test/setup/outputs.tf @@ -0,0 +1,24 @@ +/** + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +output "project_id" { + value = module.project.project_id +} + +output "sa_key" { + value = google_service_account_key.int_test.private_key + sensitive = true +} diff --git a/test/setup/variables.tf b/test/setup/variables.tf new file mode 100644 index 00000000..d35eaca5 --- /dev/null +++ b/test/setup/variables.tf @@ -0,0 +1,29 @@ +/** + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +variable "org_id" { + description = "The numeric organization id" + type = string +} + +variable "folder_id" { + description = "The folder to deploy in" + type = string +} + +variable "billing_account" { + description = "The billing account id associated with the project, e.g. XXXXXX-YYYYYY-ZZZZZZ" + type = string +} diff --git a/test/boilerplate/boilerplate.tf.txt b/test/setup/versions.tf similarity index 80% rename from test/boilerplate/boilerplate.tf.txt rename to test/setup/versions.tf index 83185dca..efbd8ea5 100644 --- a/test/boilerplate/boilerplate.tf.txt +++ b/test/setup/versions.tf @@ -13,3 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + +terraform { + required_version = ">= 0.12" +} + +provider "google" { + version = "~> 2.13.0" +} + +provider "google-beta" { + version = "~> 2.13.0" +} diff --git a/test/test_verify_boilerplate.py b/test/test_verify_boilerplate.py deleted file mode 100755 index 0d1cb440..00000000 --- a/test/test_verify_boilerplate.py +++ /dev/null @@ -1,140 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Please note that this file was generated from -# [terraform-google-module-template](https://github.com/terraform-google-modules/terraform-google-module-template). -# Please make sure to contribute relevant changes upstream! - -''' A simple test for the verify_boilerplate python script. -This will create a set of test files, both valid and invalid, -and confirm that the has_valid_header call returns the correct -value. - -It also checks the number of files that are found by the -get_files call. -''' -from copy import deepcopy -from tempfile import mkdtemp -from shutil import rmtree -import unittest -from verify_boilerplate import has_valid_header, get_refs, get_regexs, \ - get_args, get_files - - -class AllTestCase(unittest.TestCase): - """ - All of the setup, teardown, and tests are contained in this - class. - """ - - def write_file(self, filename, content, expected): - """ - A utility method that creates test files, and adds them to - the cases that will be tested. - - Args: - filename: (string) the file name (path) to be created. - content: (list of strings) the contents of the file. - expected: (boolean) True if the header is expected to be valid, - false if not. - """ - - file = open(filename, 'w+') - for line in content: - file.write(line + "\n") - file.close() - self.cases[filename] = expected - - def create_test_files(self, tmp_path, extension, header): - """ - Creates 2 test files for .tf, .xml, .go, etc and one for - Dockerfile, and Makefile. - - The reason for the difference is that Makefile and Dockerfile - don't have an extension. These would be substantially more - difficult to create negative test cases, unless the files - were written, deleted, and re-written. - - Args: - tmp_path: (string) the path in which to create the files - extension: (string) the file extension - header: (list of strings) the header/boilerplate content - """ - - content = "\n...blah \ncould be code or could be garbage\n" - special_cases = ["Dockerfile", "Makefile"] - header_template = deepcopy(header) - valid_filename = tmp_path + extension - valid_content = header_template.append(content) - if extension not in special_cases: - # Invalid test cases for non-*file files (.tf|.py|.sh|.yaml|.xml..) - invalid_header = [] - for line in header_template: - if "2019" in line: - invalid_header.append(line.replace('2019', 'YEAR')) - else: - invalid_header.append(line) - invalid_header.append(content) - invalid_content = invalid_header - invalid_filename = tmp_path + "invalid." + extension - self.write_file(invalid_filename, invalid_content, False) - valid_filename = tmp_path + "testfile." + extension - - valid_content = header_template - self.write_file(valid_filename, valid_content, True) - - def setUp(self): - """ - Set initial counts and values, and initializes the setup of the - test files. - """ - self.cases = {} - self.tmp_path = mkdtemp() + "/" - self.my_args = get_args() - self.my_refs = get_refs(self.my_args) - self.my_regex = get_regexs() - self.prexisting_file_count = len( - get_files(self.my_refs.keys(), self.my_args)) - for key in self.my_refs: - self.create_test_files(self.tmp_path, key, - self.my_refs.get(key)) - - def tearDown(self): - """ Delete the test directory. """ - rmtree(self.tmp_path) - - def test_files_headers(self): - """ - Confirms that the expected output of has_valid_header is correct. - """ - for case in self.cases: - if self.cases[case]: - self.assertTrue(has_valid_header(case, self.my_refs, - self.my_regex)) - else: - self.assertFalse(has_valid_header(case, self.my_refs, - self.my_regex)) - - def test_invalid_count(self): - """ - Test that the initial files found isn't zero, indicating - a problem with the code. - """ - self.assertFalse(self.prexisting_file_count == 0) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/verify_boilerplate.py b/test/verify_boilerplate.py deleted file mode 100644 index cc17d273..00000000 --- a/test/verify_boilerplate.py +++ /dev/null @@ -1,283 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# Verifies that all source files contain the necessary copyright boilerplate -# snippet. -# This is based on existing work -# https://github.com/kubernetes/test-infra/blob/master/hack -# /verify_boilerplate.py - -# Please note that this file was generated from -# [terraform-google-module-template](https://github.com/terraform-google-modules/terraform-google-module-template). -# Please make sure to contribute relevant changes upstream! -from __future__ import print_function -import argparse -import glob -import os -import re -import sys - - -def get_args(): - """Parses command line arguments. - - Configures and runs argparse.ArgumentParser to extract command line - arguments. - - Returns: - An argparse.Namespace containing the arguments parsed from the - command line - """ - parser = argparse.ArgumentParser() - parser.add_argument("filenames", - help="list of files to check, " - "all files if unspecified", - nargs='*') - rootdir = os.path.dirname(__file__) + "/../" - rootdir = os.path.abspath(rootdir) - parser.add_argument( - "--rootdir", - default=rootdir, - help="root directory to examine") - - default_boilerplate_dir = os.path.join(rootdir, "test/boilerplate") - parser.add_argument("--boilerplate-dir", default=default_boilerplate_dir) - return parser.parse_args() - - -def get_refs(ARGS): - """Converts the directory of boilerplate files into a map keyed by file - extension. - - Reads each boilerplate file's contents into an array, then adds that array - to a map keyed by the file extension. - - Returns: - A map of boilerplate lines, keyed by file extension. For example, - boilerplate.py.txt would result in the k,v pair {".py": py_lines} where - py_lines is an array containing each line of the file. - """ - refs = {} - - # Find and iterate over the absolute path for each boilerplate template - for path in glob.glob(os.path.join( - ARGS.boilerplate_dir, - "boilerplate.*.txt")): - extension = os.path.basename(path).split(".")[1] - ref_file = open(path, 'r') - ref = ref_file.read().splitlines() - ref_file.close() - refs[extension] = ref - return refs - - -# pylint: disable=too-many-locals -def has_valid_header(filename, refs, regexs): - """Test whether a file has the correct boilerplate header. - - Tests each file against the boilerplate stored in refs for that file type - (based on extension), or by the entire filename (eg Dockerfile, Makefile). - Some heuristics are applied to remove build tags and shebangs, but little - variance in header formatting is tolerated. - - Args: - filename: A string containing the name of the file to test - refs: A map of boilerplate headers, keyed by file extension - regexs: a map of compiled regex objects used in verifying boilerplate - - Returns: - True if the file has the correct boilerplate header, otherwise returns - False. - """ - try: - with open(filename, 'r') as fp: # pylint: disable=invalid-name - data = fp.read() - except IOError: - return False - basename = os.path.basename(filename) - extension = get_file_extension(filename) - if extension: - ref = refs[extension] - else: - ref = refs[basename] - # remove build tags from the top of Go files - if extension == "go": - con = regexs["go_build_constraints"] - (data, found) = con.subn("", data, 1) - # remove shebang - elif extension == "sh" or extension == "py": - she = regexs["shebang"] - (data, found) = she.subn("", data, 1) - data = data.splitlines() - # if our test file is smaller than the reference it surely fails! - if len(ref) > len(data): - return False - # trim our file to the same number of lines as the reference file - data = data[:len(ref)] - year = regexs["year"] - for datum in data: - if year.search(datum): - return False - - # if we don't match the reference at this point, fail - if ref != data: - return False - return True - - -def get_file_extension(filename): - """Extracts the extension part of a filename. - - Identifies the extension as everything after the last period in filename. - - Args: - filename: string containing the filename - - Returns: - A string containing the extension in lowercase - """ - return os.path.splitext(filename)[1].split(".")[-1].lower() - - -# These directories will be omitted from header checks -SKIPPED_DIRS = [ - 'Godeps', 'third_party', '_gopath', '_output', - '.git', 'vendor', '__init__.py', 'node_modules' -] - - -def normalize_files(files): - """Extracts the files that require boilerplate checking from the files - argument. - - A new list will be built. Each path from the original files argument will - be added unless it is within one of SKIPPED_DIRS. All relative paths will - be converted to absolute paths by prepending the root_dir path parsed from - the command line, or its default value. - - Args: - files: a list of file path strings - - Returns: - A modified copy of the files list where any any path in a skipped - directory is removed, and all paths have been made absolute. - """ - newfiles = [] - for pathname in files: - if any(x in pathname for x in SKIPPED_DIRS): - continue - newfiles.append(pathname) - for idx, pathname in enumerate(newfiles): - if not os.path.isabs(pathname): - newfiles[idx] = os.path.join(ARGS.rootdir, pathname) - return newfiles - - -def get_files(extensions, ARGS): - """Generates a list of paths whose boilerplate should be verified. - - If a list of file names has been provided on the command line, it will be - treated as the initial set to search. Otherwise, all paths within rootdir - will be discovered and used as the initial set. - - Once the initial set of files is identified, it is normalized via - normalize_files() and further stripped of any file name whose extension is - not in extensions. - - Args: - extensions: a list of file extensions indicating which file types - should have their boilerplate verified - - Returns: - A list of absolute file paths - """ - files = [] - if ARGS.filenames: - files = ARGS.filenames - else: - for root, dirs, walkfiles in os.walk(ARGS.rootdir): - # don't visit certain dirs. This is just a performance improvement - # as we would prune these later in normalize_files(). But doing it - # cuts down the amount of filesystem walking we do and cuts down - # the size of the file list - for dpath in SKIPPED_DIRS: - if dpath in dirs: - dirs.remove(dpath) - for name in walkfiles: - pathname = os.path.join(root, name) - files.append(pathname) - files = normalize_files(files) - outfiles = [] - for pathname in files: - basename = os.path.basename(pathname) - extension = get_file_extension(pathname) - if extension in extensions or basename in extensions: - outfiles.append(pathname) - return outfiles - - -def get_regexs(): - """Builds a map of regular expressions used in boilerplate validation. - - There are two scenarios where these regexes are used. The first is in - validating the date referenced is the boilerplate, by ensuring it is an - acceptable year. The second is in identifying non-boilerplate elements, - like shebangs and compiler hints that should be ignored when validating - headers. - - Returns: - A map of compiled regular expression objects, keyed by mnemonic. - """ - regexs = {} - # Search for "YEAR" which exists in the boilerplate, but shouldn't in the - # real thing - regexs["year"] = re.compile('YEAR') - # dates can be 2014, 2015, 2016 or 2017, company holder names can be - # anything - regexs["date"] = re.compile('(2014|2015|2016|2017|2018|2019)') - # strip // +build \n\n build constraints - regexs["go_build_constraints"] = re.compile(r"^(// \+build.*\n)+\n", - re.MULTILINE) - # strip #!.* from shell/python scripts - regexs["shebang"] = re.compile(r"^(#!.*\n)\n*", re.MULTILINE) - return regexs - - -def main(args): - """Identifies and verifies files that should have the desired boilerplate. - - Retrieves the lists of files to be validated and tests each one in turn. - If all files contain correct boilerplate, this function terminates - normally. Otherwise it prints the name of each non-conforming file and - exists with a non-zero status code. - """ - regexs = get_regexs() - refs = get_refs(args) - filenames = get_files(refs.keys(), args) - nonconforming_files = [] - for filename in filenames: - if not has_valid_header(filename, refs, regexs): - nonconforming_files.append(filename) - if nonconforming_files: - print('%d files have incorrect boilerplate headers:' % len( - nonconforming_files)) - for filename in sorted(nonconforming_files): - print(os.path.relpath(filename, args.rootdir)) - sys.exit(1) - - -if __name__ == "__main__": - ARGS = get_args() - main(ARGS) diff --git a/variables.tf b/variables.tf index f1f2290b..9885166e 100644 --- a/variables.tf +++ b/variables.tf @@ -45,4 +45,3 @@ variable "unique_writer_identity" { description = "Whether or not to create a unique identity associated with this sink. If false (the default), then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true, then a unique service account is created and used for the logging sink." default = "false" } -