diff --git a/README.md b/README.md index d7f7bd3..a1295e5 100644 --- a/README.md +++ b/README.md @@ -57,6 +57,7 @@ Then perform the following commands on the root folder: |------|-------------|------|---------|:--------:| | ip\_configuration | The configuration for VM IPs. Options are 'WORKER\_IP\_PUBLIC' or 'WORKER\_IP\_PRIVATE'. | `string` | `null` | no | | kms\_key\_name | The name for the Cloud KMS key for the job. Key format is: projects/PROJECT\_ID/locations/LOCATION/keyRings/KEY\_RING/cryptoKeys/KEY | `string` | `null` | no | +| labels | User labels to be specified for the job. | `map(string)` | `{}` | no | | machine\_type | The machine type to use for the job. | `string` | `""` | no | | max\_workers | The number of workers permitted to work on the job. More workers may improve processing speed at additional cost. | `number` | `1` | no | | name | The name of the dataflow job | `string` | n/a | yes | diff --git a/examples/dlp_api_example/README.md b/examples/dlp_api_example/README.md index 3172570..2502392 100644 --- a/examples/dlp_api_example/README.md +++ b/examples/dlp_api_example/README.md @@ -70,3 +70,29 @@ To provision this example, run the following from within this directory: - `terraform plan` to see the infrastructure plan - `terraform apply` to apply the infrastructure build - `terraform destroy` to destroy the built infrastructure. (Note that KMS key rings and crypto keys cannot be destroyed!) + + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| create\_key\_ring | Boolean for determining whether to create key ring with keys(true or false) | `bool` | `true` | no | +| key\_ring | The GCP KMS key ring to be created | `string` | n/a | yes | +| kms\_key\_name | The GCP KMS key to be created going under the key ring | `string` | n/a | yes | +| project\_id | The project ID to deploy to | `string` | n/a | yes | +| region | The region in which the bucket and the dataflow job will be deployed | `string` | `"us-central1"` | no | +| service\_account\_email | The Service Account email used to create the job. | `string` | n/a | yes | +| terraform\_service\_account\_email | The Service Account email used by terraform to spin up resources- the one from environmental variable GOOGLE\_APPLICATION\_CREDENTIALS | `string` | n/a | yes | +| wrapped\_key | Wrapped key from KMS leave blank if create\_key\_ring=true | `string` | `""` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| bucket\_name | The name of the bucket | +| df\_job\_id | The unique Id of the newly created Dataflow job | +| df\_job\_name | The name of the newly created Dataflow job | +| df\_job\_state | The state of the newly created Dataflow job | +| project\_id | The project's ID | + + diff --git a/examples/dlp_api_example/main.tf b/examples/dlp_api_example/main.tf index 2dfe4be..6d405ee 100644 --- a/examples/dlp_api_example/main.tf +++ b/examples/dlp_api_example/main.tf @@ -125,6 +125,10 @@ module "dataflow-job" { dlpProjectId = var.project_id deidentifyTemplateName = "projects/${var.project_id}/deidentifyTemplates/15" } + + labels = { + example_name = "dlp_api_example" + } } resource "null_resource" "destroy_deidentify_template" { diff --git a/examples/simple_example/README.md b/examples/simple_example/README.md index 06dbe5c..896067c 100644 --- a/examples/simple_example/README.md +++ b/examples/simple_example/README.md @@ -52,3 +52,29 @@ To provision this example, run the following from within this directory: - `terraform plan` to see the infrastructure plan - `terraform apply` to apply the infrastructure build - `terraform destroy` to destroy the built infrastructure + + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| force\_destroy | When deleting a bucket, this boolean option will delete all contained objects. If you try to delete a bucket that contains objects, Terraform will fail that run. | `bool` | `false` | no | +| project\_id | The project ID to deploy to | `string` | n/a | yes | +| region | The region in which the bucket will be deployed | `string` | n/a | yes | +| service\_account\_email | The Service Account email used to create the job. | `string` | n/a | yes | +| zone | The zone in which the dataflow job will be deployed | `string` | n/a | yes | + +## Outputs + +| Name | Description | +|------|-------------| +| bucket\_name | The name of the bucket | +| df\_job\_id | The unique Id of the newly created Dataflow job | +| df\_job\_id\_2 | The unique Id of the newly created Dataflow job | +| df\_job\_name | The name of the newly created Dataflow job | +| df\_job\_name\_2 | The name of the newly created Dataflow job | +| df\_job\_state | The state of the newly created Dataflow job | +| df\_job\_state\_2 | The state of the newly created Dataflow job | +| project\_id | The project's ID | + + diff --git a/examples/simple_example/main.tf b/examples/simple_example/main.tf index ab9dbf5..48a77c7 100644 --- a/examples/simple_example/main.tf +++ b/examples/simple_example/main.tf @@ -98,5 +98,9 @@ module "dataflow-job-2" { inputFile = "gs://dataflow-samples/shakespeare/kinglear.txt" output = "gs://${local.gcs_bucket_name}/output/my_output" } + + labels = { + example_name = "simple_example" + } } diff --git a/main.tf b/main.tf index 5c782da..4bb42e9 100644 --- a/main.tf +++ b/main.tf @@ -30,5 +30,6 @@ resource "google_dataflow_job" "dataflow_job" { machine_type = var.machine_type ip_configuration = var.ip_configuration kms_key_name = var.kms_key_name + labels = var.labels } diff --git a/modules/dataflow_bucket/README.md b/modules/dataflow_bucket/README.md index 5ee693f..4939eba 100644 --- a/modules/dataflow_bucket/README.md +++ b/modules/dataflow_bucket/README.md @@ -39,3 +39,22 @@ See [here](../example/simple_example) for a multi jobs example. The integration test [here](../test/integration/simple_example/controls/gcloud.rb) checks if the lifecycle rule is not enabled. This test also implicitly checks whether or not the bucket was successfully created. + + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| force\_destroy | When deleting a bucket, this boolean option will delete all contained objects. If you try to delete a bucket that contains objects, Terraform will fail that run. | `bool` | `false` | no | +| name | The name of the bucket. | `string` | n/a | yes | +| project\_id | The project\_id to deploy the example instance into. (e.g. "simple-sample-project-1234") | `string` | n/a | yes | +| region | The GCS bucket region. This should be the same as your dataflow job's zone ot optimize performance. | `string` | `"us-central1"` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| name | The name of the bucket | +| region | The bucket's region location | + + diff --git a/test/fixtures/simple_example/README.md b/test/fixtures/simple_example/README.md index 1b0234d..5c3be36 100644 --- a/test/fixtures/simple_example/README.md +++ b/test/fixtures/simple_example/README.md @@ -33,3 +33,29 @@ To provision this example, run the following from within this directory: - `terraform plan` to see the infrastructure plan - `terraform apply` to apply the infrastructure build - `terraform destroy` to destroy the built infrastructure + + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| project\_id | The project\_id to deploy the example instance into. (e.g. "simple-sample-project-1234") | `string` | n/a | yes | +| region | The region to deploy to | `string` | `"us-east1"` | no | +| service\_account\_email | The Service Account email used to create the job. | `string` | n/a | yes | +| zone | The zone to deploy to | `string` | `"us-central1-a"` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| bucket\_name | The name of the bucket | +| df\_job\_id | The unique Id of the newly created Dataflow job | +| df\_job\_id\_2 | The unique Id of the newly created Dataflow job | +| df\_job\_name | The name of the newly created Dataflow job | +| df\_job\_name\_2 | The name of the newly created Dataflow job | +| df\_job\_state | The state of the newly created Dataflow job | +| df\_job\_state\_2 | The state of the newly created Dataflow job | +| project\_id | The project id used when managing resources. | +| region | The region used when managing resources. | + + diff --git a/variables.tf b/variables.tf index 1242b4b..7e51d8b 100644 --- a/variables.tf +++ b/variables.tf @@ -98,3 +98,9 @@ variable "kms_key_name" { description = "The name for the Cloud KMS key for the job. Key format is: projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY" default = null } + +variable "labels" { + type = map(string) + description = "User labels to be specified for the job." + default = {} +}