diff --git a/README.md b/README.md index c5f4cfe..3727e91 100644 --- a/README.md +++ b/README.md @@ -19,8 +19,12 @@ otherwise from Harness Secrets Manager if you are provisioning from Harness. To be able to sink to a BigQuery project you need a BigQuery project and a dataset created beforehand. And you need a service account that has BigQueryEditor access to be able to create tables inside that dataset. -Each connector will add a key to that service account and provides the key as JSON to the connector for authentication. -Key will be destroyed along with the connector. + +1. When `service_account_id` is provided, each connector will add a key to that service account and provides the key as + JSON to the connector for authentication. Key will be destroyed along with the connector. +2. When `key_file` is provided, connector will not create any new key but will use the provided one here +3. When both are provided option 1 will be applied +4. When none are provided connector does not sink any data diff --git a/examples/minimal/main.tf b/examples/minimal/main.tf index b2135f9..bebe479 100644 --- a/examples/minimal/main.tf +++ b/examples/minimal/main.tf @@ -4,7 +4,7 @@ module "init" { # This is an example only; if you're adding this block to a live configuration, # make sure to use the latest release of the init module, found here: # https://github.com/entur/terraform-aiven-kafka-connect-init/releases - source = "github.com/entur/terraform-aiven-kafka-connect-init//modules/init?ref=v1.1.2" + source = "github.com/entur/terraform-aiven-kafka-connect-init//modules/init?ref=v1.1.3" access_token = var.my_aiven_access_token project_name = "my-aiven-project-name" service_name = "my-aiven-kafka-connect-service-name" @@ -18,7 +18,8 @@ module "bigquery-sink" { connector_name = "my-connector-name" bigquery_project_name = "my-bigquery-project-name" bigquery_dataset_name = "my-bigquery-dataset-name" - service_account_id = "my-service-account-id" + service_account_id = "my-service-account-id@my-project.iam.gserviceaccount.com" + kafka_username = "my-kafka-user-name" kafka_topics = ["my-topic-1", "my-topic-2"] additional_configuration = { "any.other.config.parameter" : "any-other-config-parameter-value" diff --git a/examples/minimal_test/main.tf b/examples/minimal_test/main.tf index d346fb6..679a264 100644 --- a/examples/minimal_test/main.tf +++ b/examples/minimal_test/main.tf @@ -4,7 +4,7 @@ module "init" { # This is an example only; if you're adding this block to a live configuration, # make sure to use the latest release of the init module, found here: # https://github.com/entur/terraform-aiven-kafka-connect-init/releases - source = "github.com/entur/terraform-aiven-kafka-connect-init//modules/init?ref=v1.1.2" + source = "github.com/entur/terraform-aiven-kafka-connect-init//modules/init?ref=v1.1.3" access_token = var.api_token project_name = "my-aiven-project" service_name = "my-aiven-kafka-connect-service-name" @@ -22,7 +22,8 @@ module "bigquery-sink" { connector_name = "my-connector-name" bigquery_project_name = "my-bigquery-project-name" bigquery_dataset_name = "my-bigquery-dataset-name" - service_account_id = "my-service-account-id" + service_account_id = "my-service-account-id@my-project.iam.gserviceaccount.com" + kafka_username = "my-kafka-user-name" kafka_topics = ["my-topic-1", "my-topic-2"] additional_configuration = { "any.other.config.parameter" : "any-other-config-parameter-value" diff --git a/modules/bigquery-sink/README.md b/modules/bigquery-sink/README.md index 71b0e67..93fb51c 100644 --- a/modules/bigquery-sink/README.md +++ b/modules/bigquery-sink/README.md @@ -26,6 +26,7 @@ No modules. |------|------| | [aiven_kafka_connector.bigquery-sink-connector](https://registry.terraform.io/providers/aiven/aiven/latest/docs/resources/kafka_connector) | resource | | [google_service_account_key.bq-sa-key](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/service_account_key) | resource | +| [aiven_kafka_user.kafka_user](https://registry.terraform.io/providers/aiven/aiven/latest/docs/data-sources/kafka_user) | data source | | [google_service_account.bq-sa](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/service_account) | data source | ## Inputs @@ -43,11 +44,13 @@ No modules. | [bigquery\_retry\_count](#input\_bigquery\_retry\_count) | The number of retry attempts made for a BigQuery request that fails with a backend error or a quota exceeded error | `number` | `1` | no | | [connector\_class](#input\_connector\_class) | Name or alias of the class for this connector | `string` | `"com.wepay.kafka.connect.bigquery.BigQuerySinkConnector"` | no | | [connector\_name](#input\_connector\_name) | Unique name for this connector in the connect cluster | `string` | n/a | yes | -| [init](#input\_init) | Entur init module output. https://github.com/entur/terraform-aiven-kafka-connect-init |
object({
aiven = object({
project = string
service = string
access_token = string
})
schema_registry = object({
url = string
userinfo = string
})
default_configuration = map(string)
})
| n/a | yes | +| [init](#input\_init) | Entur init module output. https://github.com/entur/terraform-aiven-kafka-connect-init |
object({
aiven = object({
access_token = string
project = string
service = string
schema_registry_url = string
})
default_configuration = map(string)
})
| n/a | yes | | [kafka\_topics](#input\_kafka\_topics) | List of kafka topic names to sink data from | `list(string)` | n/a | yes | +| [kafka\_username](#input\_kafka\_username) | Aiven service registry username to connect to Kafka schema registry | `string` | n/a | yes | +| [key\_file](#input\_key\_file) | The file containing a JSON key with BigQuery service account credentials | `string` | `""` | no | | [key\_source\_type](#input\_key\_source\_type) | Determines whether the keyfile configuration is the path to the credentials JSON file or to the JSON itself. Available values are FILE, JSON & APPLICATION\_DEFAULT | `string` | `"JSON"` | no | | [sanitize\_topics](#input\_sanitize\_topics) | Designates whether to automatically sanitize topic names before using them as table names. If not enabled, topic names are used as table names | `bool` | `true` | no | -| [service\_account\_id](#input\_service\_account\_id) | The email address of the service account with BigQuery Data Editor permission | `string` | n/a | yes | +| [service\_account\_id](#input\_service\_account\_id) | The email address of the service account with BigQuery Data Editor permission | `string` | `null` | no | ## Outputs diff --git a/modules/bigquery-sink/connector.tf b/modules/bigquery-sink/connector.tf index 4b06176..9c1bff2 100644 --- a/modules/bigquery-sink/connector.tf +++ b/modules/bigquery-sink/connector.tf @@ -1,13 +1,21 @@ data "google_service_account" "bq-sa" { + count = var.service_account_id == null ? 0 : 1 account_id = var.service_account_id } resource "google_service_account_key" "bq-sa-key" { - service_account_id = data.google_service_account.bq-sa.name + count = var.service_account_id == null ? 0 : 1 + service_account_id = data.google_service_account.bq-sa[count.index].name +} + +data "aiven_kafka_user" "kafka_user" { + project = var.init.aiven.project + service_name = var.init.aiven.service + username = var.kafka_username } locals { - bq_sa_key = base64decode(google_service_account_key.bq-sa-key.private_key) + bq_sa_key = var.service_account_id == null ? var.key_file : base64decode(google_service_account_key.bq-sa-key[0].private_key) standard_configuration = merge( var.init.default_configuration, { @@ -28,7 +36,8 @@ locals { "transforms" : "regexTransformation", "transforms.regexTransformation.regex" : "(.*)", "transforms.regexTransformation.replacement" : "$1", - "transforms.regexTransformation.type" : "org.apache.kafka.connect.transforms.RegexRouter" + "transforms.regexTransformation.type" : "org.apache.kafka.connect.transforms.RegexRouter", + "value.converter.schema.registry.basic.auth.user.info" : "${data.aiven_kafka_user.kafka_user.username}:${data.aiven_kafka_user.kafka_user.password}" } ) } diff --git a/modules/bigquery-sink/variables.tf b/modules/bigquery-sink/variables.tf index b9887a0..764568e 100644 --- a/modules/bigquery-sink/variables.tf +++ b/modules/bigquery-sink/variables.tf @@ -2,13 +2,10 @@ variable "init" { description = "Entur init module output. https://github.com/entur/terraform-aiven-kafka-connect-init" type = object({ aiven = object({ - project = string - service = string - access_token = string - }) - schema_registry = object({ - url = string - userinfo = string + access_token = string + project = string + service = string + schema_registry_url = string }) default_configuration = map(string) }) @@ -25,6 +22,11 @@ variable "connector_class" { default = "com.wepay.kafka.connect.bigquery.BigQuerySinkConnector" } +variable "kafka_username" { + description = "Aiven service registry username to connect to Kafka schema registry" + type = string +} + variable "kafka_topics" { type = list(string) description = "List of kafka topic names to sink data from" @@ -43,6 +45,7 @@ variable "bigquery_dataset_name" { variable "service_account_id" { type = string description = "The email address of the service account with BigQuery Data Editor permission" + default = null } variable "sanitize_topics" { @@ -93,6 +96,16 @@ variable "key_source_type" { default = "JSON" } +variable "key_file" { + type = string + description = "The file containing a JSON key with BigQuery service account credentials" + default = "" + validation { + condition = var.key_file != null + error_message = "Service Account Key file cannot be null." + } +} + variable "additional_configuration" { type = map(string) default = {}