Skip to content

Commit

Permalink
feat: add connector params timeout and export from replica (#406)
Browse files Browse the repository at this point in the history
Co-authored-by: Awais Malik <[email protected]>
  • Loading branch information
Carlos Mondo and g-awmalik authored Sep 2, 2023
1 parent c08d5d7 commit e563f8a
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 11 deletions.
4 changes: 4 additions & 0 deletions modules/backup/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@ fetch workflows.googleapis.com/Workflow
| backup\_retention\_time | The number of days backups should be kept | `number` | `30` | no |
| backup\_schedule | The cron schedule to execute the internal backup | `string` | `"45 2 * * *"` | no |
| compress\_export | Whether or not to compress the export when storing in the bucket; Only valid for MySQL and PostgreSQL | `bool` | `true` | no |
| connector\_params\_timeout | The end-to-end duration the connector call is allowed to run for before throwing a timeout exception. The default value is 1800 and this should be the maximum for connector methods that are not long-running operations. Otherwise, for long-running operations, the maximum timeout for a connector call is 31536000 seconds (one year). | `number` | `1800` | no |
| enable\_connector\_params | Whether to enable connector-specific parameters for Google Workflow SQL Export. | `bool` | `false` | no |
| enable\_export\_backup | Weather to create exports to GCS Buckets with this module | `bool` | `true` | no |
| enable\_internal\_backup | Wether to create internal backups with this module | `bool` | `true` | no |
| export\_databases | The list of databases that should be exported - if is an empty set all databases will be exported | `set(string)` | `[]` | no |
Expand All @@ -67,7 +69,9 @@ fetch workflows.googleapis.com/Workflow
| scheduler\_timezone | The Timezone in which the Scheduler Jobs are triggered | `string` | `"Etc/GMT"` | no |
| service\_account | The service account to use for running the workflow and triggering the workflow by Cloud Scheduler - If empty or null a service account will be created. If you have provided a service account you need to grant the Cloud SQL Admin and the Workflows Invoker role to that | `string` | `null` | no |
| sql\_instance | The name of the SQL instance to backup | `string` | n/a | yes |
| sql\_instance\_replica | The name of the SQL instance replica to export | `string` | `null` | no |
| unique\_suffix | Unique suffix to add to scheduler jobs and workflows names. | `string` | `""` | no |
| use\_sql\_instance\_replica\_in\_exporter | Whether or not to use replica instance on exporter workflow. | `bool` | `false` | no |

## Outputs

Expand Down
22 changes: 12 additions & 10 deletions modules/backup/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -97,26 +97,28 @@ resource "google_cloud_scheduler_job" "sql_backup" {
################################
resource "google_workflows_workflow" "sql_export" {
count = var.enable_export_backup ? 1 : 0
name = "sql-export-${var.sql_instance}${var.unique_suffix}"
name = var.use_sql_instance_replica_in_exporter ? "sql-export-${var.sql_instance_replica}${var.unique_suffix}" : "sql-export-${var.sql_instance}${var.unique_suffix}"
region = var.region
description = "Workflow for backing up the CloudSQL Instance"
project = var.project_id
service_account = local.service_account
source_contents = templatefile("${path.module}/templates/export.yaml.tftpl", {
project = var.project_id
instanceName = var.sql_instance
backupRetentionTime = var.backup_retention_time
databases = jsonencode(var.export_databases)
gcsBucket = var.export_uri
dbType = split("_", data.google_sql_database_instance.backup_instance.database_version)[0]
compressExport = var.compress_export
logDbName = var.log_db_name_to_export
project = var.project_id
instanceName = var.use_sql_instance_replica_in_exporter ? var.sql_instance_replica : var.sql_instance
backupRetentionTime = var.backup_retention_time
databases = jsonencode(var.export_databases)
gcsBucket = var.export_uri
dbType = split("_", data.google_sql_database_instance.backup_instance.database_version)[0]
compressExport = var.compress_export
enableConnectorParams = var.enable_connector_params
connectorParamsTimeout = var.connector_params_timeout
logDbName = var.log_db_name_to_export
})
}

resource "google_cloud_scheduler_job" "sql_export" {
count = var.enable_export_backup ? 1 : 0
name = "sql-export-${var.sql_instance}${var.unique_suffix}"
name = var.use_sql_instance_replica_in_exporter ? "sql-export-${var.sql_instance_replica}${var.unique_suffix}" : "sql-export-${var.sql_instance}${var.unique_suffix}"
project = var.project_id
region = var.region
description = "Managed by Terraform - Triggers a SQL Export via Workflows"
Expand Down
13 changes: 12 additions & 1 deletion modules/backup/templates/export.yaml.tftpl
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,10 @@ main:
args:
project: ${project}
instance: ${instanceName}
%{ if enableConnectorParams }
connector_params:
timeout: ${connectorParamsTimeout}
%{ endif }
body:
exportContext:
databases: [$${database}]
Expand Down Expand Up @@ -81,6 +85,10 @@ main:
args:
project: ${project}
instance: ${instanceName}
%{ if enableConnectorParams }
connector_params:
timeout: ${connectorParamsTimeout}
%{ endif }
body:
exportContext:
databases: [$${database}]
Expand All @@ -94,9 +102,12 @@ main:
args:
project: ${project}
instance: ${instanceName}
%{ if enableConnectorParams }
connector_params:
timeout: ${connectorParamsTimeout}
%{ endif }
body:
exportContext:
databases: $${databases}
uri: $${"${gcsBucket}/${instanceName}-" + backupTime + %{ if compressExport == true }".sql.gz"%{ else }".sql"%{ endif }}
%{ endif }

24 changes: 24 additions & 0 deletions modules/backup/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,18 @@ variable "compress_export" {
default = true
}

variable "enable_connector_params" {
description = "Whether to enable connector-specific parameters for Google Workflow SQL Export."
type = bool
default = false
}

variable "connector_params_timeout" {
description = "The end-to-end duration the connector call is allowed to run for before throwing a timeout exception. The default value is 1800 and this should be the maximum for connector methods that are not long-running operations. Otherwise, for long-running operations, the maximum timeout for a connector call is 31536000 seconds (one year)."
type = number
default = 1800
}

variable "unique_suffix" {
description = "Unique suffix to add to scheduler jobs and workflows names."
type = string
Expand All @@ -108,3 +120,15 @@ variable "log_db_name_to_export" {
type = bool
default = false
}

variable "use_sql_instance_replica_in_exporter" {
description = "Whether or not to use replica instance on exporter workflow."
type = bool
default = false
}

variable "sql_instance_replica" {
description = "The name of the SQL instance replica to export"
type = string
default = null
}

0 comments on commit e563f8a

Please sign in to comment.