From 9f51c220d78f9b2e5dcf780ef18180734fbf17da Mon Sep 17 00:00:00 2001 From: Agnieszka Figiel Date: Wed, 22 Nov 2023 13:56:15 +0100 Subject: [PATCH 1/3] Terraform upgrade --- infrastructure/base/.terraform.lock.hcl | 51 +++++++++++++++++-------- infrastructure/base/versions.tf | 2 +- 2 files changed, 36 insertions(+), 17 deletions(-) diff --git a/infrastructure/base/.terraform.lock.hcl b/infrastructure/base/.terraform.lock.hcl index bfa35e5c..eb4c18fe 100644 --- a/infrastructure/base/.terraform.lock.hcl +++ b/infrastructure/base/.terraform.lock.hcl @@ -1,24 +1,43 @@ # This file is maintained automatically by "terraform init". # Manual edits may be lost in future updates. +provider "registry.terraform.io/hashicorp/archive" { + version = "2.4.0" + hashes = [ + "h1:EtN1lnoHoov3rASpgGmh6zZ/W6aRCTgKC7iMwvFY1yc=", + "zh:18e408596dd53048f7fc8229098d0e3ad940b92036a24287eff63e2caec72594", + "zh:392d4216ecd1a1fd933d23f4486b642a8480f934c13e2cae3c13b6b6a7e34a7b", + "zh:655dd1fa5ca753a4ace21d0de3792d96fff429445717f2ce31c125d19c38f3ff", + "zh:70dae36c176aa2b258331ad366a471176417a94dd3b4985a911b8be9ff842b00", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:7d8c8e3925f1e21daf73f85983894fbe8868e326910e6df3720265bc657b9c9c", + "zh:a032ec0f0aee27a789726e348e8ad20778c3a1c9190ef25e7cff602c8d175f44", + "zh:b8e50de62ba185745b0fe9713755079ad0e9f7ac8638d204de6762cc36870410", + "zh:c8ad0c7697a3d444df21ff97f3473a8604c8639be64afe3f31b8ec7ad7571e18", + "zh:df736c5a2a7c3a82c5493665f659437a22f0baf8c2d157e45f4dd7ca40e739fc", + "zh:e8ffbf578a0977074f6d08aa8734e36c726e53dc79894cfc4f25fadc4f45f1df", + "zh:efea57ff23b141551f92b2699024d356c7ffd1a4ad62931da7ed7a386aef7f1f", + ] +} + provider "registry.terraform.io/hashicorp/github" { - version = "5.38.0" + version = "5.42.0" hashes = [ - "h1:LTQINoKOCgoI8ezTTdpXaZmqsQ92cBF2j1f2tZ/KQVo=", - "zh:05ec90a2a305c737b27ee8e24f495704eddc13e7bf193e19dc24aa2cf3faabf0", - "zh:2f24962e8108e9945c3dee9fd1464adcee7ea04469b05f308bf78f8fdab2dc3e", - "zh:370f98360a8d54ceef2c34fbd4d8ebb4ba75c8aa1bf30d3fb0f55636d8404987", - "zh:4669ba49c0f95be4fea862ec16fd512b8a0215ea00d183f6cf323c8badfb6830", - "zh:4ac2b3b75f9d425b799bbbe1346f973271b36d17abb7f8667691b30500e81134", - "zh:567035c9ae5509c0114fa8d8cac57c9def8b4ebf1def9e8b161ebe45e16d7c90", - "zh:585970da1d122f90052ebfcbf565193c76c12bd0c61825079672f8c8cdee68dd", - "zh:a1fe1bf2f3825fb7f2cf9fefdeaaeb5ce8398fcaa8b5b2fd22fae7f34dcddacb", - "zh:b2aca4d967f18ab59bddc5591de7d8ca076031c304ea3390a26b27088e221822", - "zh:baef060e6f30c513b49a30b9510b3dfe24b538b1618342e2b4fa37b6e3a1c4c2", - "zh:c30021d925ea79e9f4beba852058dba7a45d30c92691af2dbf3595113bb539d1", - "zh:c9dfa0b61e46833bd5ca89fd842f6649b725191c2452c87b21ce501fc3e22b37", - "zh:d4570bc21127387ec7c8b8f4d57c83115d15a8dbb7837f0d0c434c10e8e90de8", - "zh:e5060febd3a5afae3374640f8c3394c9cff421bee43efa0acd40795cb0ea87c2", + "h1:vHTdYL6eXJfUzz4bs0ICyg2f8ct/K2EnGAjwLrAmL3U=", + "zh:0f97039c6b70295c4a82347bc8a0bcea700b3fb3df0e0be53585da025584bb7c", + "zh:12e78898580cc2a72b5f2a77e191b158f88e974b0500489b691f34842288745c", + "zh:23660933e4f00293c0d4d6cd6b4d72e382c0df46b70cecf22b5c4c090d3b61e3", + "zh:74119174b46d8d197dd209a246bf8b5db113c66467e02c831e68a8ceea312d3e", + "zh:829c4c0c202fc646eb0e1759eb9c8f0757df5295be2d3344b8fd6ca8ce9ef33b", + "zh:92043e667f520aee4e08a10a183ad5abe5487f3e9c8ad5a55ea1358b14b17b1a", + "zh:998909806b4ff42cf480fcd359ec1f12b868846f89284b991987f55de24876b7", + "zh:9f758447db3bf386516562abd6da1e54d22ddc207bda25961d2b5b049f32da0f", + "zh:a6259215612d4d6a281c671b2d5aa3a0a0b0a3ae92ed60b633998bb692e922d3", + "zh:ad7d78056beb44191911db9443bf5eec41a3d60e7b01def2a9e608d1c4288d27", + "zh:b697e7b0abef3000e1db482c897b82cd455621b488bb6c4cd3d270763d7b08ac", + "zh:db8e849eded8aebff780f89ab7e1339053d2f15c1c8f94103d70266a090527ad", + "zh:e5bdbb85fb148dd75877a7b94b595d4e8680e495c241db02c4b12b91e9d08953", + "zh:ee812c5fd77d3817fb688f720e5eb42d7ff04db67a125de48b05458c9f657483", ] } diff --git a/infrastructure/base/versions.tf b/infrastructure/base/versions.tf index eec286db..b8499bf4 100644 --- a/infrastructure/base/versions.tf +++ b/infrastructure/base/versions.tf @@ -15,7 +15,7 @@ terraform { version = "~> 3.5.1" } } - required_version = "1.5.7" + required_version = "1.6.4" } provider "google" { From ae9bdbadd0f71c6d3cc651a94f2759da20bbc188 Mon Sep 17 00:00:00 2001 From: Agnieszka Figiel Date: Wed, 22 Nov 2023 13:57:06 +0100 Subject: [PATCH 2/3] Terraform module and GH Actions workflow for the analysis Cloud Function --- .github/workflows/deploy.yml | 52 +++++-- cloud_functions/analysis/connect_tcp.py | 32 +++++ cloud_functions/analysis/main.py | 36 +++++ cloud_functions/analysis/requirements.txt | 3 + infrastructure/README.md | 95 +++++++++---- .../base/modules/cloudfunction/main.tf | 133 ++++++++++++++++++ .../base/modules/cloudfunction/outputs.tf | 15 ++ .../base/modules/cloudfunction/variables.tf | 104 ++++++++++++++ infrastructure/base/modules/env/main.tf | 41 +++++- infrastructure/base/modules/env/outputs.tf | 4 + .../base/modules/secret_value/main.tf | 4 + .../base/modules/secret_value/outputs.tf | 4 + infrastructure/base/outputs.tf | 8 ++ 13 files changed, 484 insertions(+), 47 deletions(-) create mode 100644 cloud_functions/analysis/connect_tcp.py create mode 100644 cloud_functions/analysis/main.py create mode 100644 cloud_functions/analysis/requirements.txt create mode 100644 infrastructure/base/modules/cloudfunction/main.tf create mode 100644 infrastructure/base/modules/cloudfunction/outputs.tf create mode 100644 infrastructure/base/modules/cloudfunction/variables.tf diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index a0b447e7..b40d3e6d 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,13 +1,3 @@ -# This workflow build and push a Docker container to Google Artifact Registry and deploy it on Cloud Run when a commit is pushed to the $default-branch branch -# -# Overview: -# -# 1. Authenticate to Google Cloud -# 2. Authenticate Docker to Artifact Registry -# 3. Build a docker container -# 4. Publish it to Google Artifact Registry -# 5. Deploy it to Cloud Run -# # The workflow uses GH Secrets managed by Terraform: # - GCP_PROJECT_ID # - GCP_REGION @@ -18,6 +8,7 @@ # - _CLIENT_SERVICE # - _CMS_REPOSITORY # - _CMS_SERVICE +# - _ANALYSIS_CF_NAME # # it also uses the following secrets not managed by Terraform: # - _CLIENT_ENV @@ -30,10 +21,11 @@ on: branches: - main - develop - - infrastructure/setup + paths: - 'frontend/**' - 'cms/**' + - 'cloud_functions/**' - '.github/workflows/*' env: @@ -139,7 +131,6 @@ jobs: run: echo "environment=$ENVIRONMENT" >> $GITHUB_OUTPUT id: extract_environment - #- name: Google Auth authentication via credentials json - name: Google Auth id: auth uses: 'google-github-actions/auth@v1' @@ -190,3 +181,40 @@ jobs: - name: Show Output run: echo ${{ steps.deploy.outputs.url }} + deploy_cloud_functions: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Extract branch name + shell: bash + run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + id: extract_branch + + - name: Extract environment name + env: + ENVIRONMENT: ${{ steps.extract_branch.outputs.branch == 'main' && 'PRODUCTION' || 'STAGING' }} + run: echo "environment=$ENVIRONMENT" >> $GITHUB_OUTPUT + id: extract_environment + + - name: Google Auth + id: auth + uses: 'google-github-actions/auth@v1' + with: + credentials_json: "${{ secrets[format('{0}_GCP_SA_KEY', steps.extract_environment.outputs.environment)] }}" + token_format: 'access_token' + - name: 'Set up Cloud SDK' + uses: 'google-github-actions/setup-gcloud@v1' + with: + version: '>= 363.0.0' + - name: 'Use gcloud CLI' + run: 'gcloud info' + - name: 'Deploy to gen2 cloud function' + env: + CLOUD_FUNCTION_NAME: ${{ secrets[format('{0}_ANALYSIS_CF_NAME', steps.extract_environment.outputs.environment)] }} + run: | + gcloud functions deploy ${{ env.CLOUD_FUNCTION_NAME }} \ + --gen2 \ + --region=${{ env.REGION }} \ + --source=./cloud_functions/analysis \ diff --git a/cloud_functions/analysis/connect_tcp.py b/cloud_functions/analysis/connect_tcp.py new file mode 100644 index 00000000..81463872 --- /dev/null +++ b/cloud_functions/analysis/connect_tcp.py @@ -0,0 +1,32 @@ +import os +import ssl +import sqlalchemy + +def connect_tcp_socket() -> sqlalchemy.engine.base.Engine: + """Initializes a TCP connection pool for a Cloud SQL instance of Postgres.""" + # Note: Saving credentials in environment variables is convenient, but not + # secure - consider a more secure solution such as + # Cloud Secret Manager (https://cloud.google.com/secret-manager) to help + # keep secrets safe. + db_host = os.environ[ + "DATABASE_HOST" + ] # e.g. '127.0.0.1' ('172.17.0.1' if deployed to GAE Flex) + db_user = os.environ["DATABASE_USERNAME"] # e.g. 'my-db-user' + db_pass = os.environ["DATABASE_PASSWORD"] # e.g. 'my-db-password' + db_name = os.environ["DATABASE_NAME"] # e.g. 'my-database' + db_port = 5432 # e.g. 5432 + + pool = sqlalchemy.create_engine( + # Equivalent URL: + # postgresql+pg8000://:@:/ + sqlalchemy.engine.url.URL.create( + drivername="postgresql+pg8000", + username=db_user, + password=db_pass, + host=db_host, + port=db_port, + database=db_name, + ), + # ... + ) + return pool \ No newline at end of file diff --git a/cloud_functions/analysis/main.py b/cloud_functions/analysis/main.py new file mode 100644 index 00000000..33ae6ac6 --- /dev/null +++ b/cloud_functions/analysis/main.py @@ -0,0 +1,36 @@ +import functions_framework +import sqlalchemy + +from connect_tcp import connect_tcp_socket + +db = connect_tcp_socket() + +@functions_framework.http +def index(request): + """HTTP Cloud Function. + Args: + request (flask.Request): The request object. + + Returns: + The response text, or any set of values that can be turned into a + Response object using `make_response` + . + Note: + For more information on how Flask integrates with Cloud + Functions, see the `Writing HTTP functions` page. + + """ + return get_locations_stats(db) + +def get_locations_stats(db: sqlalchemy.engine.base.Engine) -> dict: + with db.connect() as conn: + stmt = sqlalchemy.text( + "SELECT COUNT(*) FROM locations WHERE type=:type" + ) + regions_count = conn.execute(stmt, parameters={"type": "region"}).scalar() + countries_count = conn.execute(stmt, parameters={"type": "country"}).scalar() + + return { + "regions_count": regions_count, + "countries_count": countries_count + } \ No newline at end of file diff --git a/cloud_functions/analysis/requirements.txt b/cloud_functions/analysis/requirements.txt new file mode 100644 index 00000000..92643c0f --- /dev/null +++ b/cloud_functions/analysis/requirements.txt @@ -0,0 +1,3 @@ +functions-framework +sqlalchemy +pg8000 \ No newline at end of file diff --git a/infrastructure/README.md b/infrastructure/README.md index 481ae935..24df6ee3 100644 --- a/infrastructure/README.md +++ b/infrastructure/README.md @@ -1,15 +1,14 @@ # Infrastructure -While the application can be deployed in any server configuration that supports the application's dependencies, this project includes a [Terraform](https://www.terraform.io/) project that you can use to easily and quickly deploy it using -[Google Cloud Platform](https://cloud.google.com/). +While the application can be deployed in any server configuration that supports the application's dependencies, this project includes: +- a [Terraform](https://www.terraform.io/) project that you can use to easily and quickly provision the resources and deploy the code using [Google Cloud Platform](https://cloud.google.com/), +- and a GH Actions workflow to deploy code updates. ## Dependencies -Here is the list of technical dependencies for deploying the SkyTruth 30x30 Dashboard app using these infrastructure -resources. Note that these requirements are for this particular deployment strategy, and not dependencies of the SkyTruth 30x30 Dashboard application itself - which can be deployed to other infrastructures. +Here is the list of technical dependencies for deploying the SkyTruth 30x30 Dashboard app using these infrastructure resources. Note that these requirements are for this particular deployment strategy, and not dependencies of the SkyTruth 30x30 Dashboard application itself - which can be deployed to other infrastructures. -Before proceeding, be sure you are familiar with all of these tools, as these instructions -will skip over the basics, and assume you are conformable using all of them. +Before proceeding, be sure you are familiar with all of these tools, as these instructions will skip over the basics, and assume you are conformable using all of them. - [Google Cloud Platform](https://cloud.google.com) - [Terraform](https://www.terraform.io/) @@ -19,14 +18,13 @@ will skip over the basics, and assume you are conformable using all of them. - DNS management - A purchased domain -## Structure +### Terraform project -This project has 2 main sections, each of which with a folder named after it. Each of these sections has a Terraform project, that logically depends on their predecessors. There is a 3rd component to this architecture, which is handled by Github Actions. +This project (in the inrastructure directory) has 2 main sections, each of which with a folder named after it. Each of these sections has a Terraform project, that logically depends on their predecessors. There is a 3rd component to this architecture, which is handled by Github Actions. #### Remote state -Creates a [GCP Storage Bucket](https://cloud.google.com/storage/docs/json_api/v1/buckets) -that will store the Terraform remote state. +Creates a [GCP Storage Bucket](https://cloud.google.com/storage/docs/json_api/v1/buckets), which will store the Terraform remote state. #### Base @@ -36,14 +34,16 @@ These resources include, but are not limited to: - Google Compute instance - bastion host to access the GCP infrastructure - Artifact Registry, for docker image storage -- Cloud Run, to host the live applications +- Cloud Run, to host the client application and the API/CMS +- Cloud Functions, for serving the analysis results - Cloud SQL, for relational data storage - Networking resources - Uptime monitoring - Error reporting +- Service accounts and permissions +- GH Secrets -To apply this project, you will need the following GCP permissions. These could probably be further fleshed out to a -more restrictive set of permissions/roles, but this combination is know to work: +To apply this project, you will need the following GCP permissions. These could probably be further fleshed out to a more restrictive set of permissions/roles, but this combination is know to work: - "Editor" role - "Secret Manager Admin" role @@ -53,37 +53,72 @@ more restrictive set of permissions/roles, but this combination is know to work: The output values include access data for some of the resources above. -Please note, there are some actions that need to be carried out manually - you'll get a promt from terraform with links to follow to complete the actions: +Please note, there are some actions that might to be carried out manually - you'll get a promt from terraform with links to follow to complete the actions, e.g.: - Compute Engine API needs to be enabled -#### Github Actions - -As part of this infrastructure, Github Actions are used to automatically build and push Docker images to [Artifact Registry](https://cloud.google.com/artifact-registry), and to deploy those images to CloudRun once they are pushed. Access by Github to GCP is configured through special authorization rules, automatically set up by the Terraform `base` project above. -These permissions are necessary for the service account that runs the deployment: -- "roles/iam.serviceAccountTokenCreator", -- "roles/iam.serviceAccountUser", -- "roles/run.developer", -- "roles/artifactregistry.reader", -- "roles/artifactregistry.writer" - -There are 2 CloudRun instances, one for the client application and one for the API. Github Secrets are used to provide environment secrets to these instances. Some of the secrets are managed by terraform when provisioning resources (e.g. database credentials for the API). To make it clear, the respective GH Secrets are suffixed "TF_MANAGED". - -## How to deploy +#### How to run Deploying the included Terraform project is done in steps: -- Terraform `apply` the `Remote State` project. -- Terraform `apply` the `Base` project. +- Terraform `apply` the `Remote State` project. This needs to be done once, before applying the base project. +- Terraform `apply` the `Base` project. This needs to be repeated after any changes in the base project. For both commands, please use `-var-file=vars/terraform.tfvars`` to provide the necessary terraform variables. -For the latter step, you will also need to set 2 environment variables: +For the second command, you will also need to set 2 environment variables: - GITHUB_TOKEN (your GH token) - GITHUB_OWNER (Vizzuality) to allow terraform to write to GH Secrets. Please note: when provisioning for the first time in a clean project, amend the `cloudrun` module by uncommenting the image setting to be used for first time deployment, which deploys a dummy "hello" image (because actual application images are going to be available in GAR only once the infrastructure is provisioned and the GH Actions deployment passed) +### Github Actions + +As part of this infrastructure, Github Actions are used to automatically apply code updates for the client application, API/CMS and the cloud functions. + +#### Building new code versions + +Deployment to the CloudRun instances is accomplished by building Docker images are built and pushing to [Artifact Registry](https://cloud.google.com/artifact-registry). When building the images, environment secrets are injected from GH Secrets as follows: +- for the client application: + - the following secrets set by terraform in STAGING_CLIENT_ENV_TF_MANAGED (in the format of an .env file): + - NEXT_PUBLIC_URL + - NEXT_PUBLIC_API_URL + - NEXT_PUBLIC_ANALYSIS_CF_URL + - NEXT_PUBLIC_ENVIRONMENT + - LOG_LEVEL + - additional secrets set manually in STAGING_CLIENT_ENV (copy to be managed in LastPass) +- for the CMS/API application + - the following secrets set by terraform in STAGING_CMS_ENV_TF_MANAGED (in the format of an .env file): + - HOST + - PORT + - APP_KEYS + - API_TOKEN_SALT + - ADMIN_JWT_SECRET + - TRANSFER_TOKEN_SALT + - JWT_SECRET + - CMS_URL + - DATABASE_CLIENT + - DATABASE_HOST + - DATABASE_NAME + - DATABASE_USERNAME + - DATABASE_PASSWORD + - DATABASE_SSL + +Deployment to the cloud function is accomplished by pushing the source code. Secrets and env vars are set via terraform. + +The workflow is currently set up to deploy to the staging instance when merging to develop. + +#### Service account permissions + +Access by Github to GCP is configured through special authorization rules, automatically set up by the Terraform `base` project above. +These permissions are necessary for the service account that runs the deployment: +- "roles/iam.serviceAccountTokenCreator", +- "roles/iam.serviceAccountUser", +- "roles/run.developer", +- "roles/artifactregistry.reader", +- "roles/artifactregistry.writer", +- "roles/cloudfunctions.developer" + ## Maintenance ### Connecting to the Cloud SQL databases diff --git a/infrastructure/base/modules/cloudfunction/main.tf b/infrastructure/base/modules/cloudfunction/main.tf new file mode 100644 index 00000000..2d61c264 --- /dev/null +++ b/infrastructure/base/modules/cloudfunction/main.tf @@ -0,0 +1,133 @@ +resource "google_project_service" "cloud_functions_api" { + service = "cloudfunctions.googleapis.com" + disable_on_destroy = false +} + +resource "google_project_service" "cloud_run_api" { + service = "run.googleapis.com" + disable_on_destroy = false +} + +resource "google_project_service" "cloud_build_api" { + service = "cloudbuild.googleapis.com" + disable_on_destroy = false +} + +resource "google_project_service" "cloud_logging_api" { + service = "logging.googleapis.com" + disable_on_destroy = false +} + +resource "google_project_service" "artifact_registry_api" { + service = "artifactregistry.googleapis.com" + disable_on_destroy = false +} + +# +# Zip file +# +resource "random_id" "default" { + byte_length = 8 +} + +data "archive_file" "default" { + type = "zip" + output_path = "/tmp/function-${var.function_name}-${random_id.default.hex}.zip" + source_dir = var.source_dir +} + +# +# Bucket for source code +# +resource "google_storage_bucket" "bucket" { + name = "${var.function_name}-gcf-source" + location = var.bucket_location + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "${var.function_name}-${timestamp()}.zip" + bucket = google_storage_bucket.bucket.name + source = data.archive_file.default.output_path +} + +# +# Service account +# +resource "google_service_account" "service_account" { + account_id = "${var.function_name}-cf-sa" + display_name = "${var.function_name} Cloud Functions Service Account" +} + +resource "google_secret_manager_secret_iam_member" "secret_access" { + count = length(var.secrets) + + secret_id = var.secrets[count.index]["secret"] + role = "roles/secretmanager.secretAccessor" + member = "serviceAccount:${google_service_account.service_account.email}" + + depends_on = [google_service_account.service_account] +} + +# +# Cloud function +# +resource "google_cloudfunctions2_function" "function" { + name = var.function_name + location = var.region + description = var.description + + build_config { + runtime = var.runtime + entry_point = var.entry_point + source { + storage_source { + bucket = google_storage_bucket.bucket.name + object = google_storage_bucket_object.object.name + } + } + environment_variables = var.build_environment_variables + } + + service_config { + vpc_connector = var.vpc_connector_name + max_instance_count = var.max_instance_count + min_instance_count = var.min_instance_count + available_memory = var.available_memory + available_cpu = var.available_cpu + timeout_seconds = var.timeout_seconds + environment_variables = var.runtime_environment_variables + dynamic "secret_environment_variables" { + # for each secret in the list of secrets, create a secret_environment_variable + for_each = toset(var.secrets) + content { + key = secret_environment_variables.value["key"] + project_id = secret_environment_variables.value["project_id"] + secret = secret_environment_variables.value["secret"] + version = secret_environment_variables.value["version"] + } + } + service_account_email = google_service_account.service_account.email + } +} + +data "google_iam_policy" "noauth" { + binding { + role = "roles/run.invoker" + members = [ + "allUsers", + ] + } +} + +resource "google_cloud_run_service_iam_policy" "noauth" { + location = google_cloudfunctions2_function.function.location + project = google_cloudfunctions2_function.function.project + service = google_cloudfunctions2_function.function.service_config[0].service + + policy_data = data.google_iam_policy.noauth.policy_data + + depends_on = [ + google_cloudfunctions2_function.function, + ] +} \ No newline at end of file diff --git a/infrastructure/base/modules/cloudfunction/outputs.tf b/infrastructure/base/modules/cloudfunction/outputs.tf new file mode 100644 index 00000000..5e7eaefb --- /dev/null +++ b/infrastructure/base/modules/cloudfunction/outputs.tf @@ -0,0 +1,15 @@ +output "function_uri" { + value = google_cloudfunctions2_function.function.service_config[0].uri +} + +output "function_name" { + value = google_cloudfunctions2_function.function.name +} + +locals { + service_name_split = split("/", google_cloudfunctions2_function.function.service_config[0].service) +} + +output "function_service_name" { + value = element(local.service_name_split, length(local.service_name_split) - 1) +} diff --git a/infrastructure/base/modules/cloudfunction/variables.tf b/infrastructure/base/modules/cloudfunction/variables.tf new file mode 100644 index 00000000..90f9ff54 --- /dev/null +++ b/infrastructure/base/modules/cloudfunction/variables.tf @@ -0,0 +1,104 @@ +variable "region" { + type = string + description = "GCP region" +} + +variable "bucket_location" { + type = string + description = "(Required) The GCS location." + default = "US" +} + +variable "function_name" { + type = string + description = "(Required) A user-defined name of the function. Function names must be unique globally." +} + +variable "description" { + type = string + description = "(Optional) Description of the function." +} + +variable "runtime" { + type = string + description = "(Required) The runtime in which the function is going to run. Eg. 'nodejs16', 'python39', 'dotnet3', 'go116', 'java11', 'ruby30', 'php74', etc. Check the official doc for the up-to-date list." +} + +variable "available_memory" { + type = string + default = "256M" + description = "(Optional) The amount of memory available for a function. Defaults to 256M. Supported units are k, M, G, Mi, Gi. If no unit is supplied the value is interpreted as bytes." +} + +variable "available_cpu" { + type = number + default = 1 + description = "(Optional) The number of CPUs used in a single container instance. Default value is calculated from available memory." +} + +variable "max_instance_count" { + type = number + default = 1 + description = "(Optional) The limit on the maximum number of function instances that may coexist at a given time." +} + +variable "min_instance_count" { + type = number + default = 0 + description = "(Optional) The limit on the minimum number of function instances that may coexist at a given time." +} + +variable "timeout_seconds" { + type = number + default = 120 + description = "(Optional) The function execution timeout. Execution is considered failed and can be terminated if the function is not completed at the end of the timeout period. Defaults to 60 seconds." +} + +variable "entry_point" { + type = string + description = "(Optional) Name of the function that will be executed when the Google Cloud Function is triggered." +} + +#variable "zip_path" { +# type = string +# description = "Path to the zip file containing the function code" +#} + +variable "source_dir" { + type = string + description = "Path to source directory of the function." +} + +variable "vpc_connector_name" { + type = string + description = "Name of the VPC Access Connector" +} + +variable "build_environment_variables" { + type = map(string) + description = "Key-value pairs of env vars to make available to the container" + default = {} +} + +variable "runtime_environment_variables" { + type = map(string) + description = "Key-value pairs of env vars to make available to the container" + default = {} +} + +variable "secrets" { + # List of objects for the secret_environment_variables block + # The secret_environment_variables block supports: + # key - (Required) Name of the environment variable. + # project_id - (Required) Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function. + # secret - (Required) Name of the secret in secret manager (not the full resource name). + # version - (Required) Version of the secret (version number or the string 'latest'). It is recommended to use a numeric version for secret environment variables as any updates to the secret value is not reflected until new instances start. + type = list(object({ + key = string + project_id = string + secret = string + version = string + })) + description = "List of secrets to make available to the container" + default = [] +} \ No newline at end of file diff --git a/infrastructure/base/modules/env/main.tf b/infrastructure/base/modules/env/main.tf index 9a909fc9..a36952b9 100644 --- a/infrastructure/base/modules/env/main.tf +++ b/infrastructure/base/modules/env/main.tf @@ -147,22 +147,35 @@ locals { ADMIN_JWT_SECRET = random_password.admin_jwt_secret.result TRANSFER_TOKEN_SALT = random_password.transfer_token_salt.result JWT_SECRET = random_password.jwt_secret.result - # CMS_URL = "${module.backend_cloudrun.cloudrun_service_url}/" CMS_URL = "https://${local.domain}/${var.backend_path_prefix}/" DATABASE_CLIENT = "postgres" DATABASE_HOST = module.database.database_host - DATABASE_NAME = module.database.database_name # var.database_name - DATABASE_USERNAME = module.database.database_user # var.database_user - DATABASE_PASSWORD = module.database.database_password # module.postgres_application_user_password.secret_name + DATABASE_NAME = module.database.database_name + DATABASE_USERNAME = module.database.database_user + DATABASE_PASSWORD = module.database.database_password DATABASE_SSL = false } client_env = { NEXT_PUBLIC_URL = "https://${local.domain}" NEXT_PUBLIC_API_URL = "https://${local.domain}/${var.backend_path_prefix}/api/" + NEXT_PUBLIC_ANALYSIS_CF_URL = module.analysis_cloud_function.function_uri NEXT_PUBLIC_ENVIRONMENT = "production" LOG_LEVEL = "info" } + analysis_cloud_function_env = { + DATABASE_CLIENT = "postgres" + DATABASE_HOST = module.database.database_host + DATABASE_NAME = module.database.database_name + DATABASE_USERNAME = module.database.database_user + DATABASE_SSL = false + } + analysis_cloud_function_secrets = [{ + key = "DATABASE_PASSWORD" + project_id = var.gcp_project_id + secret = module.postgres_application_user_password.secret_name + version = module.postgres_application_user_password.latest_version + }] } locals { @@ -174,6 +187,7 @@ locals { client_repository = "${upper(var.environment)}_CLIENT_REPOSITORY" cms_service = "${upper(var.environment)}_CMS_SERVICE" client_service = "${upper(var.environment)}_CLIENT_SERVICE" + analysis_cf_name = "${upper(var.environment)}_ANALYSIS_CF_NAME" } module "github_values" { @@ -188,6 +202,7 @@ module "github_values" { (local.client_repository) = module.frontend_gcr.repository_name (local.cms_service) = module.backend_cloudrun.name (local.client_service) = module.frontend_cloudrun.name + (local.analysis_cf_name) = module.analysis_cloud_function.function_name (local.cms_env_file) = join("\n", [for key, value in local.cms_env : "${key}=${value}"]) (local.client_env_file) = join("\n", [for key, value in local.client_env : "${key}=${value}"]) } @@ -219,7 +234,8 @@ variable "roles" { "roles/iam.serviceAccountUser", "roles/run.developer", "roles/artifactregistry.reader", - "roles/artifactregistry.writer" + "roles/artifactregistry.writer", + "roles/cloudfunctions.developer" ] } @@ -240,3 +256,18 @@ module "load_balancer" { dns_managed_zone_name = var.dns_zone_name backend_path_prefix = var.backend_path_prefix } + +module "analysis_cloud_function" { + source = "../cloudfunction" + region = var.gcp_region + vpc_connector_name = module.network.vpc_access_connector_name + function_name = "${var.project_name}-analysis" + description = "Analysis Cloud Function" + source_dir = "${path.root}/../../cloud_functions/analysis" + runtime = "python312" + entry_point = "index" + runtime_environment_variables = local.analysis_cloud_function_env + secrets = local.analysis_cloud_function_secrets + + depends_on = [module.postgres_application_user_password] +} diff --git a/infrastructure/base/modules/env/outputs.tf b/infrastructure/base/modules/env/outputs.tf index bb2dc253..1574d904 100644 --- a/infrastructure/base/modules/env/outputs.tf +++ b/infrastructure/base/modules/env/outputs.tf @@ -5,3 +5,7 @@ output "site_url" { output "api_url" { value = "${local.domain}/backend/api" } + +output "analysis_cloud_function_url" { + value = module.analysis_cloud_function.function_uri +} diff --git a/infrastructure/base/modules/secret_value/main.tf b/infrastructure/base/modules/secret_value/main.tf index ade5936c..100d5890 100644 --- a/infrastructure/base/modules/secret_value/main.tf +++ b/infrastructure/base/modules/secret_value/main.tf @@ -29,3 +29,7 @@ resource "google_secret_manager_secret_version" "backend_app_secret" { secret_data = var.use_random_value ? random_password.secret_value[0].result : var.value } + +data "google_secret_manager_secret_version" "latest" { + secret = google_secret_manager_secret.secret.id +} diff --git a/infrastructure/base/modules/secret_value/outputs.tf b/infrastructure/base/modules/secret_value/outputs.tf index 7e9d4c55..c3e41760 100644 --- a/infrastructure/base/modules/secret_value/outputs.tf +++ b/infrastructure/base/modules/secret_value/outputs.tf @@ -9,3 +9,7 @@ output "secret_id" { output "secret_value" { value = google_secret_manager_secret_version.backend_app_secret.secret_data } + +output "latest_version" { + value = data.google_secret_manager_secret_version.latest.version +} \ No newline at end of file diff --git a/infrastructure/base/outputs.tf b/infrastructure/base/outputs.tf index 0576af78..1b8eb21f 100644 --- a/infrastructure/base/outputs.tf +++ b/infrastructure/base/outputs.tf @@ -6,6 +6,10 @@ output "staging_api_url" { value = module.staging.api_url } +output "staging_analysis_cloud_function_url" { + value = module.staging.analysis_cloud_function_url +} + output "dns_name_servers" { value = module.dns.dns_name_servers } @@ -17,3 +21,7 @@ output "dns_name_servers" { # output "production_api_url" { # value = module.production.api_url # } + +# output "production_analysis_cloud_function_url" { +# value = module.production.analysis_cloud_function_url +# } \ No newline at end of file From 5cdb4bb1b428601f4dbdfa4b75f61ff0cefcb73e Mon Sep 17 00:00:00 2001 From: Agnieszka Figiel Date: Wed, 22 Nov 2023 14:03:02 +0100 Subject: [PATCH 3/3] Update README.md --- infrastructure/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/infrastructure/README.md b/infrastructure/README.md index 24df6ee3..5bb249ad 100644 --- a/infrastructure/README.md +++ b/infrastructure/README.md @@ -4,6 +4,8 @@ While the application can be deployed in any server configuration that supports - a [Terraform](https://www.terraform.io/) project that you can use to easily and quickly provision the resources and deploy the code using [Google Cloud Platform](https://cloud.google.com/), - and a GH Actions workflow to deploy code updates. +![GCP infrastructure - GH Actions drawio](https://github.com/Vizzuality/skytruth-30x30/assets/134055/c20e52d4-89f0-42e2-be25-e6b76a3a4fe6) + ## Dependencies Here is the list of technical dependencies for deploying the SkyTruth 30x30 Dashboard app using these infrastructure resources. Note that these requirements are for this particular deployment strategy, and not dependencies of the SkyTruth 30x30 Dashboard application itself - which can be deployed to other infrastructures.