diff --git a/.kitchen.yml b/.kitchen.yml index 30423c3a..aa5a3b78 100644 --- a/.kitchen.yml +++ b/.kitchen.yml @@ -147,3 +147,16 @@ suites: backend: gcp controls: - gcp + - name: "bq-log-alerting" + driver: + root_module_directory: test/fixtures/bq-log-alerting/ + verifier: + systems: + - name: local + backend: local + controls: + - gcloud + - name: inspec-gcp + backend: gcp + controls: + - gcp diff --git a/examples/bq-log-alerting/README.md b/examples/bq-log-alerting/README.md new file mode 100644 index 00000000..b265efb5 --- /dev/null +++ b/examples/bq-log-alerting/README.md @@ -0,0 +1,68 @@ +# Example: BigQuery Log Alerting + +This example deploys the BigQuery Log Alerting submodule in an existing project. + +## Prerequisites + +To run this example, you'll need: + +- An existing "logging" project +- A [Log export](https://github.com/terraform-google-modules/terraform-google-log-export) with a [BigQuery destination](https://github.com/terraform-google-modules/terraform-google-log-export/tree/master/modules/bigquery) in the logging project. The export filter should include at least: + - "logName: /logs/cloudaudit.googleapis.com%2Factivity" + - "logName: /logs/cloudaudit.googleapis.com%2Fdata_access" + - "logName: /logs/compute.googleapis.com%2Fvpc_flows" +- A Terraform Service Account with the [IAM Roles](../../../modules/bq-log-alerting/README.md) listed in the submodule documentation. +- To enable in the logging project the [APIs](../../../modules/bq-log-alerting/README.md) listed in the submodule documentation. +- To enable in the logging project [Google App Engine](https://cloud.google.com/appengine). +To enable it manually use: + +```shell +gcloud app create \ +--region= \ +--project= +``` + +**Note 1:** The selected Google App Engine region cannot be changed after creation and only project Owners (`role/owner`) can enable Google App Engine. + +**Note 2:** On deployment a Security Command Center Source called "BQ Log Alerts" will be created. If this source already exist due to the submodule been deployed at least once before, you need to obtain the existing Source name to be informed in the terraform variable **source_name**. +Run: + +```shell +gcloud scc sources describe \ +--source-display-name="BQ Log Alerts" \ +--format="value(name)" \ +--impersonate-service-account= +``` + +The source name format is `organizations//sources/`. + +The [terraform-example-foundation](https://github.com/terraform-google-modules/terraform-example-foundation) can be used as a reference for the creation of the logging project, the service account and the log export. + +## Instructions + +1. Run `terraform init` +1. Run `terraform plan` provide the requested variables values and review the output. +1. Run `terraform apply` + + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| logging\_project | The project to deploy the submodule | `string` | n/a | yes | +| org\_id | The organization ID for the associated services | `string` | n/a | yes | +| region | Region for BigQuery resources. | `string` | n/a | yes | +| source\_name | The Security Command Center Source name for the "BQ Log Alerts" Source if the source had been created before. The format is `organizations//sources/` | `string` | `""` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| bq\_views\_dataset\_id | The ID of the BigQuery Views dataset | +| cloud\_function\_service\_account\_email | The email of the service account created to be used by the cloud function | +| cloud\_scheduler\_job | The Cloud Scheduler job instance | +| cloud\_scheduler\_job\_name | The name of the Cloud Scheduler job created | +| pubsub\_topic\_name | PubSub topic name | +| source\_name | The Security Command Center Source name for the "BQ Log Alerts" Source | + + diff --git a/examples/bq-log-alerting/main.tf b/examples/bq-log-alerting/main.tf new file mode 100644 index 00000000..7f984bfd --- /dev/null +++ b/examples/bq-log-alerting/main.tf @@ -0,0 +1,31 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/***************************** + Provider configuration + ****************************/ +provider "google" { + version = "~> 3.30" +} + +module "bq-log-alerting" { + source = "../..//modules/bq-log-alerting" + logging_project = var.logging_project + region = var.region + org_id = var.org_id + source_name = var.source_name + dry_run = false +} diff --git a/examples/bq-log-alerting/outputs.tf b/examples/bq-log-alerting/outputs.tf new file mode 100644 index 00000000..a711a41c --- /dev/null +++ b/examples/bq-log-alerting/outputs.tf @@ -0,0 +1,45 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +output "cloud_scheduler_job_name" { + value = module.bq-log-alerting.cloud_scheduler_job_name + description = "The name of the Cloud Scheduler job created" +} + +output "pubsub_topic_name" { + value = module.bq-log-alerting.pubsub_topic_name + description = "PubSub topic name" +} + +output "cloud_scheduler_job" { + value = module.bq-log-alerting.cloud_scheduler_job + description = "The Cloud Scheduler job instance" +} + +output "source_name" { + value = module.bq-log-alerting.source_name + description = "The Security Command Center Source name for the \"BQ Log Alerts\" Source" +} + +output "cloud_function_service_account_email" { + value = module.bq-log-alerting.cloud_function_service_account_email + description = "The email of the service account created to be used by the cloud function" +} + +output "bq_views_dataset_id" { + value = module.bq-log-alerting.bq_views_dataset_id + description = "The ID of the BigQuery Views dataset" +} diff --git a/examples/bq-log-alerting/variables.tf b/examples/bq-log-alerting/variables.tf new file mode 100644 index 00000000..eed45161 --- /dev/null +++ b/examples/bq-log-alerting/variables.tf @@ -0,0 +1,36 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +variable "org_id" { + description = "The organization ID for the associated services" + type = string +} + +variable "region" { + description = "Region for BigQuery resources." + type = string +} + +variable "source_name" { + description = "The Security Command Center Source name for the \"BQ Log Alerts\" Source if the source had been created before. The format is `organizations//sources/`" + type = string + default = "" +} + +variable "logging_project" { + description = "The project to deploy the submodule" + type = string +} diff --git a/examples/bq-log-alerting/versions.tf b/examples/bq-log-alerting/versions.tf new file mode 100644 index 00000000..1285cf41 --- /dev/null +++ b/examples/bq-log-alerting/versions.tf @@ -0,0 +1,19 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +terraform { + required_version = ">=0.12.6, <0.14" +} diff --git a/modules/bq-log-alerting/.gitignore b/modules/bq-log-alerting/.gitignore new file mode 100644 index 00000000..aba70783 --- /dev/null +++ b/modules/bq-log-alerting/.gitignore @@ -0,0 +1 @@ +logging/cloud_function.zip diff --git a/modules/bq-log-alerting/README.md b/modules/bq-log-alerting/README.md new file mode 100644 index 00000000..6893fd85 --- /dev/null +++ b/modules/bq-log-alerting/README.md @@ -0,0 +1,178 @@ +# Log Export: BigQuery Log Alerting + +This submodule allows you to configure a BigQuery Log Alerting tool on Google Cloud Platform. It uses the log export created in the root module to create findings in the [Security Command Center](https://cloud.google.com/security-command-center) under a source called "BQ Log Alerts" based on rules defined as views in BigQuery. + +This tool **does not** intend to provide an exhaustive and complete security solution, but rather helps you to have **insights** about events in your infrastructure and what can be monitored using Cloud Logging, BigQuery and Security Command Center. + +## Basic Architecture + +The overview of this tool is as follows: + +* [Log sinks](https://github.com/terraform-google-modules/terraform-google-log-export) sends all [Cloud Audit Logs](https://cloud.google.com/logging/docs/audit) and [VPC Flow Logs](https://cloud.google.com/vpc/docs/flow-logs) to [BigQuery](https://github.com/terraform-google-modules/terraform-google-log-export/tree/master/modules/bigquery) located in a centralized logging project. +* Custom views in BigQuery are created that look for specific activities in these logs, defined by a SQL query, e.g. looking for events that match `v1.compute.routes.insert` or `v1.compute.routes.delete`. +* On a regular interval (`job_schedule` variable , default 15 minutes), [Cloud Scheduler](https://cloud.google.com/scheduler/docs) writes a message containing a time window parameter (`time_window_quantity` and `time_window_unit` variables, default 20 minutes) to [Cloud Pub/Sub](https://cloud.google.com/pubsub). +* This 15 minute schedule with 20 minute window is used to ensure some overlap between runs of the function, to catch cases where events may occur just as the [Cloud Function](https://cloud.google.com/functions) run has kicked-off. +* The message posted in Cloud Pub/Sub acts as the trigger for the Cloud Function which reads from the views that exist (one for each use case) and writes any events it finds to Security Command Center. +These events are called "findings" in Security Command Center parlance and represent events that are actionable, e.g. you can close them after investigation. +* Any duplicate findings are ignored, as the unique ID for the finding (a MD5 hash calculated from the concatenation of the BigQuery view name, the eventTimestamp, the callerIp, the principalEmail and the resourceName) is generated describing a particular event, and is thus deterministic. + +This represents the overall flow of alerts in this tool. + +**Note:** If you want to change the Cloud Scheduler [cron job interval](https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules) (`job_schedule`) and the time window parameters (`time_window_quantity` and `time_window_unit`) make sure to to have some **overlap between runs of the function** so that there is no gap where log entries could be ignored. + +### Cloud Logging and BigQuery + +Before using this submodule it is necessary to use the [root module](https://github.com/terraform-google-modules/terraform-google-log-export) to create a log export and the [BigQuery submodule](https://github.com/terraform-google-modules/terraform-google-log-export/tree/master/modules/bigquery) to create a destination for the logs. + +The log export filter must have at least the logs listed in the general requirements section of this README to be used by the Log Alerting tool. + +### Security Command Center + +Security Command Center is an organization level tool that creates a single pane of glass interface for all security findings in your Google Cloud Platform projects. + +Custom findings, based on events, can be configured for a variety of sources and can be [exported](https://cloud.google.com/security-command-center/docs/how-to-notifications) to other tools or notification systems for follow-up, triage, and investigation. +For this project, we make use of a custom source for all findings. + +To create this source we need to grant the organization level Security Command Center role "Security Center Sources Editor" (`roles/securitycenter.sourcesEditor`) to the Terraform service account. + +Findings can be filtered based on "category", which corresponds to the particular use case for the alert. +In order to create findings, we grant the BigQuery Log Alerting Cloud Function service account the "Security Center Findings Editor" role (`roles/securitycenter.findingsEditor`). + +**Note:** Security Command Center sources can only be created with a service account and +for this to work, the Security Command Center API needs to be enabled in the Terraform admin project. + +## Usage + +Basic usage of this submodule is as follows: + +```hcl +module "bq-log-alerting" { + source = "terraform-google-modules/log-export/google//modules/bq-log-alerting" + logging_project = + region = + org_id = + dry_run = false +} +``` + +The [examples](../../examples) directory contain an example for deploying the BigQuery Log Alerting tool. + +**Note 1:** On deployment, a Security Command Center Source called "BQ Log Alerts" will be created. If this source already exist due to the tool been deployed at least once before in the organization, obtain the existing Source name to be used in the Terraform variable **source_name**. Run: + +```shell +gcloud scc sources describe \ +--source-display-name="BQ Log Alerts" \ +--format="value(name)" \ +--impersonate-service-account= +``` + +The **source_name** format is `organizations//sources/`. + +**Note 2:** The submodule has a **dry_run** optional mode (`dry_run = true`). In this mode, instead of creating the finding in Security Command Center the submodule writes the finding to Cloud Logging. You can use the filter `resource.labels.function_name="generate-alerts" AND "DRY_RUN: scc finding:"` in the [Logs Explorer](https://console.cloud.google.com/logs/viewer) to find the logs created. + +## Monitoring + +You can [monitor the execution of the Cloud Function](https://cloud.google.com/functions/docs/monitoring) execution using: + +* Google [Error Reporting](https://cloud.google.com/error-reporting/docs) and checking errors in the [Error Reporting dashboard](https://cloud.google.com/error-reporting/docs/viewing-errors) +* Google [Monitoring](https://cloud.google.com/monitoring/docs) adding a graph based in [Cloud Functions metrics](https://cloud.google.com/monitoring/api/metrics_gcp#gcp-cloudfunctions) for `function/execution_count` to your dashboard +* Google [Cloud Logging](https://cloud.google.com/logging/docs): + * Filtering and exploring logs in the [Log Explorer](https://cloud.google.com/logging/docs/view/logs-viewer-interface) with query `resource.labels.function_name="generate-alerts"` + * Creating a counter [User-defined metric](https://cloud.google.com/logging/docs/logs-based-metrics) to be used in a Cloud Monitoring dashboard with filter: `resource.labels.function_name="generate-alerts" AND severity>=ERROR` + +### Budget Alerts + +We recommend configuring a [billing budget](https://cloud.google.com/billing/docs/how-to/budgets) in the logging project to monitor and alert on the spending of the tool. + +## Requirements + +The following sections describe the requirements which must be met in +order to invoke this submodule. + +### General + +* You need an existing "logging" project. +* A [Log export](https://github.com/terraform-google-modules/terraform-google-log-export) with a [BigQuery destination](https://github.com/terraform-google-modules/terraform-google-log-export/tree/master/modules/bigquery) in the logging project. The export filter should include at least: + * "logName: /logs/cloudaudit.googleapis.com%2Factivity" + * "logName: /logs/cloudaudit.googleapis.com%2Fdata_access" + * "logName: /logs/compute.googleapis.com%2Fvpc_flows" +* It is necessary to use a service account to authenticate the Google Terraform provider to be able to create the Security Command Center "BQ Log Alerts" Source. +This is a restriction of the Security Command Center API +* [Google App Engine](https://cloud.google.com/appengine) must be enabled in the logging project. To enable it manually use: + +```shell +gcloud app create \ +--region= \ +--project= +``` + +**Note:** The selected region cannot be changed after creation and only project Owners (`role/owner`) can enable Google App Engine. If you are not an Owner of the project, but the service account is, you can add `--impersonate-service-account=` to the command like it was used when the Security Command Center source was created. + +### IAM Roles + +The service account which will be used to invoke this submodule must have the following IAM roles: + +* Project level + * BigQuery Data Owner: `roles/bigquery.dataOwner` + * Cloud Functions Developer: `roles/cloudfunctions.developer` + * Cloud Scheduler Admin: `roles/cloudscheduler.admin` + * Pub/Sub Admin: `roles/pubsub.admin` + * Service Account Admin: `roles/iam.serviceAccountAdmin` + * Service Account User: `roles/iam.serviceAccountUser` + * Storage Admin: `roles/storage.admin` +* Organization level + * Security Admin: `roles/iam.securityAdmin` + * Security Center Sources Editor: `roles/securitycenter.sourcesEditor` + +If you are deploying this submodule in the logging project of the Terraform Example Foundation using the Terraform service account created in the Foundation, it already has all the necessary permissions in the logging project. + +### APIs + +The project against which this submodule will be invoked must have the +following APIs enabled: + +* App Engine Admin API: `appengine.googleapis.com` +* BigQuery API: `bigquery.googleapis.com` +* Cloud Build API: `cloudbuild.googleapis.com` +* Cloud Functions API: `cloudfunctions.googleapis.com` +* Cloud Logging API: `logging.googleapis.com` +* Cloud Pub/Sub API: `pubsub.googleapis.com` +* Cloud Resource Manager API: `cloudresourcemanager.googleapis.com` +* Cloud Scheduler API: `cloudscheduler.googleapis.com` +* Cloud Storage API: `storage-component.googleapis.com` +* Identity and Access Management (IAM) API: `iam.googleapis.com` +* Security Command Center API: `securitycenter.googleapis.com` + +### Software Dependencies + +* [Terraform][terraform-site] v0.12 +* [Terraform Provider for Google Cloud Platform][terraform-provider-gcp-site] v3.25.0 + + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| dry\_run | Enable dry\_run execution of the Cloud Function. If is true it will just print the object the would be converted as a finding | `bool` | `false` | no | +| function\_memory | The amount of memory in megabytes allotted for the Cloud function to use. | `number` | `"256"` | no | +| function\_timeout | The amount of time in seconds allotted for the execution of the function. | `number` | `"540"` | no | +| job\_schedule | The schedule on which the job will be executed in the unix-cron string format (https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules#defining_the_job_schedule). Defaults to 15 minutes. | `string` | `"*/15 * * * *"` | no | +| logging\_project | The project to deploy the tool. | `string` | n/a | yes | +| org\_id | The organization ID for the associated services | `string` | n/a | yes | +| region | Region for BigQuery resources. | `string` | n/a | yes | +| source\_name | The Security Command Center Source name for the "BQ Log Alerts" Source if the source had been created before. The format is `organizations//sources/` | `string` | `""` | no | +| time\_window\_quantity | The time window quantity used in the query in the view in BigQuery. | `string` | `"20"` | no | +| time\_window\_unit | The time window unit used in the query in the view in BigQuery. Valid values are 'MICROSECOND', 'MILLISECOND', 'SECOND', 'MINUTE', 'HOUR' | `string` | `"MINUTE"` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| bq\_views\_dataset\_id | The ID of the BigQuery Views dataset | +| cloud\_function\_service\_account\_email | The email of the service account created to be used by the Cloud Function | +| cloud\_scheduler\_job | The Cloud Scheduler job instance | +| cloud\_scheduler\_job\_name | The name of the Cloud Scheduler job created | +| pubsub\_topic\_name | Pub/Sub topic name | +| source\_name | The Security Command Center Source name for the "BQ Log Alerts" Source | + + diff --git a/modules/bq-log-alerting/logging/cloud_function/index.js b/modules/bq-log-alerting/logging/cloud_function/index.js new file mode 100644 index 00000000..73e44352 --- /dev/null +++ b/modules/bq-log-alerting/logging/cloud_function/index.js @@ -0,0 +1,153 @@ +/* +Copyright 2020 Google Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +const {BigQuery} = require('@google-cloud/bigquery'); +const {SecurityCenterClient} = require('@google-cloud/security-center') +const MD5 = require('crypto-js/md5'); + +const bigquery = new BigQuery(); +// These constants will need to be tuned to your preference +const project = process.env.LOGGING_PROJECT; +const dataset_name = 'views'; +const invocation_name = 'securityalert_invocation'; +const result_name = 'securityalert_result'; +const source_name = process.env.CSCC_SOURCE; +const dry_run = (process.env.DRY_RUN === 'true'); + +function createFindingObject (source_name, labels) { + const eventTime = new Date(labels.eventTimestamp.value).getTime(); + const sourceProperties = {}; + for (const key in labels) { + if (key !== 'eventTimestamp' && + key !== 'resourceName' && + key !== 'query') { + sourceProperties[key] = { stringValue: "" + labels[key] }; + } + } + return { + parent: "" + source_name, + findingId: MD5("" + labels.query + labels.eventTimestamp + labels.callerIp + labels.principalEmail + labels.resourceName), + finding: { + state: 'ACTIVE', + // Resource the finding is associated with. This is an + // example any resource identifier can be used. + resourceName: labels.resourceName, + // A free-form category. + category: "" + labels.query, + // The time associated with discovering the issue. + eventTime: { + seconds: Math.floor(eventTime / 1000), + nanos: (eventTime % 1000) * 1e6, + }, + sourceProperties: sourceProperties + } + } +} + +async function createFinding(cscc_client, source_name, labels) { + try { + finding = createFindingObject(source_name, labels) + if (dry_run) { + console.log('DRY_RUN: scc finding: ', JSON.stringify(finding)); + } else { + const [newFinding] = await cscc_client.createFinding(finding); + } + return 1; + } catch (err) { + let errorMsg = "" + err; + if (!errorMsg.includes("6 ALREADY_EXISTS")) { + throw new Error(errorMsg); + } else { + console.log("createFinding: " + errorMsg); + } + } + return 0; +} + +async function createFindingsFromResult(cscc_client, source_name, tableName, result) { + let count = 0; + for (var i = 0; i < result.length; i++) { + const res = result[i]; + const labels = { query: tableName }; + for (var j = 0; j < res.length; j++) { + const row = res[j]; + for (var key in row) { + if (row.hasOwnProperty(key)) { + if (key !== 'receiveTimestamp') { + labels[key] = row[key]; + } else { + // receiveTimestamp is returned as a complex type. + labels[key] = row[key]['value']; + } + } + } + if (labels.hasOwnProperty('resourceName')) { + count += await createFinding(cscc_client, source_name, labels); + } + } + } + return count; +} + +exports.cronPubSub = async function (event, context, callback) { + const cscc_client = new SecurityCenterClient(); + const pubsubMessage = event; + const payload = JSON.parse(Buffer.from(pubsubMessage.data, 'base64').toString()||'{}'); + const payloadQuantity = payload ? payload.quantity : ''; + const attributeQuantity = pubsubMessage.attributes ? pubsubMessage.attributes.quantity : ''; + const quantity = parseInt(payloadQuantity ? payloadQuantity : (attributeQuantity ? attributeQuantity : 1)); + const payloadUnit = payload ? payload.unit : ''; + const attributeUnit = pubsubMessage.attributes ? pubsubMessage.attributes.unit : ''; + const unit = payloadUnit ? payloadUnit : (attributeUnit ? attributeUnit : 'HOUR'); + const createTime = new Date(); + + if (!Number.isInteger(quantity)) { + throw new Error(`Quantity ${quantity} is not an integer.`); + } + if (unit !== 'MICROSECOND' && unit !== 'MILLISECOND' && unit !== 'SECOND' && unit !== 'MINUTE' && unit !== 'HOUR'){ + throw new Error(`Unit ${unit} is not in valid list (see docs for BigQuery standard SQL)`); + } + + console.log('quantity: ' + quantity + ' unit: '+ unit); + + const dataset = bigquery.dataset(dataset_name); + const data = await dataset.getTables(); + + const table_ids = data[0].map(table => table.id); + let findingCount = 0; + for (var i = 0; i < table_ids.length; i++) { + const table_name = table_ids[i]; + var query_str = 'SELECT * FROM `' + `${project}.${dataset_name}.${table_name}`; + query_str += '` WHERE receiveTimestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL '; + query_str += `${quantity} ${unit}` + ");"; + console.log(query_str); + var config_obj = { query: query_str, useLegacySql: false }; + try { + const result = await bigquery.query(config_obj); + findingCount += await createFindingsFromResult(cscc_client, source_name, table_name, result); + } catch (err) { + console.error("" + err); + throw new Error("" + err); + } + } + if (findingCount == 0) { + console.log('No new findings'); + } else { + console.log(findingCount + ' new findings'); + } + console.log("Successfully wrote metrics for all tables."); + callback(); +}; diff --git a/modules/bq-log-alerting/logging/cloud_function/package.json b/modules/bq-log-alerting/logging/cloud_function/package.json new file mode 100644 index 00000000..91d95ac6 --- /dev/null +++ b/modules/bq-log-alerting/logging/cloud_function/package.json @@ -0,0 +1,7 @@ +{ + "dependencies": { + "@google-cloud/bigquery": ">=0.10.0", + "@google-cloud/security-center": ">=3.0.1", + "crypto-js": "^3.0.0" + } +} diff --git a/modules/bq-log-alerting/main.tf b/modules/bq-log-alerting/main.tf new file mode 100644 index 00000000..68838ae4 --- /dev/null +++ b/modules/bq-log-alerting/main.tf @@ -0,0 +1,107 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#-----------------# +# Local variables # +#-----------------# +locals { + actual_source_name = var.source_name != "" ? var.source_name : google_scc_source.bq_log_alerts[0].name +} + +#--------------------------# +# Service account creation # +#--------------------------# +resource "random_string" "service_account" { + length = 6 + upper = "false" + number = "false" + special = "false" +} + +resource "google_service_account" "gcf_service_account" { + project = var.logging_project + account_id = "sa-bq-log-alerting-cf-${random_string.service_account.result}" + display_name = "BigQuery Log Alerting Cloud Function Service Account" +} + +#--------------------------------# +# Service account IAM membership # +#--------------------------------# +resource "google_project_iam_member" "gcf-big-query" { + project = var.logging_project + role = "roles/bigquery.admin" + member = "serviceAccount:${google_service_account.gcf_service_account.email}" +} + +resource "google_organization_iam_member" "gcf-security-findings" { + org_id = var.org_id + role = "roles/securitycenter.findingsEditor" + member = "serviceAccount:${google_service_account.gcf_service_account.email}" +} + +#-----------------------------------------# +# Security Command Center Source creation # +#-----------------------------------------# +resource "google_scc_source" "bq_log_alerts" { + count = var.source_name == "" ? 1 : 0 + display_name = "BQ Log Alerts" + organization = var.org_id + description = "Findings from BQ Alerting tool" +} + +#------------------------# +# Bigquery views dataset # +#------------------------# +resource "google_bigquery_dataset" "views_dataset" { + dataset_id = "views" + friendly_name = "Log Views" + description = "Log view dataset" + location = "US" + project = var.logging_project + + labels = { + env = "default" + } +} + +#-----------------------------# +# Scheduled function creation # +#-----------------------------# +module "bq-log-alerting" { + source = "terraform-google-modules/scheduled-function/google" + version = "1.5.1" + project_id = var.logging_project + job_name = "bq-alerts-event-trigger" + job_description = "publish to pubsub to trigger cloud function" + job_schedule = var.job_schedule + message_data = base64encode("{\"unit\":\"${var.time_window_unit}\",\"quantity\":\"${var.time_window_quantity}\"}") + function_description = "read from BQ view to generate alerts" + function_entry_point = "cronPubSub" + function_source_directory = "${path.module}/logging/cloud_function" + function_name = "generate-alerts" + function_runtime = "nodejs10" + function_service_account_email = google_service_account.gcf_service_account.email + function_timeout_s = var.function_timeout + function_available_memory_mb = var.function_memory + topic_name = "bq-alerts-function-trigger" + region = var.region + + function_environment_variables = { + CSCC_SOURCE = local.actual_source_name + LOGGING_PROJECT = var.logging_project + DRY_RUN = var.dry_run + } +} diff --git a/modules/bq-log-alerting/outputs.tf b/modules/bq-log-alerting/outputs.tf new file mode 100644 index 00000000..b5364147 --- /dev/null +++ b/modules/bq-log-alerting/outputs.tf @@ -0,0 +1,45 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +output "cloud_scheduler_job_name" { + value = module.bq-log-alerting.name + description = "The name of the Cloud Scheduler job created" +} + +output "pubsub_topic_name" { + value = module.bq-log-alerting.pubsub_topic_name + description = "Pub/Sub topic name" +} + +output "cloud_scheduler_job" { + value = module.bq-log-alerting.scheduler_job + description = "The Cloud Scheduler job instance" +} + +output "source_name" { + value = local.actual_source_name + description = "The Security Command Center Source name for the \"BQ Log Alerts\" Source" +} + +output "cloud_function_service_account_email" { + value = google_service_account.gcf_service_account.email + description = "The email of the service account created to be used by the Cloud Function" +} + +output "bq_views_dataset_id" { + value = google_bigquery_dataset.views_dataset.id + description = "The ID of the BigQuery Views dataset" +} diff --git a/modules/bq-log-alerting/use-cases/README.md b/modules/bq-log-alerting/use-cases/README.md new file mode 100644 index 00000000..23531ab0 --- /dev/null +++ b/modules/bq-log-alerting/use-cases/README.md @@ -0,0 +1,146 @@ +# Use Cases + +After installing the BigQuery Log Alerting tool, you will need to add some use cases to see it working. +We provided a few examples in this submodule, with some queries that can be use to populate Security Command Center with findings. + +## Prerequisites + +1. BigQuery Log Alerting tool installed. +1. BigQuery log sinks created. +1. Tables created by the sinks logs exported ( e.g.: `cloudaudit_googleapis_com_activity_*`) +1. To have permission to create BigQuery views in the logging project. + +## General Usage + +In this folder you will find several SQL files, you must change the variables `${project}`and `${dataset}` in each file +to the real logging project ID and log sink dataset name created in the BigQuery log sinks deploy. +Follow the specific usage of **each use case** for more details. + +After this, you can follow the instruction of [How create Views in BigQuery](https://cloud.google.com/bigquery/docs/views#console) using the queries on the files. + +You **must** save the views on the `views` dataset for the tool to work. +You can use the filename as the view name, if you want. +The view name will be used for the **category** of the finding that will be created. + +## Use Case descriptions + +### Services not on Allow List + +- Query File: `non_allowlisted_services.sql` +- Query Table used: `cloudaudit_googleapis_com_activity_*` +- Description: This use case aims to alert when a service outside of the accepted list of services is enabled for a project. This list is hardcoded in the query. The current list is: + - dns.googleapis.com + - iap.googleapis.com + - compute.googleapis.com + - file.googleapis.com + - stackdriver.googleapis.com +- Usage: If you want modify the list of services just change the line `dns.googleapis.com|iap.googleapis.com|compute.googleapis.com|file.googleapis.com|stackdriver.googleapis.com` adding or removing the services. Examples: + + - To add the service `translate.googleapis.com` just add `|translate.googleapis.com` in the line so the end result be like `dns.googleapis.com|iap.googleapis.com|compute.googleapis.com|file.googleapis.com|stackdriver.googleapis.com|translate.googleapis.com` + - To remove the service `file.googleapis.com` just remove `|file.googleapis.com` from the line so the end result be like `dns.googleapis.com|iap.googleapis.com|compute.googleapis.com|stackdriver.googleapis.com` + +- Testing: Just enable a service that is not present in the list like `translate.googleapis.com`. You can enable a service running the following command: + +```bash +export project_id= +gcloud services enable \ +translate.googleapis.com \ +--project ${project_id} +``` + +### IAM Policy granted on User Outside of Customer Approved Domain List + +- Query File: `iam_role_add.sql` +- Query Table used: `cloudaudit_googleapis_com_activity_*` +- Description: This use case alerts on any IAM role being granted on a user with a domain outside of an approved list. The approved domain list is hardcoded within the view query like `%domain1.com%` and `%domain2.com%`. +- Usage: Change the values `%domain1.com%` and `%domain2.com%` with your respective domains like `%yourrealdomain1.com%` and `%yourrealdomain2.com%`. +- Testing: Grant a permission to a user outside your domains in GCP console or using gcloud commands like: + +```bash +gcloud projects add-iam-policy-binding \ +--member=user:@gmail.com \ +--role=roles/gameservices.viewer +``` + +### Alert on Changes to Logging + +- Query File: `logging_changes.sql` +- Query Table used: `cloudaudit_googleapis_com_activity_*` +- Description: This use case aims to detect any modifications made to logging within a project. This includes creating or deleting log sinks, or deleting logs themselves. This does not include modifications to VPC flow logs. +- Usage: The query doesn't need changes besides the ones described in section [General Usage](./README.md#general-usage) . +- Testing: There are some ways to generate logs: + - Create a log sink + - Delete log sink + - Attempt to delete audit logs + - Delete logs + - Write entry to log file + - Create logs based metric + - Delete logs based metric + +### Alert on VPC Flow Logs being disabled + +- Query File: `disable_vpc_flow_logs.sql` +- Query Table used: `cloudaudit_googleapis_com_activity_*` +- Description: This use case creates an alert when VPC flow logs for a particular subnet are disabled. It looks for modifications to the `gce_subnetwork` resource type where `enableFlowLogs = False`. +- Usage: The query doesn't need changes besides the ones described in section [General Usage](./README.md#general-usage). +- Testing: Go to GCP console, select a subnetwork and edit its **Flow logs** option to **off** + +### Add or Remove Routes + +- Query File: `add_remove_routes.sql` +- Query Table used: `cloudaudit_googleapis_com_activity_*` +- Description: This use case creates an alert when a GCE Route is created or deleted. +- Usage: The query doesn't need changes besides the ones described in section [General Usage](./README.md#general-usage). +- Testing: Go to GCP console and create or delete a GCE Route. + +### VPC Flow logs with ingress from IP Addresses outside of expected private address ranges + +- Query File: `ingress_from_external_ip.sql` +- Query Table used: `compute_googleapis_com_vpc_flows_*` +- Description: This use case examines the VPC flow logs for all source projects and looks for ingress from IPs outside of known good IP ranges, such as on premise IPs or Google Cloud IPs. Those IP ranges are hardcoded in the query. +- Usage: To look for a new IP ranges just add a new `OR` clause in the query like: `OR NET.IP_TRUNC(src_ip_parsed, NETMASK) = b"\xDD\xDD\xDD\xDD"` where `NETMASK` is the value of the netmask and `\xDD\xDD\xDD\xDD"` is the IP range in hexadecimal format. Make sure to substitute correctly the `DD` to hexadecimal values and to maintain the `\xDD` format. +- Testing: First make sure that VPC Flow logs are enabled. If already have a gce instance, its communication to google address will produce the logs. + +### Abnormal amount of data movement out of the cloud + +- Query File: `bytes_sent.sql` +- Query Table used: `compute_googleapis_com_vpc_flows_*` +- Description: This use case creates an alert when a VM sends an amount of data beyond a specific threshold to an unknown, or untrusted IP address. This is done by examining the bytes sent field of the VPC flow logs, summing over the amount of data sent between a pair of particular IPs. Those IP ranges are hardcoded in the query as well as the data threshold. +- Usage: To look for a new range of IPs just add a new `OR` clause in the query like: `OR NET.IP_TRUNC(NET.SAFE_IP_FROM_STRING(jsonPayload.connection.dest_ip),NETMASK) = b"\xDD\xDD\xDD\xDD"` where `NETMASK` is the value of the netmask and `\xDD\xDD\xDD\xDD"` is the IP range in hexadecimal format. Make sure to substitute correctly the `DD` to hexadecimal values and to maintain the `\xDD` format. To change the data threshold just change the `αΊ€HERE bytes_sent > 1E9;` clause with a new value +- Testing: First make sure that VPC Flow logs are enabled. If you already have a VM instance you can reduce the data threshold to see the logs. + +### Anomalous Privileged Terraform Service Account Usage + +- Query File: `anomalous_terraform_sa_usage.sql` +- Query Table used: `cloudaudit_googleapis_com_activity_*` +- Description: This use case creates an alert for anomalous usage of a privileged service account that is used by a CI/CD work flow to deploy the infrastructure using terraform. +Anomalous usage is the use of the terraform service account by someone that is not the CI/CD service account. See [Terraform Example Foundation](https://github.com/terraform-google-modules/terraform-example-foundation) for [Google Cloud Build](https://cloud.google.com/cloud-build) or Jenkins examples of CI/CD to deploy infrastructure. +- Usage: Change the values of `` and ``: + - ``: The email of the Service account that has been granted the permissions to deploy infrastructure . For example, if you are using [Terraform Google Bootstrap module](https://github.com/terraform-google-modules/terraform-google-bootstrap) the service account email is the `terraform_sa_email`. + - ``: + - If you are using Cloud build the CI/CD service account email is the Cloud Build service account `@cloudbuild.gserviceaccount.com`. + - If you are using Jenkins the CI/CD service account email is the service account created to be used by the Jenkins agent. +- Testing: Run a gcloud `list` or `describe` command on a resource under the monitoring of the sink created to export logs to BigQuery using [service account impersonation](https://cloud.google.com/sdk/gcloud/reference#--impersonate-service-account)`--impersonate-service-account=`. + +### Alert on Super Admin (Org./Owner) Login + +**NOTE:** This use case needs configurations on **organization level**. + +- Query File: `superadmin_login.sql` +- Query Table used: `cloudaudit_googleapis_com_data_access_*` +- Description: This use case creates an alert for a variety of login events including success, failure, suspicious login, and login verification required, for super administrator accounts. While the current implementation includes a list of email addresses to be monitored, it can be assumed that all logins represent an event, since only super admin accounts are verified through Cloud Identity. +- Usage: Change the values of `@` and `@` with the users that you want to monitor. +- Testing: First make sure that you enabled the [Admin audit log](https://support.google.com/a/answer/9320190?hl=en) to export logs to GCP. If you created the [Log Export](https://github.com/terraform-google-modules/terraform-google-log-export) at the organization level, you just need to login to generate an alert. If you create the Log Export at folder or project level then you will need to create a new log sink: + +Create a new sink on the organization to catch the `data_access` logs: + +```bash + export project_id= + export organization_id= + export sink_name='sk-c-logging-admin-bq' + gcloud logging sinks create ${sink_name} bigquery.googleapis.com/projects/${project_id}/datasets/audit_logs \ + --log-filter="logName: /logs/cloudaudit.googleapis.com%2Fdata_access" + --organization=${organization_id} +``` + +where `project_id` is the project ID used to deploy the submodule. diff --git a/modules/bq-log-alerting/use-cases/add_remove_routes.sql b/modules/bq-log-alerting/use-cases/add_remove_routes.sql new file mode 100644 index 00000000..3ee1b20e --- /dev/null +++ b/modules/bq-log-alerting/use-cases/add_remove_routes.sql @@ -0,0 +1,34 @@ +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +SELECT + log_table.receiveTimestamp, + log_table.timestamp AS eventTimestamp, + log_table.protopayload_auditlog.requestMetadata.callerIp, + log_table.protopayload_auditlog.authenticationInfo.principalEmail, + log_table.protopayload_auditlog.serviceName, + log_table.resource.labels.project_id, + CONCAT('//', log_table.protopayload_auditlog.serviceName, '/projects/', log_table.resource.labels.project_id, '/global/routes/', log_table.resource.labels.route_id) as resourceName, + log_table.insertId +FROM + `${project}.${dataset}.cloudaudit_googleapis_com_activity_*` AS log_table +WHERE + log_table.resource.type = 'gce_route' + AND log_table.operation.last = TRUE # Route insertion can be a long-running operation, so this prevents us from double-counting. + AND log_table.protopayload_auditlog.methodName IN ( + 'v1.compute.routes.insert', + 'beta.compute.routes.insert', + 'v1.compute.routes.delete', + 'beta.compute.routes.delete' + ); diff --git a/modules/bq-log-alerting/use-cases/anomalous_terraform_sa_usage.sql b/modules/bq-log-alerting/use-cases/anomalous_terraform_sa_usage.sql new file mode 100644 index 00000000..50ca5803 --- /dev/null +++ b/modules/bq-log-alerting/use-cases/anomalous_terraform_sa_usage.sql @@ -0,0 +1,43 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +SELECT + insertId, + receiveTimestamp, + timestamp AS eventTimestamp, + protopayload_auditlog.requestMetadata.callerIp, + protopayload_auditlog.authenticationInfo.principalEmail, + protopayload_auditlog.resourceName, + protopayload_auditlog.methodName, + CASE ARRAY_LENGTH(protopayload_auditlog.authenticationInfo.serviceAccountDelegationInfo) + WHEN 0 THEN "No Impersonation" + ELSE + protopayload_auditlog.authenticationInfo.serviceAccountDelegationInfo[ +OFFSET + (0)].firstPartyPrincipal.principalEmail +END + AS originalPrincipalEmail +FROM + `${project}.${dataset}.cloudaudit_googleapis_com_activity_*` +WHERE + protopayload_auditlog.authenticationInfo.principalEmail = "" + AND ((ARRAY_LENGTH(protopayload_auditlog.authenticationInfo.serviceAccountDelegationInfo) > 0 + AND "" NOT IN ( + SELECT + firstPartyPrincipal.principalEmail + FROM + UNNEST(protopayload_auditlog.authenticationInfo.serviceAccountDelegationInfo))) + OR ((ARRAY_LENGTH(protopayload_auditlog.authenticationInfo.serviceAccountDelegationInfo) = 0) + AND NOT operation.last = TRUE) + OR protopayload_auditlog.authenticationInfo.serviceAccountKeyName IS NOT NULL) diff --git a/modules/bq-log-alerting/use-cases/bytes_sent.sql b/modules/bq-log-alerting/use-cases/bytes_sent.sql new file mode 100644 index 00000000..74935c54 --- /dev/null +++ b/modules/bq-log-alerting/use-cases/bytes_sent.sql @@ -0,0 +1,71 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +SELECT + CURRENT_TIMESTAMP() AS receiveTimestamp, + NULL AS principalEmail, + resourceName, + callerIp, + dest_ip AS destination_ip, + bytes_sent +FROM + ( + SELECT + CONCAT( + 'projects/', + jsonPayload.src_vpc.project_id, + '/zones/', + jsonPayload.src_instance.zone, + '/vm/', + jsonPayload.src_instance.vm_name + ) AS resourceName, + jsonPayload.connection.src_ip AS callerIp, + jsonPayload.connection.dest_ip AS dest_ip, + SUM(CAST(jsonPayload.bytes_sent AS FLOAT64)) AS bytes_sent + FROM + `${project}.${dataset}.compute_googleapis_com_vpc_flows_*` + WHERE + jsonPayload.src_instance.vm_name IS NOT NULL + AND NOT ( + NET.IP_TRUNC( + NET.SAFE_IP_FROM_STRING(jsonPayload.connection.dest_ip), + 8 + ) = b"\x0A\x00\x00\x00" # 10.0.0.0/8 private range. + OR NET.IP_TRUNC( + NET.SAFE_IP_FROM_STRING(jsonPayload.connection.dest_ip), + 12 + ) = b"\xAB\x10\x00\x00" # 172.16.0.0/12 private range. + OR NET.IP_TRUNC( + NET.SAFE_IP_FROM_STRING(jsonPayload.connection.dest_ip), + 16 + ) = b"\xC0\xA8\x00\x00" # 192.168.0.0/16 private range. + OR NET.IP_TRUNC( + NET.SAFE_IP_FROM_STRING(jsonPayload.connection.dest_ip), + 22 + ) = b"\x82\xD3\x00\x00" # 130.211.0.0/22 GLB range. + OR NET.IP_TRUNC( + NET.SAFE_IP_FROM_STRING(jsonPayload.connection.dest_ip), + 16 + ) = b"\x23\xBF\x00\x00" # 35.191.0.0/16 GLB range. + ) + AND receiveTimestamp >= TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL -1 DAY) + GROUP BY + jsonPayload.src_instance.vm_name, + jsonPayload.connection.src_ip, + jsonPayload.connection.dest_ip, + jsonPayload.src_vpc.project_id, + jsonPayload.src_instance.zone + ) +WHERE + bytes_sent > 1E9; diff --git a/modules/bq-log-alerting/use-cases/disable_vpc_flow_logs.sql b/modules/bq-log-alerting/use-cases/disable_vpc_flow_logs.sql new file mode 100644 index 00000000..b1155bf8 --- /dev/null +++ b/modules/bq-log-alerting/use-cases/disable_vpc_flow_logs.sql @@ -0,0 +1,40 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +SELECT + receiveTimestamp, + timestamp as eventTimestamp, + protopayload_auditlog.requestMetadata.callerIp, + protopayload_auditlog.authenticationInfo.principalEmail, + protopayload_auditlog.serviceName, + resource.labels.project_id, + resource.labels.location, + resource.labels.subnetwork_id, + resource.labels.subnetwork_name, + CONCAT('//', protopayload_auditlog.serviceName, '/projects/', resource.labels.project_id, '/regions/', resource.labels.location, '/subnetworks/', resource.labels.subnetwork_id) as resourceName, + insertId, +FROM + `${project}.${dataset}.cloudaudit_googleapis_com_activity_*` +WHERE + resource.type = "gce_subnetwork" + AND ( + JSON_EXTRACT( + protopayload_auditlog.requestJson, + '$.enableFlowLogs' + ) = "false" + OR JSON_EXTRACT( + protopayload_auditlog.requestJson, + '$.logConfig.enable' + ) = "false" + ); diff --git a/modules/bq-log-alerting/use-cases/iam_role_add.sql b/modules/bq-log-alerting/use-cases/iam_role_add.sql new file mode 100644 index 00000000..8238fbd5 --- /dev/null +++ b/modules/bq-log-alerting/use-cases/iam_role_add.sql @@ -0,0 +1,45 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +SELECT + receiveTimestamp, + timestamp AS eventTimestamp, + protopayload_auditlog.requestMetadata.callerIp, + protopayload_auditlog.authenticationInfo.principalEmail, + protopayload_auditlog.resourceName as resource_name, + protopayload_auditlog.serviceName, + CONCAT('//', protopayload_auditlog.serviceName, '/projects/', resource.labels.project_id) as resourceName, + bindings.role, + bindings.member, + insertId +FROM + `${project}.${dataset}.cloudaudit_googleapis_com_activity_*` + CROSS JOIN UNNEST( + protopayload_auditlog.servicedata_v1_iam.policyDelta.bindingDeltas + ) AS bindings +WHERE + protopayload_auditlog.methodName = 'SetIamPolicy' + AND bindings.action = 'ADD' + AND JSON_EXTRACT( + TO_JSON_STRING( + protopayload_auditlog.servicedata_v1_iam.policyDelta + ), + '$.bindingDeltas[0].member' + ) NOT LIKE '%domain1.com%' + AND JSON_EXTRACT( + TO_JSON_STRING( + protopayload_auditlog.servicedata_v1_iam.policyDelta + ), + '$.bindingDeltas[0].member' + ) NOT LIKE '%domain2.com%'; diff --git a/modules/bq-log-alerting/use-cases/ingress_from_external_ip.sql b/modules/bq-log-alerting/use-cases/ingress_from_external_ip.sql new file mode 100644 index 00000000..f06a822e --- /dev/null +++ b/modules/bq-log-alerting/use-cases/ingress_from_external_ip.sql @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +WITH parsed_ips AS ( + SELECT + insertId, + receiveTimestamp, + timestamp AS eventTimestamp, + NET.SAFE_IP_FROM_STRING(jsonPayload.connection.src_ip) AS src_ip_parsed, + NET.SAFE_IP_FROM_STRING(jsonPayload.connection.dest_ip) AS dest_ip_parsed, + jsonPayload.connection.src_ip, + jsonPayload.connection.src_port, + jsonPayload.connection.dest_ip, + jsonPayload.connection.dest_port, + jsonPayload.connection.protocol + FROM + `${project}.${dataset}.compute_googleapis_com_vpc_flows_*` +) +SELECT + receiveTimestamp, + eventTimestamp, + src_ip AS callerIp, + insertId, + # We need to keep the metric labels the same across queries. + NULL AS principalEmail, + dest_ip AS resourceName +FROM + parsed_ips +WHERE + BYTE_LENGTH(src_ip_parsed) = 16 # Is an IPv6 address. + OR NOT( + NET.IP_TRUNC(src_ip_parsed, 8) = b"\x0A\x00\x00\x00" # 10.0.0.0/8 private range. + OR NET.IP_TRUNC(src_ip_parsed, 12) = b"\xAB\x10\x00\x00" # 172.16.0.0/12 private range. + OR NET.IP_TRUNC(src_ip_parsed, 16) = b"\xC0\xA8\x00\x00" # 192.168.0.0/16 private range. + OR NET.IP_TRUNC(src_ip_parsed, 22) = b"\x82\xD3\x00\x00" # 130.211.0.0/22 GLB range. + OR NET.IP_TRUNC(src_ip_parsed, 16) = b"\x23\xBF\x00\x00" # 35.191.0.0/16 GLB range. + ) +LIMIT + 50; # Currently implemented to limit the noisy results diff --git a/modules/bq-log-alerting/use-cases/logging_changes.sql b/modules/bq-log-alerting/use-cases/logging_changes.sql new file mode 100644 index 00000000..966c0128 --- /dev/null +++ b/modules/bq-log-alerting/use-cases/logging_changes.sql @@ -0,0 +1,25 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +SELECT + receiveTimestamp, + timestamp AS eventTimestamp, + protopayload_auditlog.requestMetadata.callerIp, + protopayload_auditlog.authenticationInfo.principalEmail, + protopayload_auditlog.resourceName, + protopayload_auditlog.methodName +FROM + `${project}.${dataset}.cloudaudit_googleapis_com_activity_*` +WHERE + protopayload_auditlog.serviceName = "logging.googleapis.com"; diff --git a/modules/bq-log-alerting/use-cases/non_allowlisted_services.sql b/modules/bq-log-alerting/use-cases/non_allowlisted_services.sql new file mode 100644 index 00000000..09871083 --- /dev/null +++ b/modules/bq-log-alerting/use-cases/non_allowlisted_services.sql @@ -0,0 +1,53 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +WITH log_table AS ( + SELECT + receiveTimestamp, + timestamp, + protopayload_auditlog.requestMetadata.callerIp AS callerIp, + protopayload_auditlog.authenticationInfo.principalEmail AS principalEmail, + logName, + log.insertId, + log.resource.labels.project_id AS projectId, + REGEXP_EXTRACT(authInfo.resource,'services/([^/]*)') AS serviceName + FROM + `${project}.${dataset}.cloudaudit_googleapis_com_activity_*` log, + UNNEST(protopayload_auditlog.authorizationInfo) AS authInfo + WHERE + protopayload_auditlog.methodName IN ( + "google.api.servicemanagement.v1.ServiceManager.ActivateServices", + "google.api.serviceusage.v1.ServiceUsage.EnableService", + "google.api.serviceusage.v1.ServiceUsage.BatchEnableServices" + ) # Filter out IPv6 addresses, was receiving a double result for each enable event + AND NET.SAFE_IP_FROM_STRING(protopayload_auditlog.requestMetadata.callerIp) + != b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01" +) + +SELECT + DISTINCT receiveTimestamp, + timestamp AS eventTimestamp, + callerIp, + CONCAT('//', serviceName, '/projects/', projectId) as resourceName, + principalEmail, + insertId, + serviceName +FROM + log_table +WHERE + serviceName IS NOT NULL + AND NOT REGEXP_CONTAINS( + serviceName, + 'dns.googleapis.com|iap.googleapis.com|compute.googleapis.com|file.googleapis.com|stackdriver.googleapis.com' + ); diff --git a/modules/bq-log-alerting/use-cases/superadmin_login.sql b/modules/bq-log-alerting/use-cases/superadmin_login.sql new file mode 100644 index 00000000..9d1420fd --- /dev/null +++ b/modules/bq-log-alerting/use-cases/superadmin_login.sql @@ -0,0 +1,36 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +SELECT + timestamp AS eventTimestamp, + receiveTimestamp, + log_table.insertId, + log_table.protopayload_auditlog.serviceName, + log_table.protopayload_auditlog.requestmetadata.callerip AS callerIp, + log_table.protopayload_auditlog.authenticationinfo.principalemail AS principalEmail, + log_table.protopayload_auditlog.resourceName, + log_table.protopayload_auditlog.methodname as methodName +FROM `${project}.${dataset}.cloudaudit_googleapis_com_data_access_*` as log_table +WHERE protopayload_auditlog.authenticationinfo.principalemail IN + ( + '@', + '@' + ) +AND protopayload_auditlog.methodname IN + ( + 'google.login.LoginService.loginSuccess', + 'google.login.LoginService.loginVerification', + 'google.login.LoginService.loginFailure', + 'google.login.LoginService.loginSuspicious' + ); diff --git a/modules/bq-log-alerting/variables.tf b/modules/bq-log-alerting/variables.tf new file mode 100644 index 00000000..d0fe12df --- /dev/null +++ b/modules/bq-log-alerting/variables.tf @@ -0,0 +1,72 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +variable "org_id" { + description = "The organization ID for the associated services" + type = string +} + +variable "region" { + description = "Region for BigQuery resources." + type = string +} + +variable "source_name" { + description = "The Security Command Center Source name for the \"BQ Log Alerts\" Source if the source had been created before. The format is `organizations//sources/`" + type = string + default = "" +} + +variable "logging_project" { + description = "The project to deploy the tool." + type = string +} + +variable "job_schedule" { + description = "The schedule on which the job will be executed in the unix-cron string format (https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules#defining_the_job_schedule). Defaults to 15 minutes." + type = string + default = "*/15 * * * *" +} + +variable "time_window_unit" { + description = "The time window unit used in the query in the view in BigQuery. Valid values are 'MICROSECOND', 'MILLISECOND', 'SECOND', 'MINUTE', 'HOUR'" + type = string + default = "MINUTE" +} + +variable "time_window_quantity" { + description = "The time window quantity used in the query in the view in BigQuery." + type = string + default = "20" +} + +variable "dry_run" { + description = "Enable dry_run execution of the Cloud Function. If is true it will just print the object the would be converted as a finding" + type = bool + default = false +} + +variable "function_timeout" { + description = "The amount of time in seconds allotted for the execution of the function." + type = number + default = "540" +} + +variable "function_memory" { + description = "The amount of memory in megabytes allotted for the Cloud function to use." + type = number + default = "256" +} diff --git a/modules/bq-log-alerting/versions.tf b/modules/bq-log-alerting/versions.tf new file mode 100644 index 00000000..1285cf41 --- /dev/null +++ b/modules/bq-log-alerting/versions.tf @@ -0,0 +1,19 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +terraform { + required_version = ">=0.12.6, <0.14" +} diff --git a/test/fixtures/bq-log-alerting/main.tf b/test/fixtures/bq-log-alerting/main.tf new file mode 100644 index 00000000..18670620 --- /dev/null +++ b/test/fixtures/bq-log-alerting/main.tf @@ -0,0 +1,29 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +module "bq-log-alerting" { + source = "../../../modules/bq-log-alerting" + org_id = var.parent_resource_organization + region = var.region + source_name = var.source_name + logging_project = var.project_id + job_schedule = var.job_schedule + time_window_unit = var.time_window_unit + time_window_quantity = var.time_window_quantity + dry_run = var.dry_run + function_timeout = var.function_timeout + function_memory = var.function_memory +} diff --git a/test/fixtures/bq-log-alerting/outputs.tf b/test/fixtures/bq-log-alerting/outputs.tf new file mode 100644 index 00000000..193b5034 --- /dev/null +++ b/test/fixtures/bq-log-alerting/outputs.tf @@ -0,0 +1,70 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +output "cloud_scheduler_job_name" { + value = module.bq-log-alerting.cloud_scheduler_job_name + description = "The name of the Cloud Scheduler job created" +} + +output "pubsub_topic_name" { + value = module.bq-log-alerting.pubsub_topic_name + description = "PubSub topic name" +} + +output "cloud_scheduler_job" { + value = module.bq-log-alerting.cloud_scheduler_job + description = "The Cloud Scheduler job instance" +} + +output "source_name" { + value = module.bq-log-alerting.source_name + description = "The Security Command Center Source name for the \"BQ Log Alerts\" Source" +} + +output "cf_service_account_email" { + value = module.bq-log-alerting.cloud_function_service_account_email + description = "The email of the service account created to be used by the cloud function" +} + +output "bq_views_dataset_id" { + value = module.bq-log-alerting.bq_views_dataset_id + description = "The ID of the BigQuery Views dataset" +} + +output "logging_project" { + value = var.project_id + description = "The name of the job created" +} + +output "dry_run" { + value = var.dry_run + description = "Enable dry_run execution of the Cloud Function. If is true it will just print the object the would be converted as a finding" +} + +output "region" { + value = var.region + description = "Region for BigQuery resources." +} + +output "org_id" { + value = var.parent_resource_organization + description = "The organization ID for the associated services" +} + +output "job_schedule" { + value = var.job_schedule + description = "The schedule on which the job will be executed in the unix-cron string format (https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules#defining_the_job_schedule). Defaults to 15 minutes." +} diff --git a/test/fixtures/bq-log-alerting/variables.tf b/test/fixtures/bq-log-alerting/variables.tf new file mode 100644 index 00000000..558dba39 --- /dev/null +++ b/test/fixtures/bq-log-alerting/variables.tf @@ -0,0 +1,73 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +variable "parent_resource_organization" { + description = "The organization ID for the associated services" + type = string +} + +variable "region" { + description = "Region for BigQuery resources." + type = string + default = "us-central1" +} + +variable "source_name" { + description = "The Security Command Center Source name for the \"BQ Log Alerts\" Source if the source had been created before. The format is `organizations//sources/`" + type = string + default = "" +} + +variable "project_id" { + description = "The project to deploy the solution" + type = string +} + +variable "job_schedule" { + description = "The schedule on which the job will be executed in the unix-cron string format (https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules#defining_the_job_schedule). Defaults to 15 minutes." + type = string + default = "*/15 * * * *" +} + +variable "time_window_unit" { + description = "The time window unit used in the query in the view in BigQuery. Valid values are 'MICROSECOND', 'MILLISECOND', 'SECOND', 'MINUTE', 'HOUR'" + type = string + default = "MINUTE" +} + +variable "time_window_quantity" { + description = "The time window quantity used in the query in the view in BigQuery." + type = string + default = "20" +} + +variable "dry_run" { + description = "Enable dry_run execution of the Cloud Function. If is true it will just print the object the would be converted as a finding" + type = bool + default = false +} + +variable "function_timeout" { + description = "The amount of time in seconds allotted for the execution of the function." + type = number + default = "540" +} + +variable "function_memory" { + description = "The amount of memory in megabytes allotted for the Cloud function to use." + type = number + default = "256" +} diff --git a/test/fixtures/bq-log-alerting/versions.tf b/test/fixtures/bq-log-alerting/versions.tf new file mode 100644 index 00000000..1285cf41 --- /dev/null +++ b/test/fixtures/bq-log-alerting/versions.tf @@ -0,0 +1,19 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +terraform { + required_version = ">=0.12.6, <0.14" +} diff --git a/test/integration/bq-log-alerting/controls/gcloud.rb b/test/integration/bq-log-alerting/controls/gcloud.rb new file mode 100644 index 00000000..1e060287 --- /dev/null +++ b/test/integration/bq-log-alerting/controls/gcloud.rb @@ -0,0 +1,84 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +cloud_scheduler_job_name = attribute('cloud_scheduler_job_name') +org_id = attribute('org_id') +logging_project = attribute('logging_project') +region = attribute('region') +pubsub_topic_name = attribute('pubsub_topic_name') +job_schedule = attribute('job_schedule') +source_name = attribute('source_name') + +job_name = 'bq-alerts-event-trigger' +complete_job_name = "projects/#{logging_project}/locations/#{region}/jobs/#{job_name}" + +topic_name = "projects/#{logging_project}/topics/#{pubsub_topic_name}" + +control 'gcloud' do + title 'Big Query Log Alerting - gcloud commands' + + describe command("gcloud alpha scc sources describe #{org_id} --source=#{source_name} --format json") do + its('exit_status') { should eq 0 } + its('stderr') { should eq '' } + let(:data) do + if subject.exit_status.zero? + JSON.parse(subject.stdout) + else + {} + end + end + + it 'has correct name' do + expect(data).to include( + 'name' => source_name + ) + end + + it 'has correct displayName' do + expect(data).to include( + 'displayName' => 'BQ Log Alerts' + ) + end + end + + describe command("gcloud scheduler jobs describe #{job_name} --project=#{logging_project} --format json") do + its('exit_status') { should eq 0 } + its('stderr') { should eq '' } + let(:data) do + if subject.exit_status.zero? + JSON.parse(subject.stdout) + else + {} + end + end + + it "has correct name #{complete_job_name}" do + expect(data).to include( + 'name' => complete_job_name + ) + end + + it "has correct topicName #{topic_name}" do + expect(data['pubsubTarget']).to include( + 'topicName' => topic_name + ) + end + + it "has correct schedule #{job_schedule}" do + expect(data).to include( + 'schedule' => job_schedule + ) + end + end +end diff --git a/test/integration/bq-log-alerting/controls/gcp.rb b/test/integration/bq-log-alerting/controls/gcp.rb new file mode 100644 index 00000000..6cff3963 --- /dev/null +++ b/test/integration/bq-log-alerting/controls/gcp.rb @@ -0,0 +1,91 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +pubsub_topic_name = attribute('pubsub_topic_name') +source_name = attribute('source_name') +cf_service_account_email = attribute('cf_service_account_email') +logging_project = attribute('logging_project') +dry_run = attribute('dry_run') ? 'true' : 'false' +region = attribute('region') +org_id = attribute('org_id') + +project_role = 'roles/bigquery.admin' +org_role = 'roles/securitycenter.findingsEditor' + +org_name = "organizations/#{org_id}" + +bigquery_dataset = 'views' + +cf_name = 'generate-alerts' + +control 'gcp' do + title 'Big Query Log Alerting' + + describe google_service_account( + project: logging_project, + name: cf_service_account_email + ) do + it { should exist } + end + + describe google_project_iam_binding( + project: logging_project, + role: project_role + ) do + it { should exist } + its('members') { should include "serviceAccount:#{cf_service_account_email}" } + end + + describe google_organization_iam_binding( + name: org_name, + role: org_role + ) do + it { should exist } + its('members') { should include "serviceAccount:#{cf_service_account_email}" } + end + + describe google_bigquery_dataset( + project: logging_project, + name: bigquery_dataset + ) do + it { should exist } + its('description') { should eq 'Log view dataset' } + end + + describe google_cloudfunctions_cloud_function( + project: logging_project, + location: region, + name: cf_name + ) do + it { should exist } + its('description') { should eq 'read from BQ view to generate alerts' } + its('timeout') { should eq '540s' } + its('available_memory_mb') { should eq 256 } + its('runtime') { should eq 'nodejs10' } + its('environment_variables') { + should include( + 'CSCC_SOURCE' => source_name, + 'LOGGING_PROJECT' => logging_project, + 'DRY_RUN' => dry_run + ) + } + end + + describe google_pubsub_topic( + project: logging_project, + name: pubsub_topic_name + ) do + it { should exist } + end +end diff --git a/test/integration/bq-log-alerting/inspec.yml b/test/integration/bq-log-alerting/inspec.yml new file mode 100644 index 00000000..22195d6f --- /dev/null +++ b/test/integration/bq-log-alerting/inspec.yml @@ -0,0 +1,33 @@ +name: bq_log_alert +depends: + - name: inspec-gcp + git: https://github.com/inspec/inspec-gcp.git + tag: v1.8.0 +attributes: + - name: cloud_scheduler_job_name + required: true + type: string + - name: pubsub_topic_name + required: true + type: string + - name: source_name + required: true + type: string + - name: cf_service_account_email + required: true + type: string + - name: logging_project + required: true + type: string + - name: dry_run + required: true + type: boolean + - name: region + required: true + type: string + - name: org_id + required: true + type: string + - name: job_schedule + required: true + type: string diff --git a/test/setup/appengine.tf b/test/setup/appengine.tf new file mode 100644 index 00000000..14fdd505 --- /dev/null +++ b/test/setup/appengine.tf @@ -0,0 +1,20 @@ +/** + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +resource "google_app_engine_application" "app" { + project = module.project.project_id + location_id = "us-central" +} diff --git a/test/setup/iam.tf b/test/setup/iam.tf index 1f0ca4dd..ac894fbf 100644 --- a/test/setup/iam.tf +++ b/test/setup/iam.tf @@ -38,6 +38,15 @@ locals { # Required to create log sinks from the project level "roles/logging.configWriter", + + # Needed for the bq-log-alerting submodule to create/delete a cloud function + "roles/cloudfunctions.developer", + + # Needed for the bq-log-alerting submodule to grant service account roles + "roles/iam.serviceAccountUser", + + # Needed for the bq-log-alerting submodule to create/delete a cloud scheduler job + "roles/cloudscheduler.admin" ] log_export_billing_account_roles = [ @@ -51,6 +60,12 @@ locals { # Required to associate billing accounts to new projects "roles/billing.projectManager", + + # Required to create a Security Center Source + "roles/securitycenter.sourcesEditor", + + # Required to get/set IAM policies + "roles/resourcemanager.organizationAdmin", ] log_export_folder_roles = [ diff --git a/test/setup/main.tf b/test/setup/main.tf index 8ade6eb4..b9bcd68e 100644 --- a/test/setup/main.tf +++ b/test/setup/main.tf @@ -25,6 +25,11 @@ module "project" { billing_account = var.billing_account activate_apis = [ + "appengine.googleapis.com", + "cloudbuild.googleapis.com", + "cloudfunctions.googleapis.com", + "cloudscheduler.googleapis.com", + "securitycenter.googleapis.com", "cloudresourcemanager.googleapis.com", "oslogin.googleapis.com", "serviceusage.googleapis.com",