Add reusable action #10
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# **what?** | ||
# Run tests for <this package> against supported adapters | ||
# **why?** | ||
# To ensure that <this package> works as expected with all supported adapters | ||
# **when?** | ||
# On every PR, and every push to main and when manually triggered | ||
name: Package Integration Tests | ||
on: | ||
push: | ||
branches: | ||
- main | ||
pull_request: | ||
workflow_dispatch: | ||
jobs: | ||
run-tests: | ||
uses: dbt-labs/dbt-package-testing/.github/workflows/run_tox.yml@main | ||
Check failure on line 21 in .github/workflows/ci.yml GitHub Actions / .github/workflows/ci.ymlInvalid workflow file
|
||
with: | ||
# Postgres is defaulted since it runs right in the container. | ||
# No need to pass postgres vars in. | ||
# redshift | ||
REDSHIFT_HOST: ${{ vars.REDSHIFT_HOST }} | ||
REDSHIFT_USER: ${{ vars.REDSHIFT_USER }} | ||
REDSHIFT_DATABASE: ${{ vars.REDSHIFT_DATABASE }} | ||
REDSHIFT_SCHEMA: "integration_tests_redshift_${{ github.run_number }}" | ||
REDSHIFT_PORT: ${{ vars.REDSHIFT_PORT }} | ||
# bigquery | ||
BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }} | ||
BIGQUERY_SCHEMA: "integration_tests_bigquery_${{ github.run_number }}" | ||
# snowflake | ||
SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }} | ||
SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }} | ||
SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }} | ||
SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }} | ||
SNOWFLAKE_SCHEMA: "integration_tests_snowflake_${{ github.run_number }}" | ||
# trino | ||
TRINO_METHOD: ${{ vars.TRINO_METHOD }} | ||
TRINO_USER: ${{ vars.TRINO_USER }} | ||
TRINO_HOST: ${{ vars.TRINO_HOST }} | ||
TRINO_PORT: ${{ vars.TRINO_PORT }} | ||
TRINO_CATALOG: ${{ vars.TRINO_CATALOG }} | ||
TRINO_SCHEMA: "integration_tests_trino_${{ github.run_number }}" | ||
TRINO_TIMEZONE: ${{ vars.TRINO_TIMEZONE }} | ||
# databricks | ||
DATABRICKS_SCHEMA: "integration_tests_databricks_${{ github.run_number }}" | ||
DATABRICKS_HOST: ${{ vars.DATABRICKS_HOST }} | ||
DATABRICKS_HTTP_PATH: ${{ vars.DATABRICKS_HTTP_PATH }} | ||
# spark | ||
SPARK_HOST: ${{ vars.SPARK_HOST }} | ||
SPARK_SCHEMA: "integration_tests_spark_${{ github.run_number }}" | ||
SPARK_USER: ${{ vars.SPARK_USER }} | ||
SPARK_METHOD: ${{ vars.SPARK_METHOD }} | ||
SPARK_PORT: ${{ vars.SPARK_PORT }} | ||
# fabric | ||
FABRIC_DRIVER: ${{ vars.FABRIC_DRIVER }} | ||
FABRIC_HOST: ${{ vars.FABRIC_HOST }} | ||
FABRIC_PORT: ${{ vars.FABRIC_PORT }} | ||
FABRIC_DATABASE: ${{ vars.FABRIC_DATABASE }} | ||
FABRIC_SCHEMA: "integration_tests_fabric_${{ github.run_number }}" | ||
FABRIC_AUTHENTICATION: ${{ vars.FABRIC_AUTHENTICATION }} | ||
FABRIC_TENANT: ${{ vars.FABRIC_TENANT }} | ||
FABRIC_CLIENT: ${{ vars.FABRIC_CLIENT }} | ||
# synapse | ||
SYNAPSE_DRIVER: ${{ vars.SYNAPSE_DRIVER }} | ||
SYNAPSE_HOST: ${{ vars.SYNAPSE_HOST }} | ||
SYNAPSE_PORT: ${{ vars.SYNAPSE_PORT }} | ||
SYNAPSE_DATABASE: ${{ vars.SYNAPSE_DATABASE }} | ||
SYNAPSE_SCHEMA: "integration_tests_synapse_${{ github.run_number }}" | ||
SYNAPSE_AUTHENTICATION: ${{ vars.SYNAPSE_AUTHENTICATION }} | ||
SYNAPSE_TENANT_ID: ${{ vars.SYNAPSE_TENANT_ID }} | ||
SYNAPSE_CLIENT_ID: ${{ vars.SYNAPSE_CLIENT_ID }} | ||
# athena | ||
ATHENA_S3_STAGING_DIR: ${{ vars.ATHENA_S3_STAGING_DIR }} | ||
ATHENA_S3_DATA_DIR: ${{ vars.ATHENA_S3_DATA_DIR }} | ||
ATHENA_S3_DATA_NAMING: ${{ vars.ATHENA_S3_DATA_NAMING }} | ||
ATHENA_REGION_NAME: ${{ vars.ATHENA_REGION_NAME }} | ||
ATHENA_SCHEMA: "integration_tests_athena_${{ github.run_number }}" | ||
ATHENA_DATABASE: ${{ vars.ATHENA_DATABASE }} | ||
# plus any other env vars needed for your supported adapter | ||
secrets: | ||
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.DBT_ENV_SECRET_REDSHIFT_PASS }} | ||
BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }} | ||
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }} | ||
DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.DBT_ENV_SECRET_SNOWFLAKE_PASS }} | ||
DBT_ENV_SECRET_TRINO_PASS: ${{ secrets.DBT_ENV_SECRET_TRINO_PASS }} | ||
DBT_SECRET_ENV_DATABRICKS_TOKEN: ${{ secrets.DBT_SECRET_ENV_DATABRICKS_TOKEN }} | ||
DBT_ENV_SECRET_FABRIC_CLIENT_SECRET: ${{ secrets.DBT_ENV_SECRET_FABRIC_CLIENT_SECRET }} | ||
DBT_ENV_SECRET_SYNAPSE_CLIENT_SECRET: ${{ secrets.DBT_ENV_SECRET_SYNAPSE_CLIENT_SECRET }} | ||
DBT_ENV_SECRET_ATHENA_AWS_ACCESS_KEY_ID: ${{ secrets.DBT_ENV_SECRET_ATHENA_AWS_ACCESS_KEY_ID }} | ||
DBT_ENV_SECRET_ATHENA_AWS_SECRET_ACCESS_KEY: ${{ secrets.DBT_ENV_SECRET_ATHENA_AWS_SECRET_ACCESS_KEY }} |