diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c9475b8..41cadad 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,8 +1,8 @@ # **what?** -# Run tests for package-testing against supported adapters +# Run tests for against supported adapters # **why?** -# To ensure that dbt-package-testing works as expected with all supported adapters +# To ensure that works as expected with all supported adapters # **when?** # On every PR, and every push to main and when manually triggered @@ -10,135 +10,86 @@ name: Package Integration Tests on: - # This repo does not have secrets set up so these triggers are disabled to prevent many failing tests on all PRs - # Packages would want to trigger these tests for pushes to main and pull_requests to ensure nothing breaks - # with any changes to the package - # push: - # branches: - # - main - # pull_request: + push: + branches: + - main + pull_request: workflow_dispatch: - inputs: - adapter: - description: The adapter to test against. Defaults to all supported adapters when blank. - type: string - required: false - -env: - PYTHON_VERSION: "3.11" jobs: - determine-supported-adapters: - runs-on: ubuntu-latest - outputs: - adapters: ${{ steps.supported-adapters.outputs.adapters }} - steps: - - name: "Checkout ${{ github.event.repository }}" - uses: actions/checkout@v4 - - - name: "Set up Python ${{ env.PYTHON_VERSION }}" - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: "Install tox" - run: | - python -m pip install --upgrade pip - pip install tox - - - name: "Get list of supported adapters or use input adapter only" - id: list-adapters - run: | - if [ -z "${{ inputs.adapter }}" ]; then - # github adds a pip freeze and a new line we need to strip out - source supported_adapters.env - echo $SUPPORTED_ADAPTERS - echo "test_adapters=$SUPPORTED_ADAPTERS" >> $GITHUB_OUTPUT - else - echo "test_adapters=${{ inputs.adapter }}" >> $GITHUB_OUTPUT - fi - - - name: "Format adapter list for use as the matrix" - id: supported-adapters - run: | - # Convert to JSON array and output - supported_adapters=$(echo "${{ steps.list-adapters.outputs.test_adapters }}" | jq -Rc 'split(",")') - echo $supported_adapters - echo "adapters=$supported_adapters" >> $GITHUB_OUTPUT - - - name: "[ANNOTATION] ${{ github.event.repository.name }} - Testing ${{ steps.supported-adapters.outputs.adapters }}" - run: | - title="${{ github.event.repository.name }} - adapters to test" - message="The workflow will run tests for the following adapters: ${{ steps.supported-adapters.outputs.adapters }}" - echo "::notice $title::$message" - - run-tests: - runs-on: ubuntu-latest - needs: [determine-supported-adapters] - # you can install postgres as an additional service in the github container - services: - postgres: - image: postgres - env: - POSTGRES_USER: ${{ vars.POSTGRES_USER }} - POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASS }} - POSTGRES_DB: ${{ vars.POSTGRES_DATABASE }} - POSTGRES_HOST: ${{ vars.POSTGRES_HOST }} - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - strategy: - fail-fast: false - matrix: - adapter: ${{fromJson(needs.determine-supported-adapters.outputs.adapters)}} - - steps: - - name: "Checkout ${{ github.event.repository }} " - uses: actions/checkout@v4 - - - name: "Set up Python ${{ env.PYTHON_VERSION }}" - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: "Install ${{ matrix.adapter }}" - run: | - python -m pip install --upgrade pip - pip install dbt-${{ matrix.adapter }} - - - name: "Install tox" - run: | - python -m pip install --upgrade pip - pip install tox - - - name: "Run integration tests with tox on ${{ matrix.adapter }}" - run: | - tox -e dbt_integration_${{ matrix.adapter }} - env: - POSTGRES_HOST: ${{ vars.POSTGRES_HOST }} - POSTGRES_USER: ${{ vars.POSTGRES_USER }} - DBT_ENV_SECRET_POSTGRES_PASS: ${{ secrets.POSTGRES_PASS }} - POSTGRES_PORT: 5432 - POSTGRES_DATABASE: ${{ vars.POSTGRES_DATABASE }} - POSTGRES_SCHEMA: "dbt_utils_integration_tests_postgres_${{ github.run_number }}" - SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }} - SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }} - DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.SNOWFLAKE_PASS }} - SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }} - SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }} - SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }} - SNOWFLAKE_SCHEMA: "dbt_utils_integration_tests_snowflake_${{ github.run_number }}" - REDSHIFT_HOST: ${{ vars.REDSHIFT_HOST }} - REDSHIFT_USER: ${{ vars.REDSHIFT_USER }} - DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.REDSHIFT_PASS }} - REDSHIFT_DATABASE: ${{ vars.REDSHIFT_DATABASE }} - REDSHIFT_SCHEMA: "dbt_utils_integration_tests_redshift_${{ github.run_number }}" - REDSHIFT_PORT: 5439 - BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }} - BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }} - BIGQUERY_SCHEMA: "dbt_utils_integration_tests_bigquery_${{ github.run_number }}" - # plus any other env vars needed for your supported adapter + run-tests: + uses: dbt-labs/dbt-package-testing/.github/workflows/run_tox.yml@main + # Note: only include the inputs below for the adapters you are testing against. + # All inputs are optional and postgres has default values. + with: + # Postgres is defaulted since it runs right in the container. + # No need to pass postgres vars in. + # redshift + REDSHIFT_HOST: ${{ vars.REDSHIFT_HOST }} + REDSHIFT_USER: ${{ vars.REDSHIFT_USER }} + REDSHIFT_DATABASE: ${{ vars.REDSHIFT_DATABASE }} + REDSHIFT_SCHEMA: "integration_tests_redshift_${{ github.run_number }}" + REDSHIFT_PORT: ${{ vars.REDSHIFT_PORT }} + # bigquery + BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }} + BIGQUERY_SCHEMA: "integration_tests_bigquery_${{ github.run_number }}" + # snowflake + SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }} + SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }} + SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }} + SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }} + SNOWFLAKE_SCHEMA: "integration_tests_snowflake_${{ github.run_number }}" + # trino + TRINO_METHOD: ${{ vars.TRINO_METHOD }} + TRINO_USER: ${{ vars.TRINO_USER }} + TRINO_HOST: ${{ vars.TRINO_HOST }} + TRINO_PORT: ${{ vars.TRINO_PORT }} + TRINO_CATALOG: ${{ vars.TRINO_CATALOG }} + TRINO_SCHEMA: "integration_tests_trino_${{ github.run_number }}" + TRINO_TIMEZONE: ${{ vars.TRINO_TIMEZONE }} + # databricks + DATABRICKS_SCHEMA: "integration_tests_databricks_${{ github.run_number }}" + DATABRICKS_HOST: ${{ vars.DATABRICKS_HOST }} + DATABRICKS_HTTP_PATH: ${{ vars.DATABRICKS_HTTP_PATH }} + # spark + SPARK_HOST: ${{ vars.SPARK_HOST }} + SPARK_SCHEMA: "integration_tests_spark_${{ github.run_number }}" + SPARK_USER: ${{ vars.SPARK_USER }} + SPARK_METHOD: ${{ vars.SPARK_METHOD }} + SPARK_PORT: ${{ vars.SPARK_PORT }} + # fabric + FABRIC_DRIVER: ${{ vars.FABRIC_DRIVER }} + FABRIC_HOST: ${{ vars.FABRIC_HOST }} + FABRIC_PORT: ${{ vars.FABRIC_PORT }} + FABRIC_DATABASE: ${{ vars.FABRIC_DATABASE }} + FABRIC_SCHEMA: "integration_tests_fabric_${{ github.run_number }}" + FABRIC_AUTHENTICATION: ${{ vars.FABRIC_AUTHENTICATION }} + FABRIC_TENANT: ${{ vars.FABRIC_TENANT }} + FABRIC_CLIENT: ${{ vars.FABRIC_CLIENT }} + # synapse + SYNAPSE_DRIVER: ${{ vars.SYNAPSE_DRIVER }} + SYNAPSE_HOST: ${{ vars.SYNAPSE_HOST }} + SYNAPSE_PORT: ${{ vars.SYNAPSE_PORT }} + SYNAPSE_DATABASE: ${{ vars.SYNAPSE_DATABASE }} + SYNAPSE_SCHEMA: "integration_tests_synapse_${{ github.run_number }}" + SYNAPSE_AUTHENTICATION: ${{ vars.SYNAPSE_AUTHENTICATION }} + SYNAPSE_TENANT_ID: ${{ vars.SYNAPSE_TENANT_ID }} + SYNAPSE_CLIENT_ID: ${{ vars.SYNAPSE_CLIENT_ID }} + # athena + ATHENA_S3_STAGING_DIR: ${{ vars.ATHENA_S3_STAGING_DIR }} + ATHENA_S3_DATA_DIR: ${{ vars.ATHENA_S3_DATA_DIR }} + ATHENA_S3_DATA_NAMING: ${{ vars.ATHENA_S3_DATA_NAMING }} + ATHENA_REGION_NAME: ${{ vars.ATHENA_REGION_NAME }} + ATHENA_SCHEMA: "integration_tests_athena_${{ github.run_number }}" + ATHENA_DATABASE: ${{ vars.ATHENA_DATABASE }} + secrets: + DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.DBT_ENV_SECRET_REDSHIFT_PASS }} + BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }} + SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }} + DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.DBT_ENV_SECRET_SNOWFLAKE_PASS }} + DBT_ENV_SECRET_TRINO_PASS: ${{ secrets.DBT_ENV_SECRET_TRINO_PASS }} + DBT_SECRET_ENV_DATABRICKS_TOKEN: ${{ secrets.DBT_SECRET_ENV_DATABRICKS_TOKEN }} + DBT_ENV_SECRET_FABRIC_CLIENT_SECRET: ${{ secrets.DBT_ENV_SECRET_FABRIC_CLIENT_SECRET }} + DBT_ENV_SECRET_SYNAPSE_CLIENT_SECRET: ${{ secrets.DBT_ENV_SECRET_SYNAPSE_CLIENT_SECRET }} + DBT_ENV_SECRET_ATHENA_AWS_ACCESS_KEY_ID: ${{ secrets.DBT_ENV_SECRET_ATHENA_AWS_ACCESS_KEY_ID }} + DBT_ENV_SECRET_ATHENA_AWS_SECRET_ACCESS_KEY: ${{ secrets.DBT_ENV_SECRET_ATHENA_AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/run_tox.yml b/.github/workflows/run_tox.yml new file mode 100644 index 0000000..9f6e820 --- /dev/null +++ b/.github/workflows/run_tox.yml @@ -0,0 +1,400 @@ +# **what?** +# Run tests for packages against supported adapters +# +# **why?** +# To ensure that packages works as expected with all supported adapters + +# **when?** +# On workflow call +# This is a workflow meant to be called by many different packages to run integration tests. +# Any changes should be backwards compatible with all packages that call this workflow. + + +name: Package Integration Tests + +on: + workflow_call: + inputs: + # postgres + # postgres vars are all defaulted so that they are not required to be passed + # in since the postgres tests will run inside the container with a local instance + # of postgres. + POSTGRES_SCHEMA: + required: false + type: string + default: "integration_tests_postgres_${{ github.run_number }}" + POSTGRES_HOST: + required: false + type: string + default: "localhost" + POSTGRES_USER: + required: false + type: string + default: "root" + POSTGRES_PORT: + required: false + type: string + default: "5432" + POSTGRES_DATABASE: + required: false + type: string + default: "postgres_test" + # since this is for the db running inside the container this is not actually a secret + # and does not need to treated as such in the workflow. + DBT_ENV_SECRET_POSTGRES_PASS: + description: "The password that isn't actually secret" + type: string + default: "postgres_test" + # redshift + REDSHIFT_HOST: + required: false + type: string + REDSHIFT_USER: + required: false + type: string + REDSHIFT_DATABASE: + required: false + type: string + REDSHIFT_SCHEMA: + required: false + type: string + REDSHIFT_PORT: + required: false + type: string + # bigquery + BIGQUERY_PROJECT: + required: false + type: string + BIGQUERY_SCHEMA: + required: false + type: string + # snowflake + SNOWFLAKE_USER: + required: false + type: string + SNOWFLAKE_ROLE: + required: false + type: string + SNOWFLAKE_DATABASE: + required: false + type: string + SNOWFLAKE_WAREHOUSE: + required: false + type: string + SNOWFLAKE_SCHEMA: + required: false + type: string + # trino + TRINO_METHOD: + required: false + type: string + TRINO_USER: + required: false + type: string + TRINO_HOST: + required: false + type: string + TRINO_PORT: + required: false + type: string + TRINO_CATALOG: + required: false + type: string + TRINO_SCHEMA: + required: false + type: string + TRINO_TIMEZONE: + required: false + type: string + # databricks + DATABRICKS_SCHEMA: + required: false + type: string + DATABRICKS_HOST: + required: false + type: string + DATABRICKS_HTTP_PATH: + required: false + type: string + # spark + SPARK_HOST: + required: false + type: string + SPARK_SCHEMA: + required: false + type: string + SPARK_USER: + required: false + type: string + SPARK_METHOD: + required: false + type: string + SPARK_PORT: + required: false + type: string + # fabric + FABRIC_DRIVER: + required: false + type: string + FABRIC_HOST: + required: false + type: string + FABRIC_PORT: + required: false + type: string + FABRIC_DATABASE: + required: false + type: string + FABRIC_SCHEMA: + required: false + type: string + FABRIC_AUTHENTICATION: + required: false + type: string + FABRIC_TENANT: + required: false + type: string + FABRIC_CLIENT: + required: false + type: string + # synapse + SYNAPSE_DRIVER: + required: false + type: string + SYNAPSE_HOST: + required: false + type: string + SYNAPSE_PORT: + required: false + type: string + SYNAPSE_DATABASE: + required: false + type: string + SYNAPSE_SCHEMA: + required: false + type: string + SYNAPSE_AUTHENTICATION: + required: false + type: string + SYNAPSE_TENANT_ID: + required: false + type: string + SYNAPSE_CLIENT_ID: + required: false + type: string + # athena + ATHENA_S3_STAGING_DIR: + required: false + type: string + ATHENA_S3_DATA_DIR: + required: false + type: string + ATHENA_S3_DATA_NAMING: + required: false + type: string + ATHENA_REGION_NAME: + required: false + type: string + ATHENA_SCHEMA: + required: false + type: string + ATHENA_DATABASE: + required: false + type: string + secrets: + # postgres - None + # redshift + DBT_ENV_SECRET_REDSHIFT_PASS: + # bigquery + # This can't be prefaced by `DBT_ENV_SECRET` because it causes issues with GitHub + BIGQUERY_KEYFILE_JSON: + required: false + # snowflake + SNOWFLAKE_ACCOUNT: + required: false + DBT_ENV_SECRET_SNOWFLAKE_PASS: + required: false + # trino + DBT_ENV_SECRET_TRINO_PASS: + required: false + # databricks + DBT_SECRET_ENV_DATABRICKS_TOKEN: + required: false + # spark - None + # fabric + DBT_ENV_SECRET_FABRIC_CLIENT_SECRET: + required: false + # synapse + DBT_ENV_SECRET_SYNAPSE_CLIENT_SECRET: + required: false + # athena + DBT_ENV_SECRET_ATHENA_AWS_ACCESS_KEY_ID: + required: false + DBT_ENV_SECRET_ATHENA_AWS_SECRET_ACCESS_KEY: + required: false + +env: + PYTHON_VERSION: "3.11" + +jobs: + determine-supported-adapters: + runs-on: ubuntu-latest + outputs: + adapters: ${{ steps.supported-adapters.outputs.adapters }} + steps: + - name: "Checkout ${{ github.event.repository }}" + uses: actions/checkout@v4 + + - name: "Set up Python ${{ env.PYTHON_VERSION }}" + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: "Install tox" + run: | + python -m pip install --upgrade pip + pip install tox + + - name: "Get list of supported adapters" + id: list-adapters + run: | + # github adds a pip freeze and a new line we need to strip out + source supported_adapters.env + echo $SUPPORTED_ADAPTERS + echo "test_adapters=$SUPPORTED_ADAPTERS" >> $GITHUB_OUTPUT + + - name: "Format adapter list for use as the matrix" + id: supported-adapters + run: | + # Convert to JSON array and output + supported_adapters=$(echo "${{ steps.list-adapters.outputs.test_adapters }}" | jq -Rc 'split(",")') + echo $supported_adapters + echo "adapters=$supported_adapters" >> $GITHUB_OUTPUT + + - name: "[ANNOTATION] ${{ github.event.repository.name }} - Testing ${{ steps.supported-adapters.outputs.adapters }}" + run: | + title="${{ github.event.repository.name }} - adapters to test" + message="The workflow will run tests for the following adapters: ${{ steps.supported-adapters.outputs.adapters }}" + echo "::notice $title::$message" + + run-tests: + runs-on: ubuntu-latest + needs: [determine-supported-adapters] + services: + postgres: + image: postgres + env: + POSTGRES_USER: ${{ inputs.POSTGRES_USER }} + POSTGRES_PASSWORD: ${{ inputs.DBT_ENV_SECRET_POSTGRES_PASS }} + POSTGRES_DB: ${{ inputs.POSTGRES_DATABASE }} + POSTGRES_HOST: ${{ inputs.POSTGRES_HOST }} + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + strategy: + fail-fast: false + matrix: + adapter: ${{fromJson(needs.determine-supported-adapters.outputs.adapters)}} + + steps: + - name: "Checkout ${{ github.event.repository }} " + uses: actions/checkout@v4 + + - name: "Set up Python ${{ env.PYTHON_VERSION }}" + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: "Install ${{ matrix.adapter }}" + run: | + python -m pip install --upgrade pip + pip install dbt-${{ matrix.adapter }} + + - name: "Install tox" + run: | + python -m pip install --upgrade pip + pip install tox + + - name: "Run integration tests with tox on ${{ matrix.adapter }}" + run: | + tox -e dbt_integration_${{ matrix.adapter }} + env: + # postgres + POSTGRES_HOST: ${{ inputs.POSTGRES_HOST }} + POSTGRES_USER: ${{ inputs.POSTGRES_USER }} + DBT_ENV_SECRET_POSTGRES_PASS: ${{ inputs.DBT_ENV_SECRET_POSTGRES_PASS }} + POSTGRES_PORT: ${{ inputs.POSTGRES_PORT }} + POSTGRES_DATABASE: ${{ inputs.POSTGRES_DATABASE }} + POSTGRES_SCHEMA: ${{ inputs.POSTGRES_SCHEMA }} + # redshift + REDSHIFT_HOST: ${{ inputs.REDSHIFT_HOST }} + REDSHIFT_USER: ${{ inputs.REDSHIFT_USER }} + DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.DBT_ENV_SECRET_REDSHIFT_PASS }} + REDSHIFT_DATABASE: ${{ inputs.REDSHIFT_DATABASE }} + REDSHIFT_SCHEMA: ${{ inputs.REDSHIFT_SCHEMA }} + REDSHIFT_PORT: ${{ inputs.REDSHIFT_PORT }} + # bigquery + BIGQUERY_PROJECT: ${{ inputs.BIGQUERY_PROJECT }} + BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }} + BIGQUERY_SCHEMA: ${{ inputs.BIGQUERY_SCHEMA }} + # snowflake + SNOWFLAKE_USER: ${{ inputs.SNOWFLAKE_USER }} + SNOWFLAKE_ROLE: ${{ inputs.SNOWFLAKE_ROLE }} + SNOWFLAKE_DATABASE: ${{ inputs.SNOWFLAKE_DATABASE }} + SNOWFLAKE_WAREHOUSE: ${{ inputs.SNOWFLAKE_WAREHOUSE }} + SNOWFLAKE_SCHEMA: ${{ inputs.SNOWFLAKE_SCHEMA }} + SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }} + DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.DBT_ENV_SECRET_SNOWFLAKE_PASS }} + # trino + TRINO_METHOD: ${{ inputs.TRINO_METHOD }} + TRINO_USER: ${{ inputs.TRINO_USER }} + DBT_ENV_SECRET_TRINO_PASS: ${{ secrets.DBT_ENV_SECRET_TRINO_PASS }} + TRINO_HOST: ${{ inputs.TRINO_HOST }} + TRINO_PORT: ${{ inputs.TRINO_PORT }} + TRINO_CATALOG: ${{ inputs.TRINO_CATALOG }} + TRINO_SCHEMA: ${{ inputs.TRINO_SCHEMA }} + TRINO_TIMEZONE: ${{ inputs.TRINO_TIMEZONE }} + # databricks + DATABRICKS_SCHEMA: ${{ inputs.DATABRICKS_SCHEMA }} + DATABRICKS_HOST: ${{ inputs.DATABRICKS_HOST }} + DATABRICKS_HTTP_PATH: ${{ inputs.DATABRICKS_HTTP_PATH }} + DBT_ENV_SECRET_DATABRICKS_TOKEN: ${{ secrets.DBT_ENV_SECRET_DATABRICKS_TOKEN }} + # spark + SPARK_HOST: ${{ inputs.SPARK_HOST }} + SPARK_SCHEMA: ${{ inputs.SPARK_SCHEMA }} + SPARK_USER: ${{ inputs.SPARK_USER }} + SPARK_METHOD: ${{ inputs.SPARK_METHOD }} + SPARK_PORT: ${{ inputs.SPARK_PORT }} + # fabric + FABRIC_DRIVER: ${{ inputs.FABRIC_DRIVER }} + FABRIC_HOST: ${{ inputs.FABRIC_HOST }} + FABRIC_PORT: ${{ inputs.FABRIC_PORT }} + FABRIC_DATABASE: ${{ inputs.FABRIC_DATABASE }} + FABRIC_SCHEMA: ${{ inputs.FABRIC_SCHEMA }} + FABRIC_AUTHENTICATION: ${{ inputs.FABRIC_AUTHENTICATION }} + FABRIC_TENANT: ${{ inputs.FABRIC_TENANT }} + FABRIC_CLIENT: ${{ inputs.FABRIC_CLIENT }} + DBT_ENV_SECRET_FABRIC_CLIENT_SECRET: ${{ secrets.DBT_ENV_SECRET_FABRIC_CLIENT_SECRET }} + # synapse + SYNAPSE_DRIVER: ${{ inputs.SYNAPSE_DRIVER }} + SYNAPSE_HOST: ${{ inputs.SYNAPSE_HOST }} + SYNAPSE_PORT: ${{ inputs.SYNAPSE_PORT }} + SYNAPSE_DATABASE: ${{ inputs.SYNAPSE_DATABASE }} + SYNAPSE_SCHEMA: ${{ inputs.SYNAPSE_SCHEMA }} + SYNAPSE_AUTHENTICATION: ${{ inputs.SYNAPSE_AUTHENTICATION }} + SYNAPSE_TENANT_ID: ${{ inputs.SYNAPSE_TENANT_ID }} + SYNAPSE_CLIENT_ID: ${{ inputs.SYNAPSE_CLIENT_ID }} + DBT_ENV_SECRET_SYNAPSE_CLIENT_SECRET: ${{ secrets.DBT_ENV_SECRET_SYNAPSE_CLIENT_SECRET }} + # athena + ATHENA_S3_STAGING_DIR: ${{ inputs.ATHENA_S3_STAGING_DIR }} + ATHENA_S3_DATA_DIR: ${{ inputs.ATHENA_S3_DATA_DIR }} + ATHENA_S3_DATA_NAMING: ${{ inputs.ATHENA_S3_DATA_NAMING }} + ATHENA_REGION_NAME: ${{ inputs.ATHENA_REGION_NAME }} + ATHENA_SCHEMA: ${{ inputs.ATHENA_SCHEMA }} + ATHENA_DATABASE: ${{ inputs.ATHENA_DATABASE }} + DBT_ENV_SECRET_ATHENA_ACCESS_KEY_ID: ${{ secrets.DBT_ENV_SECRET_ATHENA_ACCESS_KEY_ID }} + DBT_ENV_SECRET_ATHENA_SECRET_ACCESS_KEY: ${{ secrets.DBT_ENV_SECRET_ATHENA_SECRET_ACCESS_KEY }}