generated from dbt-labs/dbt-oss-template
-
Notifications
You must be signed in to change notification settings - Fork 0
400 lines (383 loc) · 15.1 KB
/
run_tox.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
# **what?**
# Run tests for packages against supported adapters
#
# **why?**
# To ensure that packages works as expected with all supported adapters
# **when?**
# On workflow call
# This is a workflow meant to be called by many different packages to run integration tests.
# Any changes should be backwards compatible with all packages that call this workflow.
name: Package Integration Tests
on:
workflow_call:
inputs:
# postgres
# postgres vars are all defaulted so that they are not required to be passed
# in since the postgres tests will run inside the container with a local instance
# of postgres.
POSTGRES_SCHEMA:
required: false
type: string
default: "integration_tests_postgres_${{ github.run_number }}"
POSTGRES_HOST:
required: false
type: string
default: "localhost"
POSTGRES_USER:
required: false
type: string
default: "root"
POSTGRES_PORT:
required: false
type: string
default: "5432"
POSTGRES_DATABASE:
required: false
type: string
default: "postgres_test"
# since this is for the db running inside the container this is not actually a secret
# and does not need to treated as such in the workflow.
DBT_ENV_SECRET_POSTGRES_PASS:
description: "The password that isn't actually secret"
type: string
default: "postgres_test"
# redshift
REDSHIFT_HOST:
required: false
type: string
REDSHIFT_USER:
required: false
type: string
REDSHIFT_DATABASE:
required: false
type: string
REDSHIFT_SCHEMA:
required: false
type: string
REDSHIFT_PORT:
required: false
type: string
# bigquery
BIGQUERY_PROJECT:
required: false
type: string
BIGQUERY_SCHEMA:
required: false
type: string
# snowflake
SNOWFLAKE_USER:
required: false
type: string
SNOWFLAKE_ROLE:
required: false
type: string
SNOWFLAKE_DATABASE:
required: false
type: string
SNOWFLAKE_WAREHOUSE:
required: false
type: string
SNOWFLAKE_SCHEMA:
required: false
type: string
# trino
TRINO_METHOD:
required: false
type: string
TRINO_USER:
required: false
type: string
TRINO_HOST:
required: false
type: string
TRINO_PORT:
required: false
type: string
TRINO_CATALOG:
required: false
type: string
TRINO_SCHEMA:
required: false
type: string
TRINO_TIMEZONE:
required: false
type: string
# databricks
DATABRICKS_SCHEMA:
required: false
type: string
DATABRICKS_HOST:
required: false
type: string
DATABRICKS_HTTP_PATH:
required: false
type: string
# spark
SPARK_HOST:
required: false
type: string
SPARK_SCHEMA:
required: false
type: string
SPARK_USER:
required: false
type: string
SPARK_METHOD:
required: false
type: string
SPARK_PORT:
required: false
type: string
# fabric
FABRIC_DRIVER:
required: false
type: string
FABRIC_HOST:
required: false
type: string
FABRIC_PORT:
required: false
type: string
FABRIC_DATABASE:
required: false
type: string
FABRIC_SCHEMA:
required: false
type: string
FABRIC_AUTHENTICATION:
required: false
type: string
FABRIC_TENANT:
required: false
type: string
FABRIC_CLIENT:
required: false
type: string
# synapse
SYNAPSE_DRIVER:
required: false
type: string
SYNAPSE_HOST:
required: false
type: string
SYNAPSE_PORT:
required: false
type: string
SYNAPSE_DATABASE:
required: false
type: string
SYNAPSE_SCHEMA:
required: false
type: string
SYNAPSE_AUTHENTICATION:
required: false
type: string
SYNAPSE_TENANT_ID:
required: false
type: string
SYNAPSE_CLIENT_ID:
required: false
type: string
# athena
ATHENA_S3_STAGING_DIR:
required: false
type: string
ATHENA_S3_DATA_DIR:
required: false
type: string
ATHENA_S3_DATA_NAMING:
required: false
type: string
ATHENA_REGION_NAME:
required: false
type: string
ATHENA_SCHEMA:
required: false
type: string
ATHENA_DATABASE:
required: false
type: string
secrets:
# postgres - None
# redshift
DBT_ENV_SECRET_REDSHIFT_PASS:
# bigquery
# This can't be prefaced by `DBT_ENV_SECRET` because it causes issues with GitHub
BIGQUERY_KEYFILE_JSON:
required: false
# snowflake
SNOWFLAKE_ACCOUNT:
required: false
DBT_ENV_SECRET_SNOWFLAKE_PASS:
required: false
# trino
DBT_ENV_SECRET_TRINO_PASS:
required: false
# databricks
DBT_ENV_SECRET_DATABRICKS_TOKEN:
required: false
# spark - None
# fabric
DBT_ENV_SECRET_FABRIC_CLIENT_SECRET:
required: false
# synapse
DBT_ENV_SECRET_SYNAPSE_CLIENT_SECRET:
required: false
# athena
DBT_ENV_SECRET_ATHENA_AWS_ACCESS_KEY_ID:
required: false
DBT_ENV_SECRET_ATHENA_AWS_SECRET_ACCESS_KEY:
required: false
env:
PYTHON_VERSION: "3.11"
jobs:
determine-supported-adapters:
runs-on: ubuntu-latest
outputs:
adapters: ${{ steps.supported-adapters.outputs.adapters }}
steps:
- name: "Checkout ${{ github.event.repository }}"
uses: actions/checkout@v4
- name: "Set up Python ${{ env.PYTHON_VERSION }}"
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Install tox"
run: |
python -m pip install --upgrade pip
pip install tox
- name: "Get list of supported adapters"
id: list-adapters
run: |
# github adds a pip freeze and a new line we need to strip out
source supported_adapters.env
echo $SUPPORTED_ADAPTERS
echo "test_adapters=$SUPPORTED_ADAPTERS" >> $GITHUB_OUTPUT
- name: "Format adapter list for use as the matrix"
id: supported-adapters
run: |
# Convert to JSON array and output
supported_adapters=$(echo "${{ steps.list-adapters.outputs.test_adapters }}" | jq -Rc 'split(",")')
echo $supported_adapters
echo "adapters=$supported_adapters" >> $GITHUB_OUTPUT
- name: "[ANNOTATION] ${{ github.event.repository.name }} - Testing ${{ steps.supported-adapters.outputs.adapters }}"
run: |
title="${{ github.event.repository.name }} - adapters to test"
message="The workflow will run tests for the following adapters: ${{ steps.supported-adapters.outputs.adapters }}"
echo "::notice $title::$message"
run-tests:
runs-on: ubuntu-latest
needs: [determine-supported-adapters]
services:
postgres:
image: postgres
env:
POSTGRES_USER: ${{ inputs.POSTGRES_USER }}
POSTGRES_PASSWORD: ${{ inputs.DBT_ENV_SECRET_POSTGRES_PASS }}
POSTGRES_DB: ${{ inputs.POSTGRES_DATABASE }}
POSTGRES_HOST: ${{ inputs.POSTGRES_HOST }}
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
strategy:
fail-fast: false
matrix:
adapter: ${{fromJson(needs.determine-supported-adapters.outputs.adapters)}}
steps:
- name: "Checkout ${{ github.event.repository }} "
uses: actions/checkout@v4
- name: "Set up Python ${{ env.PYTHON_VERSION }}"
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: "Install ${{ matrix.adapter }}"
run: |
python -m pip install --upgrade pip
pip install dbt-${{ matrix.adapter }}
- name: "Install tox"
run: |
python -m pip install --upgrade pip
pip install tox
- name: "Run integration tests with tox on ${{ matrix.adapter }}"
run: |
tox -e dbt_integration_${{ matrix.adapter }}
env:
# postgres
POSTGRES_HOST: ${{ inputs.POSTGRES_HOST }}
POSTGRES_USER: ${{ inputs.POSTGRES_USER }}
DBT_ENV_SECRET_POSTGRES_PASS: ${{ inputs.DBT_ENV_SECRET_POSTGRES_PASS }}
POSTGRES_PORT: ${{ inputs.POSTGRES_PORT }}
POSTGRES_DATABASE: ${{ inputs.POSTGRES_DATABASE }}
POSTGRES_SCHEMA: ${{ inputs.POSTGRES_SCHEMA }}
# redshift
REDSHIFT_HOST: ${{ inputs.REDSHIFT_HOST }}
REDSHIFT_USER: ${{ inputs.REDSHIFT_USER }}
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.DBT_ENV_SECRET_REDSHIFT_PASS }}
REDSHIFT_DATABASE: ${{ inputs.REDSHIFT_DATABASE }}
REDSHIFT_SCHEMA: ${{ inputs.REDSHIFT_SCHEMA }}
REDSHIFT_PORT: ${{ inputs.REDSHIFT_PORT }}
# bigquery
BIGQUERY_PROJECT: ${{ inputs.BIGQUERY_PROJECT }}
BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }}
BIGQUERY_SCHEMA: ${{ inputs.BIGQUERY_SCHEMA }}
# snowflake
SNOWFLAKE_USER: ${{ inputs.SNOWFLAKE_USER }}
SNOWFLAKE_ROLE: ${{ inputs.SNOWFLAKE_ROLE }}
SNOWFLAKE_DATABASE: ${{ inputs.SNOWFLAKE_DATABASE }}
SNOWFLAKE_WAREHOUSE: ${{ inputs.SNOWFLAKE_WAREHOUSE }}
SNOWFLAKE_SCHEMA: ${{ inputs.SNOWFLAKE_SCHEMA }}
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }}
DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.DBT_ENV_SECRET_SNOWFLAKE_PASS }}
# trino
TRINO_METHOD: ${{ inputs.TRINO_METHOD }}
TRINO_USER: ${{ inputs.TRINO_USER }}
DBT_ENV_SECRET_TRINO_PASS: ${{ secrets.DBT_ENV_SECRET_TRINO_PASS }}
TRINO_HOST: ${{ inputs.TRINO_HOST }}
TRINO_PORT: ${{ inputs.TRINO_PORT }}
TRINO_CATALOG: ${{ inputs.TRINO_CATALOG }}
TRINO_SCHEMA: ${{ inputs.TRINO_SCHEMA }}
TRINO_TIMEZONE: ${{ inputs.TRINO_TIMEZONE }}
# databricks
DATABRICKS_SCHEMA: ${{ inputs.DATABRICKS_SCHEMA }}
DATABRICKS_HOST: ${{ inputs.DATABRICKS_HOST }}
DATABRICKS_HTTP_PATH: ${{ inputs.DATABRICKS_HTTP_PATH }}
DBT_ENV_SECRET_DATABRICKS_TOKEN: ${{ secrets.DBT_ENV_SECRET_DATABRICKS_TOKEN }}
# spark
SPARK_HOST: ${{ inputs.SPARK_HOST }}
SPARK_SCHEMA: ${{ inputs.SPARK_SCHEMA }}
SPARK_USER: ${{ inputs.SPARK_USER }}
SPARK_METHOD: ${{ inputs.SPARK_METHOD }}
SPARK_PORT: ${{ inputs.SPARK_PORT }}
# fabric
FABRIC_DRIVER: ${{ inputs.FABRIC_DRIVER }}
FABRIC_HOST: ${{ inputs.FABRIC_HOST }}
FABRIC_PORT: ${{ inputs.FABRIC_PORT }}
FABRIC_DATABASE: ${{ inputs.FABRIC_DATABASE }}
FABRIC_SCHEMA: ${{ inputs.FABRIC_SCHEMA }}
FABRIC_AUTHENTICATION: ${{ inputs.FABRIC_AUTHENTICATION }}
FABRIC_TENANT: ${{ inputs.FABRIC_TENANT }}
FABRIC_CLIENT: ${{ inputs.FABRIC_CLIENT }}
DBT_ENV_SECRET_FABRIC_CLIENT_SECRET: ${{ secrets.DBT_ENV_SECRET_FABRIC_CLIENT_SECRET }}
# synapse
SYNAPSE_DRIVER: ${{ inputs.SYNAPSE_DRIVER }}
SYNAPSE_HOST: ${{ inputs.SYNAPSE_HOST }}
SYNAPSE_PORT: ${{ inputs.SYNAPSE_PORT }}
SYNAPSE_DATABASE: ${{ inputs.SYNAPSE_DATABASE }}
SYNAPSE_SCHEMA: ${{ inputs.SYNAPSE_SCHEMA }}
SYNAPSE_AUTHENTICATION: ${{ inputs.SYNAPSE_AUTHENTICATION }}
SYNAPSE_TENANT_ID: ${{ inputs.SYNAPSE_TENANT_ID }}
SYNAPSE_CLIENT_ID: ${{ inputs.SYNAPSE_CLIENT_ID }}
DBT_ENV_SECRET_SYNAPSE_CLIENT_SECRET: ${{ secrets.DBT_ENV_SECRET_SYNAPSE_CLIENT_SECRET }}
# athena
ATHENA_S3_STAGING_DIR: ${{ inputs.ATHENA_S3_STAGING_DIR }}
ATHENA_S3_DATA_DIR: ${{ inputs.ATHENA_S3_DATA_DIR }}
ATHENA_S3_DATA_NAMING: ${{ inputs.ATHENA_S3_DATA_NAMING }}
ATHENA_REGION_NAME: ${{ inputs.ATHENA_REGION_NAME }}
ATHENA_SCHEMA: ${{ inputs.ATHENA_SCHEMA }}
ATHENA_DATABASE: ${{ inputs.ATHENA_DATABASE }}
DBT_ENV_SECRET_ATHENA_ACCESS_KEY_ID: ${{ secrets.DBT_ENV_SECRET_ATHENA_ACCESS_KEY_ID }}
DBT_ENV_SECRET_ATHENA_SECRET_ACCESS_KEY: ${{ secrets.DBT_ENV_SECRET_ATHENA_SECRET_ACCESS_KEY }}