fix MicrobatchExecutionDebug message #13370
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This Action checks makes a dbt run to sample json structured logs | |
# and checks that they conform to the currently documented schema. | |
# | |
# If this action fails it either means we have unintentionally deviated | |
# from our documented structured logging schema, or we need to bump the | |
# version of our structured logging and add new documentation to | |
# communicate these changes. | |
name: Structured Logging Schema Check | |
on: | |
push: | |
branches: | |
- "main" | |
- "*.latest" | |
- "releases/*" | |
pull_request: | |
workflow_dispatch: | |
permissions: read-all | |
# top-level adjustments can be made here | |
env: | |
# number of parallel processes to spawn for python testing | |
PYTHON_INTEGRATION_TEST_WORKERS: 5 | |
jobs: | |
integration-metadata: | |
name: integration test metadata generation | |
runs-on: ubuntu-latest | |
outputs: | |
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }} | |
steps: | |
- name: generate split-groups | |
id: generate-split-groups | |
run: | | |
MATRIX_JSON="[" | |
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do | |
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}") | |
done | |
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}" | |
MATRIX_JSON+="]" | |
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT | |
# run the performance measurements on the current or default branch | |
test-schema: | |
name: Test Log Schema | |
runs-on: ubuntu-20.04 | |
timeout-minutes: 30 | |
needs: | |
- integration-metadata | |
strategy: | |
fail-fast: false | |
matrix: | |
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }} | |
env: | |
# turns warnings into errors | |
RUSTFLAGS: "-D warnings" | |
# points tests to the log file | |
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs" | |
# tells integration tests to output into json format | |
DBT_LOG_FORMAT: "json" | |
# tell eventmgr to convert logging events into bytes | |
DBT_TEST_BINARY_SERIALIZATION: "true" | |
# Additional test users | |
DBT_TEST_USER_1: dbt_test_user_1 | |
DBT_TEST_USER_2: dbt_test_user_2 | |
DBT_TEST_USER_3: dbt_test_user_3 | |
steps: | |
- name: checkout dev | |
uses: actions/checkout@v4 | |
with: | |
persist-credentials: false | |
- name: Setup Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: "3.9" | |
- name: Install python dependencies | |
run: | | |
pip install --user --upgrade pip | |
pip --version | |
pip install tox | |
tox --version | |
- name: Set up postgres | |
uses: ./.github/actions/setup-postgres-linux | |
- name: ls | |
run: ls | |
# integration tests generate a ton of logs in different files. the next step will find them all. | |
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs | |
- name: Run integration tests | |
uses: nick-fields/retry@v3 | |
with: | |
timeout_minutes: 30 | |
max_attempts: 3 | |
command: tox -e integration -- -nauto | |
env: | |
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }} | |
test-schema-report: | |
name: Log Schema Test Suite | |
runs-on: ubuntu-latest | |
needs: test-schema | |
steps: | |
- name: "[Notification] Log test suite passes" | |
run: | | |
echo "::notice title="Log test suite passes"" |