STDCM v2 #5521
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: build | |
on: | |
pull_request: | |
workflow_dispatch: | |
merge_group: | |
types: [checks_requested] | |
push: | |
branches: | |
- dev | |
- staging | |
- prod | |
jobs: | |
build: | |
runs-on: ubuntu-latest | |
permissions: | |
packages: write | |
outputs: | |
stable_tags: ${{ steps.bake-metadata.outputs.stable_tags }} | |
stable_version: ${{ steps.bake-metadata.outputs.stable_version }} | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Make bake metadata | |
id: bake-metadata | |
env: | |
GITHUB_CONTEXT: ${{ toJson(github) }} | |
run: | | |
set -eo pipefail | |
echo ::group::Github context | |
python3 -m json.tool <<< "${GITHUB_CONTEXT}" | |
echo ::endgroup:: | |
echo ::group::Bake metadata | |
.github/scripts/bake-metadata.py | tee bake-metadata.json | |
echo ::endgroup:: | |
- name: Set up Docker Buildx | |
uses: docker/setup-buildx-action@v3 | |
with: | |
version: v0.12.0 | |
driver-opts: image=moby/buildkit:v0.12.3 | |
buildkitd-flags: --debug | |
- name: Login to ghcr.io | |
uses: docker/login-action@v2 | |
with: | |
registry: ghcr.io | |
username: '$' # special user for authenticating as a gh actions worker | |
password: ${{ secrets.GITHUB_TOKEN }} | |
# This action happens to randomly fail, so we retry it 3 times. | |
# Whitelisted messages are: | |
# | |
# - `failed to solve: failed to compute cache key` | |
# full message: `failed to solve: failed to compute cache key: failed to get state for index 0 on XXXXXX` | |
# | |
# - `httpReadSeeker: failed open: failed to authorize: no active session` | |
# full message: `ERROR: failed to solve: DeadlineExceeded: failed to compute cache key: failed to copy: httpReadSeeker: failed open: failed to authorize: no active session for XXXXX: context deadline exceeded` | |
- name: Build and push | |
run: | | |
set -eo pipefail | |
TRANSIENT_FAILURES=( | |
"failed to solve: failed to compute cache key" | |
"httpReadSeeker: failed open: failed to authorize: no active session" | |
) | |
BAKEFILE="--file=docker/docker-bake.hcl" | |
METADATA="--file=bake-metadata.json" | |
for i in $(seq 1 3); do | |
echo "::group::Try $i" | |
if docker buildx bake $BAKEFILE $METADATA --push 2>&1 | tee docker-build.log; then | |
echo "::endgroup::" | |
exit 0 | |
fi | |
echo "::endgroup::" | |
for failure_msg in "${TRANSIENT_FAILURES[@]}"; do | |
if grep -q "$failure_msg" docker-build.log; then | |
echo "Transient failure detected, retrying..." | |
continue 2 | |
fi | |
done | |
echo "Build failed for non-transient cause, exiting." | |
exit 1 | |
done | |
echo "All retries failed, exiting." | |
exit 1 | |
check_generated_railjson_sync: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install poetry | |
run: pipx install poetry | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: "3.9" | |
cache: "poetry" | |
- name: Install railjson_generator dependencies | |
run: | | |
poetry -C python/railjson_generator install | |
- name: Generate railjson | |
run: | | |
mkdir /tmp/generated_infras | |
poetry -C python/railjson_generator run python -m railjson_generator /tmp/generated_infras tests/infra-scripts/*.py | |
- name: Ensure generated infrastructures are up to date | |
run: diff -r -u tests/data/infras /tmp/generated_infras | |
check_railjson_generator: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install poetry | |
run: pipx install poetry | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: "3.9" | |
cache: "poetry" | |
- name: Install railjson_generator dependencies | |
run: | | |
cd python/railjson_generator | |
poetry install | |
- name: Flake8 | |
run: | | |
cd python/railjson_generator | |
poetry run pflake8 --config ./pyproject.toml --output-file flake8.xml --format junit-xml | |
- name: Publish flake8 report | |
uses: mikepenz/action-junit-report@v4 | |
if: failure() | |
with: | |
report_paths: flake8.xml | |
- name: Black | |
run: | | |
cd python/railjson_generator | |
poetry run black . --check | |
- name: Isort | |
run: | | |
cd python/railjson_generator | |
poetry run isort . --check | |
- name: Pytype | |
run: | | |
cd python/railjson_generator | |
poetry run pytype -j auto | |
- name: Pytest | |
run: | | |
cd python/railjson_generator | |
poetry run pytest --cov --cov-report=xml | |
- name: Upload coverage to Codecov | |
uses: codecov/codecov-action@v3 | |
with: | |
name: codecov | |
flags: railjson_generator | |
directory: ./python/railjson_generator | |
token: ${{ secrets.CODECOV_TOKEN }} | |
fail_ci_if_error: false | |
verbose: true | |
files: coverage.xml | |
check_commits: | |
runs-on: ubuntu-latest | |
steps: | |
- name: Checkout code | |
uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # Fetch all history for all branches and tags | |
- name: Check commit names | |
run: | | |
# We don't have a base ref to check against if we aren't in a | |
# pull_request workflow. | |
BASE=${{ github.base_ref }} | |
if [[ -z "$BASE" ]]; then | |
exit 0 | |
fi | |
commit_titles() { | |
git log --format=%s origin/"$BASE"..HEAD --skip=1 | |
} | |
commit_titles | TERM=xterm-color .github/scripts/check-commit-titles.sh | |
final_newline_lint: | |
runs-on: ubuntu-latest | |
name: Check final newline | |
steps: | |
- name: Install ripgrep | |
run: sudo apt-get install -y ripgrep | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Check final newline is present | |
run: | | |
# search missing final newline | |
if rg -Ul '[^\n]\z' -g '!*.svg' .; then | |
echo "Found missing final newline on listed file(s)" | |
exit 1 | |
fi | |
# search multiple final newlines | |
if rg -Ul '\n\n\z' .; then | |
echo "Found multiple final newlines on listed file(s)" | |
exit 1 | |
fi | |
check_integration_tests: | |
runs-on: ubuntu-latest | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Install poetry | |
run: pipx install poetry | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: "3.9" | |
cache: "poetry" | |
- name: Install dependencies | |
run: | | |
cd tests | |
poetry install | |
- name: Flake8 | |
run: | | |
cd tests | |
poetry run pflake8 --config ./pyproject.toml | |
- name: Black | |
run: | | |
cd tests | |
poetry run black . --check | |
- name: Isort | |
run: | | |
cd tests | |
poetry run isort . --check | |
- name: Pytype | |
run: | | |
cd tests | |
poetry run pytype -j auto | |
check_osrd_schema: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install poetry | |
run: pipx install poetry | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: "3.9" | |
cache: "poetry" | |
- name: Install dependencies | |
run: | | |
cd python/osrd_schemas | |
poetry install | |
- name: Flake8 | |
run: | | |
cd python/osrd_schemas | |
poetry run pflake8 --config ./pyproject.toml --output-file flake8.xml --format junit-xml | |
- name: Publish flake8 report | |
uses: mikepenz/action-junit-report@v4 | |
if: failure() | |
with: | |
report_paths: flake8.xml | |
- name: Black | |
run: | | |
cd python/osrd_schemas | |
poetry run black . --check | |
- name: Isort | |
run: | | |
cd python/osrd_schemas | |
poetry run isort . --check | |
- name: Pytype | |
run: | | |
cd python/osrd_schemas | |
poetry run pytype -j auto | |
check_toml: | |
runs-on: ubuntu-latest | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Install taplo | |
uses: baptiste0928/cargo-install@v3 | |
with: | |
crate: taplo-cli | |
locked: true | |
- name: Check TOML format | |
run: taplo fmt --check --diff | |
check_infra_schema_sync: | |
runs-on: ubuntu-latest | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Install poetry | |
run: pipx install poetry | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: "3.11" | |
cache: "poetry" | |
- name: Check infra_schema.json sync | |
run: | | |
cd python/osrd_schemas | |
poetry install --no-interaction --no-root | |
poetry run python -m osrd_schemas.infra_editor > current_infra_schema.json | |
diff current_infra_schema.json ../../front/src/reducers/osrdconf/infra_schema.json | |
check_front_rtk_sync: | |
runs-on: ubuntu-latest | |
needs: | |
- build | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Generate rtk bindings | |
run: > | |
docker run --rm --net=host -v $PWD/output:/app/tests/unit | |
-v $PWD/editoast:/editoast | |
-v $PWD/gateway:/gateway | |
-v $PWD/front/src/common/api:/app/src/common/api | |
${{ fromJSON(needs.build.outputs.stable_tags).front-build }} | |
yarn generate-types | |
- name: Check for unexpected changes | |
run: | | |
git diff --exit-code front | |
check_core: | |
runs-on: ubuntu-latest | |
needs: | |
- build | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Execute tests within container | |
run: | | |
docker run --name core-test \ | |
-v $PWD/core/build:/output ${{ fromJSON(needs.build.outputs.stable_tags).core-build }} \ | |
/bin/bash -c 'gradle -Pspotbugs_report_xml --continue check; status=$?; cp -r build/* /output/; exit $status' | |
exit $(docker wait core-test) | |
- name: Upload coverage to Codecov | |
uses: codecov/codecov-action@v3 | |
with: | |
name: codecov | |
flags: core | |
directory: ./core/build/reports/jacoco/testCodeCoverageReport | |
files: testCodeCoverageReport.xml | |
token: ${{ secrets.CODECOV_TOKEN }} | |
fail_ci_if_error: false | |
verbose: true | |
- name: Report JUnit failures | |
uses: mikepenz/action-junit-report@v4 | |
if: failure() | |
with: | |
report_paths: "./core/build/test-results/test/TEST-*.xml" | |
require_tests: true | |
- name: Report spotbugs lints | |
if: failure() | |
uses: jwgmeligmeyling/[email protected] | |
with: | |
path: "./output/reports/spotbugs/*.xml" | |
check_editoast_tests: | |
runs-on: ubuntu-latest | |
needs: | |
- build | |
services: | |
postgres: | |
image: postgis/postgis | |
env: | |
POSTGRES_USER: postgres | |
POSTGRES_PASSWORD: password | |
ports: | |
- 5432:5432 | |
# needed because the postgres container does not provide a healthcheck | |
options: >- | |
--health-cmd pg_isready | |
--health-interval 10s | |
--health-timeout 5s | |
--health-retries 5 | |
redis: | |
image: redis | |
ports: | |
- 6379:6379 | |
options: >- | |
--health-cmd "redis-cli ping" | |
--health-interval 5s | |
--health-timeout 5s | |
--health-retries 5 | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Check for i18n missing keys | |
run: | | |
docker run --name=front-i18n-checker --net=host \ | |
${{ fromJSON(needs.build.outputs.stable_tags).front-build }} \ | |
yarn i18n-checker | |
exit $(docker wait front-i18n-checker) | |
- name: Execute tests within container | |
run: | | |
docker run --rm --net=host \ | |
-v $PWD/docker/init_db.sql:/init_db.sql \ | |
postgis/postgis:latest \ | |
psql postgresql://postgres:password@localhost:5432 -f /init_db.sql | |
docker run --name=editoast-test --net=host -v $PWD/output:/output \ | |
-e DATABASE_URL="postgres://osrd:password@localhost:5432/osrd" \ | |
${{ fromJSON(needs.build.outputs.stable_tags).editoast-test }} \ | |
/bin/bash -c "diesel migration run --locked-schema && cargo test --workspace --verbose -- --test-threads=4 && grcov . --binary-path ./target/debug/ -s . -t lcov --branch --ignore-not-existing --ignore "/*" -o /output/lcov.info" | |
exit $(docker wait editoast-test) | |
- name: Upload coverage | |
uses: codecov/codecov-action@v3 | |
with: | |
name: codecov | |
flags: editoast | |
fail_ci_if_error: false | |
verbose: true | |
token: ${{ secrets.CODECOV_TOKEN }} | |
files: ./output/lcov.info | |
check_editoast_lints: | |
# lints runs in a separate job, as it takes about 1m30 for the documentation | |
# check to complete. As editoast tests take while to run, we don't want this to | |
# be on the hot path | |
runs-on: ubuntu-latest | |
needs: | |
- build | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Documentation check | |
run: | | |
docker run --name=editoast-doc --net=host -v $PWD/output:/output \ | |
-e RUSTDOCFLAGS="-D warnings" \ | |
${{ fromJSON(needs.build.outputs.stable_tags).editoast-test }} \ | |
cargo doc --manifest-path ./Cargo.toml --no-deps | |
exit $(docker wait editoast-doc) | |
- name: Format check | |
run: | | |
docker run --name=editoast-format --net=host -v $PWD/output:/output \ | |
${{ fromJSON(needs.build.outputs.stable_tags).editoast-test }} \ | |
cargo fmt --check | |
exit $(docker wait editoast-format) | |
- name: Clippy check | |
run: | | |
docker run --name=editoast-clippy --net=host -v $PWD/output:/output \ | |
${{ fromJSON(needs.build.outputs.stable_tags).editoast-test }} \ | |
cargo clippy --all-features --all-targets -- -D warnings | |
exit $(docker wait editoast-clippy) | |
check_editoast_openapi: | |
# for the same reason as check_editoast_lints, we run this in a separate job | |
runs-on: ubuntu-latest | |
needs: | |
- build | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Generate OpenAPI | |
run: | | |
docker run --name=editoast-test --net=host -v $PWD/output:/output \ | |
${{ fromJSON(needs.build.outputs.stable_tags).editoast }} \ | |
/bin/bash -c 'editoast openapi > /output/openapi.yaml' | |
- name: Check for unexpected changes | |
run: | | |
diff $PWD/editoast/openapi.yaml $PWD/output/openapi.yaml | |
- name: Check for i18n API errors | |
run: | | |
docker run --name=front-i18n-api-error --net=host -v $PWD/output/openapi.yaml:/editoast/openapi.yaml \ | |
${{ fromJSON(needs.build.outputs.stable_tags).front-build }} \ | |
yarn i18n-api-errors | |
exit $(docker wait front-i18n-api-error) | |
check_gateway: | |
runs-on: ubuntu-latest | |
needs: | |
- build | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Execute tests within container | |
run: | | |
docker run --name=gateway-test --net=host -v $PWD/output:/output \ | |
${{ fromJSON(needs.build.outputs.stable_tags).gateway-test }} \ | |
/bin/bash -c "cargo test --verbose && grcov . --binary-path ./target/debug/ -s . -t lcov --branch --ignore-not-existing --ignore "/*" -o /output/lcov.info" | |
exit $(docker wait gateway-test) | |
- name: Upload coverage to Codecov | |
uses: codecov/codecov-action@v3 | |
with: | |
name: codecov | |
flags: gateway | |
fail_ci_if_error: false | |
verbose: true | |
token: ${{ secrets.CODECOV_TOKEN }} | |
files: ./output/lcov.info | |
- name: Documentation check | |
run: | | |
docker run --name=gateway-doc --net=host -v $PWD/output:/output \ | |
-e RUSTDOCFLAGS="-D warnings" \ | |
${{ fromJSON(needs.build.outputs.stable_tags).gateway-test }} \ | |
cargo doc --manifest-path ./Cargo.toml --no-deps | |
exit $(docker wait gateway-doc) | |
- name: Format check | |
run: | | |
docker run --name=gateway-format --net=host -v $PWD/output:/output \ | |
${{ fromJSON(needs.build.outputs.stable_tags).gateway-test }} \ | |
cargo fmt --check | |
exit $(docker wait gateway-format) | |
check_front: | |
runs-on: ubuntu-latest | |
needs: | |
- build | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Execute tests within container | |
run: | | |
docker run --name=front-test --net=host -v $PWD/output:/app/tests/unit \ | |
${{ fromJSON(needs.build.outputs.stable_tags).front-build }} \ | |
yarn test-coverage | |
exit $(docker wait front-test) | |
- name: Upload coverage to Codecov | |
uses: codecov/codecov-action@v3 | |
with: | |
name: codecov | |
flags: front | |
fail_ci_if_error: false | |
verbose: true | |
token: ${{ secrets.CODECOV_TOKEN }} | |
files: ./output/coverage/clover.xml | |
integration_tests: | |
runs-on: ubuntu-latest | |
needs: | |
- build | |
steps: | |
# TODO: check if we can deduplicate the base steps from integration_tests_quality | |
# https://www.jameskerr.blog/posts/sharing-steps-in-github-action-workflows/ | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Install poetry | |
run: pipx install poetry | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: "3.9" | |
cache: "poetry" | |
- name: Install dependencies | |
run: | | |
cd tests | |
poetry install | |
- name: Startup the test infrastructure | |
id: start_playwright_worker | |
run: | | |
set -e | |
export OSRD_FRONT_MODE=nginx | |
export TAG='${{ needs.build.outputs.stable_version }}' | |
services='editoast core front gateway' | |
docker compose pull $services | |
docker compose up --no-build -d $services jaeger | |
env: | |
DOCKER_BUILDKIT: 1 | |
COMPOSE_DOCKER_CLI_BUILD: 1 | |
- name: Run pytest | |
run: | | |
poetry -C tests run pytest -m "not e2e" --cov --cov-report=xml | |
- name: Upload coverage to Codecov | |
uses: codecov/codecov-action@v3 | |
with: | |
name: codecov | |
flags: tests | |
fail_ci_if_error: false | |
verbose: true | |
token: ${{ secrets.CODECOV_TOKEN }} | |
files: coverage.xml | |
- name: Run Playwright tests | |
run: | | |
docker run --init --name=playwright-test --net=host \ | |
-e CI=true \ | |
-v $PWD/front/test-results:/app/test-results \ | |
${{ fromJSON(needs.build.outputs.stable_tags).front-tests }} \ | |
/bin/sh -c "npx playwright install --with-deps && yarn e2e-tests" | |
exit $(docker wait playwright-test) | |
- uses: actions/upload-artifact@v3 | |
if: always() | |
with: | |
name: integration-videos | |
path: front/test-results/ | |
retention-days: 30 | |
- name: Save container logs | |
run: docker compose logs > container-logs | |
if: always() | |
- uses: actions/upload-artifact@v3 | |
if: always() | |
with: | |
name: integration-container-logs | |
path: container-logs | |
retention-days: 30 |