diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml index f0d203a..fb37ddb 100644 --- a/.github/workflows/python-tests.yml +++ b/.github/workflows/python-tests.yml @@ -21,52 +21,137 @@ jobs: steps: - uses: actions/checkout@v4 + # Load dependencies from GitHub runner cache for quicker install - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} + cache: pip + cache-dependency-path: requirements.txt - name: Install dependencies run: | python -m pip install --upgrade pip pip install -r requirements.txt - - name: Install pytest and pytest-cov + # Lint the Python files & upload the result statistics + - name: Run pylint analysis + id: pylint + continue-on-error: true + run: | + mkdir -p tests/python/pylint/reports + # Use python -m pylint and tee to ensure output is captured and visible in logs + PYTHONPATH=$(pwd) python -m pylint --rcfile tests/python/.pylintrc infrastructure samples setup shared tests 2>&1 | tee tests/python/pylint/reports/latest.txt + + - name: Upload pylint reports + if: always() + uses: actions/upload-artifact@v4 + with: + name: pylint-reports-${{ matrix.python-version }} + path: tests/python/pylint/reports/ + + # Static code analysis through simple compilation to ensure code is syntactically sound + - name: Verify bytecode compilation run: | - pip install pytest pytest-cov + python -m compileall infrastructure samples setup shared tests # Run tests with continue-on-error so that coverage and PR comments are always published. # The final step will explicitly fail the job if any test failed, ensuring PRs cannot be merged with failing tests. - name: Run pytest with coverage and generate JUnit XML + id: pytest run: | - PYTHONPATH=$(pwd) COVERAGE_FILE=tests/python/.coverage-${{ matrix.python-version }} pytest --cov=shared/python --cov-config=tests/python/.coveragerc --cov-report=html:tests/python/htmlcov-${{ matrix.python-version }} --junitxml=tests/python/junit-${{ matrix.python-version }}.xml tests/python/ + PYTHONPATH=$(pwd) COVERAGE_FILE=tests/python/.coverage-${{ matrix.python-version }} pytest --cov=shared/python --cov-config=tests/python/.coveragerc --cov-report=html:tests/python/htmlcov-${{ matrix.python-version }} --cov-report=term-missing --junitxml=tests/python/junit-${{ matrix.python-version }}.xml tests/python/ continue-on-error: true - name: Upload coverage HTML report + if: always() uses: actions/upload-artifact@v4 with: name: coverage-html-${{ matrix.python-version }} path: tests/python/htmlcov-${{ matrix.python-version }}/ - name: Upload JUnit test results + if: always() uses: actions/upload-artifact@v4 + continue-on-error: true with: name: junit-results-${{ matrix.python-version }} path: tests/python/junit-${{ matrix.python-version }}.xml - - name: Publish Unit Test Results to PR + # Extract all linting and coverage results in preparation for publish + - name: Extract and Summarize Metrics + if: always() + id: metrics + run: | + # Pylint Score + TEXT_REPORT="tests/python/pylint/reports/latest.txt" + if [ -s "$TEXT_REPORT" ]; then + PYLINT_SCORE=$(grep -Eo 'Your code has been rated at [0-9.]+/10' "$TEXT_REPORT" | grep -Eo '[0-9.]+/10' | head -n 1) + if [ -n "$PYLINT_SCORE" ]; then + echo "pylint_score=$PYLINT_SCORE" >> "$GITHUB_OUTPUT" + else + echo "pylint_score=N/A" >> "$GITHUB_OUTPUT" + fi + else + echo "pylint_score=N/A" >> "$GITHUB_OUTPUT" + fi + + # Coverage Percentage + if [ -f "tests/python/.coverage-${{ matrix.python-version }}" ]; then + TOTAL_COV=$(PYTHONPATH=$(pwd) COVERAGE_FILE=tests/python/.coverage-${{ matrix.python-version }} python -m coverage report | grep TOTAL | awk '{print $NF}') + echo "coverage=$TOTAL_COV" >> "$GITHUB_OUTPUT" + else + echo "coverage=N/A" >> "$GITHUB_OUTPUT" + fi + + # Publish general statistics for linting, test success, and code coverage as well as detailed tests results + - name: Publish Consolidated Results to PR + if: always() && github.event_name == 'pull_request' + uses: marocchino/sticky-pull-request-comment@v2 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + header: python-results-${{ matrix.python-version }} + message: | + ## 🐍 Python ${{ matrix.python-version }} Results + + | Metric | Status | Value | + | :--- | :---: | :--- | + | **Pylint Score** | ${{ steps.pylint.outcome == 'success' && '✅' || '⚠️' }} | `${{ steps.metrics.outputs.pylint_score }}` | + | **Unit Tests** | ${{ steps.pytest.outcome == 'success' && '✅' || '❌' }} | `${{ steps.pytest.outcome }}` | + | **Code Coverage** | 📊 | `${{ steps.metrics.outputs.coverage }}` | + + [Full Workflow Logs](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) + + - name: Generate Job Summary + if: always() + run: | + echo "## 🐍 Python ${{ matrix.python-version }} Execution Summary" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "| Category | Status | Detail |" >> $GITHUB_STEP_SUMMARY + echo "| :--- | :---: | :--- |" >> $GITHUB_STEP_SUMMARY + echo "| **Pylint** | ${{ steps.pylint.outcome == 'success' && '✅' || '⚠️' }} | Score: `${{ steps.metrics.outputs.pylint_score }}` |" >> $GITHUB_STEP_SUMMARY + echo "| **Pytest** | ${{ steps.pytest.outcome == 'success' && '✅' || '❌' }} | Outcome: `${{ steps.pytest.outcome }}` |" >> $GITHUB_STEP_SUMMARY + echo "| **Coverage** | 📊 | Total: `${{ steps.metrics.outputs.coverage }}` |" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "---" >> $GITHUB_STEP_SUMMARY + + - name: Publish Unit Test Results if: always() uses: EnricoMi/publish-unit-test-result-action@v2 with: files: tests/python/junit-${{ matrix.python-version }}.xml - comment_title: Python ${{ matrix.python-version }} Test Results + comment_title: Python ${{ matrix.python-version }} Detailed Test Results # Explicitly fail the job if any test failed (so PRs cannot be merged with failing tests). # This runs after all reporting steps, meaning coverage and PR comments are always published. - name: Fail if tests failed - if: always() + if: steps.pytest.outcome == 'failure' run: | - if grep -q 'failures="[1-9]' tests/python/junit-${{ matrix.python-version }}.xml; then - echo "::error ::Unit tests failed. See above for details." - exit 1 - fi + echo "::error ::Unit tests failed. See above for details." + exit 1 + + - name: Fail if pylint failed + if: steps.pylint.outcome == 'failure' + run: | + echo "::error ::Pylint violations detected. See PR comment or artifacts for details." + exit 1