Skip to content

Adding issues: write on labeler pemissions #161

Adding issues: write on labeler pemissions

Adding issues: write on labeler pemissions #161

Workflow file for this run

name: Tests
on:
push:
branches: [main, develop]
pull_request:
branches: [main]
workflow_call:
inputs:
component:
description: 'Component to test (api, portal, or all)'
required: false
type: string
default: 'all'
jobs:
# Detect which components changed (only for push/PR, not workflow_call)
changes:
runs-on: ubuntu-latest
outputs:
api: ${{ steps.set-outputs.outputs.api }}
portal: ${{ steps.set-outputs.outputs.portal }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Detect changes (push/PR)
uses: dorny/paths-filter@v3
id: changes
if: github.event_name != 'workflow_call'
with:
filters: |
api:
- 'api/**'
portal:
- 'portal/**'
- name: Set outputs
id: set-outputs
run: |
# For workflow_call, use the input component
if [ "${{ github.event_name }}" == "workflow_call" ]; then
if [ "${{ inputs.component }}" == "api" ]; then
echo "api=true" >> $GITHUB_OUTPUT
echo "portal=false" >> $GITHUB_OUTPUT
elif [ "${{ inputs.component }}" == "portal" ]; then
echo "api=false" >> $GITHUB_OUTPUT
echo "portal=true" >> $GITHUB_OUTPUT
else
echo "api=true" >> $GITHUB_OUTPUT
echo "portal=true" >> $GITHUB_OUTPUT
fi
else
# For push/PR, use path-filter results
echo "api=${{ steps.changes.outputs.api }}" >> $GITHUB_OUTPUT
echo "portal=${{ steps.changes.outputs.portal }}" >> $GITHUB_OUTPUT
fi
# Backend Unit Tests
unit-tests:
runs-on: ubuntu-latest
name: Backend Unit Tests
needs: changes
if: needs.changes.outputs.api == 'true'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Install uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
cache-dependency-glob: "api/uv.lock"
- name: Set up Python
working-directory: ./api
run: uv python install 3.13
- name: Install dependencies
working-directory: ./api
run: uv sync --frozen
- name: Run linter (ruff)
working-directory: ./api
run: |
uv tool run ruff check app/ --output-format=github
uv tool run ruff format app/ --check
- name: Run unit tests with coverage
working-directory: ./api
env:
PYTHONPATH: ${{ github.workspace }}/api
SECRET_KEY: test-secret-key-for-github-actions-minimum-32-characters
ACCESS_TOKEN_EXPIRE_MINUTES: 30
REFRESH_TOKEN_EXPIRE_DAYS: 7
run: |
uv run pytest --cov=app --cov-report=term-missing --cov-report=xml --cov-report=html tests/unit -v
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
if: github.event_name != 'workflow_call'
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./api/coverage.xml
flags: backend-unit
name: backend-unit-coverage
fail_ci_if_error: false
- name: Upload unit tests coverage reports
uses: actions/upload-artifact@v6
with:
name: backend-unit-coverage-report
path: api/htmlcov/
retention-days: 30
# Backend Integration Tests
integration-tests:
runs-on: ubuntu-latest
name: Backend Integration Tests
needs: changes
if: needs.changes.outputs.api == 'true'
services:
postgres:
image: postgres:15
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: tron_test
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Install uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
cache-dependency-glob: "api/uv.lock"
- name: Set up Python
working-directory: ./api
run: uv python install 3.13
- name: Install dependencies
working-directory: ./api
run: uv sync --frozen
- name: Run integration tests with coverage
working-directory: ./api
env:
PYTHONPATH: ${{ github.workspace }}/api
DB_HOST: localhost
DB_USER: postgres
DB_PASSWORD: postgres
DB_NAME: tron_test
DB_PORT: 5432
SECRET_KEY: test-secret-key-for-github-actions-minimum-32-characters
ACCESS_TOKEN_EXPIRE_MINUTES: 30
REFRESH_TOKEN_EXPIRE_DAYS: 7
run: |
uv run pytest --cov=app --cov-report=term-missing --cov-report=xml --cov-report=html tests/integration -v
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
if: github.event_name != 'workflow_call'
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./api/coverage.xml
flags: backend-integration
name: backend-integration-coverage
fail_ci_if_error: false
- name: Upload integration tests coverage reports
uses: actions/upload-artifact@v6
with:
name: backend-integration-coverage-report
path: api/htmlcov/
retention-days: 30
# Frontend Tests
frontend-tests:
runs-on: ubuntu-latest
name: Frontend Tests
needs: changes
if: needs.changes.outputs.portal == 'true'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Node.js
uses: actions/setup-node@v6
with:
node-version: '20'
cache: 'npm'
cache-dependency-path: portal/package-lock.json
- name: Install dependencies
working-directory: ./portal
run: npm ci
- name: Run linter
working-directory: ./portal
run: npm run lint
- name: Type check
working-directory: ./portal
run: npx tsc --noEmit
- name: Run tests with coverage
working-directory: ./portal
run: npm run test:coverage
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
if: github.event_name != 'workflow_call'
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./portal/coverage
flags: frontend
name: frontend-coverage
fail_ci_if_error: false
- name: Upload coverage reports
uses: actions/upload-artifact@v6
with:
name: frontend-coverage-report
path: portal/coverage/
retention-days: 30
# Security scanning
security-scan:
runs-on: ubuntu-latest
name: Security Scan
needs: changes
if: needs.changes.outputs.api == 'true' || needs.changes.outputs.portal == 'true'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Run Trivy vulnerability scanner (filesystem)
uses: aquasecurity/trivy-action@master
with:
scan-type: 'fs'
scan-ref: '.'
format: 'sarif'
output: 'trivy-results.sarif'
severity: 'CRITICAL,HIGH'
- name: Upload Trivy scan results
uses: github/codeql-action/upload-sarif@v3
if: always()
continue-on-error: true
with:
sarif_file: 'trivy-results.sarif'
# Aggregator job - all tests must pass
test:
runs-on: ubuntu-latest
name: test
needs: [unit-tests, integration-tests, frontend-tests, security-scan]
if: always()
steps:
- name: Check test results
run: |
if [ "${{ needs.unit-tests.result }}" == "failure" ] || \
[ "${{ needs.integration-tests.result }}" == "failure" ] || \
[ "${{ needs.frontend-tests.result }}" == "failure" ] || \
[ "${{ needs.security-scan.result }}" == "failure" ]; then
echo "❌ Some tests failed"
exit 1
fi
echo "✅ All tests passed successfully"