test: Action Repair (#900) #373
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Regression Tests | |
| on: | |
| push: | |
| branches: [main] | |
| paths: | |
| - 'src/**' | |
| - 'tests/**' | |
| - 'pyproject.toml' | |
| - 'Makefile' | |
| pull_request_target: | |
| branches: [main] | |
| paths: | |
| - 'src/**' | |
| - 'tests/**' | |
| - 'pyproject.toml' | |
| - 'Makefile' | |
| workflow_dispatch: | |
| inputs: | |
| pr_number: | |
| description: 'PR number to test (e.g., 123). Leave empty if using ref instead.' | |
| required: false | |
| type: string | |
| ref: | |
| description: 'Branch, tag, or commit SHA to test (e.g., feature-branch or abc123). Leave empty if using PR number.' | |
| required: false | |
| type: string | |
| schedule: | |
| - cron: '0 16 * * *' | |
| jobs: | |
| test1: | |
| runs-on: ubuntu-latest | |
| # Only auto-run for internal PRs (same repo) or push/workflow_dispatch/schedule | |
| # External PRs (from forks) require manual trigger via workflow_dispatch | |
| if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || (github.event_name == 'pull_request_target' && github.event.pull_request.head.repo.full_name == github.repository) | |
| strategy: | |
| matrix: | |
| python-version: ["3.11"] | |
| fail-fast: false | |
| steps: | |
| - name: Get PR SHA (if PR number provided) | |
| if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_number != '' | |
| id: get_pr_sha | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| const prNumber = context.payload.inputs.pr_number; | |
| const { data: pr } = await github.rest.pulls.get({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| pull_number: parseInt(prNumber) | |
| }); | |
| console.log(`PR #${prNumber} head SHA: ${pr.head.sha}`); | |
| core.setOutput('sha', pr.head.sha); | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| # For pull_request_target, checkout the PR branch | |
| # For workflow_dispatch, checkout the PR SHA (if PR number provided), or specified ref, or default branch | |
| # For push, checkout the pushed branch | |
| with: | |
| ref: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || (github.event_name == 'workflow_dispatch' && (github.event.inputs.pr_number != '' && steps.get_pr_sha.outputs.sha || github.event.inputs.ref || github.ref)) || github.ref }} | |
| fetch-depth: 0 | |
| - name: Set up Python ${{ matrix.python-version }} | |
| uses: actions/setup-python@v4 | |
| with: | |
| python-version: ${{ matrix.python-version }} | |
| cache: 'pip' | |
| - name: Upgrade pip | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install --upgrade setuptools wheel | |
| - name: Cache pip dependencies | |
| uses: actions/cache@v3 | |
| with: | |
| path: ~/.cache/pip | |
| key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }} | |
| restore-keys: | | |
| ${{ runner.os }}-pip- | |
| - name: Install dependencies | |
| run: | | |
| pip install -e ".[dev,test]" | |
| - name: Deploy SeekDB (OceanBase) | |
| run: | | |
| # Remove existing container if it exists | |
| sudo docker rm -f seekdb 2>/dev/null || true | |
| # Start SeekDB container | |
| sudo docker run -d -p 10001:2881 -e MEMORY_LIMIT=6G -e LOG_DISK_SIZE=6G -e DATAFILE_SIZE=6G -e DATAFILE_NEXT=6G -e DATAFILE_MAXSIZE=100G --name seekdb oceanbase/seekdb | |
| # Wait for database to be ready | |
| echo "Waiting for SeekDB to be ready..." | |
| timeout=60 | |
| elapsed=0 | |
| while [ $elapsed -lt $timeout ]; do | |
| if sudo docker exec seekdb mysql -uroot -e "SELECT 1" > /dev/null 2>&1; then | |
| echo "✓ SeekDB is ready!" | |
| break | |
| fi | |
| echo "Waiting for SeekDB... ($elapsed/$timeout seconds)" | |
| sleep 2 | |
| elapsed=$((elapsed + 2)) | |
| done | |
| if [ $elapsed -ge $timeout ]; then | |
| echo "⚠ Warning: SeekDB may not be fully ready, but continuing..." | |
| fi | |
| # Show container status | |
| echo "SeekDB container status:" | |
| sudo docker ps | grep seekdb || true | |
| echo "Recent logs:" | |
| sudo docker logs --tail 20 seekdb || true | |
| mysql -uroot -h127.0.0.1 -P10001 -e "CREATE DATABASE IF NOT EXISTS powermem;" || echo "⚠ Warning: Failed to create database, but continuing..." | |
| mysql -uroot -h127.0.0.1 -P10001 -e "SHOW DATABASES LIKE 'powermem';" || echo "⚠ Warning: Could not verify database, but continuing..." | |
| - name: Set env | |
| env: | |
| QWEN_API_KEY: "${{ secrets.QWEN_API_KEY }}" | |
| SILICONFLOW_CN_API_KEY: "${{ secrets.SILICONFLOW_CN_API_KEY }}" | |
| SILICONFLOW_COM_API_KEY: "${{ secrets.SILICONFLOW_COM_API_KEY }}" | |
| run: | | |
| echo "QWEN_API_KEY length: ${#QWEN_API_KEY}" | |
| echo "SILICONFLOW_CN_API_KEY length: ${#SILICONFLOW_CN_API_KEY}" | |
| echo "SILICONFLOW_COM_API_KEY length: ${#SILICONFLOW_COM_API_KEY}" | |
| cp .env.example .env | |
| # Update existing environment variables | |
| sed -i 's|^OCEANBASE_HOST=.*|OCEANBASE_HOST=127.0.0.1|' .env | |
| sed -i 's|^OCEANBASE_PORT=.*|OCEANBASE_PORT=10001|' .env | |
| sed -i 's|^OCEANBASE_USER=.*|OCEANBASE_USER=root|' .env | |
| sed -i 's|^OCEANBASE_PASSWORD=.*|OCEANBASE_PASSWORD=|' .env | |
| sed -i 's|^OCEANBASE_DATABASE=.*|OCEANBASE_DATABASE=powermem|' .env | |
| sed -i 's|^OCEANBASE_COLLECTION=.*|OCEANBASE_COLLECTION=memories|' .env | |
| sed -i 's|^DATABASE_PROVIDER=.*|DATABASE_PROVIDER=oceanbase|' .env | |
| sed -i 's|^LLM_PROVIDER=.*|LLM_PROVIDER=siliconflow|' .env | |
| sed -i 's|^LLM_MODEL=.*|LLM_MODEL=THUDM/GLM-4-9B-0414|' .env | |
| sed -i 's|^SILICONFLOW_LLM_BASE_URL=.*|SILICONFLOW_LLM_BASE_URL=https://api.siliconflow.cn/v1|' .env | |
| sed -i 's|^GRAPH_STORE_PORT=.*|GRAPH_STORE_PORT=10001|' .env | |
| sed -i 's|^GRAPH_STORE_PASSWORD=.*|GRAPH_STORE_PASSWORD=|' .env | |
| sed -i "s|^LLM_API_KEY=.*|LLM_API_KEY=${SILICONFLOW_CN_API_KEY}|" .env | |
| sed -i "s|^EMBEDDING_API_KEY=.*|EMBEDDING_API_KEY=${QWEN_API_KEY}|" .env | |
| sed -i "s|^POWERMEM_SERVER_API_KEYS=.*|POWERMEM_SERVER_API_KEYS=key1,key2,key3|" .env | |
| - name: Run regression tests | |
| id: run_tests | |
| env: | |
| QWEN_API_KEY: "${{ secrets.QWEN_API_KEY }}" | |
| SILICONFLOW_CN_API_KEY: "${{ secrets.SILICONFLOW_CN_API_KEY }}" | |
| SILICONFLOW_COM_API_KEY: "${{ secrets.SILICONFLOW_COM_API_KEY }}" | |
| run: | | |
| mkdir -p report | |
| pytest tests/regression/test_scenario*.py -vs --junitxml=report/test1.xml | |
| - name: Upload JUnit report | |
| if: always() && (steps.run_tests.outcome == 'success' || steps.run_tests.outcome == 'failure') | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: junit-test1 | |
| path: report/ | |
| - name: Cleanup Docker | |
| if: always() | |
| run: | | |
| echo "清理 Docker 容器..." | |
| sudo docker rm -f seekdb 2>/dev/null || true | |
| sudo docker ps | |
| echo "✓ 清理完成" | |
| test2: | |
| runs-on: ubuntu-latest | |
| # Only auto-run for internal PRs (same repo) or push/workflow_dispatch/schedule | |
| # External PRs (from forks) require manual trigger via workflow_dispatch | |
| if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || (github.event_name == 'pull_request_target' && github.event.pull_request.head.repo.full_name == github.repository) | |
| strategy: | |
| matrix: | |
| python-version: ["3.11"] | |
| fail-fast: false | |
| steps: | |
| - name: Get PR SHA (if PR number provided) | |
| if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_number != '' | |
| id: get_pr_sha | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| const prNumber = context.payload.inputs.pr_number; | |
| const { data: pr } = await github.rest.pulls.get({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| pull_number: parseInt(prNumber) | |
| }); | |
| console.log(`PR #${prNumber} head SHA: ${pr.head.sha}`); | |
| core.setOutput('sha', pr.head.sha); | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| # For pull_request_target, checkout the PR branch | |
| # For workflow_dispatch, checkout the PR SHA (if PR number provided), or specified ref, or default branch | |
| # For push, checkout the pushed branch | |
| with: | |
| ref: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || (github.event_name == 'workflow_dispatch' && (github.event.inputs.pr_number != '' && steps.get_pr_sha.outputs.sha || github.event.inputs.ref || github.ref)) || github.ref }} | |
| fetch-depth: 0 | |
| - name: Set up Python ${{ matrix.python-version }} | |
| uses: actions/setup-python@v4 | |
| with: | |
| python-version: ${{ matrix.python-version }} | |
| cache: 'pip' | |
| - name: Upgrade pip | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install --upgrade setuptools wheel | |
| - name: Cache pip dependencies | |
| uses: actions/cache@v3 | |
| with: | |
| path: ~/.cache/pip | |
| key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }} | |
| restore-keys: | | |
| ${{ runner.os }}-pip- | |
| - name: Install dependencies | |
| run: | | |
| pip install -e ".[dev,test]" | |
| - name: Deploy SeekDB (OceanBase) | |
| run: | | |
| # Remove existing container if it exists | |
| sudo docker rm -f seekdb 2>/dev/null || true | |
| # Start SeekDB container | |
| sudo docker run -d -p 10001:2881 -e MEMORY_LIMIT=6G -e LOG_DISK_SIZE=6G -e DATAFILE_SIZE=6G -e DATAFILE_NEXT=6G -e DATAFILE_MAXSIZE=100G --name seekdb oceanbase/seekdb | |
| # Wait for database to be ready | |
| echo "Waiting for SeekDB to be ready..." | |
| timeout=60 | |
| elapsed=0 | |
| while [ $elapsed -lt $timeout ]; do | |
| if sudo docker exec seekdb mysql -uroot -e "SELECT 1" > /dev/null 2>&1; then | |
| echo "✓ SeekDB is ready!" | |
| break | |
| fi | |
| echo "Waiting for SeekDB... ($elapsed/$timeout seconds)" | |
| sleep 2 | |
| elapsed=$((elapsed + 2)) | |
| done | |
| if [ $elapsed -ge $timeout ]; then | |
| echo "⚠ Warning: SeekDB may not be fully ready, but continuing..." | |
| fi | |
| # Show container status | |
| echo "SeekDB container status:" | |
| sudo docker ps | grep seekdb || true | |
| echo "Recent logs:" | |
| sudo docker logs --tail 20 seekdb || true | |
| mysql -uroot -h127.0.0.1 -P10001 -e "CREATE DATABASE IF NOT EXISTS powermem;" || echo "⚠ Warning: Failed to create database, but continuing..." | |
| mysql -uroot -h127.0.0.1 -P10001 -e "SHOW DATABASES LIKE 'powermem';" || echo "⚠ Warning: Could not verify database, but continuing..." | |
| - name: Set env | |
| env: | |
| QWEN_API_KEY: "${{ secrets.QWEN_API_KEY }}" | |
| SILICONFLOW_CN_API_KEY: "${{ secrets.SILICONFLOW_CN_API_KEY }}" | |
| SILICONFLOW_COM_API_KEY: "${{ secrets.SILICONFLOW_COM_API_KEY }}" | |
| run: | | |
| echo "QWEN_API_KEY length: ${#QWEN_API_KEY}" | |
| echo "SILICONFLOW_CN_API_KEY length: ${#SILICONFLOW_CN_API_KEY}" | |
| echo "SILICONFLOW_COM_API_KEY length: ${#SILICONFLOW_COM_API_KEY}" | |
| cp .env.example .env | |
| # Update existing environment variables | |
| sed -i 's|^OCEANBASE_HOST=.*|OCEANBASE_HOST=127.0.0.1|' .env | |
| sed -i 's|^OCEANBASE_PORT=.*|OCEANBASE_PORT=10001|' .env | |
| sed -i 's|^OCEANBASE_USER=.*|OCEANBASE_USER=root|' .env | |
| sed -i 's|^OCEANBASE_PASSWORD=.*|OCEANBASE_PASSWORD=|' .env | |
| sed -i 's|^OCEANBASE_DATABASE=.*|OCEANBASE_DATABASE=powermem|' .env | |
| sed -i 's|^OCEANBASE_COLLECTION=.*|OCEANBASE_COLLECTION=memories|' .env | |
| sed -i 's|^DATABASE_PROVIDER=.*|DATABASE_PROVIDER=oceanbase|' .env | |
| sed -i 's|^LLM_PROVIDER=.*|LLM_PROVIDER=siliconflow|' .env | |
| sed -i 's|^LLM_MODEL=.*|LLM_MODEL=THUDM/GLM-4-9B-0414|' .env | |
| sed -i 's|^SILICONFLOW_LLM_BASE_URL=.*|SILICONFLOW_LLM_BASE_URL=https://api.siliconflow.cn/v1|' .env | |
| sed -i 's|^GRAPH_STORE_PORT=.*|GRAPH_STORE_PORT=10001|' .env | |
| sed -i 's|^GRAPH_STORE_PASSWORD=.*|GRAPH_STORE_PASSWORD=|' .env | |
| sed -i "s|^LLM_API_KEY=.*|LLM_API_KEY=${SILICONFLOW_CN_API_KEY}|" .env | |
| sed -i "s|^EMBEDDING_API_KEY=.*|EMBEDDING_API_KEY=${QWEN_API_KEY}|" .env | |
| sed -i "s|^POWERMEM_SERVER_API_KEYS=.*|POWERMEM_SERVER_API_KEYS=key1,key2,key3|" .env | |
| - name: Run regression tests | |
| id: run_tests | |
| env: | |
| QWEN_API_KEY: "${{ secrets.QWEN_API_KEY }}" | |
| SILICONFLOW_CN_API_KEY: "${{ secrets.SILICONFLOW_CN_API_KEY }}" | |
| SILICONFLOW_COM_API_KEY: "${{ secrets.SILICONFLOW_COM_API_KEY }}" | |
| run: | | |
| mkdir -p report | |
| pytest tests/regression/ --ignore-glob='*test_scenario*.py' -vs --junitxml=report/test2.xml | |
| - name: Upload JUnit report | |
| if: always() && (steps.run_tests.outcome == 'success' || steps.run_tests.outcome == 'failure') | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: junit-test2 | |
| path: report/ | |
| - name: Cleanup Docker | |
| if: always() | |
| run: | | |
| echo "清理 Docker 容器..." | |
| sudo docker rm -f seekdb 2>/dev/null || true | |
| sudo docker ps | |
| echo "✓ 清理完成" | |
| report: | |
| runs-on: ubuntu-latest | |
| needs: [test1, test2] | |
| if: always() && (needs.test1.result == 'success' || needs.test1.result == 'failure') && (needs.test2.result == 'success' || needs.test2.result == 'failure') | |
| steps: | |
| - name: Download JUnit artifacts | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: junit-* | |
| - name: Collect JUnit reports | |
| run: | | |
| mkdir -p all-reports | |
| # download-artifact 解压后可能是 junit-test1/report/test1.xml 或 junit-test1/test1.xml,用 find 兼容两种结构 | |
| find junit-test1 junit-test2 -maxdepth 3 -name '*.xml' -exec cp {} all-reports/ \; | |
| ls -la all-reports/ | |
| - name: Publish Test Report Summary | |
| uses: mikepenz/action-junit-report@v4 | |
| if: always() | |
| with: | |
| report_paths: 'all-reports/*.xml' | |
| require_tests: false | |
| job_name: Regression Tests | |
| check_name: Test Report | |