Build time improvements #1307
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Build with analysis tools | |
on: | |
workflow_dispatch: | |
inputs: | |
run_all_benchmarks: | |
type: boolean | |
schedule: # Schdeule the job to run at 12 a.m. daily | |
- cron: '0 0 * * *' | |
pull_request: | |
paths-ignore: | |
- "**/*.md" | |
jobs: | |
cibw_docker_image: | |
uses: ./.github/workflows/cibw_docker_image.yml | |
permissions: {packages: write} | |
with: | |
cibuildwheel_ver: "2.12.1" | |
force_update: false | |
code_coverage: | |
needs: [cibw_docker_image] | |
runs-on: "ubuntu-latest" | |
container: | |
image: ${{needs.cibw_docker_image.outputs.tag}} | |
services: | |
mongodb: | |
image: mongo:4.4 | |
ports: | |
- 27017:27017 | |
env: | |
VCPKG_NUGET_USER: ${{secrets.VCPKG_NUGET_USER || github.repository_owner}} | |
VCPKG_NUGET_TOKEN: ${{secrets.VCPKG_NUGET_TOKEN || secrets.GITHUB_TOKEN}} | |
VCPKG_MAN_NUGET_USER: ${{secrets.VCPKG_MAN_NUGET_USER}} # For forks to download pre-compiled dependencies from the Man repo | |
VCPKG_MAN_NUGET_TOKEN: ${{secrets.VCPKG_MAN_NUGET_TOKEN}} | |
ARCTIC_CMAKE_PRESET: linux-debug | |
steps: | |
- uses: actions/[email protected] | |
with: | |
submodules: recursive | |
- name: Get number of CPU cores | |
uses: SimenB/[email protected] | |
id: cpu-cores | |
- name: Extra envs | |
run: | | |
. build_tooling/vcpkg_caching.sh # Linux follower needs another call in CIBW | |
echo -e "VCPKG_BINARY_SOURCES=$VCPKG_BINARY_SOURCES | |
VCPKG_ROOT=$PLATFORM_VCPKG_ROOT" | tee -a $GITHUB_ENV | |
cmake -P cpp/CMake/CpuCount.cmake | sed 's/^-- //' | tee -a $GITHUB_ENV | |
echo "ARCTICDB_CODE_COVERAGE_BUILD=1" | tee -a $GITHUB_ENV | |
env: | |
CMAKE_BUILD_PARALLEL_LEVEL: ${{vars.CMAKE_BUILD_PARALLEL_LEVEL}} | |
- name: Prepare C++ compilation env | |
run: . build_tooling/prep_cpp_build.sh | |
- name: CMake compile | |
# We are pinning the version to 10.6 because >= 10.7, use node20 which is not supported in the container | |
uses: lukka/[email protected] | |
with: | |
cmakeListsTxtPath: ${{github.workspace}}/cpp/CMakeLists.txt | |
configurePreset: ${{env.ARCTIC_CMAKE_PRESET}} | |
buildPreset: ${{env.ARCTIC_CMAKE_PRESET}} | |
env: | |
ARCTICDB_DEBUG_FIND_PYTHON: ${{vars.ARCTICDB_DEBUG_FIND_PYTHON}} | |
python_impl_name: 'cp311' | |
- name: Run C++ Tests | |
shell: bash -l {0} | |
run: | | |
cd cpp/out/linux-debug-build/ | |
ls arcticdb | |
make -j ${{ steps.cpu-cores.outputs.count }} arcticdb_rapidcheck_tests | |
make -j ${{ steps.cpu-cores.outputs.count }} test_unit_arcticdb | |
ctest | |
# We are chainging the python here because we want to use the default python to build (it is devel version) | |
# and this python for the rest of the testing | |
- name: Select Python (Linux) | |
run: echo /opt/python/cp36-cp36m/bin >> $GITHUB_PATH | |
- name: Install local dependencies with pip | |
shell: bash | |
run: | | |
python -m pip install --upgrade pip | |
ARCTIC_CMAKE_PRESET=skip pip install -ve .[Testing] | |
# - name: Test with pytest | |
# uses: ./.github/actions/run_local_pytest | |
# with: | |
# build_type: debug | |
# threads: 1 | |
# fast_tests_only: 0 | |
# other_params: '-m coverage run ' | |
- name: Get python Coverage report | |
shell: bash -l {0} | |
run: | | |
cd python | |
python -m coverage report -m | tee output.txt | |
python -m coverage html | |
zip -r python_cov.zip htmlcov/ | |
echo "PYTHON_COV_PERCENT=$(cat output.txt | grep 'TOTAL' | awk '{print $NF}' | tr -d '%')" >> $GITHUB_ENV | |
- name: Run Gcovr manually post-pytest | |
shell: bash -l {0} | |
run: | | |
cd cpp/out/linux-debug-build/ | |
python -m pip install gcovr | |
mkdir coverage | |
python -m gcovr --txt --html-details coverage/index.html -e vcpkg_installed/ -e proto/ -e ../../third_party -e ../../arcticdb/util/test/ -r ../.. --exclude-throw-branches --exclude-unreachable-branches -u --exclude-function-lines | tee output.txt | |
zip -r coverage.zip coverage/ | |
echo "CPP_COV_PERCENT=$(cat output.txt | grep 'TOTAL' | awk '{print $NF}' | tr -d '%')" >> $GITHUB_ENV | |
- name: Upload Coverage | |
uses: actions/[email protected] | |
with: | |
name: cpp-coverage-artifact | |
path: cpp/out/linux-debug-build/coverage.zip | |
- name: Upload Python Coverage | |
uses: actions/[email protected] | |
with: | |
name: python-coverage-artifact | |
path: python/python_cov.zip | |
- name: Restore cached CPP Coverage Percentage from the previous run | |
id: cache-cov-restore | |
uses: actions/cache/[email protected] | |
with: | |
path: prev_coverage.txt | |
key: coverage | |
- name: Get and compare coverage if cache was restored | |
run: | | |
# if cache was restored, compare coverage | |
if [ -f coverage.txt ]; then | |
PREV_COVERAGE=$(cat prev_coverage.txt | cut -d' ' -f2) | |
echo "Previous coverage: $PREV_COVERAGE" | |
CURR_COVERAGE=${{env.CPP_COV_PERCENT}} | |
echo "CPP_COV_PREV_PERCENT=$PREV_COVERAGE" >> $GITHUB_ENV | |
echo "Current coverage: $CURR_COVERAGE" | |
if [ $CURR_COVERAGE -gt $PREV_COVERAGE ]; then | |
echo "Coverage increased" | |
elif [ $CURR_COVERAGE -lt $PREV_COVERAGE ]; then | |
echo "Coverage decreased" | |
else | |
echo "Coverage unchanged" | |
fi | |
fi | |
- name: Save CPP Coverage Percentage to file | |
run: | | |
echo "Coverage: ${{ env.CPP_COV_PERCENT }}" > current_coverage.txt | |
- name: Save the current CPP Coverage Percentage to the cache | |
id: cache-cov-save | |
uses: actions/cache/[email protected] | |
with: | |
path: current_coverage.txt | |
key: coverage | |
- name: Check percentage and send Slack notification | |
if: ${{ env.CPP_COV_PREV_PERCENT && env.CPP_COV_PERCENT && env.CPP_COV_PERCENT < env.CPP_COV_PREV_PERCENT }} | |
uses: slackapi/[email protected] | |
with: | |
# For posting a rich message using Block Kit | |
payload: | | |
{ | |
"text": "The CPP Code Coverage has been reduced", | |
"blocks": [ | |
{ | |
"type": "section", | |
"text": { | |
"type": "mrkdwn", | |
"text": "The CPP Code Coverage from the current run(${{ env.CPP_COV_PERCENT }}%) is lower the previous one(${{ env.CPP_COV_PREV_PERCENT }}%)." | |
} | |
} | |
] | |
} | |
env: | |
SLACK_WEBHOOK_URL: ${{ secrets.ARCTICDB_DEV_WEBHOOK_URL }} | |
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK | |
start_ec2_runner: | |
uses: ./.github/workflows/ec2_runner_jobs.yml | |
secrets: inherit | |
with: | |
job_type: start | |
benchmark_linux_medium: | |
timeout-minutes: 1200 | |
needs: [start_ec2_runner, cibw_docker_image] | |
runs-on: ${{ needs.start_ec2_runner.outputs.label }} | |
container: | |
image: ${{needs.cibw_docker_image.outputs.tag}} | |
permissions: | |
contents: write # Technically not necessary since the ARCTICDB_TEST_PAT token override exists | |
env: | |
# this is potentially overflowing the cache, so should be looked into after we address issue #1057 | |
SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} # Setting this env var enables the caching | |
VCPKG_NUGET_USER: ${{secrets.VCPKG_NUGET_USER || github.repository_owner}} | |
VCPKG_NUGET_TOKEN: ${{secrets.VCPKG_NUGET_TOKEN || secrets.GITHUB_TOKEN}} | |
CMAKE_C_COMPILER_LAUNCHER: sccache | |
CMAKE_CXX_COMPILER_LAUNCHER: sccache | |
steps: | |
- uses: actions/[email protected] | |
with: | |
fetch-depth: 0 | |
token: ${{ secrets.ARCTICDB_TEST_PAT }} | |
- name: Set persistent storage variables | |
uses: ./.github/actions/set_persistent_storage_env_vars | |
with: | |
aws_access_key: "${{ secrets.AWS_S3_ACCESS_KEY }}" | |
aws_secret_key: "${{ secrets.AWS_S3_SECRET_KEY }}" | |
- name: Configure sccache | |
uses: mozilla-actions/[email protected] | |
with: | |
version: "v0.4.0" | |
# We are changing the python here because we want to use the default python to build (it is devel version) | |
# and this python for the rest of the testing | |
- name: Select Python (Linux) | |
run: | | |
ls /opt/python | |
echo /opt/python/cp36-cp36m/bin >> $GITHUB_PATH | |
- name: Install ASV | |
shell: bash | |
run: | | |
git config --global --add safe.directory . | |
python -m pip install --upgrade pip | |
pip install asv virtualenv | |
python -m asv machine -v --yes --machine ArcticDB-Runner-Medium | |
- name: Benchmark all commits | |
if: inputs.run_all_benchmarks == true | |
shell: bash -el {0} | |
run: | | |
python -m asv run -v --show-stderr HASHFILE:python/hashes_to_benchmark.txt | |
- name: Benchmark only latest commit | |
if: github.event_name != 'pull_request' && inputs.run_all_benchmarks == false | |
shell: bash -el {0} | |
run: | | |
python -m asv run -v --show-stderr HEAD^! | |
git log HEAD^..HEAD --oneline -n1 --decorate=no | awk '{print $1;}' >> python/hashes_to_benchmark.txt | |
- name: Benchmark against master | |
if: github.event_name == 'pull_request' && inputs.run_all_benchmarks == false | |
shell: bash -el {0} | |
run: | | |
python -m asv continuous -v --show-stderr origin/master HEAD -f 1.15 | |
- name: Publish results | |
if: github.event_name != 'pull_request' || github.ref == 'refs/heads/master' | |
shell: bash -el {0} | |
run: | | |
git config --global user.name "${GITHUB_ACTOR}" | |
git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com" | |
git fetch --tags --all | |
git pull | |
python -m asv publish -v | |
if ${{ github.event_name != 'pull_request' }} ; then | |
git add python/.asv/results/* | |
git add python/hashes_to_benchmark.txt | |
git commit -m "Update ASV Results and hashes to test" | |
git push origin HEAD | |
fi | |
if ${{ github.ref == 'refs/heads/master' }} ; then | |
python -m asv gh-pages -v --rewrite | |
fi | |
stop-ec2-runner: | |
needs: [start_ec2_runner, benchmark_linux_medium] | |
if: ${{ always() }} | |
uses: ./.github/workflows/ec2_runner_jobs.yml | |
secrets: inherit | |
with: | |
job_type: stop | |
label: ${{ needs.start_ec2_runner.outputs.label }} | |
ec2-instance-id: ${{ needs.start_ec2_runner.outputs.ec2-instance-id }} |