Skip to content

Commit

Permalink
Add sccache stats to CI output.
Browse files Browse the repository at this point in the history
  • Loading branch information
alliepiper committed May 18, 2024
1 parent 084d02f commit cf2d77a
Show file tree
Hide file tree
Showing 8 changed files with 156 additions and 61 deletions.
2 changes: 1 addition & 1 deletion .github/actions/workflow-build/build-workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ def generate_dispatch_job_image(matrix_job, job_type):


def generate_dispatch_job_command(matrix_job, job_type):
script_path = "ci/windows" if is_windows(matrix_job) else "ci"
script_path = "./ci/windows" if is_windows(matrix_job) else "./ci"
script_ext = ".ps1" if is_windows(matrix_job) else ".sh"
script_job_type = job_type
script_project = matrix_job['project']
Expand Down
7 changes: 4 additions & 3 deletions .github/actions/workflow-results/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,12 @@ runs:
name: workflow
path: workflow/

- name: Download job success artifacts
- name: Download job artifacts
continue-on-error: true # This may fail if no jobs succeed. The checks below will catch this.
uses: actions/download-artifact@v3
with:
name: dispatch-job-success
path: dispatch-job-success/
name: jobs
path: jobs

- name: Fetch workflow job info
if: ${{ inputs.github_token != ''}}
Expand Down Expand Up @@ -75,6 +75,7 @@ runs:
continue-on-error: true
shell: bash --noprofile --norc -euo pipefail {0}
run: |
find jobs
echo "Generating job summary..."
python3 "${GITHUB_ACTION_PATH}/prepare-execution-summary.py" workflow/workflow.json results/job_times.json
Expand Down
67 changes: 49 additions & 18 deletions .github/actions/workflow-results/prepare-execution-summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,16 @@


import argparse
import functools
import json
import os
import re
import sys


def job_succeeded(job):
# The job was successful if the artifact file 'dispatch-job-success/dispatch-job-success-<job_id>' exists:
return os.path.exists(f'dispatch-job-success/{job["id"]}')
# The job was successful if the success file exists:
return os.path.exists(f'jobs/{job["id"]}/success')


def natural_sort_key(key):
Expand Down Expand Up @@ -42,17 +43,21 @@ def extract_jobs(workflow):
return jobs


def create_summary_entry(include_times):
summary = {'passed': 0, 'failed': 0}

if include_times:
summary['job_time'] = 0
summary['step_time'] = 0

return summary
@functools.lru_cache(maxsize=None)
def get_sccache_stats(job_id):
sccache_file = f'jobs/{job_id}/sccache_stats.json'
if os.path.exists(sccache_file):
with open(sccache_file) as f:
return json.load(f)
return None


def update_summary_entry(entry, job, job_times=None):
if 'passed' not in entry:
entry['passed'] = 0
if 'failed' not in entry:
entry['failed'] = 0

if job_succeeded(job):
entry['passed'] += 1
else:
Expand All @@ -63,15 +68,34 @@ def update_summary_entry(entry, job, job_times=None):
job_time = time_info["job_seconds"]
command_time = time_info["command_seconds"]

if not 'job_time' in entry:
entry['job_time'] = 0
if not 'step_time' in entry:
entry['step_time'] = 0

entry['job_time'] += job_time
entry['step_time'] += command_time

sccache_stats = get_sccache_stats(job["id"])
if sccache_stats:
sccache_stats = sccache_stats['stats']
requests = sccache_stats.get('compile_requests', 0)
hits = 0
if 'cache_hits' in sccache_stats:
cache_hits = sccache_stats['cache_hits']
for lang, lang_hits in cache_hits.items():
hits += lang_hits
if 'sccache' not in entry:
entry['sccache'] = {'requests': requests, 'hits': hits}
else:
entry['sccache']['requests'] += requests
entry['sccache']['hits'] += hits

return entry


def build_summary(jobs, job_times=None):
summary = create_summary_entry(job_times)
summary['projects'] = {}
summary = {'projects': {}}
projects = summary['projects']

for job in jobs:
Expand All @@ -81,8 +105,7 @@ def build_summary(jobs, job_times=None):

project = matrix_job["project"]
if not project in projects:
projects[project] = create_summary_entry(job_times)
projects[project]['tags'] = {}
projects[project] = {'tags': {}}
tags = projects[project]['tags']

update_summary_entry(projects[project], job, job_times)
Expand All @@ -92,16 +115,15 @@ def build_summary(jobs, job_times=None):
continue

if not tag in tags:
tags[tag] = create_summary_entry(job_times)
tags[tag]['values'] = {}
tags[tag] = {'values': {}}
values = tags[tag]['values']

update_summary_entry(tags[tag], job, job_times)

value = str(matrix_job[tag])

if not value in values:
values[value] = create_summary_entry(job_times)
values[value] = {}
update_summary_entry(values[value], job, job_times)

# Natural sort the value strings within each tag:
Expand Down Expand Up @@ -162,12 +184,21 @@ def get_summary_stats(summary):

stats = f'{fail_string:<21}'

if (summary['job_time']):
if 'job_time' in summary and total > 0 and summary['job_time'] > 0:
job_time = summary['job_time']
total_job_duration = format_seconds(job_time)
avg_job_duration = format_seconds(job_time / total)
stats += f' | Total Time: {total_job_duration:>7} | Avg Time: {avg_job_duration:>6}'

if 'sccache' in summary:
sccache = summary['sccache']
requests = sccache["requests"]
hits = sccache["hits"]
hit_percent = int(100 * hits / requests) if requests > 0 else 0
hit_ratio = f'{hits}/{requests}'
hit_string = f'Hits: {hit_percent:>3}% ({hit_ratio})'
stats += f' | {hit_string:<24}' # allow space for hits and requests to each be 4 digits

return stats


Expand Down
6 changes: 3 additions & 3 deletions .github/actions/workflow-results/verify-job-success.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ def main():

job_id_map = json.load(args.job_id_map)

# For each job id, verify that the file 'dispatch-job-success/<job_id>' exists
# For each job id, verify that the success artifact exists
success = True
for job_id, job_name in job_id_map.items():
success_file = f'dispatch-job-success/{job_id}'
success_file = f'jobs/{job_id}/success'
print(f'Verifying job with id "{job_id}": "{job_name}"')
if not os.path.exists(success_file):
print(f'Failed: Artifact "dispatch-job-success/{job_id}" not found')
print(f'Failed: Artifact "{success_file}" not found')
success = False

if not success:
Expand Down
115 changes: 84 additions & 31 deletions .github/workflows/workflow-dispatch-job.yml
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,11 @@ jobs:
with:
path: ${{github.event.repository.name}}
persist-credentials: false
- name: Move files to coder user home directory
- name: Link files to coder user home directory
run: |
cp -R ${{github.event.repository.name}} /home/coder/
chown -R coder:coder /home/coder/
ln -s "$(pwd)/${{github.event.repository.name}}" /home/coder/${{github.event.repository.name}}
chown -R coder:coder ${{github.event.repository.name}}
chown -R coder:coder /home/coder/${{github.event.repository.name}}
- name: Add NVCC problem matcher
run: |
echo "::add-matcher::${{github.event.repository.name}}/.github/problem-matchers/problem-matcher.json"
Expand Down Expand Up @@ -109,17 +110,38 @@ jobs:
echo " - Continuous Integration (CI) Overview: https://github.com/NVIDIA/cccl/blob/main/ci-overview.md"
exit $exit_code
fi
- name: Mark job as successful
- name: Prepare job artifacts
id: done
run: |
echo "SUCCESS=true" | tee -a ${GITHUB_OUTPUT}
mkdir dispatch-job-success
touch dispatch-job-success/${{inputs.id}}
- name: Upload dispatch-job-success
echo "SUCCESS=true" | tee -a "${GITHUB_OUTPUT}"
result_dir="jobs/${{inputs.id}}"
mkdir -p "$result_dir"
touch "$result_dir/success"
# Finds a matching file in the repo directory and copies it to the results directory.
find_and_copy() {
filename="$1"
filepath="$(find ${{github.event.repository.name}} -name "${filename}" -print -quit)"
if [[ -z "$filepath" ]]; then
echo "${filename} does not exist in repo directory."
return 1
fi
cp -v "$filepath" "$result_dir"
}
find_and_copy "sccache_stats.json" || true # Ignore failures
echo "job artifacts:"
find "$result_dir"
- name: Upload job artifacts
uses: actions/upload-artifact@v3
with:
name: dispatch-job-success
path: dispatch-job-success/${{inputs.id}}
name: jobs
path: jobs


windows:
name: ${{inputs.name}}
Expand Down Expand Up @@ -147,31 +169,62 @@ jobs:
role-to-assume: arn:aws:iam::279114543810:role/gha-oidc-NVIDIA
aws-region: us-east-2
role-duration-seconds: 43200 # 12 hours
- name: Checkout repo
uses: actions/checkout@v3
with:
path: ${{github.event.repository.name}}
persist-credentials: false
- name: Fetch ${{ inputs.image }}
run: docker pull ${{ inputs.image }}
- name: Prepare paths for docker
id: paths
run: |
echo "HOST_REPO=${{ github.workspace }}\${{ github.event.repository.name }}".Replace('\', '/') | Out-File -FilePath $env:GITHUB_OUTPUT -Append
echo "MOUNT_REPO=C:/${{ github.event.repository.name }}" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
cat $env:GITHUB_OUTPUT
shell: powershell
- name: Run command # Do not change this step's name, it is checked in parse-job-times.py
run: >-
docker run ${{ inputs.image }} powershell -c "[System.Environment]::SetEnvironmentVariable('AWS_ACCESS_KEY_ID','${{env.AWS_ACCESS_KEY_ID}}')
[System.Environment]::SetEnvironmentVariable('AWS_SECRET_ACCESS_KEY','${{env.AWS_SECRET_ACCESS_KEY}}')
[System.Environment]::SetEnvironmentVariable('AWS_SESSION_TOKEN','${{env.AWS_SESSION_TOKEN }}')
[System.Environment]::SetEnvironmentVariable('SCCACHE_BUCKET','${{env.SCCACHE_BUCKET}}')
[System.Environment]::SetEnvironmentVariable('SCCACHE_REGION','${{env.SCCACHE_REGION}}')
[System.Environment]::SetEnvironmentVariable('SCCACHE_IDLE_TIMEOUT','${{env.SCCACHE_IDLE_TIMEOUT}}')
[System.Environment]::SetEnvironmentVariable('SCCACHE_S3_USE_SSL','${{env.SCCACHE_S3_USE_SSL}}')
[System.Environment]::SetEnvironmentVariable('SCCACHE_S3_NO_CREDENTIALS','${{env.SCCACHE_S3_NO_CREDENTIALS}}')
git clone https://github.com/${{github.repository}}.git;
cd ${{github.event.repository.name}};
git fetch --all;
git checkout ${{github.ref_name}};
${{inputs.command}}"
- name: Mark job as successful
run: |
docker run \
--mount type=bind,source="${{steps.paths.outputs.HOST_REPO}}",target="${{steps.paths.outputs.MOUNT_REPO}}" \
--workdir "${{steps.paths.outputs.MOUNT_REPO}}" \
${{ inputs.image }} \
powershell -c "
[System.Environment]::SetEnvironmentVariable('AWS_ACCESS_KEY_ID','${{env.AWS_ACCESS_KEY_ID}}');
[System.Environment]::SetEnvironmentVariable('AWS_SECRET_ACCESS_KEY','${{env.AWS_SECRET_ACCESS_KEY}}');
[System.Environment]::SetEnvironmentVariable('AWS_SESSION_TOKEN','${{env.AWS_SESSION_TOKEN }}');
[System.Environment]::SetEnvironmentVariable('SCCACHE_BUCKET','${{env.SCCACHE_BUCKET}}');
[System.Environment]::SetEnvironmentVariable('SCCACHE_REGION','${{env.SCCACHE_REGION}}');
[System.Environment]::SetEnvironmentVariable('SCCACHE_IDLE_TIMEOUT','${{env.SCCACHE_IDLE_TIMEOUT}}');
[System.Environment]::SetEnvironmentVariable('SCCACHE_S3_USE_SSL','${{env.SCCACHE_S3_USE_SSL}}');
[System.Environment]::SetEnvironmentVariable('SCCACHE_S3_NO_CREDENTIALS','${{env.SCCACHE_S3_NO_CREDENTIALS}}');
git config --global --add safe.directory C:/cccl;
${{inputs.command}}"
- name: Prepare job artifacts
id: done
run: |
echo "SUCCESS=true" | tee -a ${GITHUB_OUTPUT}
mkdir dispatch-job-success
touch dispatch-job-success/${{inputs.id}}
- name: Upload dispatch-job-success
echo "SUCCESS=true" | tee -a "${GITHUB_OUTPUT}"
result_dir="jobs/${{inputs.id}}"
mkdir -p "$result_dir"
touch "$result_dir/success"
# Finds a matching file in the repo directory and copies it to the results directory.
find_and_copy() {
filename="$1"
filepath="$(find ${{github.event.repository.name}} -name "${filename}" -print -quit)"
if [[ -z "$filepath" ]]; then
echo "${filename} does not exist in repo directory."
return 1
fi
cp -v "$filepath" "$result_dir"
}
find_and_copy "sccache_stats.json" || true # Ignore failures
- name: Upload job artifacts
uses: actions/upload-artifact@v3
with:
name: dispatch-job-success
path: dispatch-job-success/${{inputs.id}}
name: jobs
path: jobs
10 changes: 7 additions & 3 deletions ci/build_common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -180,13 +180,18 @@ function build_preset() {
local red="1;31"
local GROUP_NAME="🏗️ Build ${BUILD_NAME}"

local preset_dir="${BUILD_DIR}/${PRESET}"
local sccache_json="${preset_dir}/sccache_stats.json"

source "./sccache_stats.sh" "start"

pushd .. > /dev/null
run_command "$GROUP_NAME" cmake --build --preset=$PRESET -v
status=$?
popd > /dev/null

sccache --show-adv-stats --stats-format=json > "${sccache_json}"

minimal_sccache_stats=$(source "./sccache_stats.sh" "end")

# Only print detailed stats in actions workflow
Expand Down Expand Up @@ -224,9 +229,8 @@ function test_preset()

local GROUP_NAME="🚀 Test ${BUILD_NAME}"

ctest_log_dir="${BUILD_DIR}/log/ctest"
ctest_log="${ctest_log_dir}/${PRESET}"
mkdir -p "${ctest_log_dir}"
local preset_dir="${BUILD_DIR}/${PRESET}"
local ctest_log="${preset_dir}/ctest.log"

pushd .. > /dev/null
run_command "$GROUP_NAME" ctest --output-log "${ctest_log}" --preset=$PRESET
Expand Down
3 changes: 1 addition & 2 deletions ci/matrix.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,7 @@ workflows:
# - {jobs: ['build'], project['thrust'], std: 17, ctk: *ctk_curr, cxx: [*gcc12, *llvm16]}
#
override:
- {jobs: ['build'], project: ['thrust', 'cub'], std: 'all', cxx: [*gcc12, *llvm16, *msvc2022]}
- {jobs: ['infra'], project: 'cccl', cxx: [*gcc12, *llvm16]}
- {jobs: ['build'], project: ['thrust'], std: [14, 17], cxx: [*gcc12]}

pull_request:
# default_projects: nvcc
Expand Down
7 changes: 7 additions & 0 deletions ci/windows/build_common.psm1
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ If(!(test-path -PathType container "../build")) {
# The most recent build will always be symlinked to cccl/build/latest
New-Item -ItemType Directory -Path "$BUILD_DIR" -Force

# Convert to an absolute path:
$BUILD_DIR = (Get-Item -Path "$BUILD_DIR").FullName

# Prepare environment for CMake:
$env:CMAKE_BUILD_PARALLEL_LEVEL = $PARALLEL_LEVEL
$env:CTEST_PARALLEL_LEVEL = 1
Expand Down Expand Up @@ -107,6 +110,10 @@ function build_preset {
cmake --build --preset $PRESET -v
$test_result = $LastExitCode

$preset_dir = "${BUILD_DIR}/${PRESET}"
$sccache_json = "${preset_dir}/sccache_stats.json"
sccache --show-adv-stats --stats-format=json > "${sccache_json}"

sccache_stats('Stop')

echo "$step complete"
Expand Down

0 comments on commit cf2d77a

Please sign in to comment.