Skip to content

Commit

Permalink
Create timing artifacts.
Browse files Browse the repository at this point in the history
  • Loading branch information
alliepiper committed May 16, 2024
1 parent 502b169 commit 796fa29
Show file tree
Hide file tree
Showing 6 changed files with 307 additions and 57 deletions.
47 changes: 43 additions & 4 deletions .github/actions/workflow-results/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,52 @@ runs:
name: dispatch-job-success
path: dispatch-job-success/

- name: Fetch workflow job info
if: ${{ inputs.github_token != ''}}
continue-on-error: true
uses: actions/github-script@v4
with:
github-token: ${{ inputs.github_token }}
script: |
const fs = require('fs');
const owner = context.repo.owner;
const repo = context.repo.repo;
const runId = context.runId;
github.paginate(
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs',
{
owner: owner,
repo: repo,
run_id: runId
}
)
.then(jobs => {
console.log('::group::Jobs JSON');
console.log(JSON.stringify(jobs, null, 2));
console.log('::endgroup::');
fs.mkdirSync("results", { recursive: true });
fs.writeFileSync('results/jobs.json', JSON.stringify(jobs, null, 2));
console.log(`Fetched ${jobs.length} jobs and saved to results/jobs.json`);
})
.catch(error => {
console.error(error);
});
- name: Parse job times
continue-on-error: true
shell: bash --noprofile --norc -euo pipefail {0}
run: |
echo "Parsing job times..."
python3 "${GITHUB_ACTION_PATH}/parse-job-times.py" workflow/workflow.json results/jobs.json
- name: Prepare execution summary
id: job-summary
continue-on-error: true
shell: bash --noprofile --norc -euo pipefail {0}
run: |
echo "Generating job summary..."
python3 "${GITHUB_ACTION_PATH}/prepare-execution-summary.py" workflow/workflow.json
python3 "${GITHUB_ACTION_PATH}/prepare-execution-summary.py" workflow/workflow.json results/job_times.json
- name: Prepare final summary
id: final-summary
Expand Down Expand Up @@ -70,8 +109,8 @@ runs:
github-token: ${{ inputs.github_token }}
script: |
const pr_number = process.env.PR_NUMBER;
const owner = 'NVIDIA';
const repo = 'cccl';
const owner = context.repo.owner;
const repo = context.repo.repo;
// Decode URL-encoded string for proper display in comments
const commentBody = decodeURIComponent(process.env.COMMENT_BODY);
console.log('::group::Commenting on PR #' + pr_number + ' with the following message:')
Expand Down
12 changes: 8 additions & 4 deletions .github/actions/workflow-results/final-summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ def read_file(filepath):
with open(filepath, 'r') as f:
return f.read().rstrip("\n ")


def print_file_if_present(filepath):
if os.path.exists(filepath):
print(read_file(filepath) + "\n\n")
Expand All @@ -25,7 +26,7 @@ def print_summary_file(filepath, heading_level):
def main():
# List of all projects detected in 'execution/projects/{project}_summary.json':
projects = []
project_file_regex="(.*)_summary.json"
project_file_regex = "(.*)_summary.json"
for filename in os.listdir("execution/projects"):
match = re.match(project_file_regex, filename)
if match:
Expand All @@ -35,8 +36,12 @@ def main():

print("<ul>")
for project in projects:
print("<li>")
print_summary_file(f"execution/projects/{project}_summary.json", 3)
print("<li>")
print_summary_file(f"execution/projects/{project}_summary.json", 3)
print("<li>")
print(f'<details><summary>Legend</summary>\n')
print(read_file('execution/legend.txt') + "\n")
print('</details>\n')
print("</ul>\n")

print_summary_file("workflow/runner_summary.json", 2)
Expand All @@ -45,6 +50,5 @@ def main():
print("</details>")



if __name__ == '__main__':
main()
120 changes: 120 additions & 0 deletions .github/actions/workflow-results/parse-job-times.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
#!/usr/bin/env python3

import argparse
import datetime
import json
import os
import sys


def get_jobs_json(jobs_file):
# Return the contents of jobs.json
with open(jobs_file) as f:
result = json.load(f)

return result


def get_workflow_json(workflow_file):
# Return the contents of ~/cccl/.local/tmp/workflow.json
with open(workflow_file) as f:
return json.load(f)


def write_json(filepath, json_object):
with open(filepath, 'w') as f:
json.dump(json_object, f, indent=4)


def generate_job_id_map(workflow):
'''Map full job name to job id'''
job_id_map = {}
for group_name, group_json in workflow.items():
standalone = group_json['standalone'] if 'standalone' in group_json else []
for job in standalone:
name = f"{group_name} / s.{job['id']} / {job['name']}"
job_id_map[name] = job['id']
two_stage = group_json['two_stage'] if 'two_stage' in group_json else []
for pc in two_stage:
producers = pc['producers']
for job in producers:
name = f"{group_name} / t.{pc['id']} / p.{job['id']} / {job['name']}"
job_id_map[name] = job['id']
consumers = pc['consumers']
for job in consumers:
name = f"{group_name} / t.{pc['id']} / c.{job['id']} / {job['name']}"
job_id_map[name] = job['id']

return job_id_map


def main():
# Accept two command line arguments: <workflow.json> <jobs.json>
parser = argparse.ArgumentParser(description='Parse job times')
parser.add_argument('workflow', type=str, help='Path to workflow.json')
parser.add_argument('jobs', type=str, help='Path to jobs.json')
args = parser.parse_args()

jobs = get_jobs_json(args.jobs)
workflow = get_workflow_json(args.workflow)

# Converts full github job names into job ids:
job_id_map = generate_job_id_map(workflow)

# Map of id -> { <job stats> }
result = {}

for job in jobs:
name = job['name']

# Build workflow, verify devcontainers, etc:
if name not in job_id_map:
print(f"Skipping {name}")
continue

id = job_id_map[name]

# Job times are 2024-05-09T06:52:20Z
started_at = job['started_at']
started_time = datetime.datetime.strptime(started_at, "%Y-%m-%dT%H:%M:%SZ")

completed_at = job['completed_at']
completed_time = datetime.datetime.strptime(completed_at, "%Y-%m-%dT%H:%M:%SZ")

job_seconds = (completed_time - started_time).total_seconds()
job_duration = str(datetime.timedelta(seconds=job_seconds))

result[id] = {}
result[id]['name'] = name
result[id]['started_at'] = started_at
result[id]['completed_at'] = completed_at
result[id]['job_duration'] = job_duration
result[id]['job_seconds'] = job_seconds

# Find the "Run command" step and record its duration:
command_seconds = 0
for step in job['steps']:
if step['name'] == "Run command":
step_started_at = step['started_at']
step_started_time = datetime.datetime.strptime(step_started_at, "%Y-%m-%dT%H:%M:%SZ")
step_completed_at = step['completed_at']
step_completed_time = datetime.datetime.strptime(step_completed_at, "%Y-%m-%dT%H:%M:%SZ")
command_seconds = (step_completed_time - step_started_time).total_seconds()
break

command_duration = str(datetime.timedelta(seconds=command_seconds))

result[id]['command_seconds'] = command_seconds
result[id]['command_duration'] = command_duration

os.makedirs("results", exist_ok=True)
write_json("results/job_times.json", result)

print("::group::Job times")
for id, stats in result.items():
print(f"{stats['job_duration']:10} {stats['command_duration']:10} {stats['command_seconds'] * 100 / stats['job_seconds']:3f} {stats['name']}")
print("::endgroup::")


if __name__ == "__main__":
main()
Loading

0 comments on commit 796fa29

Please sign in to comment.