Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into mfig-10249-vtransfe…
Browse files Browse the repository at this point in the history
…r-virtual-targets
  • Loading branch information
michaelfig committed Nov 6, 2024
2 parents c495a31 + d4f2864 commit 204c99c
Show file tree
Hide file tree
Showing 144 changed files with 5,879 additions and 1,336 deletions.
65 changes: 65 additions & 0 deletions .github/actions/ci-test-result.cjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
#! /usr/bin/env node
const fs = require('node:fs');
const process = require('node:process');
const { sendMetricsToGCP, makeTimeSeries } = require('./gcp-monitoring.cjs');

const resultFiles = process.argv.slice(2);

const tapResultRegex = new RegExp(
`(^(?<status>not )?ok (?<num>[0-9]+) - (?<name>.+?)(?: %ava-dur=(?<duration>[0-9]+)ms)?(?:# (?<comments>.+?))?$(?<output>(\n^#.+?$)*)(?<failure>(\n^(?:(?!(?:not|ok) ))[^\n\r]+?$)*))`,
'gms',
);
let timeSeriesData = [];

function processTAP(packageName, tapbody) {
let m;
const returnValue = [];
// eslint-disable-next-line no-cond-assign
while ((m = tapResultRegex.exec(tapbody))) {
if (m.groups.name) {
const testCaseName = `${m.groups.name}`.replace(/["<>]/g, '').trim();

let skipped = false;
let succeeded = true;
let todo = false;
if (m.groups.status) {
succeeded = false;
}
if (m.groups.comments) {
if (m.groups.comments.match(/SKIP/gi)) {
skipped = true;
}
if (m.groups.comments.match(/TODO/gi)) {
todo = true;
skipped = true;
succeeded = true;
}
}
returnValue.push({
labels: {
test_name: testCaseName,
package: packageName,
test_status:
succeeded && !(todo || skipped)
? 'succeeded'
: !succeeded
? 'failed'
: 'skipped',
},
value: Number(succeeded && !(todo || skipped)),
});
}
}
return returnValue;
}

for (const file of resultFiles) {
const resultsBody = fs.readFileSync(file, 'utf-8');
const packageName = file.split('/').at(-2);

const response = processTAP(packageName, resultsBody);
timeSeriesData.push(...response);
}

const timeSeries = makeTimeSeries(timeSeriesData);
sendMetricsToGCP(timeSeries);
169 changes: 169 additions & 0 deletions .github/actions/dump-ci-stats-to-gcp-metrics.cjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
const Monitoring = require('@google-cloud/monitoring');

const gcpCredentials = JSON.parse(process.env.GCP_CREDENTIALS);
const monitoring = new Monitoring.MetricServiceClient({
projectId: gcpCredentials.project_id,
credentials: {
client_email: gcpCredentials.client_email,
private_key: gcpCredentials.private_key,
},
});

async function sendMetricsToGCP(metricType, metricValue, labels) {
const projectId = gcpCredentials.project_id;

const request = {
name: monitoring.projectPath(projectId),
timeSeries: [
{
metric: {
type: `custom.googleapis.com/github/${metricType}`,
labels: labels,
},
resource: {
type: 'global',
labels: {
project_id: projectId,
},
},
points: [
{
interval: {
endTime: {
seconds: Math.floor(Date.now() / 1000),
},
},
value: {
doubleValue: metricValue,
},
},
],
},
],
};
try {
await monitoring.createTimeSeries(request);
console.log(`Metric ${metricType} sent successfully.`);
} catch (error) {
console.error('Error sending metric:', error);
}
}

// Function to fetch workflow and job details via GitHub API
async function fetchWorkflowDetails() {
const runId = process.argv[2];
const repo = process.env.GITHUB_REPOSITORY;
const apiUrl = `https://api.github.com/repos/${repo}/actions/runs/${runId}`;

try {
const response = await fetch(apiUrl, {
headers: {
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,
Accept: 'application/vnd.github.v3+json',
},
});

if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
const data = await response.json();

return {
workflowId: data.id,
workflowName: data.name,
status: data.status, // "completed", "in_progress", etc.
conclusion: data.conclusion, // "success", "failure"
startTime: data.created_at,
endTime: data.updated_at,
trigger: data.event, // "push", "pull_request", etc.
jobs: await fetchJobDetails(repo, data.id), // Fetch individual job details
};
} catch (error) {
console.error('Error fetching workflow details:', error);
process.exit(1);
}
}

async function fetchJobDetails(repo, runId) {
const apiUrl = `https://api.github.com/repos/${repo}/actions/runs/${runId}/jobs`;

try {
const response = await fetch(apiUrl, {
headers: {
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,
Accept: 'application/vnd.github.v3+json',
},
});

if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
const data = await response.json();
return data.jobs;
} catch (error) {
console.error('Error fetching job details:', error);
return [];
}
}

// Main function to send metrics
(async () => {
try {
const workflowStats = await fetchWorkflowDetails();

const workflowLabels = {
workflow_name: workflowStats.workflowName,
workflow_id: workflowStats.workflowId,
trigger: workflowStats.trigger,
};

const workflowDuration =
(new Date(workflowStats.endTime) - new Date(workflowStats.startTime)) /
1000;
await sendMetricsToGCP(
'ci_workflow_duration',
workflowDuration,
workflowLabels,
);

for (const job of workflowStats.jobs) {
const jobLabels = {
workflow_name: workflowStats.workflowName,
job_name: job.name,
runner_name: job.runner_name,
conclusion: job.conclusion,
};

const jobExecutionTime =
(new Date(job.completed_at) - new Date(job.started_at)) / 1000;
await sendMetricsToGCP(
'ci_job_execution_time',
jobExecutionTime,
jobLabels,
);

// Send job status (1 for success, 0 for failure)
const jobStatus = job.conclusion === 'success' ? 1 : 0;
await sendMetricsToGCP('ci_job_status', jobStatus, jobLabels);

// Capture step-level metrics for step details per job
for (const step of job.steps) {
const stepExecutionTime =
(new Date(step.completed_at) - new Date(step.started_at)) / 1000;
const stepLabels = {
workflow_name: workflowStats.workflowName,
job_name: job.name,
step_name: step.name,
runner_name: job.runner_name,
};

await sendMetricsToGCP(
'ci_step_execution_time',
stepExecutionTime,
stepLabels,
);
}
}
} catch (error) {
console.error('Error in main function:', error);
process.exit(1);
}

process.exit(0);
})();
62 changes: 62 additions & 0 deletions .github/actions/gcp-monitoring.cjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
const Monitoring = require('@google-cloud/monitoring');

const gcpCredentials = JSON.parse(process.env.GCP_CREDENTIALS);
const projectId = gcpCredentials.project_id;

const monitoring = new Monitoring.MetricServiceClient({
projectId: gcpCredentials.project_id,
credentials: {
client_email: gcpCredentials.client_email,
private_key: gcpCredentials.private_key,
},
});

async function sendMetricsToGCP(timeSeries) {
const batchSize = 200;
for (let i = 0; i < timeSeries.length; i += batchSize) {
const batch = timeSeries.slice(i, i + batchSize);
const request = {
name: monitoring.projectPath(projectId),
timeSeries: batch,
};

try {
await monitoring.createTimeSeries(request);
console.log(
`Batch starting with metric ${batch[0].metric.type} sent successfully.`,
);
} catch (error) {
console.error('Error sending batch:', error);
}
}
}

function makeTimeSeries(testData) {
const timeSeries = testData.map(({ labels, value }) => ({
metric: {
type: `custom.googleapis.com/github/test-results`,
labels,
},
resource: {
type: 'global',
labels: {
project_id: projectId,
},
},
points: [
{
interval: {
endTime: {
seconds: Math.floor(Date.now() / 1000),
},
},
value: {
doubleValue: value,
},
},
],
}));
return timeSeries;
}

module.exports = { sendMetricsToGCP, makeTimeSeries };
11 changes: 11 additions & 0 deletions .github/actions/post-test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@ inputs:
description: 'site for datadog'
required: false
default: 'us3.datadoghq.com'
gcp-credentials:
description: 'gcp'
required: false
default: ''

runs:
using: composite
Expand Down Expand Up @@ -53,3 +57,10 @@ runs:
continue-on-error: true
with:
token: ${{ inputs.codecov-token }}
- name: Send test results to GCP
shell: bash
if: ${{ inputs.gcp-credentials }}
env:
GCP_CREDENTIALS: ${{ inputs.gcp-credentials }}
run: |
node .github/actions/ci-test-result.cjs ./packages/*/_testoutput.txt
39 changes: 39 additions & 0 deletions .github/workflows/dump-ci-stats.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
name: Dump GH CI Stats

on:
workflow_run: # This allows the workflow to be reused
workflows:
[
'Integration Tests',
'Test Golang',
'golangci-lint',
'Build release Docker Images',
'Test all Packages',
'Test Documentation',
'Manage integration check',
'after-merge.yml',
]
types:
- completed

jobs:
dump_ci_stats:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v2

- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: '18'

- name: Install GCP Monitoring/Metrics Client
run: yarn add @google-cloud/monitoring --ignore-workspace-root-check

- name: Run Final Job and Send Logs to GCP
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GCP_CREDENTIALS: ${{ secrets.GCP_CREDENTIALS }}
run: |
node .github/actions/dump-ci-stats-to-gcp-metrics.cjs ${{ github.event.workflow_run.id }}
Loading

0 comments on commit 204c99c

Please sign in to comment.