Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add: upload reports as an artifact #37

Open
wants to merge 11 commits into
base: beta
Choose a base branch
from
Open
8 changes: 8 additions & 0 deletions run-tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,14 @@ This action fulfils the following objectives in your runner environment:
* default:
* false, in case this param isn't passed by default the build will wait for the test to get completed
* specify if the test need to run in background and not poll for results, useful in case of long running test cases. The action will be marked as passed as soon as build is started independent of result.
* `upload`:
* Valid Inputs:
* true
* false
* default:
* false, in case this param isn't passed reports will not be uploaded to artifacts.
* specify if you want to upload test reports to artifacts.
* Note: For Espresso Cucumber Tests, please pass [plugins to your cucumberOptions](https://www.browserstack.com/docs/app-automate/espresso/run-cucumber-tests#:~:text=Step%201%3A%20Specify%20format%20of%20test%20report%20in%20command) in your config file for the report to be uploaded.

> Note: In case you are using this action along with **browserstack/github-actions/upload-app@beta** you need not specify app and test_suite in the config and framework in the inputs. It will the automatically picked from the previous steps outputs.

Expand Down
4 changes: 3 additions & 1 deletion run-tests/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@ inputs:
framework:
description: 'Test framework used for the test espresso/xcuitest'
async:
description: 'Start the test and not wait for the result, marks the build intermediately as passed'
description: 'Start the test and not wait for the result, marks the build intermediately as passed'
upload:
description: 'Uploads the test reports to github artifacts. Requires async to be false.'
runs:
using: 'node12'
main: 'dist/index.js'
10 changes: 10 additions & 0 deletions run-tests/config/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@ module.exports = {
APP_HASHED_ID: 'BROWSERSTACK_APP_HASHED_ID',
TEST_SUITE_ID: 'BROWSERSTACK_TEST_SUITE_ID',
},
FRAMEWORKS: {
espresso: 'espresso',
xcuitest: 'xcuitest',
},
URLS: {
BASE_URL: 'api-cloud.browserstack.com/app-automate',
APP_UPLOAD_ENDPOINT: 'upload',
Expand All @@ -19,12 +23,18 @@ module.exports = {
espresso: 'espresso/v2/builds',
xcuitest: 'xcuitest/v2/builds',
},
REPORT: {
espresso_junit: 'report',
espresso_cucumber: 'assets',
xcuitest_resultbundle: 'resultbundle',
},
DASHBOARD_BASE: 'app-automate.browserstack.com/dashboard/v2/builds',
},
INPUT: {
CONFIG_PATH: 'config-path',
FRAMEWORK: 'framework',
ASYNC: 'async',
UPLOAD: 'upload',
},
WATCH_INTERVAL: 60000,
TEST_STATUS: {
Expand Down
30,954 changes: 20,463 additions & 10,491 deletions run-tests/dist/index.js

Large diffs are not rendered by default.

78 changes: 64 additions & 14 deletions run-tests/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions run-tests/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
},
"homepage": "https://github.com/browserstack/github-actions#readme",
"dependencies": {
"@actions/artifact": "^1.1.1",
"@actions/core": "^1.2.7",
"@actions/github": "^4.0.0",
"cli-table": "^0.3.6",
Expand Down
80 changes: 78 additions & 2 deletions run-tests/src/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ const core = require('@actions/core');
const request = require('request');
const Table = require('cli-table');
const fs = require('fs');
const artifacts = require('@actions/artifact');
const constants = require("../config/constants");

const {
Expand All @@ -10,6 +11,7 @@ const {
INPUT,
WATCH_INTERVAL,
TEST_STATUS,
FRAMEWORKS,
} = constants;

class TestRunner {
Expand All @@ -27,6 +29,7 @@ class TestRunner {
this.test_suite_hashed_id = process.env[ENV_VARS.TEST_SUITE_ID];
this.framework = core.getInput(INPUT.FRAMEWORK) || process.env[ENV_VARS.FRAMEWORK];
this.async = core.getInput(INPUT.ASYNC);
this.upload = core.getInput(INPUT.UPLOAD);
} catch (e) {
throw Error(`Action input failed for reason: ${e.message}`);
}
Expand Down Expand Up @@ -162,21 +165,94 @@ class TestRunner {
clearInterval(poller);
TestRunner._parseApiResult(content);
this.build_status = content.status;
resolve();
resolve(content);
}
}
});
}, WATCH_INTERVAL);
});
}

static async _uploadResults(content) {
const promises = [];
core.info(`Uploading test report to artifacts for build id: ${content.id}`);
const { devices, id: buildId, framework } = content;
const rootDir = './reports';
if (!fs.existsSync(rootDir)) {
fs.mkdirSync(rootDir);
}
const username = process.env[ENV_VARS.BROWSERSTACK_USERNAME].replace("-GitHubAction", "");
const accesskey = process.env[ENV_VARS.BROWSERSTACK_ACCESS_KEY];
const inputCapabilities = content.input_capabilities;
let reportEndpoint;
if (framework === FRAMEWORKS.espresso) {
if (inputCapabilities.cucumberOptions && inputCapabilities.cucumberOptions.plugins) {
reportEndpoint = URLS.REPORT.espresso_cucumber;
} else {
reportEndpoint = URLS.REPORT.espresso_junit;
}
} else if (framework === FRAMEWORKS.xcuitest) {
if (inputCapabilities.enableResultBundle) {
reportEndpoint = URLS.REPORT.xcuitest_resultbundle;
} else {
core.info("'enableResultBundle' is missing in capabilities. Skipping reports.");
return;
}
} else {
core.error(new Error("Invalid Framework."));
return;
}
for (const device of devices) {
const { sessions } = device;
for (const session of sessions) {
const { id } = session;
const options = {
url: `https://${username}:${accesskey}@${URLS.BASE_URL}/${URLS.WATCH_FRAMEWORKS[framework]}/${buildId}/sessions/${id}/${reportEndpoint}`,
};
/* eslint-disable no-eval */
promises.push(new Promise((resolve, reject) => {
request.get(options, (error, response) => {
if (error) {
reject(error);
}
if (response.statusCode !== 200) {
reject(response.body);
}
resolve(response.body);
});
}).then(async (report) => {
if (framework === FRAMEWORKS.espresso) {
fs.writeFileSync(`${rootDir}/${id}.xml`, report);
} else if (framework === FRAMEWORKS.xcuitest) {
fs.writeFileSync(`${rootDir}/${id}.zip`, report);
}
}).catch((err) => {
core.error(err);
}));
}
}
await Promise.allSettled(promises);
try {
const files = fs.readdirSync(rootDir).map((path) => `${rootDir}/${path}`);
const { artifactName, failedItems, artifactItems } = await artifacts.create().uploadArtifact('reports', files, rootDir, {
continueOnError: true,
});
core.info(`Reports successfully uploaded to artifacts with artifact name ${artifactName}`);
core.debug(`failedItems:${JSON.stringify(failedItems)}`);
core.debug(`artifactItems:${JSON.stringify(artifactItems)}`);
} catch (err) {
core.error(err);
}
}

async run() {
try {
await this._startBuild();
const dashboardUrl = `https://${URLS.DASHBOARD_BASE}/${this.build_id}`;
core.info(`Build Dashboard link: ${dashboardUrl}`);
if (this.async) return;
await this._pollBuild();
const content = await this._pollBuild();
if (this.upload) await TestRunner._uploadResults(content);
if (this.build_status !== TEST_STATUS.PASSED) {
core.setFailed(`Browserstack Build with build id: ${this.build_id} ${this.build_status}`);
}
Expand Down
Loading