Skip to content

Commit

Permalink
feat: add -c<capture> option
Browse files Browse the repository at this point in the history
  • Loading branch information
luckyaxl committed Jun 6, 2024
1 parent 9be7c2c commit c0a4e78
Show file tree
Hide file tree
Showing 4 changed files with 57 additions and 32 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Changelog

## v0.0.11 (12/15/2023)
### Added
- disable auto capture sonarqube

### Enhancement
- Add ```-c``` option for capturing sonarqube screenshot
- remove ```%``` on result

## v0.0.10 (12/15/2023)
### Added

Expand Down
4 changes: 3 additions & 1 deletion src/core/pullRequests.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ interface FetchPullRequests {
endDate: string;
outputDirectory: string;
logFilePath: string;
capture: boolean;
}

interface GitHubApiResponse {
Expand All @@ -23,6 +24,7 @@ export const fetchPullRequests = async ({
endDate,
outputDirectory,
logFilePath,
capture,
}: FetchPullRequests) => {
console.log(
`${successColorAnsi("[*]")} Fetching data from author: ${warningColorAnsi(
Expand Down Expand Up @@ -84,5 +86,5 @@ export const fetchPullRequests = async ({
fs.mkdirSync(outputDirectory, { recursive: true });
}

sequentialProcess(data, outputDirectory, logFilePath);
sequentialProcess(data, outputDirectory, logFilePath, capture);
};
42 changes: 25 additions & 17 deletions src/core/sequentialProcess.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,33 +14,41 @@ const env = process.env;
export const sequentialProcess = async (
items: any[],
outputDir: string,
logFilePath: string
logFilePath: string,
capture: boolean
) => {
let browser;

try {
browser = await puppeteer.launch({
headless: "new",
});
const page = await browser.newPage();

await page.setViewport({ width: 1512, height: 850 });
let page: any;

await page.goto(`${env.SONARQUBE_URL}/sessions/new`, {
waitUntil: "networkidle0",
});
if (capture) {
page = await browser.newPage();

await page.setViewport({ width: 1512, height: 850 });

await page.goto(`${env.SONARQUBE_URL}/sessions/new`, {
waitUntil: "networkidle0",
});

await page.type("#login", env.SONAR_LOGIN as string);
await page.type("#password", env.SONAR_PASSWORD as string);
await page.type("#login", env.SONAR_LOGIN as string);
await page.type("#password", env.SONAR_PASSWORD as string);

await page.keyboard.press("Enter");
await page.keyboard.press("Enter");

await page.waitForNetworkIdle();
await page.waitForNetworkIdle();

const title = await page.title();
const title = await page.title();

if (title === "SonarQube") {
throw new Error(`${errorColorAnsi("[!]")} Login SonarQube failed!`);
if (title === "SonarQube") {
throw new Error(`${errorColorAnsi("[!]")} Login SonarQube failed!`);
}
} else {
await browser.close();
}

const delay = (ms: number) =>
Expand All @@ -52,10 +60,10 @@ export const sequentialProcess = async (
try {
const comments = await fetchIssueComments(item.comments_url);

const percentage = comments?.percentage;
const percentage = comments?.percentage || "0.00%";
const sonarQubeUrl = comments?.sonarQubeUrl;

if (sonarQubeUrl) {
if (sonarQubeUrl && capture) {
await page.goto(sonarQubeUrl, {
waitUntil: "networkidle0",
});
Expand All @@ -73,11 +81,11 @@ export const sequentialProcess = async (
.format("DD MMM YYYY");
const prUrl = item.html_url;

const msg = `${mergedAt} ${warningColorAnsi(prUrl)} ${percentage || 0}`;
const msg = `${mergedAt} ${warningColorAnsi(prUrl)} ${percentage}`;

fs.appendFile(
logFilePath,
`${mergedAt},${prUrl},${percentage}\n`,
`${mergedAt},${prUrl},${percentage.replace('%', '')}\n`,
(err) => {
if (err) {
console.error(
Expand Down
35 changes: 21 additions & 14 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ const validateFormat = (value: string) => {
return value;
};

const banner = `${successColorAnsi("SonarGit v0.0.10")}
const banner = `${successColorAnsi("SonarGit v0.0.11")}
Automated bot scraper to streamline data extraction from GitHub pull requests
and capture dynamic SonarQube screenshots.\n`;

Expand All @@ -52,11 +52,12 @@ shell
"Specify end date in format DD/MM/YYYY",
validateFormat
)
.option("-c, --capture <capture>", "Capture sonarqube screenshot")
.option("-o, --output <output>", "Specify output file name, ex: output.csv")
.parse();

const options = shell.opts();
const { start, end, output: outputFile } = options;
const { start, end, output: outputFile, capture } = options;

let startDate = formatDateGit(start);
let endDate = moment().format("YYYY-MM-DD");
Expand Down Expand Up @@ -91,7 +92,9 @@ AUTHOR=\n`;

const fileName = outputFile
? path.basename(outputFile)
: `Git ${formatDate(start)} - ${formatDate(end) || moment().format("DD MMM YYYY")}.csv`;
: `Git ${formatDate(start)} - ${
formatDate(end) || moment().format("DD MMM YYYY")
}.csv`;

const logFilePath = path.join(outputDirectory, fileName);

Expand All @@ -103,14 +106,18 @@ if (!fs.existsSync(configPath)) {
fs.writeFileSync(configPath, config);
}

const requiredEnvVariables = [
"SONARQUBE_URL",
"SONAR_LOGIN",
"SONAR_PASSWORD",
"GITHUB_TOKEN",
"OWNER",
"AUTHOR",
];

checkEnvVar(requiredEnvVariables);
fetchPullRequests({ startDate, endDate, outputDirectory, logFilePath });
const requiredEnvVariables = ["GITHUB_TOKEN", "OWNER", "AUTHOR"];
const sonarQubeEnvVar = ["SONARQUBE_URL", "SONAR_LOGIN", "SONAR_PASSWORD"];

const envVar = capture
? [...requiredEnvVariables, ...sonarQubeEnvVar]
: requiredEnvVariables;

checkEnvVar(envVar);
fetchPullRequests({
startDate,
endDate,
outputDirectory,
logFilePath,
capture,
});

0 comments on commit c0a4e78

Please sign in to comment.