Skip to content

Commit

Permalink
🔧 DAT-18919: fossa report generation (#267)
Browse files Browse the repository at this point in the history
* testing with storedLogic

* testing with storedLogic

* testing with storedLogic

* testing with storedLogic

* testing with storedLogic

* testing

* testing

* testing

* testing

* changes to Liquibase org report

* $filename

* $filename

* $filename

* $filename

* echo statement

* echo statement

* refactor using cli

* check trigger of the jobs in datical repo

* check trigger of the jobs in datical repo

* testing with new workflow to generate fossa report

* testing with new workflow to generate fossa report

* testing with new workflow to generate fossa report

* testing with new workflow to generate fossa report

* FOSSA_API_KEY

* upload to s3

* upload to s3

* upload to s3

* upload to s3

* upload to s3

* upload to s3

* combine-fossa-reports

* combine-fossa-reports

* combine-fossa-reports

* combine-fossa-reports

* combine-fossa-reports

* combine-fossa-reports

* combine-fossa-reports

* combine-fossa-reports

* combine-fossa-reports

* combine-fossa-reports

* combine-fossa-reports

* combine-fossa-reports

* combine-fossa-reports

* combine-fossa-reports

* datical service repo trigger

* Upload to build page

* Upload to build page

* update README.md

* minor changes

* testing

* testing

* testing

* testing

* testing

* skip datical-service

* run datical-service

* run datical-service

* testing

* testing

* testing

* enterprise-fossa-trigger-report-generation.yml

* test protoclub

* test protoclub

* add more retries

* add more retries

* add ephemeral-database

* fix conditional statement

* upload to ${{ github.event.inputs.version_number_for_report_generation }}/raw_reports

* use branch

* upload to build page

* use default branch

* use --static-only-analysis

---------

Co-authored-by: Sayali M <sayali@Sayalis-MacBook-Pro>
  • Loading branch information
sayaliM0412 and Sayali M authored Jan 21, 2025
1 parent ef718a3 commit f90884b
Show file tree
Hide file tree
Showing 5 changed files with 254 additions and 172 deletions.
324 changes: 157 additions & 167 deletions .github/workflows/fossa.yml
Original file line number Diff line number Diff line change
@@ -1,184 +1,174 @@
# Name of the GitHub Action workflow
name: FOSSA Report Generation
name: Enterprise- FOSSA Report Generation

# Event triggers for the workflow
on:
workflow_call:
workflow_dispatch:
inputs:
org:
required: false
version_number_for_report_generation:
type: string
description: 'Organization name: liquibase or datical'
workflow_dispatch:

# Define the jobs in the workflow
description: 'Supply the DaticalDb-installer version variable which is used during its report generation to be stored in the s3 bucket. eg 8.7.352'
required: false

jobs:
fossa-scan:
# Specifies the type of runner to use
wait-for-fossa-report-generation:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
# Sequence of steps that make up a single job
strategy:
matrix:
repo: [
{ name: "DaticalDB-installer", ref: "master",owner: "Datical" },
{name: "ephemeral-database", ref: "master",owner: "liquibase"},
{ name: "drivers", ref: "master",owner: "Datical" },
{name: "protoclub", ref: "develop",owner: "Datical"},
{ name: "datical-sqlparser", ref: "master",owner: "Datical" },
{ name: "storedlogic", ref: "master",owner: "Datical" },
{ name: "AppDBA", ref: "master",owner: "Datical" },
{ name: "liquibase-bundle", ref: "master",owner: "Datical" },
{ name: "liquibase", ref: "ddb",owner: "Datical" },
{ name: "ephemeral-database", ref: "master",owner: "Liquibase" }
]

name: "${{ matrix.repo.name }} - Fossa Report"
steps:
# Checkout the code to the GitHub runner
- name: Checkout Code
uses: actions/checkout@v4
- name: Set workflow inputs
run: |
if [[ "${{ matrix.repo.name }}" ]]; then
echo "WORKFLOW_INPUTS={ \"version_number_for_report_generation\": \"${{ github.event.inputs.version_number_for_report_generation }}\" }" >> $GITHUB_ENV
else
echo "WORKFLOW_INPUTS={}" >> $GITHUB_ENV
fi
- name: Set up JDK for Datical
if: inputs.org == 'datical'
uses: actions/setup-java@v4
- name: Dispatch an action and get the run ID
uses: codex-/return-dispatch@v1
id: return_dispatch
continue-on-error: true
with:
distribution: 'temurin'
java-version: '11'
cache: 'maven'
server-id: datical
server-username: REPO_MAVEN_USER
server-password: REPO_MAVEN_PASSWORD

- name: Log inputs
token: ${{ secrets.FOSSA_TRIGGER_REPORT_GENERATION }}
ref: ${{ matrix.repo.ref }}
repo: ${{ matrix.repo.name }}
owner: ${{ matrix.repo.owner }}
workflow: fossa.yml
workflow_inputs: ${{ env.WORKFLOW_INPUTS }}

- name: Retry fetching run ID (max 4 attempts with 5 seconds delay)
run: |
echo "Org: ${{ inputs.org }}"
retries=7
delay=5 # Delay of 5 seconds between retries
for i in $(seq 1 $retries); do
run_id="${{ steps.return_dispatch.outputs.run_id }}"
if [ -n "$run_id" ]; then
echo "Found run ID: $run_id"
echo "run_id=$run_id" >> $GITHUB_ENV
break
else
echo "Run ID not found, retrying in $delay seconds..."
fi
if [ $i -eq $retries ]; then
echo "Failed to get run ID after $retries attempts."
exit 1
fi
# Wait before retrying
sleep $delay
done
shell: bash

- name: Set up JDK for Liquibase
if: inputs.org == 'liquibase'
uses: actions/setup-java@v4
- name: Await Run ID ${{ steps.return_dispatch.outputs.run_id }}
uses: Codex-/await-remote-run@v1
with:
distribution: 'temurin'
java-version: '17'
token: ${{ secrets.FOSSA_TRIGGER_REPORT_GENERATION }}
run_id: ${{ steps.return_dispatch.outputs.run_id }}
repo: ${{ matrix.repo.name }}
owner: ${{ matrix.repo.owner }}
run_timeout_seconds: 420 # 7 minutes Time until giving up on the run
poll_interval_ms: 120000 # 2 minutes Frequency to poll the run for a status.


#look for dependencies in maven
- name: maven-settings-xml-action for Liquibase
if: inputs.org == 'liquibase'
uses: whelk-io/maven-settings-xml-action@v22
combine-fossa-reports:
runs-on: ubuntu-latest
needs: wait-for-fossa-report-generation
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
repositories: |
[
{
"id": "liquibase",
"url": "https://maven.pkg.github.com/liquibase/liquibase",
"releases": {
"enabled": "false"
},
"snapshots": {
"enabled": "true",
"updatePolicy": "always"
}
},
{
"id": "liquibase-pro",
"url": "https://maven.pkg.github.com/liquibase/liquibase-pro",
"releases": {
"enabled": "false"
},
"snapshots": {
"enabled": "true",
"updatePolicy": "always"
}
}
]
servers: |
[
{
"id": "liquibase-pro",
"username": "liquibot",
"password": "${{ secrets.LIQUIBOT_PAT }}"
},
{
"id": "liquibase",
"username": "liquibot",
"password": "${{ secrets.LIQUIBOT_PAT }}"
}
]
- name: run FOSSA CLI
uses: fossas/fossa-action@main
repository: liquibase/build-logic
ref: DAT-18919
path: build-logic

- name: Set up AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
api-key: ${{ secrets.FOSSA_API_KEY }}
branch: ${{ github.ref }}
aws-access-key-id: ${{ secrets.LIQUIBASEORIGIN_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.LIQUIBASEORIGIN_SECRET_ACCESS_KEY }}
aws-region: us-east-1

- name: Get the commit sha
id: get_commit_sha
- name: Download reports from S3 and Rearrange CSV files
run: |
commit_sha=`(git rev-parse HEAD)`
echo "commit_sha=${commit_sha}" >> $GITHUB_OUTPUT
- name: Get repository name
id: get_repo_name
run: echo "repo_name=${{ github.event.repository.name }}" >> $GITHUB_OUTPUT

# https://docs.fossa.com/docs/download-fossa-project-attribution-reports
# 7retries×30seconds=210seconds
- name: Datical- Set the dependency metadata information
if: inputs.org == 'datical'
run: |
mkdir -p /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports
max_retries=8
retry_delay=30
attempt=0
success=0
while [ $attempt -lt $max_retries ]; do
curl --location 'https://app.fossa.com/api/revisions/custom%2B40163%2Fgithub.com%2FDatical%2F${{ steps.get_repo_name.outputs.repo_name }}%24${{ steps.get_commit_sha.outputs.commit_sha }}/attribution/download?format=CSV&includeDirectDependencies=true&includeDeepDependencies=true&download=true' \
--header 'Authorization: Bearer ${{ secrets.FOSSA_COMBINED_REPORT }}' \
-o /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports/${{ steps.get_repo_name.outputs.repo_name }}.csv && success=1 && break
echo "Curl failed, retrying in $retry_delay seconds..."
attempt=$((attempt + 1))
sleep $retry_delay
done
if [ $success -ne 1 ]; then
echo "Failed to download the report after $max_retries attempts"
exit 1
fi
ls -l /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports
cat /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports/${{ steps.get_repo_name.outputs.repo_name }}.csv || echo "File is empty or not found"
# 7retries×30seconds=210seconds
- name: Liquibase- Set the dependency metadata information
if: inputs.org == 'liquibase'
run: |
mkdir -p /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports
max_retries=8
retry_delay=30
attempt=0
success=0
while [ $attempt -lt $max_retries ]; do
curl --location 'https://app.fossa.com/api/revisions/custom%2B40163%2Fgithub.com%2Fliquibase%2F${{ steps.get_repo_name.outputs.repo_name }}%24${{ steps.get_commit_sha.outputs.commit_sha }}/attribution/download?format=CSV&includeDirectDependencies=true&includeDeepDependencies=true&download=true' \
--header 'Authorization: Bearer ${{ secrets.FOSSA_COMBINED_REPORT }}' \
-o /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports/${{ steps.get_repo_name.outputs.repo_name }}.csv && success=1 && break
echo "Curl failed, retrying in $retry_delay seconds..."
attempt=$((attempt + 1))
sleep $retry_delay
done
if [ $success -ne 1 ]; then
echo "Failed to download the report after $max_retries attempts"
exit 1
fi
ls -l /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports
cat /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports/${{ steps.get_repo_name.outputs.repo_name }}.csv || echo "File is empty or not found"
# Upload report to S3
- name: Upload report to S3
if: always()
run: |
aws s3 cp /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports/${{ steps.get_repo_name.outputs.repo_name }}.csv s3://liquibaseorg-origin/enterprise_fossa_report/
env:
AWS_ACCESS_KEY_ID: ${{ secrets.LIQUIBASEORIGIN_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.LIQUIBASEORIGIN_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: us-east-1

- name : Upload to build page
uses: actions/upload-artifact@v4
with:
name: fossa-reports
path: |
/home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports/${{ steps.get_repo_name.outputs.repo_name }}.csv
# Create a directory to store downloaded reports from S3
mkdir -p /home/runner/work/enterprise/fossa_reports_s3
# Download all files from the specified S3 bucket to the created directory
aws s3 cp --recursive s3://liquibaseorg-origin/enterprise_fossa_report/${{ github.event.inputs.version_number_for_report_generation }}/raw_reports /home/runner/work/enterprise/fossa_reports_s3/
# List the contents of the directory to confirm successful download
ls -l /home/runner/work/enterprise/fossa_reports_s3
# Define an array of CSV file names
csv_files=("DaticalDB-installer" "drivers" "protoclub" "datical-sqlparser" "storedlogic" "AppDBA" "liquibase-bundle" "liquibase")
# Loop through each CSV file and remove headers again for combine report generation
for file in "${csv_files[@]}"; do
tail -n +1 /home/runner/work/enterprise/fossa_reports_s3/${file}.csv >> /home/runner/work/enterprise/fossa_reports_s3/${file}_no_header.csv
done
# Concatenate all CSV files without headers, sort, and remove duplicates
cat /home/runner/work/enterprise/fossa_reports_s3/*_no_header.csv | sort | uniq > /home/runner/work/enterprise/fossa_reports_s3/enterprise_unique.csv
# Add a header to the final CSV file, placing it above the sorted and unique data
echo 'Title,Version,Declared License,Package Homepage' | cat - /home/runner/work/enterprise/fossa_reports_s3/enterprise_unique.csv > temp && mv temp /home/runner/work/enterprise/fossa_reports_s3/enterprise_unique.csv
ls -l $GITHUB_WORKSPACE
# Read ignored dependencies from a file
ignoredLibsFile=$(cat $GITHUB_WORKSPACE/build-logic/.github/workflows/ignore_dependencies_fossa.txt)
# Split the ignored dependencies into an array
IFS=',' read -r -a ignoredLibs <<< "$ignoredLibsFile"
# Create a temporary file
tempfile=$(mktemp)
# Build the grep command to filter out ignored dependencies
grepCmd="grep -iv"
for lib in "${ignoredLibs[@]}"; do
grepCmd="$grepCmd -e \"$lib\""
done
# Process the FOSSA report to remove ignored dependencies
cat /home/runner/work/enterprise/fossa_reports_s3/enterprise_unique.csv | eval $grepCmd > enterprise_report.csv
- name: Upload CSV to Artifacts
uses: actions/upload-artifact@v3
with:
name: enterprise_report
path: ${{ inputs.version_number_for_report_generation }}/enterprise_report.csv

- name: Upload merged CSV to S3
if: always()
run: aws s3 cp enterprise_report.csv s3://liquibaseorg-origin/enterprise_fossa_report/${{ inputs.version_number_for_report_generation }}/enterprise_report_${{ inputs.version_number_for_report_generation }}.csv


trigger-datical-service:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Dispatch an action for datical-service
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ secrets.FOSSA_TRIGGER_REPORT_GENERATION }}
repository: Datical/datical-service
event-type: trigger-fossa-report-generation
client-payload: '{"ref": "master", "version_number_for_report_generation": "${{ github.event.inputs.version_number_for_report_generation }}"}'

Loading

0 comments on commit f90884b

Please sign in to comment.