Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
122 changes: 88 additions & 34 deletions .github/workflows/reusable-build-test-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,12 @@ on:
SPL_COM_PASSWORD:
description: password to splunk.com
required: true
GSSA_AWS_ACCESS_KEY_ID:
description: GSSA AWS access key id
required: true
GSSA_AWS_SECRET_ACCESS_KEY:
description: GSSA AWS secret access key
required: true
permissions:
contents: read
packages: read
Expand All @@ -107,6 +113,8 @@ env:
PYTHON_VERSION: "3.9"
POETRY_VERSION: "2.1.4"
POETRY_EXPORT_PLUGIN_VERSION: "1.9.0"
GS_IMAGE_VERSION: "1.0.0"
GS_VERSION: "0.3"
jobs:
validate-custom-version:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -481,24 +489,30 @@ jobs:
run: |
if [ -f "poetry.lock" ]
then
python${{ env.PYTHON_VERSION }} -m pip install poetry==${{ env.POETRY_VERSION }} poetry-plugin-export==${{ env.POETRY_EXPORT_PLUGIN_VERSION }}
mkdir -p package/lib || true
python${{ env.PYTHON_VERSION }} -m pip install poetry==${{ env.POETRY_VERSION }} poetry-plugin-export==${{ env.POETRY_EXPORT_PLUGIN_VERSION }}
poetry check
poetry check --lock
poetry export --without-hashes -o package/lib/requirements.txt
poetry export --without-hashes --with dev -o requirements_dev.txt
fi
if [ ! -f requirements_dev.txt ]; then echo no requirements;exit 0 ;fi
if [ ! -f dev_deps/requirements_dev.txt ]; then
echo "No dev_deps/requirements_dev.txt. Migrate your dependencies to dev_deps/requirements_dev.txt"
exit 1
else
echo "Found dev_deps/requirements_dev.txt. Installing dev dependencies in an isolated environment";
fi
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf https://github.com
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf ssh://[email protected]
poetry install --with dev
python${{ env.PYTHON_VERSION }} -m venv ~/.dev_venv
~/.dev_venv/bin/python${{ env.PYTHON_VERSION }} -m pip install -r dev_deps/requirements_dev.txt
~/.dev_venv/bin/python${{ env.PYTHON_VERSION }} -m pip install -r package/lib/requirements.txt
- name: Create directories
run: |
mkdir -p /opt/splunk/var/log/splunk
chmod -R 777 /opt/splunk/var/log/splunk
- name: Copy pytest ini
run: cp tests/unit/pytest-ci.ini pytest.ini
- name: Run Pytest with coverage
run: poetry run pytest --cov=./ --cov-report=xml --junitxml=test-results/junit.xml tests/unit
run: ~/.dev_venv/bin/python${{ env.PYTHON_VERSION }} -m pytest --cov=./ --cov-report=xml --junitxml=test-results/junit.xml tests/unit
- name: Job summary
continue-on-error: true
run: |
Expand Down Expand Up @@ -568,48 +582,41 @@ jobs:
echo "No prod dependencies were found"
rm requirements.txt
fi
poetry export --without-hashes --with dev -o requirements_dev.txt
cat requirements_dev.txt
fi
- name: Setup UCC
run: |
if [ -f "requirements_ucc.txt" ]; then
python${{ env.PYTHON_VERSION }} -m venv .ucc_venv
if [ -f "dev_deps/requirements_ucc.txt" ]; then
echo "Found requirements_ucc.txt. Installing UCC dependencies in an isolated environment"
./.ucc_venv/bin/python -m pip install -r requirements_ucc.txt
export UCC_GEN="$PWD/.ucc_venv/bin/ucc-gen"

python${{ env.PYTHON_VERSION }} -m venv ~/.ucc_venv
~/.ucc_venv/bin/python${{ env.PYTHON_VERSION }} -m pip install -r dev_deps/requirements_ucc.txt
export UCC_GEN="$HOME/.ucc_venv/bin/ucc-gen"

if [ ! -f "$UCC_GEN" ]; then
echo "ucc-gen not found after installing requirements from requirements_ucc.txt"
echo "ucc-gen not found after installing requirements from dev_deps/requirements_ucc.txt"
exit 1
fi

echo "UCC_GEN=$UCC_GEN" >> "$GITHUB_ENV"
else
echo "No UCC requirements file found, skipping UCC setup"
echo "Consider adding a requirements_ucc.txt file and place UCC requirement there, to avoid dependency conflicts"
echo "Create a dev_deps/requirements_ucc.txt file and place UCC requirement there, to avoid dependency conflicts"
exit 1
fi
- name: Get pip cache dir
id: pip-cache
run: |
echo "dir=$(pip cache dir)" >> "$GITHUB_OUTPUT"
- name: Run Check there are libraries to scan
id: checklibs
run: if [ -f requirements_dev.txt ]; then echo "ENABLED=true" >> "$GITHUB_OUTPUT"; fi
run: if [ -f dev_deps/requirements_dev.txt ]; then echo "ENABLED=true" >> "$GITHUB_OUTPUT"; fi
- name: pip cache
if: ${{ steps.checklibs.outputs.ENABLED == 'true' }}
uses: actions/cache@v4
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('requirements_dev.txt') }}
key: ${{ runner.os }}-pip-${{ hashFiles('dev_deps/requirements_dev.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install deps
if: ${{ steps.checklibs.outputs.ENABLED == 'true' }}
run: |
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf https://github.com
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf ssh://[email protected]
pip install -r requirements_dev.txt
- name: Semantic Release Get Next
id: semantic
if: github.event_name != 'pull_request'
Expand Down Expand Up @@ -812,6 +819,50 @@ jobs:
name: appinspect-api-html-report-${{ matrix.tags }}
path: AppInspect_response.html

run-gs-scorecard:
name: quality-gs-scorecard
needs: build
if: ${{ !cancelled() && needs.build.result == 'success' && (github.ref_name == 'main' && github.event_name == 'push') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.GSSA_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.GSSA_AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Login to Amazon ECR
uses: aws-actions/amazon-ecr-login@v2
- name: Pull GS Scorecard image
run:
docker pull 956110764581.dkr.ecr.us-west-2.amazonaws.com/ta-automation/gs-scorecard:${{ env.GS_IMAGE_VERSION }}
- name: Run GS Scorecard
env:
GITHUB_TOKEN: ${{ secrets.GH_TOKEN_ADMIN }}
GITHUB_USERNAME: ${{ secrets.SA_GH_USER_NAME }}
APPINSPECT_USER: ${{ secrets.SPL_COM_USER }}
APPINSPECT_PASS: ${{ secrets.SPL_COM_PASSWORD }}
run: |
docker run --rm \
-e GITHUB_TOKEN \
-e GITHUB_USERNAME \
-e AWS_ACCESS_KEY_ID="${{ secrets.GSSA_AWS_ACCESS_KEY_ID }}" \
-e AWS_SECRET_ACCESS_KEY="${{ secrets.GSSA_AWS_SECRET_ACCESS_KEY }}" \
-e AWS_DEFAULT_REGION="us-west-2" \
-e APPINSPECT_USER \
-e APPINSPECT_PASS \
-e GS_VERSION="${{ env.GS_VERSION }}" \
-v "$(pwd)":/addon \
956110764581.dkr.ecr.us-west-2.amazonaws.com/ta-automation/gs-scorecard:"${{ env.GS_IMAGE_VERSION }}"

- name: Upload GS Scorecard report
uses: actions/upload-artifact@v4
if: always()
with:
name: gs-scorecard-report
path: ./gs_scorecard.html

setup:
needs:
- setup-workflow
Expand Down Expand Up @@ -902,11 +953,12 @@ jobs:
env:
PYTHON_KEYRING_BACKEND: keyring.backends.null.Keyring
run: |
poetry install --only modinput
python${{ env.PYTHON_VERSION }} -m venv ~/.dev_venv
~/.dev_venv/bin/python${{ env.PYTHON_VERSION }} -m pip install -r dev_deps/requirements_dev.txt
if [ -f "tests/ucc_modinput_functional/tmp/openapi.json" ]; then
poetry run ucc-test-modinput gen -o tests/ucc_modinput_functional/tmp/openapi.json -t ${{ steps.download-openapi.outputs.download-path }}/tmp/
~/.dev_venv/bin/ucc-test-modinput gen -o tests/ucc_modinput_functional/tmp/openapi.json -t ${{ steps.download-openapi.outputs.download-path }}/tmp/
else
poetry run ucc-test-modinput gen -o ${{ steps.download-openapi.outputs.download-path }}/openapi.json -t ${{ steps.download-openapi.outputs.download-path }}/tmp/
~/.dev_venv/bin/ucc-test-modinput gen -o ${{ steps.download-openapi.outputs.download-path }}/openapi.json -t ${{ steps.download-openapi.outputs.download-path }}/tmp/
fi
- name: upload-libs-to-s3
id: upload-libs-to-s3
Expand All @@ -915,9 +967,11 @@ jobs:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
run: |
poetry install --with dev
python${{ env.PYTHON_VERSION }} -m venv ~/.dev_venv
echo "Found requirements_dev.txt. Installing dev dependencies in an isolated environment"
~/.dev_venv/bin/python${{ env.PYTHON_VERSION }} -m pip install -r dev_deps/requirements_dev.txt
libs_archive=libs_$(basename "$BUILD_NAME" .spl).tgz
cp -r "$(find "$(poetry env info --path)" -maxdepth 3 -type d -name "site-packages")" libs/
cp -r ~/.dev_venv/lib/python${{ env.PYTHON_VERSION }}/site-packages/ libs/
tar -czf "$libs_archive" libs
aws s3 cp "$libs_archive" "s3://${{ needs.setup-workflow.outputs.s3_bucket_k8s }}/ta-apps/$libs_archive" --only-show-errors
- name: upload-swagger-artifacts-to-s3
Expand Down Expand Up @@ -1265,7 +1319,7 @@ jobs:
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
remaining_time_minutes=$(( 300-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
Expand Down Expand Up @@ -1550,7 +1604,7 @@ jobs:
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
remaining_time_minutes=$(( 300-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
Expand Down Expand Up @@ -1826,7 +1880,7 @@ jobs:
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
remaining_time_minutes=$(( 300-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
Expand Down Expand Up @@ -2101,7 +2155,7 @@ jobs:
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
remaining_time_minutes=$(( 300-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
Expand Down Expand Up @@ -2365,7 +2419,7 @@ jobs:
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
remaining_time_minutes=$(( 300-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
Expand Down Expand Up @@ -2634,7 +2688,7 @@ jobs:
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
remaining_time_minutes=$(( 300-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
Expand Down
58 changes: 58 additions & 0 deletions .github/workflows/reusable-publish-to-splunkbase.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
name: publish-to-splunkbase
on:
workflow_call:
inputs:
addon_version:
description: 'The version of the add-on to publish to Splunkbase'
required: true
type: string
splunk_versions:
description: 'Comma-separated list of supported Splunk versions'
required: true
type: string
cim_versions:
description: 'Comma-separated list of supported CIM versions'
required: true
type: string
secrets:
SPL_COM_USERNAME:
description: 'Splunk Community username'
required: true
SPL_COM_PASSWORD:
description: 'Splunk Community password'
required: true

jobs:
inputs-validator:
runs-on: ubuntu-latest
steps:
- id: matrix
uses: splunk/[email protected]
with:
features: PYTHON39
publish:
runs-on: ubuntu-latest
needs:
- inputs-validator
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.12"
- run: pip install splunk_add_on_ucc_framework-5.69.1-py3-none-any.whl
- name: Fetch build
env:
GH_TOKEN: ${{ github.token }}
run: |
gh release download v${{ inputs.addon_version }} --pattern "*${{ inputs.addon_version }}.spl" --output release.spl
- run: |
APP_ID=$(cat .splunkbase)
export APP_ID
ucc-gen publish \
--stage \
--app-id "$APP_ID" \
--package-path release.spl \
--splunk-versions ${{ inputs.splunk_versions }} \
--cim-versions ${{ inputs.cim_versions }} \
--username ${{ secrets.SPL_COM_USERNAME }} \
--password ${{ secrets.SPL_COM_PASSWORD }}
61 changes: 61 additions & 0 deletions .github/workflows/reusable-validate-deploy-docs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
name: validate-deploy-docs

on:
workflow_call:

jobs:
validate-docs-change:
runs-on: ubuntu-latest
outputs:
status: ${{ steps.validate.outputs.status }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.12
- name: Install mkdocs and plugins
run: pip install mkdocs==1.6.0 mkdocs-material==9.5.32 mkdocs-print-site-plugin==2.6.0
- name: Validate docs change
id: validate
shell: bash
run: |
RED='\033[0;31m'
GREEN='\033[0;32m'
NC='\033[0m'
if mkdocs build --strict; then
echo "status=success" >> "$GITHUB_OUTPUT"
echo -e "${GREEN}Docs validation success${NC}"
else
echo "status=failure" >> "$GITHUB_OUTPUT"
echo -e "${RED}Docs validation failure${NC}"
exit 1
fi

deploy-docs:
needs:
- validate-docs-change
runs-on: ubuntu-latest
permissions:
contents: write
pages: write
if: github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/main'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.12
- name: Install mkdocs and plugins
run: pip install mkdocs==1.6.0 mkdocs-material==9.5.32 mkdocs-print-site-plugin==2.6.0
- name: Build and Deploy docs
id: deploy
shell: bash
run: |
RED='\033[0;31m'
GREEN='\033[0;32m'
NC='\033[0m'
if [ "${{ needs.validate-docs-change.outputs.status }}" == "failure" ]; then
echo -e "${RED}Docs validation failed, abort docs deployment... (for more details look at Validate docs change job)${NC}"
exit 1
fi
mkdocs gh-deploy --force
echo -e "${GREEN}Deployed docs on github!${NC}"
Loading
Loading