diff --git a/.github/test-scripts/gpu-benchmark.py b/.github/test-scripts/gpu-benchmark.py
deleted file mode 100755
index f89a440..0000000
--- a/.github/test-scripts/gpu-benchmark.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python3
-"""GPU benchmark script for testing PyTorch CUDA capabilities.
-
-Used by ``demo-gpu-dbg.yml``.
-"""
-
-import sys
-import time
-import torch
-
-
-def main():
- print(f'PyTorch version: {torch.__version__}')
- print(f'CUDA available: {torch.cuda.is_available()}')
-
- if not torch.cuda.is_available():
- print('ERROR: CUDA not available!')
- sys.exit(1)
-
- # Get GPU information
- device = torch.cuda.get_device_properties(0)
- print(f'GPU: {device.name}')
- print(f'Memory: {device.total_memory / 1e9:.1f} GB')
- print(f'CUDA version: {torch.version.cuda}')
-
- # Matrix multiplication benchmark
- print('\nRunning matrix multiplication benchmark...')
- size = 8192
- x = torch.randn(size, size, dtype=torch.float32).cuda()
- torch.cuda.synchronize()
-
- # Warmup
- print(' Warming up...')
- for _ in range(3):
- _ = torch.matmul(x, x.T)
- torch.cuda.synchronize()
-
- # Benchmark
- print(' Running benchmark...')
- start = time.time()
- y = torch.matmul(x, x.T)
- torch.cuda.synchronize()
- elapsed = time.time() - start
-
- gflops = (2 * size**3) / (elapsed * 1e9)
- print(f' Matrix size: {size}x{size}')
- print(f' Time: {elapsed:.3f} seconds')
- print(f' Performance: {gflops:.1f} GFLOPS')
- print(f' Memory used: {torch.cuda.max_memory_allocated()/1e9:.2f} GB')
-
- # Quick training simulation
- print('\nSimulating model training...')
- model = torch.nn.Sequential(
- torch.nn.Linear(1024, 512),
- torch.nn.ReLU(),
- torch.nn.Linear(512, 256),
- torch.nn.ReLU(),
- torch.nn.Linear(256, 10)
- ).cuda()
-
- optimizer = torch.optim.Adam(model.parameters())
- data = torch.randn(32, 1024).cuda()
-
- for i in range(10):
- output = model(data)
- loss = output.sum()
- loss.backward()
- optimizer.step()
- optimizer.zero_grad()
- if i == 0 or i == 9:
- print(f' Iteration {i+1}: loss = {loss.item():.4f}')
-
- print('\nGPU workload completed successfully!')
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/.github/workflows/README.md b/.github/workflows/README.md
new file mode 100644
index 0000000..69b408e
--- /dev/null
+++ b/.github/workflows/README.md
@@ -0,0 +1,105 @@
+# `ec2-gha` Demos
+This directory contains the reusable workflow and demo workflows for ec2-gha, demonstrating various capabilities.
+
+For documentation about the main workflow, [`runner.yml`](runner.yml), see [the main README](../../README.md).
+
+
+- [`demos` – run all demo workflows](#demos)
+- [Core demos](#core)
+ - [`dbg-minimal` – configurable debugging instance](#dbg-minimal)
+ - [`gpu-minimal` – `nvidia-smi` "hello world"](#gpu-minimal)
+ - [`cpu-sweep` – OS/architecture matrix](#cpu-sweep)
+ - [`gpu-sweep` – GPU instance types with PyTorch](#gpu-sweep)
+- [Parallelization](#parallel)
+ - [`instances-mtx` – multiple instances for parallel jobs](#instances-mtx)
+ - [`runners-mtx` – multiple runners on single instance](#runners-mtx)
+ - [`jobs-split` – different job types on separate instances](#jobs-split)
+- [Stress testing](#stress-tests)
+ - [`test-disk-full` – disk-full scenario testing](#test-disk-full)
+- [Real-world example: Mamba installation testing](#mamba)
+
+
+## [`demos`](demos.yml) – run all demo workflows
+Useful regression test, demonstrates and verifies features.
+
+[][demos#25]
+
+## Core demos
+
+### [`dbg-minimal`](demo-dbg-minimal.yml) – configurable debugging instance
+- `workflow_dispatch` with customizable parameters (instance type, AMI, timeouts)
+- Also callable via `workflow_call` (used by `cpu-sweep`)
+- Extended debug mode for troubleshooting
+- **Instance type:** `t3.large` (default), configurable
+- **Use case:** Interactive debugging and testing
+
+### [`gpu-minimal`](demo-gpu-minimal.yml) – `nvidia-smi` "hello world"
+- **Instance type:** `g4dn.xlarge`
+
+### [`cpu-sweep`](demo-cpu-sweep.yml) – OS/architecture matrix
+- Tests 12 combinations across operating systems and architectures
+- **OS:** Ubuntu 22.04/24.04, Debian 11/12, AL2, AL2023
+- **Architectures:** x86 (`t3.*`) and ARM (`t4g.*`)
+- Calls `dbg-minimal` for each combination
+- **Use case:** Cross-platform compatibility testing
+
+### [`gpu-sweep`](demo-gpu-sweep.yml) – GPU instance types with PyTorch
+- Tests different GPU instance families
+- **Instance types:** `g4dn.xlarge`, `g5.xlarge`, `g6.xlarge`, `g5g.xlarge` (ARM64 + GPU)
+- Uses Deep Learning OSS PyTorch 2.5.1 AMIs
+- Activates conda environment and runs PyTorch CUDA tests
+- **Use case:** GPU compatibility and performance testing
+
+## Parallelization
+
+### [`instances-mtx`](demo-instances-mtx.yml) – multiple instances for parallel jobs
+- Creates configurable number of instances (default: 3)
+- Uses matrix strategy to run jobs in parallel
+- Each job runs on its own EC2 instance
+- **Instance type:** `t3.medium`
+- **Use case:** Parallel test execution, distributed builds
+
+### [`runners-mtx`](demo-runners-mtx.yml) – multiple runners on single instance
+- Configurable runners per instance (default: 3)
+- All runners share the same instance resources
+- Demonstrates resource-efficient parallel execution
+- **Instance type:** `t3.xlarge` (larger instance for multiple runners)
+- **Use case:** Shared environment testing, resource optimization
+
+### [`jobs-split`](demo-jobs-split.yml) – different job types on separate instances
+- Launches 2 instances
+- Build job runs on first instance
+- Test job runs on second instance
+- Demonstrates targeted job placement
+- **Instance type:** `t3.medium`
+- **Use case:** Pipeline with dedicated instances per stage
+
+## Stress testing
+
+### [`test-disk-full`](test-disk-full.yml) – disk-full scenario testing
+- Tests runner behavior when disk space is exhausted
+- **Configurable parameters:**
+ - `disk_size`: Root disk size (`0`=AMI default, `+N`=AMI+N GB, e.g., `+2`)
+ - `fill_strategy`: How to fill disk (`gradual`, `immediate`, or `during-tests`)
+ - `debug`: Debug mode (`false`, `true`/`trace`, or number for trace+sleep)
+ - `max_instance_lifetime`: Maximum lifetime before forced shutdown (default: 15 minutes)
+- **Features tested:**
+ - Heartbeat mechanism for detecting stuck jobs
+ - Stale job file detection and cleanup
+ - Worker/Listener process monitoring
+ - Robust shutdown with multiple fallback methods
+- **Instance type:** `t3.medium` (default)
+- **Use case:** Verifying robustness in resource-constrained environments
+
+## Real-world example: [Mamba installation testing](https://github.com/Open-Athena/mamba/blob/gha/.github/workflows/install.yaml)
+- Tests different versions of `mamba_ssm` package on GPU instances
+- **Customizes `instance_name`**: `"$repo/$name==${{ inputs.mamba_version }} (#$run)"`
+ - Results in descriptive names like `"mamba/install==2.2.5 (#123)"`
+ - Makes it easy to identify which version is being tested on each instance
+- Uses pre-installed PyTorch from DLAMI conda environment
+- **Use case:** Package compatibility testing across versions
+
+[][mamba#12]
+
+[mamba#12]: https://github.com/Open-Athena/mamba/actions/runs/16972369660/
+[demos#25]: https://github.com/Open-Athena/ec2-gha/actions/runs/17004697889
diff --git a/.github/workflows/demo-archs.yml b/.github/workflows/demo-archs.yml
deleted file mode 100644
index 68d3228..0000000
--- a/.github/workflows/demo-archs.yml
+++ /dev/null
@@ -1,70 +0,0 @@
-name: Demo – 2 GPU instances with different architectures
-on:
- workflow_dispatch:
- workflow_call: # Tested by `demos.yml`
-permissions:
- id-token: write # Required for AWS OIDC authentication
- contents: read # Required for actions/checkout
-jobs:
- # Launch EC2 runners for each instance type
- g4dn:
- name: Launch g4dn
- uses: ./.github/workflows/runner.yml
- with:
- ec2_instance_type: g4dn.xlarge
- secrets: inherit
- g4ad:
- name: Launch g4ad
- uses: ./.github/workflows/runner.yml
- with:
- ec2_instance_type: g4ad.xlarge
- secrets: inherit
-
- # Run jobs directly on launched instances
- test-g4dn:
- needs: g4dn
- if: needs.g4dn.outputs.id != ''
- name: Test g4dn
- runs-on: ${{ needs.g4dn.outputs.id }}
- steps:
- - name: GPU test on g4dn.xlarge
- run: nvidia-smi # Verify GPU is available
- test-g4ad:
- needs: g4ad
- if: needs.g4ad.outputs.id != ''
- name: Test g4ad
- runs-on: ${{ needs.g4ad.outputs.id }}
- steps:
- - name: GPU test on g4ad.xlarge
- run: |
- lspci | grep -i "vga\|display\|3d\|amd"
-
- # Use a `matrix` to run jobs on multiple instances
- test-matrix:
- needs: [g4dn, g4ad]
- if: always() && (needs.g4dn.outputs.id != '' || needs.g4ad.outputs.id != '')
- name: Test ${{ matrix.instance }} (matrix)
- continue-on-error: true
- strategy:
- matrix:
- include:
- - instance: g4dn
- runner: ${{ needs.g4dn.outputs.id }}
- - instance: g4ad
- runner: ${{ needs.g4ad.outputs.id }}
- fail-fast: false
- runs-on: ${{ matrix.runner }}
- steps:
- - name: Matrix GPU test on ${{ matrix.instance }}.xlarge
- run: |
- echo "Running on ${{ matrix.instance }}.xlarge"
- echo "Instance type: $(curl -s http://169.254.169.254/latest/meta-data/instance-type)"
-
- # Check GPU based on instance type
- if [[ "${{ matrix.instance }}" == "g4dn" ]]; then
- echo "Testing NVIDIA GPU..."
- nvidia-smi
- elif [[ "${{ matrix.instance }}" == "g4ad" ]]; then
- echo "Testing AMD GPU..."
- lspci | grep -i "vga\|display\|3d\|amd"
- fi
diff --git a/.github/workflows/demo-cpu-sweep.yml b/.github/workflows/demo-cpu-sweep.yml
new file mode 100644
index 0000000..939748b
--- /dev/null
+++ b/.github/workflows/demo-cpu-sweep.yml
@@ -0,0 +1,44 @@
+name: Demo – OS/Architecture sweep (CPU nodes)
+on:
+ workflow_dispatch:
+ inputs:
+ sleep:
+ description: "Sleep duration in seconds for workload simulation"
+ required: false
+ type: string
+ default: "10"
+ workflow_call: # Can be called from other workflows
+ inputs:
+ sleep:
+ required: false
+ type: string
+ default: "10"
+permissions:
+ id-token: write # Required for AWS OIDC authentication
+ contents: read # Required for actions/checkout
+jobs:
+ os-arch-matrix:
+ uses: ./.github/workflows/demo-dbg-minimal.yml
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - { "os": Ubuntu 22.04, "arch": x86, "ami": ami-021589336d307b577, "instance_type": t3.medium }
+ - { "os": Ubuntu 22.04, "arch": ARM, "ami": ami-06daf9c2d2cf1cb37, "instance_type": t4g.medium }
+ - { "os": Ubuntu 24.04, "arch": x86, "ami": ami-0ca5a2f40c2601df6, "instance_type": t3.medium }
+ - { "os": Ubuntu 24.04, "arch": ARM, "ami": ami-0aa307ed50ca3e58f, "instance_type": t4g.medium }
+ - { "os": Debian 11 , "arch": x86, "ami": ami-0e6612f57082e7ea4, "instance_type": t3.large }
+ - { "os": Debian 11 , "arch": ARM, "ami": ami-0c3f5b0b87f042da8, "instance_type": t4g.large }
+ - { "os": Debian 12 , "arch": x86, "ami": ami-05b50089e01b13194, "instance_type": t3.large }
+ - { "os": Debian 12 , "arch": ARM, "ami": ami-0505441d7e1514742, "instance_type": t4g.large }
+ - { "os": AL2 , "arch": x86, "ami": ami-0e2c86481225d3c51, "instance_type": t3.small }
+ - { "os": AL2 , "arch": ARM, "ami": ami-08333c9352b93f31e, "instance_type": t4g.small }
+ - { "os": AL2023 , "arch": x86, "ami": ami-00ca32bbc84273381, "instance_type": t3.small }
+ - { "os": AL2023 , "arch": ARM, "ami": ami-0aa7db6294d00216f, "instance_type": t4g.small }
+ name: ${{ matrix.os }} ${{ matrix.arch }}
+ with:
+ type: ${{ matrix.instance_type }}
+ ami: ${{ matrix.ami }}
+ sleep: ${{ inputs.sleep }}
+ instance_name: 'cpu-sweep#$run ${{ matrix.os }} ${{ matrix.arch }}'
+ secrets: inherit
diff --git a/.github/workflows/demo-dbg-minimal.yml b/.github/workflows/demo-dbg-minimal.yml
index 7b37cc4..0484008 100644
--- a/.github/workflows/demo-dbg-minimal.yml
+++ b/.github/workflows/demo-dbg-minimal.yml
@@ -1,17 +1,151 @@
name: Demo – configurable instance for debugging
on:
workflow_dispatch:
- workflow_call: # Tested by `demos.yml`
+ inputs:
+ type:
+ description: "EC2 instance type (e.g., t3.medium, t3.large)"
+ required: false
+ type: string
+ default: "t3.large"
+ ami:
+ description: "AMI ID to use"
+ required: false
+ type: string
+ default: "ami-0e86e20dae9224db8" # Ubuntu 24.04 LTS x86_64 (us-east-1)
+ registration_timeout:
+ description: "Max seconds to wait for runner registration (default: 360)"
+ required: false
+ type: string
+ default: "360"
+ initial_grace_period:
+ description: "Grace period if no job starts (default: 180)"
+ required: false
+ type: string
+ default: "180"
+ grace_period:
+ description: "Grace period before termination after job (default: 60)"
+ required: false
+ type: string
+ default: "60"
+ max_instance_lifetime:
+ description: "Max instance lifetime in minutes (default: 360)"
+ required: false
+ type: string
+ default: "360"
+ sleep:
+ description: "Sleep duration in seconds"
+ required: false
+ type: string
+ default: "600"
+ debug:
+ description: "Enable debug mode (extends termination delay to 600s)"
+ required: false
+ type: boolean
+ default: true
+ instance_name:
+ description: "Instance name"
+ required: false
+ type: string
+ workflow_call: # Called by demo-cpu-sweep, more for the "minimal", less for the "dbg"
+ inputs:
+ type:
+ description: "EC2 instance type"
+ required: false
+ type: string
+ default: "t3.large"
+ ami:
+ description: "AMI ID to use"
+ required: false
+ type: string
+ default: "ami-0e86e20dae9224db8" # Ubuntu 24.04 LTS x86_64 (us-east-1)
+ sleep:
+ description: "Sleep duration in seconds"
+ required: false
+ type: string
+ default: "10"
+ debug:
+ description: "Enable debug mode"
+ required: false
+ type: boolean
+ default: false
+ instance_name:
+ description: "Instance name"
+ required: false
+ type: string
+
permissions:
id-token: write # Required for AWS OIDC authentication
- contents: read # Required for actions/checkout
+ contents: read # Required for actions/checkout; normally set by default, but must explicitly specify when defining a custom `permissions` block.
+
jobs:
- ec2:
- # To use from another repo: Open-Athena/ec2-gha/.github/workflows/runner.yml
+ launch:
+ name: Launch ${{ inputs.type }}
uses: ./.github/workflows/runner.yml
+ with:
+ name: 🚀
+ ec2_instance_type: ${{ inputs.type }}
+ ec2_image_id: ${{ inputs.ami }}
+ debug: ${{ inputs.debug }}
+ instance_name: ${{ inputs.instance_name || 'debug/$name#$run' }}
+ # `workflow_dispatch has higher defaults for these; revert to original defaults for `workflow_call`
+ runner_registration_timeout: ${{ inputs.registration_timeout || '300' }}
+ runner_grace_period: ${{ inputs.grace_period || '60' }}
+ runner_initial_grace_period: ${{ inputs.initial_grace_period || '120' }}
+ max_instance_lifetime: ${{ inputs.max_instance_lifetime || '60' }}
secrets: inherit
- gpu-test:
- needs: ec2
- runs-on: ${{ needs.ec2.outputs.id }}
+
+ test:
+ needs: launch
+ name: 🔬
+ runs-on: ${{ needs.launch.outputs.id }}
steps:
- - run: nvidia-smi # Verify GPU is available
+ - name: Instance Info
+ run: |
+ echo "=== Instance Information ==="
+ echo "Hostname: $(hostname)"
+ echo "Instance ID: $(curl -s http://169.254.169.254/latest/meta-data/instance-id)"
+ echo "Instance type: $(curl -s http://169.254.169.254/latest/meta-data/instance-type)"
+ echo "Public IP: $(curl -s http://169.254.169.254/latest/meta-data/public-ipv4)"
+ echo "Region: $(curl -s http://169.254.169.254/latest/meta-data/placement/region)"
+ echo "AMI ID: $(curl -s http://169.254.169.254/latest/meta-data/ami-id)"
+ echo ""
+ echo "=== System Details ==="
+ echo "OS: $(cat /etc/os-release | grep PRETTY_NAME | cut -d'"' -f2)"
+ echo "Kernel: $(uname -r)"
+ echo "Architecture: $(uname -m)"
+ echo "CPU cores: $(nproc)"
+ echo "Memory: $(free -h | grep Mem | awk '{print $2}')"
+ echo "User: $(whoami)"
+ echo "Home: $HOME"
+
+ - name: Sleep Test
+ run: |
+ DURATION=${{ inputs.sleep }}
+ echo "Starting sleep at: $(date '+%Y-%m-%d %H:%M:%S.%3N')"
+ echo "Sleeping for ${DURATION} seconds..."
+ sleep $DURATION
+ echo "Finished at: $(date '+%Y-%m-%d %H:%M:%S.%3N')"
+
+ - name: Debug Info
+ run: |
+ echo "=== Runner Environment ==="
+ echo "Runner home: ${RUNNER_HOME:-not set}"
+ echo "Runner workspace: ${GITHUB_WORKSPACE:-not set}"
+ echo "Runner temp: ${RUNNER_TEMP:-not set}"
+ echo ""
+ echo "=== Timeout Settings ==="
+ echo "Debug mode: ${{ inputs.debug }}"
+ echo "Registration timeout: ${{ inputs.registration_timeout }}s"
+ echo "Grace period: ${{ inputs.grace_period }}s"
+ echo "Initial grace period: ${{ inputs.initial_grace_period }}s"
+ echo "Max lifetime: ${{ inputs.max_instance_lifetime }} minutes"
+ echo ""
+ echo "=== Important Logs ==="
+ echo "Setup log: /var/log/runner-setup.log"
+ echo "Debug log: /var/log/runner-debug.log"
+ if [ -f /tmp/runner-0-config.log ]; then
+ echo "Config log exists: /tmp/runner-0-config.log"
+ echo "Last 5 lines of config log:"
+ tail -5 /tmp/runner-0-config.log
+ fi
+
diff --git a/.github/workflows/demo-gpu-minimal.yml b/.github/workflows/demo-gpu-minimal.yml
index 965fa58..1bcb877 100644
--- a/.github/workflows/demo-gpu-minimal.yml
+++ b/.github/workflows/demo-gpu-minimal.yml
@@ -1,8 +1,20 @@
-name: Demo – minimal EC2 GPU runner
+name: Demo – minimal EC2 GPU job
on:
workflow_dispatch:
+ workflow_call: # Tested by `demos.yml`
+permissions:
+ id-token: write # Required for AWS OIDC authentication
+ contents: read # Required for actions/checkout; normally set by default, but must explicitly specify when defining a custom `permissions` block.
jobs:
- placeholder:
- runs-on: ubuntu-latest
+ ec2:
+ # To use from another repo: Open-Athena/ec2-gha/.github/workflows/runner.yml@v2
+ uses: ./.github/workflows/runner.yml
+ secrets: inherit
+ with:
+ ec2_instance_type: g4dn.xlarge # ≈$0.56/hr GPU instance: https://instances.vantage.sh/aws/ec2/g4dn.xlarge
+ ec2_image_id: ami-00096836009b16a22 # Deep Learning OSS Nvidia Driver AMI GPU PyTorch 2.4.1 (Ubuntu 22.04) 20250302
+ gpu-test:
+ needs: ec2
+ runs-on: ${{ needs.ec2.outputs.id }}
steps:
- - run: echo "Placeholder workflow – being developed in #2 / rw/hooks branch"
\ No newline at end of file
+ - run: nvidia-smi # Verify GPU is available
diff --git a/.github/workflows/demo-gpu-sweep.yml b/.github/workflows/demo-gpu-sweep.yml
new file mode 100644
index 0000000..0db7c9c
--- /dev/null
+++ b/.github/workflows/demo-gpu-sweep.yml
@@ -0,0 +1,116 @@
+name: Demo – GPU instance sweep
+on:
+ workflow_dispatch:
+ workflow_call:
+permissions:
+ id-token: write # Required for AWS OIDC authentication
+ contents: read # Required for actions/checkout
+jobs:
+ # Launch GPU instances with PyTorch DLAMIs
+ g4dn:
+ name: 🚀 g4dn.xlarge
+ uses: ./.github/workflows/runner.yml
+ with:
+ ec2_instance_type: g4dn.xlarge
+ ec2_image_id: ami-00dddcf8fefea182f # Deep Learning OSS PyTorch 2.5.1 Ubuntu 22.04
+ instance_name: "gpu-sweep#$run g4dn"
+ secrets: inherit
+ g5:
+ name: 🚀 g5.xlarge
+ uses: ./.github/workflows/runner.yml
+ with:
+ ec2_instance_type: g5.xlarge
+ ec2_image_id: ami-00dddcf8fefea182f # Deep Learning OSS PyTorch 2.5.1 Ubuntu 22.04
+ instance_name: "gpu-sweep#$run g5"
+ secrets: inherit
+ g6:
+ name: 🚀 g6.xlarge
+ uses: ./.github/workflows/runner.yml
+ with:
+ ec2_instance_type: g6.xlarge
+ ec2_image_id: ami-00dddcf8fefea182f # Deep Learning OSS PyTorch 2.5.1 Ubuntu 22.04
+ instance_name: "gpu-sweep#$run g6"
+ secrets: inherit
+ g5g:
+ name: 🚀 g5g.xlarge
+ uses: ./.github/workflows/runner.yml
+ with:
+ ec2_instance_type: g5g.xlarge
+ ec2_image_id: ami-00cbe74a3dff23b9f # Deep Learning ARM64 OSS PyTorch 2.5.1 Ubuntu 22.04
+ instance_name: "gpu-sweep#$run g5g"
+ secrets: inherit
+
+ # Test jobs for each GPU instance
+ test-g4dn:
+ name: 🔬 g4dn.xlarge
+ needs: g4dn
+ runs-on: ${{ needs.g4dn.outputs.id }}
+ steps:
+ - name: GPU Test
+ run: |
+ nvidia-smi
+ # Activate PyTorch conda environment
+ source /opt/conda/etc/profile.d/conda.sh
+ conda activate pytorch
+ python3 -c "import torch; print(f'PyTorch: {torch.__version__}, CUDA: {torch.cuda.is_available()}')"
+ python3 -c "import torch; print(f'GPU: {torch.cuda.get_device_name(0)}')"
+
+ test-g5:
+ name: 🔬 g5.xlarge
+ needs: g5
+ runs-on: ${{ needs.g5.outputs.id }}
+ steps:
+ - name: GPU Test
+ run: |
+ nvidia-smi
+ # Activate PyTorch conda environment
+ source /opt/conda/etc/profile.d/conda.sh
+ conda activate pytorch
+ python3 -c "import torch; print(f'PyTorch: {torch.__version__}, GPU: {torch.cuda.get_device_name(0)}')"
+
+ test-g6:
+ name: 🔬 g6.xlarge
+ needs: g6
+ runs-on: ${{ needs.g6.outputs.id }}
+ steps:
+ - name: GPU Test
+ run: |
+ nvidia-smi
+ # Activate PyTorch conda environment
+ source /opt/conda/etc/profile.d/conda.sh
+ conda activate pytorch
+ python3 -c "import torch; print(f'PyTorch: {torch.__version__}, GPU: {torch.cuda.get_device_name(0)}')"
+
+ test-g5g:
+ name: 🔬 g5g.xlarge
+ needs: g5g
+ runs-on: ${{ needs.g5g.outputs.id }}
+ steps:
+ - name: GPU Info
+ run: |
+ echo "=== GPU Instance Information ==="
+ echo "g5g.xlarge: AWS Graviton (ARM64) + NVIDIA T4g GPU"
+ nvidia-smi
+ echo ""
+ echo "=== PyTorch Test ==="
+ # Activate PyTorch conda environment
+ source /opt/conda/etc/profile.d/conda.sh
+ conda activate pytorch
+ python3 -c "import torch; print(f'PyTorch: {torch.__version__}')"
+ python3 -c "import torch; print(f'CUDA Available: {torch.cuda.is_available()}')"
+ python3 -c "import torch; print(f'CUDA Version: {torch.version.cuda}')"
+ python3 -c "import torch; print(f'GPU: {torch.cuda.get_device_name(0) if torch.cuda.is_available() else \"N/A\"}')"
+ - name: Basic GPU Test
+ run: |
+ source /opt/conda/etc/profile.d/conda.sh
+ conda activate pytorch
+ python3 -c "
+ import torch
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
+ print(f'Using device: {device}')
+ if device.type == 'cuda':
+ x = torch.randn(1000, 1000).to(device)
+ y = torch.randn(1000, 1000).to(device)
+ z = torch.matmul(x, y)
+ print(f'Matrix multiplication result shape: {z.shape}')
+ "
diff --git a/.github/workflows/demo-gpu.yml b/.github/workflows/demo-gpu.yml
deleted file mode 100644
index 0a0cb0a..0000000
--- a/.github/workflows/demo-gpu.yml
+++ /dev/null
@@ -1,74 +0,0 @@
-name: Demo – toy GPU workload, optional sleep (for debugging)
-on:
- workflow_dispatch:
- inputs:
- sleep:
- description: "Sleep for this many seconds before completing job (optional, helps keep instance alive for SSH access and debugging)"
- required: false
- type: number
- default: 0
- ssh_pubkey:
- description: "Add this SSH public key to instance's `~/.ssh/authorized_keys` (optional, for debugging)"
- required: false
- type: string
- workflow_call: # Tested by `demos.yml`
- inputs:
- sleep:
- required: false
- type: number
- default: 0
- ssh_pubkey:
- required: false
- type: string
-
-permissions:
- id-token: write # Required for AWS OIDC authentication
- contents: read # Required for actions/checkout
-
-jobs:
- ec2:
- uses: ./.github/workflows/runner.yml
- secrets: inherit
- with:
- ssh_pubkey: ${{ inputs.ssh_pubkey }}
-
- gpu-workload:
- needs: ec2
- runs-on: ${{ needs.ec2.outputs.id }}
- steps:
- - uses: actions/checkout@v4
-
- - name: System info
- run: |
- echo "=== System Info ==="
- uname -a
- lscpu | grep "Model name" || true
- free -h
-
- echo -e "\n=== GPU Info ==="
- nvidia-smi --query-gpu=name,memory.total,driver_version --format=csv
-
- - name: Install PyTorch with CUDA support
- run: |
- echo "=== Installing PyTorch with CUDA support ==="
- # Install python3-venv if not available
- sudo apt-get update && sudo apt-get install -y python3-venv
- # Use system Python3 and create a virtual environment to avoid pip root warning
- python3 --version
- python3 -m venv /tmp/venv
- source /tmp/venv/bin/activate
- pip install torch --index-url https://download.pytorch.org/whl/cu121
- echo "PyTorch installed successfully"
-
- - name: Run PyTorch GPU benchmark
- run: |
- echo "=== PyTorch GPU Benchmark ==="
- source /tmp/venv/bin/activate
- python .github/test-scripts/gpu-benchmark.py
-
- - name: Sleep (${{ inputs.sleep }}s)
- if: ${{ inputs.sleep > 0 }}
- run: |
- echo "Sleeping for ${{ inputs.sleep }} seconds (useful for SSH debugging)..."
- echo "Instance will remain available at the IP shown in the GitHub Actions log"
- sleep ${{ inputs.sleep }}
diff --git a/.github/workflows/demo-instances-mtx.yml b/.github/workflows/demo-instances-mtx.yml
new file mode 100644
index 0000000..dec379c
--- /dev/null
+++ b/.github/workflows/demo-instances-mtx.yml
@@ -0,0 +1,61 @@
+name: Demo – multiple instances for parallel jobs
+on:
+ workflow_dispatch:
+ inputs:
+ instance_count:
+ description: "Number of EC2 instances to create"
+ required: false
+ type: string
+ default: "3"
+ workflow_call: # Tested by `demos.yml`
+ inputs:
+ instance_count:
+ required: false
+ type: string
+ default: "3"
+
+permissions:
+ id-token: write # Required for AWS OIDC authentication
+ contents: read # Required for actions/checkout; normally set by default, but must explicitly specify when defining a custom `permissions` block.
+
+jobs:
+ ec2:
+ name: Launch ${{ inputs.instance_count }} EC2 instances
+ uses: ./.github/workflows/runner.yml
+ secrets: inherit
+ with:
+ instance_count: ${{ inputs.instance_count }}
+ ec2_image_id: ami-0e86e20dae9224db8 # Ubuntu 24.04 LTS x86_64 (us-east-1)
+
+ parallel-jobs:
+ needs: ec2
+ strategy:
+ matrix:
+ # Parse the JSON array of runner objects for the matrix
+ runner: ${{ fromJson(needs.ec2.outputs.mtx) }}
+ runs-on: ${{ matrix.runner.id }}
+ name: Job #${{ matrix.runner.idx }} (instance ${{ matrix.runner.instance_idx }})
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Instance info
+ run: |
+ echo "Running on instance with label: ${{ matrix.runner.id }}"
+ echo "Job index: ${{ matrix.runner.idx }}"
+ echo "Hostname: $(hostname)"
+ echo "Instance ID: $(curl -s http://169.254.169.254/latest/meta-data/instance-id)"
+ echo "Instance type: $(curl -s http://169.254.169.254/latest/meta-data/instance-type)"
+ echo "Region: $(curl -s http://169.254.169.254/latest/meta-data/placement/region)"
+
+ - name: Simulate workload
+ run: |
+ # Each instance runs independently
+ DURATION=$((RANDOM % 10 + 5))
+ echo "Simulating workload for ${DURATION} seconds..."
+ sleep $DURATION
+ echo "Workload complete!"
+
+ - name: Verify parallelism
+ run: |
+ echo "Completed at: $(date '+%Y-%m-%d %H:%M:%S')"
+ echo "This job ran in parallel with other matrix jobs"
diff --git a/.github/workflows/demo-job-seq.yml b/.github/workflows/demo-job-seq.yml
deleted file mode 100644
index 6bb51b6..0000000
--- a/.github/workflows/demo-job-seq.yml
+++ /dev/null
@@ -1,76 +0,0 @@
-name: Demo – multiple jobs on one EC2 instance, in sequence
-# Tests that multiple jobs can run sequentially on the same EC2 instance
-on:
- workflow_dispatch:
- inputs:
- sleep:
- description: "Sleep this many seconds at the end of the `prepare`, `train`, and `eval` jobs (optional, for SSH access and debugging)"
- required: false
- type: number
- default: 0
- workflow_call: # Tested by `demos.yml`
- inputs:
- sleep:
- required: false
- type: number
- default: 0
-
-permissions:
- id-token: write
- contents: read
-
-jobs:
- ec2:
- uses: ./.github/workflows/runner.yml
- secrets: inherit
-
- prepare:
- needs: ec2
- runs-on: ${{ needs.ec2.outputs.id }}
- outputs:
- gpu-uuid: ${{ steps.gpu-info.outputs.uuid }}
- steps:
- - name: Get GPU info
- id: gpu-info
- run: |
- echo "=== Preparing GPU environment ==="
- nvidia-smi
- uuid=$(nvidia-smi --query-gpu=gpu_uuid --format=csv,noheader)
- echo "uuid=$uuid" >> $GITHUB_OUTPUT
- echo "GPU UUID: $uuid"
-
- - name: Sleep (${{ inputs.sleep }}s)
- if: ${{ inputs.sleep > 0 }}
- run: sleep ${{ inputs.sleep }}
-
- train:
- needs: [ec2, prepare]
- runs-on: ${{ needs.ec2.outputs.id }}
- steps:
- - name: Verify same GPU
- run: |
- echo "=== Training model on GPU ==="
- current_uuid=$(nvidia-smi --query-gpu=gpu_uuid --format=csv,noheader)
- if [[ "$current_uuid" == "${{ needs.prepare.outputs.gpu-uuid }}" ]]; then
- echo "✅ Confirmed: Using same GPU as preparation job"
- else
- echo "❌ ERROR: Different GPU!"
- exit 1
- fi
-
- - name: Sleep (${{ inputs.sleep }}s)
- if: ${{ inputs.sleep > 0 }}
- run: sleep ${{ inputs.sleep }}
-
- eval:
- needs: [ec2, train]
- runs-on: ${{ needs.ec2.outputs.id }}
- steps:
- - name: Final validation
- run: |
- echo "=== Evaluation complete ==="
- nvidia-smi
-
- - name: Sleep (${{ inputs.sleep }}s)
- if: ${{ inputs.sleep > 0 }}
- run: sleep ${{ inputs.sleep }}
diff --git a/.github/workflows/demo-jobs-split.yml b/.github/workflows/demo-jobs-split.yml
new file mode 100644
index 0000000..40b3782
--- /dev/null
+++ b/.github/workflows/demo-jobs-split.yml
@@ -0,0 +1,89 @@
+name: Demo – multiple instances for different job types
+on:
+ workflow_dispatch:
+ workflow_call: # Tested by `demos.yml`
+
+permissions:
+ id-token: write # Required for AWS OIDC authentication
+ contents: read # Required for actions/checkout; normally set by default, but must explicitly specify when defining a custom `permissions` block.
+
+jobs:
+ ec2:
+ name: Launch 2 EC2 instances
+ uses: ./.github/workflows/runner.yml
+ secrets: inherit
+ with:
+ instance_count: "2"
+ ec2_image_id: ami-0e86e20dae9224db8 # Ubuntu 24.04 LTS x86_64 (us-east-1)
+
+ # First job type - runs on first instance
+ build-job:
+ needs: ec2
+ runs-on: ${{ fromJson(needs.ec2.outputs.mtx)[0].id }}
+ name: Build job
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Build task
+ run: |
+ echo "Running BUILD job on first instance"
+ echo "Instance: $(curl -s http://169.254.169.254/latest/meta-data/instance-id)"
+ echo "Simulating build process..."
+ sleep 10
+ echo "Build complete!"
+
+ - name: Create artifact
+ run: |
+ echo "Build output from $(hostname)" > build-output.txt
+ echo "Built at $(date)" >> build-output.txt
+
+ - uses: actions/upload-artifact@v4
+ with:
+ name: build-output
+ path: build-output.txt
+
+ # Second job type - runs on second instance
+ test-job:
+ needs: ec2
+ runs-on: ${{ fromJson(needs.ec2.outputs.mtx)[1].id }}
+ name: Test job
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Test task
+ run: |
+ echo "Running TEST job on second instance"
+ echo "Instance: $(curl -s http://169.254.169.254/latest/meta-data/instance-id)"
+ echo "Simulating test suite..."
+ sleep 15
+ echo "Tests passed!"
+
+ - name: Generate test report
+ run: |
+ echo "Test report from $(hostname)" > test-report.txt
+ echo "Tests run at $(date)" >> test-report.txt
+ echo "All tests: PASSED" >> test-report.txt
+
+ - uses: actions/upload-artifact@v4
+ with:
+ name: test-report
+ path: test-report.txt
+
+ # Aggregation job that uses artifacts from both instances
+ report:
+ needs: [build-job, test-job]
+ runs-on: ubuntu-latest
+ steps:
+ - name: Download all artifacts
+ uses: actions/download-artifact@v4
+
+ - name: Generate final report
+ run: |
+ echo "=== Final Report ==="
+ echo "Build output:"
+ cat build-output/build-output.txt
+ echo ""
+ echo "Test report:"
+ cat test-report/test-report.txt
+ echo ""
+ echo "Both jobs completed successfully using separate EC2 instances!"
diff --git a/.github/workflows/demo-matrix-wide.yml b/.github/workflows/demo-matrix-wide.yml
deleted file mode 100644
index cd1cff3..0000000
--- a/.github/workflows/demo-matrix-wide.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-name: Demo – matrix on single runner
-on:
- workflow_dispatch:
- workflow_call: # Tested by `demos.yml`
-permissions:
- id-token: write # Required for AWS OIDC authentication
- contents: read # Required for actions/checkout
-jobs:
- ec2:
- uses: ./.github/workflows/runner.yml
- secrets: inherit
- test-matrix:
- needs: ec2
- runs-on: ${{ needs.ec2.outputs.id }}
- name: Test on ${{ matrix.os }}
- strategy:
- matrix:
- os: [ubuntu-20.04, ubuntu-22.04, ubuntu-24.04]
- steps:
- - name: Test on ${{ matrix.os }}
- run: |
- echo "Testing on ${{ matrix.os }}"
- echo "Runner: $(hostname)"
- echo "Time: $(date)"
- echo "Simulating work..."
- sleep 10
- echo "Done!"
- - name: Show system info
- run: |
- echo "=== System Information ==="
- uname -a
- echo "=== CPU Info ==="
- lscpu | head -10
- echo "=== Memory Info ==="
- free -h
diff --git a/.github/workflows/demo-multi-instance.yml b/.github/workflows/demo-multi-instance.yml
deleted file mode 100644
index 7e21ac2..0000000
--- a/.github/workflows/demo-multi-instance.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-name: Demo – multiple instances for parallel jobs
-on:
- workflow_dispatch:
-jobs:
- placeholder:
- runs-on: ubuntu-latest
- steps:
- - run: echo "Placeholder workflow – being developed in #2 / rw/hooks branch"
\ No newline at end of file
diff --git a/.github/workflows/demo-multi-job.yml b/.github/workflows/demo-multi-job.yml
deleted file mode 100644
index 9c584db..0000000
--- a/.github/workflows/demo-multi-job.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-name: Demo – multiple instances for different job types
-on:
- workflow_dispatch:
-jobs:
- placeholder:
- runs-on: ubuntu-latest
- steps:
- - run: echo "Placeholder workflow – being developed in #2 / rw/hooks branch"
\ No newline at end of file
diff --git a/.github/workflows/demo-multi-runner.yml b/.github/workflows/demo-multi-runner.yml
deleted file mode 100644
index b3e4b52..0000000
--- a/.github/workflows/demo-multi-runner.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-name: Demo – multiple runners on one instance
-on:
- workflow_dispatch:
-jobs:
- placeholder:
- runs-on: ubuntu-latest
- steps:
- - run: echo "Placeholder workflow – being developed in #2 / rw/hooks branch"
diff --git a/.github/workflows/demo-runners-mtx.yml b/.github/workflows/demo-runners-mtx.yml
new file mode 100644
index 0000000..775ca21
--- /dev/null
+++ b/.github/workflows/demo-runners-mtx.yml
@@ -0,0 +1,88 @@
+name: Demo – multiple runners on single instance
+on:
+ workflow_dispatch:
+ inputs:
+ runners_per_instance:
+ description: "Number of runners per EC2 instance"
+ required: false
+ type: string
+ default: "3"
+ sleep:
+ description: "Sleep duration in seconds for each job"
+ required: false
+ type: string
+ default: "10"
+ workflow_call: # Tested by `demos.yml`
+ inputs:
+ runners_per_instance:
+ required: false
+ type: string
+ default: "3"
+ sleep:
+ required: false
+ type: string
+ default: "10"
+
+permissions:
+ id-token: write # Required for AWS OIDC authentication
+ contents: read # Required for actions/checkout; normally set by default, but must explicitly specify when defining a custom `permissions` block.
+
+jobs:
+ ec2:
+ name: Launch 1 EC2 instance with ${{ inputs.runners_per_instance }} runners
+ uses: ./.github/workflows/runner.yml
+ secrets: inherit
+ with:
+ instance_count: "1"
+ runners_per_instance: ${{ inputs.runners_per_instance }}
+ ec2_image_id: ami-0e86e20dae9224db8 # Ubuntu 24.04 LTS x86_64 (us-east-1)
+ # Use a larger instance type to handle multiple runners
+ ec2_instance_type: t3.xlarge # 4 vCPUs, 16 GB RAM
+
+ parallel-jobs:
+ needs: ec2
+ strategy:
+ matrix:
+ # Parse the JSON array of runner objects for the matrix
+ runner: ${{ fromJson(needs.ec2.outputs.mtx) }}
+ runs-on: ${{ matrix.runner.id }}
+ name: 'Parallel job ${{ matrix.runner.idx }} (${{ matrix.runner.id }})'
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Runner info
+ run: |
+ echo "Running on runner with label: ${{ matrix.runner.id }}"
+ echo "Runner index: ${{ matrix.runner.idx }}"
+ echo "Runner index on instance: ${{ matrix.runner.runner_idx }}"
+ echo "Hostname: $(hostname)"
+ echo "Instance ID: $(curl -s http://169.254.169.254/latest/meta-data/instance-id)"
+ echo "Instance type: $(curl -s http://169.254.169.254/latest/meta-data/instance-type)"
+ echo "Region: $(curl -s http://169.254.169.254/latest/meta-data/placement/region)"
+ echo "Runner index: ${RUNNER_INDEX:-unknown}"
+ echo "Runner home: ${RUNNER_HOME:-unknown}"
+
+ - name: Simulate workload
+ run: |
+ # All runners on same instance run independently
+ DURATION=${{ inputs.sleep }}
+ echo "Starting at: $(date '+%Y-%m-%d %H:%M:%S.%3N')"
+ echo "Simulating workload for ${DURATION} seconds..."
+ sleep $DURATION
+ echo "Workload complete at: $(date '+%Y-%m-%d %H:%M:%S.%3N')"
+
+ - name: Verify parallelism
+ run: |
+ echo "This job ran in parallel with other matrix jobs on the SAME instance"
+ echo "With ${{ inputs.runners_per_instance }} runners and ${{ inputs.sleep }}s sleep:"
+ echo "- Sequential execution would take: $((${{ inputs.runners_per_instance }} * ${{ inputs.sleep }}))s"
+ echo "- Parallel execution should take: ~${{ inputs.sleep }}s (plus overhead)"
+
+ - name: Show resource sharing
+ run: |
+ echo "=== Resource Usage ==="
+ echo "CPU cores available: $(nproc)"
+ echo "Memory available: $(free -h | grep Mem | awk '{print $2}')"
+ echo "Load average: $(uptime | awk -F'load average:' '{print $2}')"
+ echo ""
+ echo "Note: All ${{ inputs.runners_per_instance }} runners share these resources"
diff --git a/.github/workflows/demos.yml b/.github/workflows/demos.yml
index 7f5e3de..08d9880 100644
--- a/.github/workflows/demos.yml
+++ b/.github/workflows/demos.yml
@@ -5,18 +5,29 @@ permissions:
id-token: write
contents: read
jobs:
- demo-00-minimal:
- uses: ./.github/workflows/demo-00-minimal.yml
+ # Minimal demos for quick testing
+ demo-dbg-minimal:
+ uses: ./.github/workflows/demo-dbg-minimal.yml
secrets: inherit
- demo-job-seq:
- uses: ./.github/workflows/demo-job-seq.yml
+ demo-gpu-minimal:
+ uses: ./.github/workflows/demo-gpu-minimal.yml
secrets: inherit
- demo-gpu-dbg:
- uses: ./.github/workflows/demo-gpu-dbg.yml
+
+ # Sweep demos for comprehensive testing
+ demo-cpu-sweep:
+ uses: ./.github/workflows/demo-cpu-sweep.yml
secrets: inherit
- demo-archs:
- uses: ./.github/workflows/demo-archs.yml
+ demo-gpu-sweep:
+ uses: ./.github/workflows/demo-gpu-sweep.yml
secrets: inherit
- demo-matrix-wide:
- uses: ./.github/workflows/demo-matrix-wide.yml
+
+ # Multi-instance/runner demos
+ demo-instances-mtx:
+ uses: ./.github/workflows/demo-instances-mtx.yml
+ secrets: inherit
+ demo-runners-mtx:
+ uses: ./.github/workflows/demo-runners-mtx.yml
+ secrets: inherit
+ demo-jobs-split:
+ uses: ./.github/workflows/demo-jobs-split.yml
secrets: inherit
diff --git a/.github/workflows/runner.yml b/.github/workflows/runner.yml
index fb49f5f..2ee6d7f 100644
--- a/.github/workflows/runner.yml
+++ b/.github/workflows/runner.yml
@@ -26,7 +26,7 @@ on:
type: string
default: "v2"
aws_region:
- description: "AWS region for EC2 instances (defaults to us-east-1)"
+ description: "AWS region for EC2 instances (falls back to vars.AWS_REGION, then us-east-1)"
required: false
type: string
default: "us-east-1"
@@ -38,35 +38,37 @@ on:
description: "CloudWatch Logs group name for streaming runner logs (leave empty to disable)"
required: false
type: string
- ec2_launch_role:
- description: "AWS role ARN to assume for EC2 operations (falls back to vars.EC2_LAUNCH_ROLE; one or the other is required)"
+ debug:
+ description: "Debug mode: false=off, true/trace=set -x only, number=set -x + sleep N minutes before shutdown"
required: false
type: string
- ec2_image_id:
- description: "AWS AMI ID to use (falls back to vars.EC2_IMAGE_ID)"
+ default: false
+ ec2_home_dir:
+ description: "Home directory on the AWS instance (falls back to vars.EC2_HOME_DIR, then auto-detection)"
required: false
type: string
- default: "ami-00096836009b16a22" # Deep Learning OSS Nvidia Driver AMI GPU PyTorch
- ec2_instance_type:
- description: "AWS instance type (falls back to vars.EC2_INSTANCE_TYPE)"
+ ec2_image_id:
+ description: "AWS AMI ID to use (required - must be provided via input or vars.EC2_IMAGE_ID)"
required: false
type: string
- default: "g4dn.xlarge"
ec2_instance_profile:
description: "Instance profile name to attach to launched EC2 instance (required for CloudWatch logging)"
required: false
type: string
- ec2_home_dir:
- description: "Home directory on the AWS instance (falls back to vars.EC2_HOME_DIR)"
+ ec2_instance_type:
+ description: "AWS instance type (falls back to vars.EC2_INSTANCE_TYPE, then t3.medium)"
required: false
type: string
- default: "/home/ubuntu"
ec2_key_name:
description: "Name of an EC2 key pair to use for SSH access (falls back to vars.EC2_KEY_NAME)"
required: false
type: string
+ ec2_launch_role:
+ description: "AWS role ARN to assume for EC2 operations (falls back to vars.EC2_LAUNCH_ROLE)"
+ required: false
+ type: string
ec2_root_device_size:
- description: "Root device size in GB (0 = use AMI default)"
+ description: "Root disk size in GB (0=AMI default, +N=AMI+N GB for testing, e.g. +2)"
required: false
type: string
default: "0"
@@ -78,43 +80,66 @@ on:
description: "Additional userdata script to run on instance startup (before runner starts)"
required: false
type: string
+ instance_count:
+ description: "Number of EC2 instances to create (for parallel jobs)"
+ required: false
+ type: string
+ default: "1"
+ instance_name:
+ description: "Name tag template for EC2 instances. Uses Python string.Template format with variables: $repo, $name (workflow filename stem), $workflow (full workflow name), $ref, $run (number), $idx (0-based instance index for multi-instance launches). Default: $repo/$name#$run (or $repo/$name#$run $idx for multi-instance)"
+ required: false
+ type: string
max_instance_lifetime:
description: "Maximum instance lifetime in minutes before automatic shutdown (falls back to vars.MAX_INSTANCE_LIFETIME, then 360 = 6 hours)"
required: false
type: string
+ name:
+ description: "Name for the launch job"
+ required: false
+ type: string
runner_grace_period:
- description: "Grace period in seconds before terminating instance after last job completes (default 120)"
+ description: "Grace period in seconds before terminating instance after last job completes (falls back to vars.RUNNER_GRACE_PERIOD, then 60)"
required: false
type: string
- default: "120"
runner_initial_grace_period:
- description: "Grace period in seconds before terminating instance if no jobs start (default 180)"
+ description: "Grace period in seconds before terminating instance if no jobs start (falls back to vars.RUNNER_INITIAL_GRACE_PERIOD, then 180)"
+ required: false
+ type: string
+ runner_poll_interval:
+ description: "How often (in seconds) to check termination conditions (falls back to vars.RUNNER_POLL_INTERVAL, then 10)"
required: false
type: string
- default: "180"
runner_registration_timeout:
- description: "Maximum seconds to wait for runner to register with GitHub (default 300 = 5 minutes)"
+ description: "Maximum seconds to wait for runner to register with GitHub (falls back to vars.RUNNER_REGISTRATION_TIMEOUT, then 360 = 6 minutes)"
required: false
type: string
- default: "300"
+ runners_per_instance:
+ description: "Number of runners to register per instance (each in separate directories to allow concurrent jobs)"
+ required: false
+ type: string
+ default: "1"
ssh_pubkey:
description: "SSH public key to add to authorized_keys (falls back to vars.SSH_PUBKEY)"
required: false
type: string
outputs:
id:
- description: "Instance ID for runs-on"
+ description: "Instance ID for runs-on (single instance)"
value: ${{ jobs.launch.outputs.id }}
+ mtx:
+ description: "JSON array of objects for matrix strategies"
+ value: ${{ jobs.launch.outputs.mtx }}
permissions:
- id-token: write # Required for AWS OIDC
+ id-token: write # Required for AWS OIDC
jobs:
launch:
- name: Launch ${{ inputs.ec2_instance_type || vars.EC2_INSTANCE_TYPE }}
+ name: ${{ inputs.name || format('Launch {0}', inputs.ec2_instance_type || vars.EC2_INSTANCE_TYPE) }}
runs-on: ubuntu-latest
outputs:
id: ${{ steps.aws-start.outputs.label }}
+ mtx: ${{ steps.aws-start.outputs.mtx }}
steps:
- name: Check EC2_LAUNCH_ROLE configuration
run: |
@@ -141,21 +166,27 @@ jobs:
id: aws-start
uses: ./
with:
- aws_region: ${{ inputs.aws_region }}
+ action_ref: ${{ inputs.action_ref }}
+ aws_region: ${{ inputs.aws_region || vars.AWS_REGION }}
aws_tags: ${{ inputs.aws_tags }}
cloudwatch_logs_group: ${{ inputs.cloudwatch_logs_group || vars.CLOUDWATCH_LOGS_GROUP }}
+ debug: ${{ inputs.debug }}
+ ec2_home_dir: ${{ inputs.ec2_home_dir || vars.EC2_HOME_DIR }}
ec2_image_id: ${{ inputs.ec2_image_id || vars.EC2_IMAGE_ID }}
- ec2_instance_type: ${{ inputs.ec2_instance_type || vars.EC2_INSTANCE_TYPE }}
ec2_instance_profile: ${{ inputs.ec2_instance_profile || vars.EC2_INSTANCE_PROFILE }}
- ec2_home_dir: ${{ inputs.ec2_home_dir || vars.EC2_HOME_DIR }}
+ ec2_instance_type: ${{ inputs.ec2_instance_type || vars.EC2_INSTANCE_TYPE }}
ec2_key_name: ${{ inputs.ec2_key_name || vars.EC2_KEY_NAME }}
ec2_root_device_size: ${{ inputs.ec2_root_device_size }}
ec2_security_group_id: ${{ inputs.ec2_security_group_id || vars.EC2_SECURITY_GROUP_ID }}
ec2_userdata: ${{ inputs.ec2_userdata }}
- max_instance_lifetime: ${{ inputs.max_instance_lifetime || vars.MAX_INSTANCE_LIFETIME || '360' }}
- runner_initial_grace_period: ${{ inputs.runner_initial_grace_period }}
- runner_grace_period: ${{ inputs.runner_grace_period }}
- runner_registration_timeout: ${{ inputs.runner_registration_timeout }}
+ instance_count: ${{ inputs.instance_count }}
+ instance_name: ${{ inputs.instance_name }}
+ max_instance_lifetime: ${{ inputs.max_instance_lifetime || vars.MAX_INSTANCE_LIFETIME }}
+ runner_grace_period: ${{ inputs.runner_grace_period || vars.RUNNER_GRACE_PERIOD }}
+ runner_initial_grace_period: ${{ inputs.runner_initial_grace_period || vars.RUNNER_INITIAL_GRACE_PERIOD }}
+ runner_poll_interval: ${{ inputs.runner_poll_interval || vars.RUNNER_POLL_INTERVAL }}
+ runner_registration_timeout: ${{ inputs.runner_registration_timeout || vars.RUNNER_REGISTRATION_TIMEOUT }}
+ runners_per_instance: ${{ inputs.runners_per_instance }}
ssh_pubkey: ${{ inputs.ssh_pubkey || vars.SSH_PUBKEY }}
env:
GH_PAT: ${{ secrets.GH_SA_TOKEN }}
diff --git a/.github/workflows/test-disk-full.yml b/.github/workflows/test-disk-full.yml
new file mode 100644
index 0000000..c7ec32f
--- /dev/null
+++ b/.github/workflows/test-disk-full.yml
@@ -0,0 +1,210 @@
+name: Test Disk Full
+
+on:
+ workflow_dispatch:
+ inputs:
+ disk_size:
+ description: 'Root disk size in GB (0=AMI default, +N=AMI+N GB, e.g. +2 for AMI+2GB)'
+ required: false
+ type: string
+ default: '+2' # 2GB more than the AMI size
+ fill_strategy:
+ description: 'How to fill disk: gradual, immediate, or during-tests'
+ required: false
+ type: choice
+ options:
+ - gradual
+ - immediate
+ - during-tests
+ default: gradual
+ debug:
+ description: 'Debug mode: false=off, true/trace=trace only, number=trace+sleep N minutes'
+ required: false
+ type: string
+ default: 'true'
+ instance_type:
+ description: 'Instance type'
+ required: false
+ type: string
+ default: 't3.medium'
+ max_instance_lifetime:
+ description: 'Max instance lifetime in minutes (default: 15)'
+ required: false
+ type: string
+ default: '15'
+
+permissions:
+ id-token: write
+ contents: read
+
+jobs:
+ launch:
+ name: Launch runner
+ uses: ./.github/workflows/runner.yml
+ secrets:
+ GH_SA_TOKEN: ${{ secrets.GH_SA_TOKEN }}
+ with:
+ ec2_image_id: ami-0ca5a2f40c2601df6 # Ubuntu 24.04 x86_64 in us-east-1
+ ec2_instance_type: ${{ inputs.instance_type }}
+ ec2_root_device_size: ${{ inputs.disk_size }}
+ debug: ${{ inputs.debug }}
+ max_instance_lifetime: ${{ inputs.max_instance_lifetime }}
+
+ test-disk-full:
+ name: Fill disk (${{ inputs.fill_strategy }})
+ needs: launch
+ runs-on: ${{ needs.launch.outputs.id }}
+
+ steps:
+ - name: Check initial disk usage
+ run: |
+ echo "=== Initial disk usage ==="
+ df -h /
+ echo ""
+ echo "=== Largest directories ==="
+ du -sh /* 2>/dev/null | sort -hr | head -10 || true
+
+ - name: Fill disk immediately
+ if: inputs.fill_strategy == 'immediate'
+ run: |
+ echo "=== Filling disk immediately ==="
+ # Create a large file that leaves only ~100MB free
+ AVAILABLE=$(df / | awk 'NR==2 {print int($4/1024)-100}')
+ if [ $AVAILABLE -gt 0 ]; then
+ echo "Creating ${AVAILABLE}MB file to fill disk..."
+ dd if=/dev/zero of=/tmp/disk_filler bs=1M count=$AVAILABLE 2>/dev/null || true
+ fi
+ echo "=== Disk usage after fill ==="
+ df -h /
+
+ - name: Fill disk gradually
+ if: inputs.fill_strategy == 'gradual'
+ run: |
+ echo "=== Filling disk gradually ==="
+ COUNTER=0
+ while true; do
+ AVAILABLE=$(df / | awk 'NR==2 {print int($4/1024)}')
+ if [ $AVAILABLE -lt 500 ]; then
+ echo "Disk nearly full (${AVAILABLE}MB remaining), creating final files..."
+ # Fill remaining space with smaller files
+ for i in {1..10}; do
+ dd if=/dev/zero of=/tmp/gradual_fill_${COUNTER}_${i} bs=1M count=50 2>/dev/null || break
+ done
+ break
+ fi
+ echo "Creating 500MB file (${AVAILABLE}MB currently available)..."
+ dd if=/dev/zero of=/tmp/gradual_fill_${COUNTER} bs=1M count=500 2>/dev/null || break
+ COUNTER=$((COUNTER + 1))
+ df -h /
+ sleep 2
+ done
+ echo "=== Final disk usage ==="
+ df -h /
+
+ - name: Setup Python project for test
+ if: inputs.fill_strategy == 'during-tests'
+ run: |
+ echo "=== Setting up Python project that will fill disk during tests ==="
+ cat > setup.py << 'EOF'
+ from setuptools import setup, find_packages
+
+ setup(
+ name="disk-filler-test",
+ version="0.1.0",
+ packages=find_packages(),
+ python_requires=">=3.8",
+ install_requires=[
+ "pytest>=7.0.0",
+ "numpy>=1.20.0", # Large package
+ "pandas>=1.3.0", # Large package
+ "scipy>=1.7.0", # Large package
+ "matplotlib>=3.4.0", # Large package
+ "scikit-learn>=1.0.0", # Large package
+ "torch>=2.0.0", # Very large package
+ "transformers>=4.30.0", # Very large package
+ ],
+ )
+ EOF
+
+ mkdir -p tests
+ cat > tests/test_disk_filler.py << 'EOF'
+ import os
+ import tempfile
+ import pytest
+
+ def test_create_large_arrays():
+ """Create large arrays to consume memory and disk (via swap/tmp)"""
+ import numpy as np
+ arrays = []
+ for i in range(10):
+ # Create 100MB arrays
+ arr = np.random.random((1024, 1024, 100))
+ arrays.append(arr)
+ # Also write to temp file
+ with tempfile.NamedTemporaryFile(delete=False, dir='/tmp') as f:
+ np.save(f, arr)
+ print(f"Created array {i+1}/10")
+
+ def test_generate_files():
+ """Generate many temporary files"""
+ for i in range(100):
+ with tempfile.NamedTemporaryFile(delete=False, dir='/tmp',
+ prefix=f'test_file_{i}_') as f:
+ # Write 10MB to each file
+ f.write(os.urandom(10 * 1024 * 1024))
+ if i % 10 == 0:
+ print(f"Generated {i+1}/100 files")
+
+ def test_disk_space_check():
+ """Check if we're out of disk space"""
+ import shutil
+ usage = shutil.disk_usage('/')
+ percent_used = (usage.used / usage.total) * 100
+ print(f"Disk usage: {percent_used:.1f}%")
+ print(f"Free space: {usage.free / (1024**3):.2f} GB")
+ # This test "passes" even when disk is full to see behavior
+ assert percent_used > 0
+ EOF
+
+ echo "=== Installing packages (this will consume disk space) ==="
+ pip install -e . || true
+
+ echo "=== Running tests that fill disk ==="
+ pytest tests/ -v || true
+
+ echo "=== Final disk usage ==="
+ df -h /
+
+ - name: Try to write when disk is full
+ if: always()
+ run: |
+ echo "=== Testing write operations with full disk ==="
+ # Try various write operations to see what fails
+ echo "Test" > /tmp/test_write.txt 2>&1 || echo "Failed to write to /tmp"
+ echo "Test" > ~/test_write.txt 2>&1 || echo "Failed to write to home"
+ touch /tmp/test_touch 2>&1 || echo "Failed to touch file"
+ mkdir /tmp/test_mkdir 2>&1 || echo "Failed to create directory"
+
+ # Check if we can still run commands
+ echo "=== Can we still run basic commands? ==="
+ date || echo "date command failed"
+ pwd || echo "pwd command failed"
+ whoami || echo "whoami command failed"
+
+ - name: Monitor termination behavior
+ if: always()
+ run: |
+ echo "=== Monitoring termination behavior ==="
+ echo "This job will complete soon. Watch the runner logs to see if:"
+ echo "1. The termination check detects disk full state"
+ echo "2. The instance can successfully shut down"
+ echo "3. The robust shutdown methods are triggered"
+ echo ""
+ echo "Current disk usage:"
+ df -h /
+ echo ""
+ echo "Checking runner processes:"
+ ps aux | grep -E '[R]unner|[c]heck-runner-termination' || true
+ echo ""
+ echo "Last entries in termination check log:"
+ tail -20 /tmp/termination-check.log 2>/dev/null || echo "No termination check log found"
diff --git a/CLAUDE.md b/CLAUDE.md
new file mode 100644
index 0000000..c1afd32
--- /dev/null
+++ b/CLAUDE.md
@@ -0,0 +1,166 @@
+# ec2-gha
+
+**ec2-gha** is a GitHub Action for creating ephemeral, self-hosted GitHub Actions runners on AWS EC2 instances. These runners support GPU workloads, automatically terminate when idle, and can handle multi-job workflows.
+
+## Common Development Commands
+
+- Don't explicitly set the `AWS_PROFILE` (e.g. to `oa-ci-dev`) in your commands; assume it's set for you out of band, verify if you need.
+- Instance userdata (rendered form of `src/ec2_gha/templates/user-script.sh.templ`) has to stay under 16KiB. We remove comments while "rendering", so the `templ` itself may be a bit over the limit.
+
+### Testing
+```bash
+# Install test dependencies
+pip install '.[test]'
+
+# Run tests matching a pattern. You don't need to do this very often though.
+cd tests/ && pytest -v -m 'not slow'
+
+# Update `syrupy` "snapshots", run tests to verify they pass with (possibly-updated) snapshot values. Just a wrapper for:
+# ```bash
+# pytest --snapshot-update -m 'not slow'
+# pytest -vvv -m 'not slow' .
+# ```
+# Update syrupy "snapshot" files. Can also be used in conjunction with `git rebase -x` (I'll mostly do that manually, when cleaning up commits).
+scripts/update-snapshots.sh
+```
+
+### Linting
+```bash
+# Ruff is configured in pyproject.toml
+ruff check src/
+ruff format src/
+```
+
+## Key Architecture Components
+
+### GitHub Actions Integration
+- **`.github/workflows/runner.yml`**:
+ - Main entrypoint, reusable workflow callable via external workflows' `job.uses`
+ - Wraps the `action.yml` composite action
+ - Outputs an `id` that subsequent jobs can pass to `job.runs-on`
+- **`action.yml`**:
+ - Composite action, wraps `Dockerfile` / `ec2_gha` Python module.
+ - ≈20 input parameters, including:
+ - AWS/EC2 configs (instance type, AMI, optional CloudWatch log group, keypair/pubkey for SSH-debugging, etc.)
+ - GitHub runner configurations (timeouts / poll intervals, labels, etc.)
+ - Outputs:
+ - `mtx` (array of objects for matrix strategies)
+ - When only one instance/runner is created, also outputs `label` and `instance-id`
+
+### Core Python Modules
+- **`src/ec2_gha/__main__.py`**: Entry point that parses environment variables and initiates runner creation
+- **`src/ec2_gha/start.py`**: Contains `StartAWS` class handling EC2 operations, instance lifecycle, and template rendering
+
+### Template and Script System
+- **`src/ec2_gha/templates/user-script.sh.templ`**: Main userdata template using Python's String.Template format
+- **`src/ec2_gha/scripts/runner-setup.sh`**: Main runner setup script fetched by userdata
+- **`src/ec2_gha/scripts/job-started-hook.sh`**: GitHub Actions hook for job start events
+- **`src/ec2_gha/scripts/job-completed-hook.sh`**: GitHub Actions hook for job completion
+- **`src/ec2_gha/scripts/check-runner-termination.sh`**: Periodic termination check script
+
+## Versioning and Security
+
+### Action Ref Resolution
+`runner.yml` requires an `action_ref` parameter that gets resolved to a Git SHA for security:
+1. Python code resolves branch/tag references to immutable SHAs
+2. All scripts are fetched using the resolved SHA to prevent TOCTOU attacks
+3. This ensures the exact code version is used throughout execution
+
+### Version Strategy
+- Main branch (`v2`) contains stable releases
+- `action_ref` defaults to the branch name in `runner.yml`
+- Patch/minor version tags like `v2.0.0`, `v2.1.0` can be created from the `v2` branch
+
+`ec2-gha`'s initial release uses a `v2` branch because the upstream `start-aws-gha-runner` has published some `v1*` tags.
+
+### Usage Example
+```yaml
+# Caller workflow uses the v2 branch
+uses: Open-Athena/ec2-gha/.github/workflows/runner.yml@v2
+# The runner.yml on v2 branch has action_ref default of "v2"
+# This gets resolved to a SHA at runtime for security
+```
+
+For complete usage examples, see `.github/workflows/demo*.yml`.
+
+## Development Guidelines
+
+### Template Modifications
+When modifying the userdata template (`user-script.sh.templ`):
+- Use `$variable` or `${variable}` syntax for template substitutions
+- Escape literal `$` as `$$`
+- Test template rendering in `tests/test_start.py`
+
+### Environment Variables
+The action uses a hierarchical input system:
+1. Direct workflow inputs (highest priority)
+2. Repository/organization variables (`vars.*`)
+3. Default values
+
+GitHub Actions declares env vars prefixed with `INPUT_` for each input, which `start.py` reads.
+
+### Error Handling
+- Use descriptive error messages that help users understand AWS/GitHub configuration issues
+- Always clean up AWS resources on failure (instances, etc.)
+- Log important operations to assist debugging
+
+### Instance Lifecycle Management
+
+#### Termination Logic
+The runner uses a polling-based approach to determine when to terminate:
+
+1. **Job Tracking**: GitHub runner hooks track job lifecycle
+ - `job-started-hook.sh`: Creates JSON files in `/var/run/github-runner-jobs/`
+ - `job-completed-hook.sh`: Removes job files and updates activity timestamp
+ - Heartbeat mechanism: Active jobs touch their files periodically
+
+2. **Periodic Polling**: Systemd timer runs `check-runner-termination.sh` every `runner_poll_interval` seconds (default: 10s)
+ - Checks for running jobs by verifying both Runner.Listener AND Runner.Worker processes
+ - Detects stale job files (older than 3× poll interval, likely disk full)
+ - Handles Worker process death (job completed but hook couldn't run)
+ - Grace periods:
+ - `runner_initial_grace_period` (default: 180s) - Before first job
+ - `runner_grace_period` (default: 60s) - Between jobs
+
+3. **Robustness Features**:
+ - **Process Monitoring**: Distinguishes between idle Listener and active Worker
+ - **Fallback Termination**: Multiple shutdown methods with increasing force
+ - **Hook Script Separation**: Scripts fetched from GitHub for maintainability
+
+4. **Clean Shutdown Sequence**:
+ - Stop runner processes gracefully (SIGINT with timeout)
+ - Deregister all runners from GitHub
+ - Flush CloudWatch logs (if configured)
+ - Execute shutdown with fallbacks (`systemctl poweroff`, `shutdown -h now`, `halt -f`)
+
+### AWS Resource Tagging
+By default, launched EC2 instances are Tagged with:
+- `Name`: `f"{repo}/{workflow}#{run}"`
+- `Repository`: GitHub repository name
+- `Workflow`: Workflow name
+- `URL`: Direct link to the GitHub Actions run
+
+## Important Implementation Details
+
+### Multi-Job Support
+- Runners are non-ephemeral to support instance reuse
+- Job tracking via GitHub runner hooks (job-started, job-completed)
+- Grace period prevents premature termination between sequential jobs
+
+### Security Considerations
+- Never log or expose AWS credentials or GitHub tokens
+- Use IAM instance profiles for EC2 API access (not credentials)
+- Support OIDC authentication for GitHub Actions
+
+### CloudWatch Integration
+When implementing CloudWatch features:
+- Logs are streamed from specific paths defined in userdata template
+- Instance profile (separate from launch role) required for CloudWatch API access
+- Log group must exist before instance creation
+- dpkg lock wait (up to 2 minutes) ensures CloudWatch agent installation succeeds on Ubuntu AMIs where cloud-init or unattended-upgrades may be running
+
+## Testing Checklist
+
+Before committing changes:
+1. Run tests: `cd tests/ && pytest -v -m 'not slow'`
+2. Verify template rendering doesn't break
diff --git a/README.md b/README.md
index a79d201..d5714a7 100644
--- a/README.md
+++ b/README.md
@@ -4,6 +4,7 @@ Run GitHub Actions on ephemeral EC2 instances.
**TOC**
- [Quick Start](#quick-start)
+- [Demos](#demos)
- [Inputs](#inputs)
- [Required](#required)
- [`secrets.GH_SA_TOKEN`](#gh-sa-token)
@@ -12,8 +13,9 @@ Run GitHub Actions on ephemeral EC2 instances.
- [Outputs](#outputs)
- [Technical Details](#technical)
- [Runner Lifecycle](#lifecycle)
- - [Multi-Job Workflows](#multi-job)
- - [How Termination Works](#termination)
+ - [Parallel Jobs (Multiple Instances)](#parallel)
+ - [Multi-Job Workflows (Sequential)](#multi-job)
+ - [Termination logic](#termination)
- [CloudWatch Logs Integration](#cloudwatch)
- [Debugging and Troubleshooting](#debugging)
- [SSH Access](#ssh)
@@ -44,13 +46,36 @@ jobs:
# - `secrets.GH_SA_TOKEN` (GitHub token with repo admin access)
# - `vars.EC2_LAUNCH_ROLE` (role with GitHub OIDC access to this repo)
secrets: inherit
+ with:
+ ec2_instance_type: g4dn.xlarge
+ ec2_image_id: ami-00096836009b16a22 # Deep Learning OSS Nvidia Driver AMI GPU PyTorch
gpu-test:
needs: ec2
- runs-on: ${{ needs.ec2.outputs.instance }}
+ runs-on: ${{ needs.ec2.outputs.id }}
steps:
- run: nvidia-smi # GPU node!
```
+## Demos
+
+Example workflows demonstrating ec2-gha capabilities are in [`.github/workflows/`](.github/workflows/):
+
+[][demos#25]
+
+- [`demo-dbg-minimal.yml`](.github/workflows/demo-dbg-minimal.yml) - Configurable debugging instance
+- [`demo-gpu-minimal.yml`](.github/workflows/demo-gpu-minimal.yml) - Basic GPU test
+- [`demo-cpu-sweep.yml`](.github/workflows/demo-cpu-sweep.yml) - OS/arch matrix (Ubuntu, Debian, AL2/AL2023 on x86/ARM)
+- [`demo-gpu-sweep.yml`](.github/workflows/demo-gpu-sweep.yml) - GPU instances (g4dn, g5, g6, g5g) with PyTorch
+- [`demo-instances-mtx.yml`](.github/workflows/demo-instances-mtx.yml) - Multiple instances for parallel jobs
+- [`demo-runners-mtx.yml`](.github/workflows/demo-runners-mtx.yml) - Multiple runners on single instance
+- [`demo-jobs-split.yml`](.github/workflows/demo-jobs-split.yml) - Different job types on separate instances
+
+### Test Suite
+- [`demos.yml`](.github/workflows/demos.yml) - Runs all demos for regression testing
+- [`test-disk-full.yml`](.github/workflows/test-disk-full.yml) - Stress test for disk-full scenarios with configurable fill strategies
+
+See [`.github/workflows/README.md`](.github/workflows/README.md) for detailed descriptions of each demo.
+
## Inputs
### Required
@@ -79,29 +104,35 @@ The `EC2_LAUNCH_ROLE` is passed to [aws-actions/configure-aws-credentials]; if y
Many of these fall back to corresponding `vars.*` (if not provided as `inputs`):
-- `action_ref` - ec2-gha Git ref to checkout (branch/tag/SHA); auto-detected if not specified
+- `action_ref` - ec2-gha Git ref to checkout (branch/tag/SHA); automatically resolved to a SHA for security
+- `aws_region` - AWS region for EC2 instances (falls back to `vars.AWS_REGION`, default: `us-east-1`)
- `cloudwatch_logs_group` - CloudWatch Logs group name for streaming logs (falls back to `vars.CLOUDWATCH_LOGS_GROUP`)
- `ec2_home_dir` - Home directory (default: `/home/ubuntu`)
-- `ec2_image_id` - AMI ID (default: Deep Learning AMI)
+- `ec2_image_id` - AMI ID (default: Ubuntu 24.04 LTS)
- `ec2_instance_profile` - IAM instance profile name for EC2 instances
- Useful for on-instance debugging [via SSH][SSH access]
- Required for [CloudWatch logging][cw]
- Falls back to `vars.EC2_INSTANCE_PROFILE`
- See [Appendix: IAM Role Setup](#iam-setup-appendix) for more details and sample setup code
-- `ec2_instance_type` - Instance type (default: `g4dn.xlarge`)
+- `ec2_instance_type` - Instance type (default: `t3.medium`)
- `ec2_key_name` - EC2 key pair name (for [SSH access])
-- `ec2_root_device_size` - Root device size in GB (default: 0 = use AMI default)
+- `instance_count` - Number of instances to create (default: 1, for parallel jobs)
+- `instance_name` - Name tag template for EC2 instances. Uses Python string.Template format with variables: `$repo`, `$name` (workflow filename stem), `$workflow` (full workflow name), `$ref`, `$run` (number), `$idx` (0-based instance index for multi-instance launches). Default: `$repo/$name#$run` (or `$repo/$name#$run $idx` for multi-instance)
+- `debug` - Debug mode: `false`=off, `true`/`trace`=set -x only, number=set -x + sleep N minutes before shutdown (for troubleshooting)
+- `ec2_root_device_size` - Root disk size in GB: `0`=AMI default, `+N`=AMI+N GB for testing (e.g., `+2` for AMI size + 2GB), or explicit size in GB
- `ec2_security_group_id` - Security group ID (required for [SSH access], should expose inbound port 22)
- `max_instance_lifetime` - Maximum instance lifetime in minutes before automatic shutdown (falls back to `vars.MAX_INSTANCE_LIFETIME`, default: 360 = 6 hours; generally should not be relevant, instances shut down within 1-2mins of jobs completing)
-- `runner_grace_period` - Grace period in seconds before terminating (default: 120)
+- `runner_grace_period` - Grace period in seconds before terminating after last job completes (default: 60)
- `runner_initial_grace_period` - Grace period in seconds before terminating instance if no jobs start (default: 180)
+- `runner_poll_interval` - How often (in seconds) to check termination conditions (default: 10)
- `ssh_pubkey` - SSH public key (for [SSH access])
## Outputs
-| Name | Description |
-|------|---------------------------------------------|
-| id | Value to pass to subsequent jobs' `runs-on` |
+| Name | Description |
+|------|--------------------------------------------------------------------------|
+| id | Single runner label for `runs-on` (when `instance_count=1`) |
+| mtx | JSON array of objects for matrix strategies (each has: idx, id, instance_id, instance_idx, runner_idx) |
## Technical Details
@@ -114,7 +145,34 @@ This workflow creates EC2 instances with GitHub Actions runners that:
- Use [GitHub's native runner hooks][hooks] for job tracking
- Optionally support [SSH access] and [CloudWatch logging][cw] (for debugging)
-### Multi-Job Workflows
+### Parallel Jobs (Multiple Instances)
+
+Create multiple EC2 instances for parallel execution using `instance_count`:
+
+```yaml
+jobs:
+ ec2:
+ uses: Open-Athena/ec2-gha/.github/workflows/runner.yml@main
+ secrets: inherit
+ with:
+ instance_count: "3" # Create 3 instances
+
+ parallel-jobs:
+ needs: ec2
+ strategy:
+ matrix:
+ runner: ${{ fromJson(needs.ec2.outputs.mtx) }}
+ runs-on: ${{ matrix.runner.id }}
+ steps:
+ - run: echo "Running on runner ${{ matrix.runner.idx }} (instance ${{ matrix.runner.instance_idx }})"
+```
+
+Each instance gets a unique runner label and can execute jobs independently. This is useful for:
+- Matrix builds that need isolated environments
+- Parallel testing across different configurations
+- Distributed workloads
+
+### Multi-Job Workflows (Sequential)
The runner supports multiple sequential jobs on the same instance, e.g.:
@@ -124,36 +182,55 @@ jobs:
uses: Open-Athena/ec2-gha/.github/workflows/runner.yml@main
secrets: inherit
with:
- runner_grace_period: "120" # 2 minutes between jobs
+ runner_grace_period: "120" # Max idle time before termination (seconds)
prepare:
needs: ec2
- runs-on: ${{ needs.ec2.outputs.instance }}
+ runs-on: ${{ needs.ec2.outputs.id }}
steps:
- run: echo "Preparing environment"
train:
needs: [ec2, prepare]
- runs-on: ${{ needs.ec2.outputs.instance }}
+ runs-on: ${{ needs.ec2.outputs.id }}
steps:
- run: echo "Training model"
evaluate:
needs: [ec2, train]
- runs-on: ${{ needs.ec2.outputs.instance }}
+ runs-on: ${{ needs.ec2.outputs.id }}
steps:
- run: echo "Evaluating results"
```
-(see also [demo-job-seq], [demo-archs], [demo-matrix-wide])
+(see also demo workflows in [`.github/workflows/`](.github/workflows/))
+
+### Termination logic
+
+The runner uses [GitHub Actions runner hooks][hooks] to track job lifecycle and determine when to terminate:
-### How Termination Works
+#### Job Tracking
+- **Start/End Hooks**: Creates/removes JSON files in `/var/run/github-runner-jobs/` when jobs start/end
+- **Heartbeat Mechanism**: Active jobs update their file timestamps periodically to detect stuck jobs
+- **Process Monitoring**: Checks both Runner.Listener and Runner.Worker processes to verify jobs are truly running
+- **Activity Tracking**: Updates `/var/run/github-runner-last-activity` timestamp on job events
-1. [GitHub Actions runner hooks][hooks] track job lifecycle events
-2. When a job completes, the hook checks if other jobs are running
-3. If no jobs are active, a termination check is scheduled after the grace period
-4. The instance terminates if still idle when the check runs
-5. New jobs starting within the grace period cancel the termination
-6. Before shutdown, the runner process is gracefully stopped to remove itself from GitHub
+#### Termination Conditions
+The systemd timer checks every `runner_poll_interval` seconds (default: 10s) and terminates when:
+1. No active jobs are running
+2. Idle time exceeds the grace period:
+ - `runner_initial_grace_period` (default: 180s) - Before first job
+ - `runner_grace_period` (default: 60s) - Between jobs
+
+#### Robustness Features
+- **Stale Job Detection**: Removes job files older than 3× poll interval (likely disk full)
+- **Worker Process Detection**: Distinguishes between idle runners and active jobs
+- **Multiple Shutdown Methods**: Uses robust termination with fallback to `shutdown -h now`
+
+#### Clean Shutdown Sequence
+1. Stop runner processes gracefully (SIGINT)
+2. Deregister runners from GitHub
+3. Flush CloudWatch logs (if configured)
+4. Execute shutdown with multiple fallback methods
### CloudWatch Logs Integration
@@ -237,17 +314,17 @@ Once connected to the instance:
- Uses non-ephemeral runners to support instance-reuse across jobs
- Uses activity-based termination with systemd timer checks every 30 seconds
-- Terminates only after runner_grace_period seconds of inactivity (no race conditions)
-- Sets maximum instance lifetime (configurable via `max_instance_lifetime`, default: 6 hours)
+- Terminates only after `runner_grace_period` seconds of inactivity (no race conditions)
+- Also terminates after `max_instance_lifetime`, as a fail-safe (default: 6 hours)
- Supports custom AMIs with pre-installed dependencies
### Default AWS Tags
The action automatically adds these tags to EC2 instances (unless already provided):
- `Name`: Auto-generated from repository/workflow/run-number (e.g., "my-repo/test-workflow/#123")
-- `repository`: GitHub repository full name
-- `workflow`: Workflow name
-- `gha_url`: Direct link to the GitHub Actions run
+- `Repository`: GitHub repository full name
+- `Workflow`: Workflow name
+- `URL`: Direct link to the GitHub Actions run
These help with debugging and cost tracking. You can override any of these by providing your own tags with the same keys.
@@ -526,19 +603,98 @@ gh variable set EC2_INSTANCE_PROFILE --body "GitHubRunnerEC2Profile"
## Acknowledgements
-This repo borrows from or reuses:
-- [omsf/start-aws-gha-runner] (upstream; this fork adds self-termination and various features)
-- [related-sciences/gce-github-runner] (self-terminating GCE runner, using [job hooks][hooks])
+- This repo forked [omsf/start-aws-gha-runner]; it adds self-termination (bypassing [omsf/stop-aws-gha-runner]) and various features.
+- [machulav/ec2-github-runner] is similar, [requires][egr ex] separate "start" and "stop" jobs
+- [related-sciences/gce-github-runner] is a self-terminating GCE runner, using [job hooks][hooks])
+
+Here's a diff porting [ec2-github-runner][machulav/ec2-github-runner]'s README [example][egr ex] to ec2-gha:
+```diff
+ name: do-the-job
+ on: pull_request
+ jobs:
+- start-runner:
++ ec2:
+ name: Start self-hosted EC2 runner
+- runs-on: ubuntu-latest
+- outputs:
+- label: ${{ steps.start-ec2-runner.outputs.label }}
+- ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }}
+- steps:
+- - name: Configure AWS credentials
+- uses: aws-actions/configure-aws-credentials@v4
++ uses: Open-Athena/ec2-gha/.github/workflows/runner.yml@v2
+ with:
+- aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+- aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+- aws-region: ${{ secrets.AWS_REGION }}
+- - name: Start EC2 runner
+- id: start-ec2-runner
+- uses: machulav/ec2-github-runner@v2
+- with:
+- mode: start
+- github-token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }}
+- ec2-image-id: ami-123
+- ec2-instance-type: t3.nano
+- subnet-id: subnet-123
+- security-group-id: sg-123
+- iam-role-name: my-role-name # optional, requires additional permissions
+- aws-resource-tags: > # optional, requires additional permissions
+- [
+- {"Key": "Name", "Value": "ec2-github-runner"},
+- {"Key": "GitHubRepository", "Value": "${{ github.repository }}"}
+- ]
+- block-device-mappings: > # optional, to customize EBS volumes
+- [
+- {"DeviceName": "/dev/sda1", "Ebs": {"VolumeSize": 100, "VolumeType": "gp3"}}
+- ]
++ ec2_image_id: ami-123
++ ec2_instance_type: t3.nano
++ ec2_root_device_size: 100
++ ec2_subnet_id: subnet-123
++ ec2_security_group_id: sg-123
++ ec2_launch_role: my-role-name
++ secrets:
++ GH_SA_TOKEN: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }}
+ do-the-job:
+ name: Do the job on the runner
+ needs: start-runner # required to start the main job when the runner is ready
+ runs-on: ${{ needs.start-runner.outputs.label }} # run the job on the newly created runner
+ steps:
+ - name: Hello World
+ run: echo 'Hello World!'
+- stop-runner:
+- name: Stop self-hosted EC2 runner
+- needs:
+- - start-runner # required to get output from the start-runner job
+- - do-the-job # required to wait when the main job is done
+- runs-on: ubuntu-latest
+- if: ${{ always() }} # required to stop the runner even if the error happened in the previous jobs
+- steps:
+- - name: Configure AWS credentials
+- uses: aws-actions/configure-aws-credentials@v4
+- with:
+- aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+- aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+- aws-region: ${{ secrets.AWS_REGION }}
+- - name: Stop EC2 runner
+- uses: machulav/ec2-github-runner@v2
+- with:
+- mode: stop
+- github-token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }}
+- label: ${{ needs.start-runner.outputs.label }}
+- ec2-instance-id: ${{ needs.start-runner.outputs.ec2-instance-id }}
+```
[`runner.yml`]: .github/workflows/runner.yml
-[demo-job-seq]: .github/workflows/demo-job-seq.yml
-[demo-archs]: .github/workflows/demo-archs.yml
-[demo-matrix-wide]: .github/workflows/demo-matrix-wide.yml
[aws-actions/configure-aws-credentials]: https://github.com/aws-actions/configure-aws-credentials
[hooks]: https://docs.github.com/en/actions/how-tos/manage-runners/self-hosted-runners/run-scripts
[omsf/start-aws-gha-runner]: https://github.com/omsf/start-aws-gha-runner
+[omsf/stop-aws-gha-runner]: https://github.com/omsf/stop-aws-gha-runner
+[machulav/ec2-github-runner]: https://github.com/machulav/ec2-github-runner
+[egr ex]: https://github.com/machulav/ec2-github-runner?tab=readme-ov-file#example
[related-sciences/gce-github-runner]: https://github.com/related-sciences/gce-github-runner
[reusable workflow]: https://docs.github.com/en/actions/how-tos/reuse-automations/reuse-workflows#calling-a-reusable-workflow
[file an issue]: https://github.com/Open-Athena/ec2-gha/issues/new/choose
[SSH access]: #ssh
[cw]: #cloudwatch
+[demos#25]: https://github.com/Open-Athena/ec2-gha/actions/runs/17004697889
diff --git a/action.yml b/action.yml
index c0bb7f1..053277d 100644
--- a/action.yml
+++ b/action.yml
@@ -4,76 +4,87 @@ runs:
using: "docker"
image: "Dockerfile"
inputs:
+ action_ref:
+ description: "ec2-gha Git ref (branch/tag/SHA) to use for fetching scripts"
+ required: false
+ default: "v2"
aws_region:
- description: "The AWS region name to use for your runner. Defaults to AWS_REGION."
+ description: "AWS region for EC2 instances (falls back to vars.AWS_REGION, then us-east-1)"
required: false
aws_subnet_id:
- description: "The AWS subnet ID to use for your runner. Will use the account default subnet if not specified."
+ description: "AWS subnet ID (will use the account default subnet if not specified)"
required: false
aws_tags:
- description: "The AWS tags to use for your runner, formatted as a JSON list. See `README` for more details."
+ description: "AWS tags to apply to EC2 instances (JSON array format)"
required: false
cloudwatch_logs_group:
- description: "CloudWatch Logs group name for streaming runner logs. Leave empty to disable CloudWatch Logs."
+ description: "CloudWatch Logs group name for streaming runner logs (leave empty to disable)"
+ required: false
+ debug:
+ description: "Debug mode: false=off, true/trace=set -x only, number=set -x + sleep N minutes before shutdown"
required: false
ec2_home_dir:
- description: "The EC2 AMI home directory to use for your runner. Will not start if not specified. For example: `/home/ec2-user`"
- required: true
+ description: "Home directory on the AWS instance (falls back to vars.EC2_HOME_DIR, then auto-detection)"
+ required: false
ec2_image_id:
- description: "The machine AMI to use for your runner. This AMI can be a default but should have docker installed in the AMI. Will not start if not specified."
- required: true
+ description: "AWS AMI ID to use (required - must be provided via input or vars.EC2_IMAGE_ID)"
+ required: false
ec2_instance_profile:
- description: "Instance profile name to attach to launched EC2 instances (e.g. for CloudWatch Logs)"
+ description: "Instance profile name to attach to launched EC2 instance (required for CloudWatch logging)"
required: false
ec2_instance_type:
- description: "The type of instance to use for your runner. For example: t2.micro, t4g.nano, etc. Will not start if not specified."
- required: true
+ description: "AWS instance type (falls back to vars.EC2_INSTANCE_TYPE, then t3.medium)"
+ required: false
ec2_key_name:
- description: "Name of the EC2 key pair to use for SSH access"
+ description: "Name of an EC2 key pair to use for SSH access (falls back to vars.EC2_KEY_NAME)"
required: false
ec2_root_device_size:
- description: "The root device size in GB to use for your runner. Optional, defaults to the AMI default root disk size."
+ description: "Root disk size in GB (0=AMI default, +N=AMI+N GB for testing, e.g. +2)"
required: false
ec2_security_group_id:
- description: "The AWS security group ID to use for your runner. Will use the account default security group if not specified."
+ description: "AWS security group ID (falls back to vars.EC2_SECURITY_GROUP_ID)"
required: false
ec2_userdata:
- description: "User data script to run on instance startup. Use this to configure the instance before the runner starts."
+ description: "Additional userdata script to run on instance startup (before runner starts)"
required: false
extra_gh_labels:
- description: "Any extra GitHub labels to tag your runners with. Passed as a comma-separated list with no spaces."
+ description: "Any extra GitHub labels to tag your runners with. Passed as a comma-separated list with no spaces"
required: false
instance_count:
description: "The number of instances to create, defaults to 1"
- required: true
+ required: false
default: "1"
+ instance_name:
+ description: "Name tag template for EC2 instances. Uses Python string.Template format with variables: $repo, $name (workflow filename stem), $workflow (full workflow name), $ref, $run (number), $idx (0-based instance index for multi-instance launches). Default: $repo/$name#$run (or $repo/$name#$run $idx for multi-instance)"
+ required: false
max_instance_lifetime:
description: "Maximum instance lifetime in minutes before automatic shutdown (default 360 = 6 hours)"
required: false
- default: "360"
repo:
description: "The repo to run against. Will use the current repo if not specified."
required: false
runner_grace_period:
- description: "Grace period in seconds before terminating instance after last job completes (default 30)"
+ description: "Grace period in seconds before terminating instance after last job completes (falls back to vars.RUNNER_GRACE_PERIOD, then 60)"
required: false
- default: "30"
runner_initial_grace_period:
- description: "Grace period in seconds before terminating instance if no jobs start (default 120)"
+ description: "Grace period in seconds before terminating instance if no jobs start (falls back to vars.RUNNER_INITIAL_GRACE_PERIOD, then 180)"
+ required: false
+ runner_poll_interval:
+ description: "How often (in seconds) to check termination conditions (falls back to vars.RUNNER_POLL_INTERVAL, then 10)"
required: false
- default: "120"
runner_registration_timeout:
- description: "Maximum seconds to wait for runner to register with GitHub (default 300 = 5 minutes)"
+ description: "Maximum seconds to wait for runner to register with GitHub (falls back to vars.RUNNER_REGISTRATION_TIMEOUT, then 360 = 6 minutes)"
required: false
- default: "300"
+ runners_per_instance:
+ description: "Number of runners to register per instance (each in separate directories to allow concurrent jobs)"
+ required: false
+ default: "1"
ssh_pubkey:
description: "SSH public key to add to authorized_keys for debugging access"
required: false
outputs:
- mapping:
- description: "A JSON object mapping instance IDs to unique GitHub runner labels. This is used in conjunction with the `instance_mapping` input when stopping."
- instances:
- description: "A JSON list of the GitHub runner labels to be used in the 'runs-on' field"
+ mtx:
+ description: "A JSON array of objects for matrix strategies. Each object has: idx (overall 0-based index), id (runner label), instance_id, instance_idx (0-based instance index), runner_idx (0-based runner index within instance)"
label:
description: "The single runner label (for single instance use)"
instance-id:
diff --git a/img/demos#25 1.png b/img/demos#25 1.png
new file mode 100644
index 0000000..e648e63
Binary files /dev/null and b/img/demos#25 1.png differ
diff --git a/img/mamba#12.png b/img/mamba#12.png
new file mode 100644
index 0000000..afebd04
Binary files /dev/null and b/img/mamba#12.png differ
diff --git a/pyproject.toml b/pyproject.toml
index 847b047..7281398 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -3,7 +3,7 @@ name = "ec2_gha"
version = "1.0.0"
description = "Start an AWS GitHub Actions Runner"
readme = "README.md"
-requires-python = ">=3.12"
+requires-python = ">=3.10"
authors = [{ name = "Ethan Holz", email = "ethan.holz@omsf.io" }]
dependencies = ["boto3", "gha_runner @ git+https://github.com/Open-Athena/gha-runner.git@v1"]
@@ -19,7 +19,7 @@ build-backend = "setuptools.build_meta"
where = ["src"]
[tool.setuptools.package-data]
-ec2_gha = ["*.templ", "templates/*.templ"]
+ec2_gha = ["*.templ", "templates/*.templ", "templates/*.sh"]
[tool.pytest.ini_options]
markers = ["slow: marks test as slow"]
diff --git a/scripts/instance-runtime.py b/scripts/instance-runtime.py
new file mode 100755
index 0000000..0ca0517
--- /dev/null
+++ b/scripts/instance-runtime.py
@@ -0,0 +1,669 @@
+#!/usr/bin/env python3
+"""
+Analyze EC2 instance runtime and job execution time for GitHub Actions runners.
+
+Usage:
+ instance-runtime.py INSTANCE_ID [INSTANCE_ID ...]
+ instance-runtime.py https://github.com/OWNER/REPO/actions/runs/RUN_ID[/job/JOB_ID]
+ instance-runtime.py --help
+"""
+
+import argparse
+import json
+import re
+import subprocess
+import sys
+from concurrent.futures import ThreadPoolExecutor, as_completed
+from datetime import datetime, timezone
+from functools import partial
+
+from dateutil import parser as date_parser
+
+from ec2_gha.log_constants import (
+ LOG_STREAM_RUNNER_SETUP,
+ LOG_STREAM_JOB_STARTED,
+ LOG_STREAM_JOB_COMPLETED,
+ LOG_STREAM_TERMINATION,
+ LOG_PREFIX_JOB_STARTED,
+ LOG_PREFIX_JOB_COMPLETED,
+ LOG_MSG_TERMINATION_PROCEEDING,
+ LOG_MSG_RUNNER_REMOVED,
+ DEFAULT_CLOUDWATCH_LOG_GROUP,
+)
+
+err = partial(print, file=sys.stderr)
+
+def run_command(cmd: list[str]) -> str | None:
+ """Run a command and return output, or None on error."""
+ try:
+ result = subprocess.run(cmd, capture_output=True, text=True, check=True)
+ return result.stdout.strip()
+ except subprocess.CalledProcessError as e:
+ err(f"Error running command: {' '.join(cmd)}")
+ err(f"Error: {e.stderr}")
+ return None
+
+
+
+
+
+
+def get_log_streams(instance_id: str, log_group: str = None) -> list[dict]:
+ if log_group is None:
+ log_group = DEFAULT_CLOUDWATCH_LOG_GROUP
+ """Get CloudWatch log streams for an instance."""
+ cmd = [
+ "aws", "logs", "describe-log-streams",
+ "--log-group-name", log_group,
+ "--log-stream-name-prefix", instance_id,
+ "--query", "logStreams",
+ "--output", "json"
+ ]
+ output = run_command(cmd)
+ if output:
+ try:
+ return json.loads(output)
+ except json.JSONDecodeError:
+ return []
+ return []
+
+
+def get_log_events(log_group: str, log_stream: str, limit: int = 100, start_from_head: bool = False) -> list[dict]:
+ """Get events from a CloudWatch log stream."""
+ cmd = [
+ "aws", "logs", "get-log-events",
+ "--log-group-name", log_group,
+ "--log-stream-name", log_stream,
+ "--limit", str(limit),
+ "--output", "json"
+ ]
+ if start_from_head:
+ cmd.append("--start-from-head")
+
+ output = run_command(cmd)
+ if output:
+ try:
+ result = json.loads(output)
+ return result.get("events", [])
+ except json.JSONDecodeError:
+ return []
+ return []
+
+
+def parse_timestamp(ts_str: str) -> datetime | None:
+ """Parse various timestamp formats and ensure timezone is set."""
+ try:
+ dt = date_parser.parse(ts_str)
+ # If no timezone info, assume UTC
+ if dt.tzinfo is None:
+ dt = dt.replace(tzinfo=timezone.utc)
+ return dt
+ except:
+ return None
+
+
+def extract_timestamp_from_log(message: str) -> datetime | None:
+ """Extract timestamp from log message."""
+ # Pattern: [Thu Aug 14 00:29:25 UTC 2025]
+ match = re.search(r'\[([^]]+UTC \d{4})\]', message)
+ if match:
+ return parse_timestamp(match.group(1))
+
+ # Pattern: [2025-08-14 17:37:20]
+ match = re.search(r'\[(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})\]', message)
+ if match:
+ return parse_timestamp(match.group(1))
+
+ return None
+
+
+def analyze_instance(instance_id: str, log_group: str = None) -> dict:
+ if log_group is None:
+ log_group = DEFAULT_CLOUDWATCH_LOG_GROUP
+ """Analyze runtime and job execution for an instance."""
+ result = {
+ "instance_id": instance_id,
+ "launch_time": None,
+ "termination_time": None,
+ "total_runtime_seconds": 0,
+ "job_runtime_seconds": 0,
+ "jobs": [],
+ "state": "unknown",
+ "instance_type": "unknown",
+ "tags": {}
+ }
+
+
+ # Get CloudWatch logs
+ log_streams = get_log_streams(instance_id, log_group)
+
+ # Check if logs are empty (all streams have no events)
+ # Note: storedBytes is often 0 even when there's data, so check for event timestamps instead
+ logs_empty = all(
+ stream.get("firstEventTimestamp") is None and stream.get("lastEventTimestamp") is None
+ for stream in log_streams
+ ) if log_streams else True
+
+ # Extract instance info from logs
+ if log_streams and not logs_empty:
+ # Try to get launch time and instance type from runner-setup log
+ for stream in log_streams:
+ if f"/{LOG_STREAM_RUNNER_SETUP}" in stream["logStreamName"]:
+ # Get first events for launch time
+ events = get_log_events(log_group, stream["logStreamName"], limit=50, start_from_head=True)
+
+ # Get the first timestamp from any log entry as approximate launch time
+ if not result["launch_time"] and events:
+ for event in events:
+ ts = extract_timestamp_from_log(event.get("message", ""))
+ if ts:
+ result["launch_time"] = ts
+ break
+
+ # Look for instance type and other metadata
+ for event in events:
+ msg = event.get("message", "")
+
+ # Look for instance type in metadata
+ if result["instance_type"] == "unknown":
+ # Try various patterns for instance types
+ patterns = [
+ r'Instance type:\s+(\S+)',
+ r'instance-type["\s:]+([a-z0-9]+\.[a-z0-9]+)',
+ r'EC2_INSTANCE_TYPE=([a-z0-9]+\.[a-z0-9]+)',
+ r'"instance_type":\s*"([a-z0-9]+\.[a-z0-9]+)"',
+ # Common instance type patterns
+ r'\b(g4dn\.\w+|g5\.\w+|g5g\.\w+|t[234]\.\w+|t[34][ag]\.\w+|p[234]\.\w+|p4d\.\w+|c[456]\.\w+|c[56]a\.\w+|m[456]\.\w+|m[56]a\.\w+|r[456]\.\w+)\b',
+ ]
+ for pattern in patterns:
+ match = re.search(pattern, msg, re.IGNORECASE)
+ if match:
+ result["instance_type"] = match.group(1).lower()
+ break
+
+ # Look for region in metadata
+ if "Region:" in msg:
+ match = re.search(r'Region:\s+(\S+)', msg)
+ if match:
+ result["tags"]["Region"] = match.group(1)
+
+ # Look for repository name
+ if "Repository:" in msg or "GITHUB_REPOSITORY" in msg:
+ match = re.search(r'Repository:\s+(\S+)|GITHUB_REPOSITORY=(\S+)', msg)
+ if match:
+ repo = match.group(1) or match.group(2)
+ result["tags"]["Repository"] = repo
+
+ # If still no launch time, use the log stream creation time
+ if not result["launch_time"] and stream.get("creationTime"):
+ # CloudWatch timestamps are in milliseconds
+ result["launch_time"] = datetime.fromtimestamp(stream["creationTime"] / 1000, tz=timezone.utc)
+
+ # Find termination time
+ for stream in log_streams:
+ if f"/{LOG_STREAM_TERMINATION}" in stream["logStreamName"]:
+ events = get_log_events(log_group, stream["logStreamName"])
+ for event in events:
+ if LOG_MSG_TERMINATION_PROCEEDING in event["message"]:
+ ts = extract_timestamp_from_log(event["message"])
+ if ts:
+ result["termination_time"] = ts
+ elif LOG_MSG_RUNNER_REMOVED in event["message"]:
+ ts = extract_timestamp_from_log(event["message"])
+ if ts and not result["termination_time"]:
+ result["termination_time"] = ts
+
+ # Determine state based on termination time
+ if result["termination_time"]:
+ result["state"] = "terminated"
+ elif result["launch_time"]:
+ # If we have launch time but no termination, assume still running
+ result["state"] = "running"
+ result["termination_time"] = datetime.now(timezone.utc)
+ result["still_running"] = True
+
+ # Calculate total runtime
+ if result["launch_time"] and result["termination_time"]:
+ delta = result["termination_time"] - result["launch_time"]
+ result["total_runtime_seconds"] = int(delta.total_seconds())
+
+ # Analyze job execution times
+ job_starts = {}
+ job_ends = {}
+
+ for stream in log_streams:
+ if f"/{LOG_STREAM_JOB_STARTED}" in stream["logStreamName"]:
+ events = get_log_events(log_group, stream["logStreamName"])
+ for event in events:
+ msg = event.get("message", "")
+ # Parse job start events - look for the job started prefix
+ if LOG_PREFIX_JOB_STARTED in msg or "Job STARTED" in msg:
+ # Extract timestamp
+ ts = extract_timestamp_from_log(msg)
+ if ts:
+ # Extract job name - try both patterns
+ # Pattern 1: "Job started: job-name" (using LOG_PREFIX_JOB_STARTED)
+ # Pattern 2: "Job STARTED : Test pip install - multiple versions/install (Run: 16952719799/11, Attempt: 1)"
+ # Create pattern that handles both cases
+ prefix_pattern = re.escape(LOG_PREFIX_JOB_STARTED.rstrip(':'))
+ match = re.search(rf'(?:{prefix_pattern}|Job STARTED)\s*:\s*([^(\n]+?)(?:\s*\(Run:\s*(\d+)/(\d+))?$', msg, re.IGNORECASE)
+ if match:
+ job_name = match.group(1).strip()
+ run_id = match.group(2) if match.group(2) else None
+ job_num = match.group(3) if match.group(3) else None
+
+ if run_id and job_num:
+ job_key = f"{run_id}/{job_num}"
+ else:
+ # Use job name as key if no run info
+ job_key = job_name
+ job_starts[job_key] = (ts, job_name)
+
+ elif f"/{LOG_STREAM_JOB_COMPLETED}" in stream["logStreamName"]:
+ events = get_log_events(log_group, stream["logStreamName"])
+ for event in events:
+ msg = event.get("message", "")
+ # Parse job completion events - look for the job completed prefix
+ if LOG_PREFIX_JOB_COMPLETED in msg or "Job COMPLETED" in msg:
+ # Extract timestamp
+ ts = extract_timestamp_from_log(msg)
+ if ts:
+ # Extract job name - try both patterns
+ # Pattern 1: "Job completed: job-name" (using LOG_PREFIX_JOB_COMPLETED)
+ # Pattern 2: "Job COMPLETED: Test pip install - multiple versions/install (Run: 16952719799/11, Attempt: 1)"
+ # Create pattern that handles both cases
+ prefix_pattern = re.escape(LOG_PREFIX_JOB_COMPLETED.rstrip(':'))
+ match = re.search(rf'(?:{prefix_pattern}|Job COMPLETED)\s*:\s*([^(\n]+?)(?:\s*\(Run:\s*(\d+)/(\d+))?$', msg, re.IGNORECASE)
+ if match:
+ job_name = match.group(1).strip()
+ run_id = match.group(2) if match.group(2) else None
+ job_num = match.group(3) if match.group(3) else None
+
+ if run_id and job_num:
+ job_key = f"{run_id}/{job_num}"
+ else:
+ # Use job name as key if no run info
+ job_key = job_name
+ job_ends[job_key] = (ts, job_name)
+
+ # Match starts and ends
+ total_job_time = 0
+ for job_key in job_starts:
+ if job_key in job_ends:
+ start_ts, start_name = job_starts[job_key]
+ end_ts, end_name = job_ends[job_key]
+ duration = int((end_ts - start_ts).total_seconds())
+ total_job_time += duration
+ result["jobs"].append({
+ "name": start_name or end_name or job_key,
+ "start": start_ts.isoformat(),
+ "end": end_ts.isoformat(),
+ "duration_seconds": duration
+ })
+
+ result["job_runtime_seconds"] = total_job_time
+
+ return result
+
+
+def get_instances_from_github_url(url: str) -> list[str]:
+ """Extract instance IDs from a GitHub Actions URL."""
+ # Parse the URL
+ match = re.match(r'https://github\.com/([^/]+)/([^/]+)/actions/runs/(\d+)(?:/job/(\d+))?', url)
+ if not match:
+ err(f"Error: Invalid GitHub Actions URL format: {url}")
+ return []
+
+ owner, repo, run_id, job_id = match.groups()
+
+ # Get jobs for this run
+ cmd = ["gh", "api", f"repos/{owner}/{repo}/actions/runs/{run_id}/jobs"]
+ output = run_command(cmd)
+ if not output:
+ return []
+
+ try:
+ jobs_data = json.loads(output)
+ except json.JSONDecodeError:
+ err(f"Error: Could not parse GitHub API response")
+ return []
+
+ instance_ids = []
+ jobs = jobs_data.get("jobs", [])
+
+ for job in jobs:
+ # If specific job_id provided, filter to that job
+ if job_id and str(job.get("id")) != job_id:
+ continue
+
+ # Look for instance ID in runner name (format: i-xxxxx)
+ runner_name = job.get("runner_name", "")
+ match = re.search(r'(i-[0-9a-f]+)', runner_name)
+ if match:
+ instance_ids.append(match.group(1))
+
+ # Also check labels
+ for label in job.get("labels", []):
+ match = re.search(r'(i-[0-9a-f]+)', label)
+ if match:
+ instance_ids.append(match.group(1))
+
+ return list(set(instance_ids)) # Remove duplicates
+
+
+def format_duration(seconds: int) -> str:
+ """Format duration in human-readable format."""
+ hours = seconds // 3600
+ minutes = (seconds % 3600) // 60
+ secs = seconds % 60
+
+ if hours > 0:
+ return f"{hours}h {minutes}m {secs}s"
+ elif minutes > 0:
+ return f"{minutes}m {secs}s"
+ else:
+ return f"{secs}s"
+
+
+# Cache for instance prices to avoid repeated API calls
+_price_cache = {}
+_pricing_api_warned = False
+
+def get_instance_price(instance_type: str, region: str = "us-east-1") -> float:
+ """Get the current on-demand price for an instance type."""
+ global _pricing_api_warned
+
+ # Check cache first
+ cache_key = f"{instance_type}:{region}"
+ if cache_key in _price_cache:
+ return _price_cache[cache_key]
+
+ # Try AWS Pricing API (only works from us-east-1 region)
+ # Note: This requires the pricing:GetProducts permission
+ try:
+ cmd = [
+ "aws", "pricing", "get-products",
+ "--service-code", "AmazonEC2",
+ "--region", "us-east-1", # Pricing API only works in us-east-1
+ "--filters",
+ f"Type=TERM_MATCH,Field=instanceType,Value={instance_type}",
+ f"Type=TERM_MATCH,Field=location,Value={get_region_name(region)}",
+ "Type=TERM_MATCH,Field=operatingSystem,Value=Linux",
+ "Type=TERM_MATCH,Field=tenancy,Value=Shared",
+ "Type=TERM_MATCH,Field=preInstalledSw,Value=NA",
+ "--max-items", "1",
+ "--output", "json"
+ ]
+
+ output = run_command(cmd)
+ if output and "PriceList" in output:
+ data = json.loads(output)
+ price_list = data.get("PriceList", [])
+ if price_list:
+ price_data = json.loads(price_list[0])
+ on_demand = price_data.get("terms", {}).get("OnDemand", {})
+ for term in on_demand.values():
+ for price_dimension in term.get("priceDimensions", {}).values():
+ price_per_unit = price_dimension.get("pricePerUnit", {}).get("USD")
+ if price_per_unit:
+ price = float(price_per_unit)
+ _price_cache[cache_key] = price
+ err(f"Got live price for {instance_type} in {region}: ${price:.4f}/hour")
+ return price
+ except Exception as e:
+ # Pricing API might not be available or have permissions
+ if not _pricing_api_warned:
+ err(f"Note: Could not fetch live pricing (AWS Pricing API unavailable or no permissions)")
+ _pricing_api_warned = True
+
+ # No pricing available
+ _price_cache[cache_key] = 0
+ return 0
+
+
+def get_region_name(region_code: str) -> str:
+ """Convert region code to region name for pricing API.
+
+ Uses AWS SSM to get the actual region name, falls back to a formatted guess.
+ """
+ # Try to get from AWS SSM parameters (these are publicly available)
+ try:
+ cmd = [
+ "aws", "ssm", "get-parameter",
+ "--name", f"/aws/service/global-infrastructure/regions/{region_code}/longName",
+ "--query", "Parameter.Value",
+ "--output", "text",
+ "--region", region_code
+ ]
+ output = run_command(cmd)
+ if output:
+ return output
+ except:
+ pass
+
+ # Fallback: format the region code into a readable name
+ # us-east-1 -> US East 1, eu-west-2 -> EU West 2, etc.
+ parts = region_code.split('-')
+ if len(parts) >= 3:
+ region_map = {
+ "us": "US",
+ "eu": "EU",
+ "ap": "Asia Pacific",
+ "ca": "Canada",
+ "sa": "South America",
+ "me": "Middle East",
+ "af": "Africa"
+ }
+ area = region_map.get(parts[0], parts[0].upper())
+ direction = parts[1].capitalize()
+ number = parts[2]
+ return f"{area} ({direction} {number})"
+
+ # Last resort: just return the code
+ return region_code
+
+
+def calculate_cost(
+ instance_type: str,
+ runtime_seconds: int,
+ region: str = "us-east-1",
+) -> float:
+ """Calculate cost based on instance type and runtime."""
+ hourly_cost = get_instance_price(instance_type, region)
+ if hourly_cost == 0:
+ return 0
+
+ hours = runtime_seconds / 3600
+ return hourly_cost * hours
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Analyze EC2 instance runtime and job execution time for GitHub Actions runners.",
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog="""
+Examples:
+ %(prog)s i-0abc123def456789
+ %(prog)s i-0abc123def456789 i-0def456abc789012
+ %(prog)s https://github.com/owner/repo/actions/runs/123456789
+ %(prog)s https://github.com/owner/repo/actions/runs/123456789/job/987654321
+ %(prog)s --log-group /custom/log/group i-0abc123def456789
+ """
+ )
+
+ parser.add_argument(
+ "targets",
+ nargs="+",
+ help="Instance IDs or GitHub Actions URL"
+ )
+
+ parser.add_argument(
+ "--log-group",
+ default="/aws/ec2/github-runners",
+ help="CloudWatch Logs group name (default: /aws/ec2/github-runners)"
+ )
+
+ parser.add_argument(
+ "--region",
+ default="us-east-1",
+ help="AWS region for pricing (default: us-east-1)"
+ )
+
+ parser.add_argument(
+ "--json",
+ action="store_true",
+ help="Output results as JSON"
+ )
+
+ parser.add_argument(
+ "--parallel",
+ type=int,
+ default=10,
+ help="Maximum number of parallel instance lookups (default: 10, use 1 for sequential)"
+ )
+
+ args = parser.parse_args()
+
+ # Collect all instance IDs
+ instance_ids = []
+ for target in args.targets:
+ if target.startswith("https://github.com/"):
+ ids = get_instances_from_github_url(target)
+ if ids:
+ err(f"Found instances from GitHub URL: {', '.join(ids)}")
+ instance_ids.extend(ids)
+ else:
+ err(f"Warning: No instances found for URL: {target}")
+ elif target.startswith("i-"):
+ instance_ids.append(target)
+ else:
+ err(f"Warning: Skipping invalid target: {target}")
+
+ if not instance_ids:
+ err("Error: No valid instance IDs found")
+ sys.exit(1)
+
+ # Analyze instances in parallel
+ results = []
+ total_runtime = 0
+ total_job_runtime = 0
+ total_cost = 0
+
+ # Determine parallel execution mode
+ max_workers = min(args.parallel, len(instance_ids))
+ if max_workers > 1:
+ err(f"Analyzing {len(instance_ids)} instance(s) with {max_workers} parallel workers...")
+ else:
+ err(f"Analyzing {len(instance_ids)} instance(s) sequentially...")
+
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
+ # Submit all tasks
+ future_to_instance = {
+ executor.submit(analyze_instance, instance_id, args.log_group): instance_id
+ for instance_id in instance_ids
+ }
+
+ # Process results as they complete
+ for future in as_completed(future_to_instance):
+ instance_id = future_to_instance[future]
+ try:
+ result = future.result(timeout=30) # 30 second timeout per instance
+
+ # Calculate cost
+ cost = calculate_cost(result["instance_type"], result["total_runtime_seconds"], args.region)
+ result["estimated_cost"] = cost
+
+ # Add to results
+ results.append(result)
+ total_runtime += result["total_runtime_seconds"]
+ total_job_runtime += result["job_runtime_seconds"]
+ total_cost += cost
+
+ except Exception as e:
+ err(f"Error analyzing {instance_id}: {e}")
+ # Add failed result with all required fields
+ results.append({
+ "instance_id": instance_id,
+ "error": str(e),
+ "total_runtime_seconds": 0,
+ "job_runtime_seconds": 0,
+ "estimated_cost": 0,
+ "instance_type": "unknown",
+ "state": "error",
+ "launch_time": None,
+ "termination_time": None,
+ "jobs": [],
+ "tags": {}
+ })
+
+ # Sort results by instance ID for consistent output
+ results.sort(key=lambda x: x.get("instance_id", ""))
+
+ if args.json:
+ # JSON output
+ output = {
+ "instances": results,
+ "summary": {
+ "total_instances": len(results),
+ "total_runtime_seconds": total_runtime,
+ "total_job_runtime_seconds": total_job_runtime,
+ "total_idle_seconds": total_runtime - total_job_runtime,
+ "estimated_total_cost": round(total_cost, 4)
+ }
+ }
+ print(json.dumps(output, indent=2, default=str))
+ else:
+ # Human-readable output
+ print("\n" + "="*80)
+ for result in results:
+ print(f"\nInstance: {result['instance_id']}")
+ print(f" Type: {result['instance_type']}")
+ print(f" State: {result['state']}")
+
+ if result.get("tags", {}).get("Name"):
+ print(f" Name: {result['tags']['Name']}")
+
+ if result["launch_time"]:
+ print(f" Launch Time: {result['launch_time']}")
+
+ if result["termination_time"]:
+ if result.get("still_running"):
+ print(f" Current Time: {result['termination_time']} (still running)")
+ else:
+ print(f" Termination Time: {result['termination_time']}")
+
+ print(f" Total Runtime: {format_duration(result['total_runtime_seconds'])} ({result['total_runtime_seconds']}s)")
+ print(f" Job Runtime: {format_duration(result['job_runtime_seconds'])} ({result['job_runtime_seconds']}s)")
+
+ idle_time = result['total_runtime_seconds'] - result['job_runtime_seconds']
+ print(f" Idle Time: {format_duration(idle_time)} ({idle_time}s)")
+
+ if result['total_runtime_seconds'] > 0:
+ utilization = (result['job_runtime_seconds'] / result['total_runtime_seconds']) * 100
+ print(f" Utilization: {utilization:.1f}%")
+
+ if result.get("estimated_cost", 0) > 0:
+ print(f" Estimated Cost: ${result['estimated_cost']:.4f}")
+
+ if result["jobs"]:
+ print(f" Jobs ({len(result['jobs'])}):")
+ for job in result["jobs"]:
+ print(f" - {job['name']}: {format_duration(job['duration_seconds'])}")
+
+ print("\n" + "="*80)
+ print("SUMMARY")
+ print(f" Total Instances: {len(results)}")
+ print(f" Total Runtime: {format_duration(total_runtime)} ({total_runtime}s)")
+ print(f" Total Job Runtime: {format_duration(total_job_runtime)} ({total_job_runtime}s)")
+ print(f" Total Idle Time: {format_duration(total_runtime - total_job_runtime)} ({total_runtime - total_job_runtime}s)")
+
+ if total_runtime > 0:
+ overall_utilization = (total_job_runtime / total_runtime) * 100
+ print(f" Overall Utilization: {overall_utilization:.1f}%")
+
+ if total_cost > 0:
+ print(f" Estimated Total Cost: ${total_cost:.4f} (from AWS Pricing API)")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/update-snapshots.sh b/scripts/update-snapshots.sh
new file mode 100755
index 0000000..5802c79
--- /dev/null
+++ b/scripts/update-snapshots.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+set -e
+
+pytest --snapshot-update -m 'not slow'
+pytest -vvv -m 'not slow' .
diff --git a/src/ec2_gha/__main__.py b/src/ec2_gha/__main__.py
index b88bc2d..90a2655 100644
--- a/src/ec2_gha/__main__.py
+++ b/src/ec2_gha/__main__.py
@@ -1,40 +1,55 @@
from ec2_gha.start import StartAWS
+from ec2_gha.defaults import (
+ EC2_INSTANCE_TYPE,
+ INSTANCE_COUNT,
+ INSTANCE_NAME,
+ MAX_INSTANCE_LIFETIME,
+ RUNNER_GRACE_PERIOD,
+ RUNNER_INITIAL_GRACE_PERIOD,
+ RUNNER_POLL_INTERVAL,
+ RUNNER_REGISTRATION_TIMEOUT,
+)
from gha_runner.gh import GitHubInstance
from gha_runner.clouddeployment import DeployInstance
from gha_runner.helper.input import EnvVarBuilder, check_required
-import os
+from os import environ
def main():
- env = dict(os.environ)
+ env = dict(environ)
required = ["GH_PAT", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"]
# Check that everything exists
check_required(env, required)
- # Timeout for waiting for runner to register with GitHub (default 5 minutes)
- timeout = int(os.environ.get("INPUT_RUNNER_REGISTRATION_TIMEOUT", "300"))
+ # Timeout for waiting for runner to register with GitHub
+ timeout_str = environ.get("INPUT_RUNNER_REGISTRATION_TIMEOUT", "").strip()
+ timeout = int(timeout_str) if timeout_str else int(RUNNER_REGISTRATION_TIMEOUT)
- token = os.environ["GH_PAT"]
+ token = environ["GH_PAT"]
# Make a copy of environment variables for immutability
- env = dict(os.environ)
+ env = dict(environ)
builder = (
EnvVarBuilder(env)
.update_state("INPUT_AWS_SUBNET_ID", "subnet_id")
.update_state("INPUT_AWS_TAGS", "tags", is_json=True)
.update_state("INPUT_CLOUDWATCH_LOGS_GROUP", "cloudwatch_logs_group")
+ .update_state("INPUT_DEBUG", "debug")
.update_state("INPUT_EC2_HOME_DIR", "home_dir")
.update_state("INPUT_EC2_IMAGE_ID", "image_id")
.update_state("INPUT_EC2_INSTANCE_PROFILE", "iam_instance_profile")
.update_state("INPUT_EC2_INSTANCE_TYPE", "instance_type")
.update_state("INPUT_EC2_KEY_NAME", "key_name")
- .update_state("INPUT_EC2_ROOT_DEVICE_SIZE", "root_device_size", type_hint=int)
+ .update_state("INPUT_EC2_ROOT_DEVICE_SIZE", "root_device_size", type_hint=str)
.update_state("INPUT_EC2_SECURITY_GROUP_ID", "security_group_id")
.update_state("INPUT_EC2_USERDATA", "userdata")
.update_state("INPUT_EXTRA_GH_LABELS", "labels")
.update_state("INPUT_INSTANCE_COUNT", "instance_count", type_hint=int)
+ .update_state("INPUT_INSTANCE_NAME", "instance_name")
.update_state("INPUT_MAX_INSTANCE_LIFETIME", "max_instance_lifetime")
.update_state("INPUT_RUNNER_GRACE_PERIOD", "runner_grace_period")
.update_state("INPUT_RUNNER_INITIAL_GRACE_PERIOD", "runner_initial_grace_period")
+ .update_state("INPUT_RUNNER_POLL_INTERVAL", "runner_poll_interval")
+ .update_state("INPUT_RUNNERS_PER_INSTANCE", "runners_per_instance", type_hint=int)
.update_state("INPUT_SSH_PUBKEY", "ssh_pubkey")
.update_state("AWS_REGION", "region_name") # default
.update_state("INPUT_AWS_REGION", "region_name") # input override
@@ -48,10 +63,41 @@ def main():
if repo is None:
raise Exception("Repo cannot be empty")
- # Instance count is not a keyword arg for StartAWS, so we remove it
- instance_count = params.pop("instance_count")
+ # Instance count and runners_per_instance are not keyword args for StartAWS, so we remove them
+ instance_count = params.pop("instance_count", INSTANCE_COUNT)
+ runners_per_instance = params.pop("runners_per_instance", 1)
+
+ # Apply defaults that weren't set via inputs or vars
+ params.setdefault("max_instance_lifetime", MAX_INSTANCE_LIFETIME)
+ params.setdefault("runner_grace_period", RUNNER_GRACE_PERIOD)
+ params.setdefault("runner_initial_grace_period", RUNNER_INITIAL_GRACE_PERIOD)
+ params.setdefault("runner_poll_interval", RUNNER_POLL_INTERVAL)
+ params.setdefault("instance_name", INSTANCE_NAME)
+ params.setdefault("instance_type", EC2_INSTANCE_TYPE)
+ params.setdefault("region_name", "us-east-1") # Default AWS region
+
+ # image_id is required - must be provided via input or vars
+ if not params.get("image_id"):
+ raise Exception("EC2 AMI ID (ec2_image_id) must be provided via input or vars.EC2_IMAGE_ID")
+ # home_dir will be set to AUTO in start.py if not provided
gh = GitHubInstance(token=token, repo=repo)
+
+ # Pass runners_per_instance to StartAWS
+ params["runners_per_instance"] = runners_per_instance
+
+ # Generate all the tokens we need upfront
+ # Each instance needs runners_per_instance tokens
+ total_runners = instance_count * runners_per_instance
+ if runners_per_instance > 1:
+ # Generate all tokens upfront
+ all_tokens = gh.create_runner_tokens(total_runners)
+ # Group tokens by instance (each instance gets runners_per_instance tokens)
+ grouped_tokens = []
+ for i in range(0, total_runners, runners_per_instance):
+ grouped_tokens.append(all_tokens[i:i+runners_per_instance])
+ params["grouped_runner_tokens"] = grouped_tokens
+
# This will create a new instance of StartAWS and configure it correctly
deployment = DeployInstance(
provider_type=StartAWS,
diff --git a/src/ec2_gha/defaults.py b/src/ec2_gha/defaults.py
new file mode 100644
index 0000000..fa98097
--- /dev/null
+++ b/src/ec2_gha/defaults.py
@@ -0,0 +1,20 @@
+"""Default values for ec2-gha configuration."""
+
+# Instance lifetime and timing defaults
+MAX_INSTANCE_LIFETIME = "120" # 2 hours (in minutes)
+RUNNER_GRACE_PERIOD = "60" # 1 minute (in seconds)
+RUNNER_INITIAL_GRACE_PERIOD = "180" # 3 minutes (in seconds)
+RUNNER_POLL_INTERVAL = "10" # 10 seconds
+RUNNER_REGISTRATION_TIMEOUT = "300" # 5 minutes (in seconds)
+
+# EC2 instance defaults
+EC2_INSTANCE_TYPE = "t3.medium"
+
+# Instance naming default template
+INSTANCE_NAME = "$repo/$name#$run"
+
+# Default instance count
+INSTANCE_COUNT = 1
+
+# Home directory auto-detection sentinel
+AUTO = "AUTO"
diff --git a/src/ec2_gha/log_constants.py b/src/ec2_gha/log_constants.py
new file mode 100644
index 0000000..c5de836
--- /dev/null
+++ b/src/ec2_gha/log_constants.py
@@ -0,0 +1,24 @@
+"""
+Constants for log messages used across ec2-gha components.
+
+These constants ensure consistency between the runner scripts that generate logs
+and the analysis tools that parse them.
+"""
+
+# Log stream names (relative to instance ID prefix)
+LOG_STREAM_RUNNER_SETUP = "runner-setup"
+LOG_STREAM_JOB_STARTED = "job-started"
+LOG_STREAM_JOB_COMPLETED = "job-completed"
+LOG_STREAM_TERMINATION = "termination"
+LOG_STREAM_RUNNER_DIAG = "runner-diag"
+
+# Log message prefixes
+LOG_PREFIX_JOB_STARTED = "Job started:"
+LOG_PREFIX_JOB_COMPLETED = "Job completed:"
+
+# Termination messages
+LOG_MSG_TERMINATION_PROCEEDING = "proceeding with termination"
+LOG_MSG_RUNNER_REMOVED = "Runner removed from GitHub successfully"
+
+# Default CloudWatch log group
+DEFAULT_CLOUDWATCH_LOG_GROUP = "/aws/ec2/github-runners"
diff --git a/src/ec2_gha/scripts/check-runner-termination.sh b/src/ec2_gha/scripts/check-runner-termination.sh
new file mode 100644
index 0000000..3388d24
--- /dev/null
+++ b/src/ec2_gha/scripts/check-runner-termination.sh
@@ -0,0 +1,96 @@
+#!/bin/bash
+# Periodic check for GitHub Actions runner termination conditions
+# Called by systemd timer to determine if the instance should shut down
+
+exec >> /tmp/termination-check.log 2>&1
+
+# Source common functions and variables
+source /usr/local/bin/runner-common.sh
+
+# File paths for tracking
+A="$RUNNER_STATE_DIR/last-activity"
+J="$RUNNER_STATE_DIR/jobs"
+H="$RUNNER_STATE_DIR/has-run-job"
+
+# Current timestamp
+N=$(date +%s)
+
+# Check if any runners are actually running
+RUNNER_PROCS=$(pgrep -f "Runner.Listener" | wc -l)
+if [ $RUNNER_PROCS -eq 0 ]; then
+ # No runner processes, check if we have stale job files
+ if ls $J/*.job 2>/dev/null | grep -q .; then
+ log "WARNING: Found job files but no runner processes - cleaning up stale jobs"
+ rm -f $J/*.job
+ fi
+fi
+
+# Check job files and update timestamps for active runners
+# This creates a heartbeat mechanism to detect stuck/failed job completion
+for job_file in $J/*.job; do
+ [ -f "$job_file" ] || continue
+ if grep -q '"status":"running"' "$job_file" 2>/dev/null; then
+ # Extract runner number from job file name (format: RUNID-JOBNAME-RUNNER.job)
+ runner_num=$(basename "$job_file" .job | rev | cut -d- -f1 | rev)
+
+ # For a job to be truly running, we need BOTH Listener AND Worker processes
+ # Listener alone means the runner is idle/waiting, not actually running a job
+ listener_alive=$(pgrep -f "runner-${runner_num}/.*Runner.Listener" 2>/dev/null | wc -l)
+ worker_alive=$(pgrep -f "runner-${runner_num}/.*Runner.Worker" 2>/dev/null | wc -l)
+
+ if [ $listener_alive -gt 0 ] && [ $worker_alive -gt 0 ]; then
+ # Both processes exist, job is truly running - update heartbeat
+ touch "$job_file" 2>/dev/null || true
+ elif [ $listener_alive -gt 0 ] && [ $worker_alive -eq 0 ]; then
+ # Listener exists but no Worker - job has likely failed/completed but hook couldn't run
+ job_age=$((N - $(stat -c %Y "$job_file" 2>/dev/null || echo 0)))
+ log "WARNING: Runner $runner_num Listener alive but Worker dead - job likely completed (file age: ${job_age}s)"
+ rm -f "$job_file"
+ touch "$A" # Update last activity since we just cleaned up a job
+ else
+ # No Listener at all - runner is completely dead
+ job_age=$((N - $(stat -c %Y "$job_file" 2>/dev/null || echo 0)))
+ log "WARNING: Job file $(basename $job_file) exists but runner $runner_num is dead (file age: ${job_age}s)"
+ rm -f "$job_file"
+ fi
+ fi
+done
+
+# Now check for stale job files that couldn't be touched (e.g., disk full)
+# With polling every ${RUNNER_POLL_INTERVAL:-10}s, files should never be older than ~30s
+# If they are, something is preventing the touch (likely disk full)
+STALE_THRESHOLD=$((${RUNNER_POLL_INTERVAL:-10} * 3)) # 3x the poll interval
+for job_file in $J/*.job; do
+ [ -f "$job_file" ] || continue
+ if grep -q '"status":"running"' "$job_file" 2>/dev/null; then
+ job_age=$((N - $(stat -c %Y "$job_file" 2>/dev/null || echo 0)))
+ if [ $job_age -gt $STALE_THRESHOLD ]; then
+ log "ERROR: Job file $(basename $job_file) is stale (${job_age}s old, threshold ${STALE_THRESHOLD}s)"
+ log "Touch must be failing (disk full?) - removing stale job file"
+ rm -f "$job_file"
+ fi
+ fi
+done
+
+# Ensure activity file exists and get its timestamp
+[ ! -f "$A" ] && touch "$A"
+L=$(stat -c %Y "$A" 2>/dev/null || echo 0)
+
+# Calculate idle time
+I=$((N-L))
+
+# Determine grace period based on whether any job has run yet
+[ -f "$H" ] && G=${RUNNER_GRACE_PERIOD:-60} || G=${RUNNER_INITIAL_GRACE_PERIOD:-180}
+
+# Count running jobs
+R=$(grep -l '"status":"running"' $J/*.job 2>/dev/null | wc -l || echo 0)
+
+# Check if we should terminate
+if [ $R -eq 0 ] && [ $I -gt $G ]; then
+ log "TERMINATING: idle $I > grace $G"
+ deregister_all_runners
+ flush_cloudwatch_logs
+ debug_sleep_and_shutdown
+else
+ [ $R -gt 0 ] && log "$R job(s) running" || log "Idle $I/$G sec"
+fi
diff --git a/src/ec2_gha/scripts/job-completed-hook.sh b/src/ec2_gha/scripts/job-completed-hook.sh
new file mode 100644
index 0000000..4bb214d
--- /dev/null
+++ b/src/ec2_gha/scripts/job-completed-hook.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+# GitHub Actions runner job-completed hook
+# Called when a job finishes (success or failure) on this runner
+# Environment variables provided by GitHub Actions runner
+
+exec >> /tmp/job-completed-hook.log 2>&1
+
+# Source common variables
+source /usr/local/bin/runner-common.sh
+
+# Get runner index from environment (defaults to 0 for single-runner instances)
+I="${RUNNER_INDEX:-0}"
+
+# Log the job completion with a specific prefix for CloudWatch filtering
+# The LOG_PREFIX will be substituted during setup
+echo "[$(date)] Runner-$I: LOG_PREFIX_JOB_COMPLETED ${GITHUB_JOB}"
+
+# Remove the job tracking file to indicate this runner no longer has an active job
+rm -f $RUNNER_STATE_DIR/jobs/${GITHUB_RUN_ID}-${GITHUB_JOB}-$I.job
+
+# Update activity timestamp to reset the idle timer
+touch $RUNNER_STATE_DIR/last-activity
diff --git a/src/ec2_gha/scripts/job-started-hook.sh b/src/ec2_gha/scripts/job-started-hook.sh
new file mode 100644
index 0000000..e80591c
--- /dev/null
+++ b/src/ec2_gha/scripts/job-started-hook.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+# GitHub Actions runner job-started hook
+# Called when a job starts running on this runner
+# Environment variables provided by GitHub Actions runner
+
+exec >> /tmp/job-started-hook.log 2>&1
+
+# Source common variables
+source /usr/local/bin/runner-common.sh
+
+# Get runner index from environment (defaults to 0 for single-runner instances)
+I="${RUNNER_INDEX:-0}"
+
+# Log the job start with a specific prefix for CloudWatch filtering
+# The LOG_PREFIX will be substituted during setup
+echo "[$(date)] Runner-$I: LOG_PREFIX_JOB_STARTED Runner-$I: ${GITHUB_JOB}"
+
+# Create a job tracking file to indicate this runner has an active job
+# Format: RUNID-JOBNAME-RUNNER.job
+mkdir -p $RUNNER_STATE_DIR/jobs
+echo '{"status":"running","runner":"'$I'"}' > $RUNNER_STATE_DIR/jobs/${GITHUB_RUN_ID}-${GITHUB_JOB}-$I.job
+
+# Update activity timestamps to reset the idle timer
+touch $RUNNER_STATE_DIR/last-activity $RUNNER_STATE_DIR/has-run-job
diff --git a/src/ec2_gha/scripts/runner-setup.sh b/src/ec2_gha/scripts/runner-setup.sh
new file mode 100755
index 0000000..26d14a1
--- /dev/null
+++ b/src/ec2_gha/scripts/runner-setup.sh
@@ -0,0 +1,410 @@
+#!/bin/bash
+set -e
+
+# This script is fetched and executed by the minimal userdata script
+# All variables are already exported by the userdata script
+
+# Enable debug tracing to a file for troubleshooting
+exec 2> >(tee -a /var/log/runner-debug.log >&2)
+
+# Conditionally enable debug mode (set -x) for tracing
+# Debug can be: true/True/trace (trace only), or a number (trace + sleep minutes)
+if [ "$debug" = "true" ] || [ "$debug" = "True" ] || [ "$debug" = "trace" ] || [[ "$debug" =~ ^[0-9]+$ ]]; then
+ set -x
+fi
+
+# Determine home directory early since it's needed by shared functions
+if [ -z "$homedir" ] || [ "$homedir" = "AUTO" ]; then
+ # Try to find the default non-root user's home directory
+ for user in ubuntu ec2-user centos admin debian fedora alpine arch; do
+ if id "$user" &>/dev/null; then
+ homedir="/home/$user"
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Auto-detected homedir: $homedir" | tee -a /var/log/runner-setup.log
+ break
+ fi
+ done
+
+ # Fallback if no standard user found
+ if [ -z "$homedir" ] || [ "$homedir" = "AUTO" ]; then
+ homedir=$(getent passwd | awk -F: '$3 >= 1000 && $3 < 65534 && $6 ~ /^\/home\// {print $6}' | while read dir; do
+ if [ -d "$dir" ]; then
+ echo "$dir"
+ break
+ fi
+ done)
+ if [ -z "$homedir" ]; then
+ homedir="/home/ec2-user" # Ultimate fallback
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Using fallback homedir: $homedir" | tee -a /var/log/runner-setup.log
+ else
+ owner=$(stat -c "%U" "$homedir" 2>/dev/null || stat -f "%Su" "$homedir" 2>/dev/null)
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Detected homedir: $homedir (owner: $owner)" | tee -a /var/log/runner-setup.log
+ fi
+ fi
+else
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Using specified homedir: $homedir" | tee -a /var/log/runner-setup.log
+fi
+export homedir
+
+# Set common paths
+BIN_DIR=/usr/local/bin
+RUNNER_STATE_DIR=/var/run/github-runner
+mkdir -p $RUNNER_STATE_DIR
+
+# Fetch shared functions from GitHub
+echo "[$(date '+%Y-%m-%d %H:%M:%S')] Fetching shared functions from GitHub (SHA: ${action_sha})" | tee -a /var/log/runner-setup.log
+FUNCTIONS_URL="https://raw.githubusercontent.com/Open-Athena/ec2-gha/${action_sha}/src/ec2_gha/templates/shared-functions.sh"
+if ! curl -sSL "$FUNCTIONS_URL" -o /tmp/shared-functions.sh && ! wget -q "$FUNCTIONS_URL" -O /tmp/shared-functions.sh; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Failed to download shared functions" | tee -a /var/log/runner-setup.log
+ shutdown -h now
+ exit 1
+fi
+
+# Write shared functions that will be used by multiple scripts
+cat > $BIN_DIR/runner-common.sh << EOSF
+# Auto-generated shared functions and variables
+# Set homedir for scripts that source this file
+homedir="$homedir"
+debug="$debug"
+RUNNER_STATE_DIR="$RUNNER_STATE_DIR"
+export homedir debug RUNNER_STATE_DIR
+
+EOSF
+
+# Append the downloaded shared functions
+cat /tmp/shared-functions.sh >> $BIN_DIR/runner-common.sh
+
+chmod +x $BIN_DIR/runner-common.sh
+source $BIN_DIR/runner-common.sh
+
+logger "EC2-GHA: Starting userdata script"
+trap 'logger "EC2-GHA: Script failed at line $LINENO with exit code $?"' ERR
+trap 'terminate_instance "Setup script failed with error on line $LINENO"' ERR
+# Handle watchdog termination signal
+trap 'if [ -f $RUNNER_STATE_DIR/watchdog-terminate ]; then terminate_instance "No runners registered within timeout"; else terminate_instance "Script terminated"; fi' TERM
+
+# Set up registration timeout failsafe - terminate if runner doesn't register in time
+REGISTRATION_TIMEOUT="$runner_registration_timeout"
+if ! [[ "$REGISTRATION_TIMEOUT" =~ ^[0-9]+$ ]]; then
+ REGISTRATION_TIMEOUT=300
+fi
+# Create a marker file for watchdog termination request
+touch $RUNNER_STATE_DIR/watchdog-active
+(
+ sleep $REGISTRATION_TIMEOUT
+ if [ ! -f $RUNNER_STATE_DIR/registered ]; then
+ touch $RUNNER_STATE_DIR/watchdog-terminate
+ kill -TERM $$ 2>/dev/null || true
+ fi
+ rm -f $RUNNER_STATE_DIR/watchdog-active
+) &
+REGISTRATION_WATCHDOG_PID=$!
+echo $REGISTRATION_WATCHDOG_PID > $RUNNER_STATE_DIR/watchdog.pid
+
+# Run any custom user data script provided by the user
+if [ -n "$userdata" ]; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Running custom userdata" | tee -a /var/log/runner-setup.log
+ eval "$userdata"
+fi
+
+exec >> /var/log/runner-setup.log 2>&1
+log "Starting runner setup"
+
+# Fetch instance metadata for labeling and logging
+INSTANCE_TYPE=$(get_metadata "instance-type")
+INSTANCE_ID=$(get_metadata "instance-id")
+REGION=$(get_metadata "placement/region")
+AZ=$(get_metadata "placement/availability-zone")
+log "Instance metadata: Type=${INSTANCE_TYPE} ID=${INSTANCE_ID} Region=${REGION} AZ=${AZ}"
+
+# Set up maximum lifetime timeout - instance will terminate after this time regardless of job status
+MAX_LIFETIME_MINUTES=$max_instance_lifetime
+log "Setting up maximum lifetime timeout: ${MAX_LIFETIME_MINUTES} minutes"
+# Use ; instead of && so shutdown runs even if echo fails (e.g., disk full)
+# Try multiple shutdown methods as fallbacks
+nohup bash -c "
+ sleep ${MAX_LIFETIME_MINUTES}m
+ echo '[$(date)] Maximum lifetime reached' 2>/dev/null || true
+ # Try normal shutdown
+ shutdown -h now 2>/dev/null || {
+ # If shutdown fails, try halt
+ halt -f 2>/dev/null || {
+ # If halt fails, try sysrq if available (Linux only)
+ if [ -w /proc/sysrq-trigger ]; then
+ echo 1 > /proc/sys/kernel/sysrq 2>/dev/null
+ echo o > /proc/sysrq-trigger 2>/dev/null
+ fi
+ # Last resort: force immediate reboot
+ reboot -f 2>/dev/null || true
+ }
+ }
+" > /var/log/max-lifetime.log 2>&1 &
+
+# Configure CloudWatch Logs if a log group is specified
+if [ "$cloudwatch_logs_group" != "" ]; then
+ log "Installing CloudWatch agent"
+
+ # Detect architecture for CloudWatch agent
+ ARCH=$(uname -m)
+ if [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then
+ CW_ARCH="arm64"
+ else
+ CW_ARCH="amd64"
+ fi
+
+ if command -v dpkg >/dev/null 2>&1; then
+ wait_for_dpkg_lock
+ wget -q https://s3.amazonaws.com/amazoncloudwatch-agent/ubuntu/${CW_ARCH}/latest/amazon-cloudwatch-agent.deb
+ dpkg -i -E ./amazon-cloudwatch-agent.deb
+ rm amazon-cloudwatch-agent.deb
+ elif command -v rpm >/dev/null 2>&1; then
+ # Note: For RPM-based systems, the path structure might differ
+ wget -q https://s3.amazonaws.com/amazoncloudwatch-agent/amazon_linux/${CW_ARCH}/latest/amazon-cloudwatch-agent.rpm
+ rpm -U ./amazon-cloudwatch-agent.rpm
+ rm amazon-cloudwatch-agent.rpm
+ fi
+
+ # Build CloudWatch config
+ cat > /opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json << EOF
+{
+ "agent": {
+ "run_as_user": "cwagent"
+ },
+ "logs": {
+ "logs_collected": {
+ "files": {
+ "collect_list": [
+ { "file_path": "/var/log/runner-setup.log" , "log_group_name": "$cloudwatch_logs_group", "log_stream_name": "{instance_id}/runner-setup" , "timezone": "UTC" },
+ { "file_path": "/var/log/runner-debug.log" , "log_group_name": "$cloudwatch_logs_group", "log_stream_name": "{instance_id}/runner-debug" , "timezone": "UTC" },
+ { "file_path": "/tmp/job-started-hook.log" , "log_group_name": "$cloudwatch_logs_group", "log_stream_name": "{instance_id}/job-started" , "timezone": "UTC" },
+ { "file_path": "/tmp/job-completed-hook.log" , "log_group_name": "$cloudwatch_logs_group", "log_stream_name": "{instance_id}/job-completed", "timezone": "UTC" },
+ { "file_path": "/tmp/termination-check.log" , "log_group_name": "$cloudwatch_logs_group", "log_stream_name": "{instance_id}/termination" , "timezone": "UTC" },
+ { "file_path": "/tmp/runner-*-config.log" , "log_group_name": "$cloudwatch_logs_group", "log_stream_name": "{instance_id}/runner-config", "timezone": "UTC" },
+ { "file_path": "$homedir/_diag/Runner_**.log", "log_group_name": "$cloudwatch_logs_group", "log_stream_name": "{instance_id}/runner-diag" , "timezone": "UTC" },
+ { "file_path": "$homedir/_diag/Worker_**.log", "log_group_name": "$cloudwatch_logs_group", "log_stream_name": "{instance_id}/worker-diag" , "timezone": "UTC" }
+ ]
+ }
+ }
+ }
+}
+EOF
+
+ if ! /opt/aws/amazon-cloudwatch-agent/bin/amazon-cloudwatch-agent-ctl -a fetch-config -m ec2 -c file:/opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json -s; then
+ log_error "Failed to start CloudWatch agent"
+ terminate_instance "CloudWatch agent startup failed"
+ fi
+
+ log "CloudWatch agent started successfully"
+fi
+
+# Configure SSH access if public key provided (useful for debugging)
+if [ -n "$ssh_pubkey" ]; then
+ log "Configuring SSH access"
+ # Determine the default user based on the home directory owner
+ DEFAULT_USER=$(stat -c "%U" "$homedir" 2>/dev/null || echo "root")
+ mkdir -p "$homedir/.ssh"
+ chmod 700 "$homedir/.ssh"
+ echo "$ssh_pubkey" >> "$homedir/.ssh/authorized_keys"
+ chmod 600 "$homedir/.ssh/authorized_keys"
+ if [ "$DEFAULT_USER" != "root" ]; then
+ chown -R "$DEFAULT_USER:$DEFAULT_USER" "$homedir/.ssh"
+ fi
+ log "SSH key added for user $DEFAULT_USER"
+fi
+
+log "Working directory: $homedir"
+cd "$homedir"
+
+# Run any pre-runner script provided by the user
+if [ -n "$script" ]; then
+ echo "$script" > pre-runner-script.sh
+ log "Running pre-runner script"
+ source pre-runner-script.sh
+fi
+export RUNNER_ALLOW_RUNASROOT=1
+
+# Number of runners to configure on this instance
+RUNNERS_PER_INSTANCE=$runners_per_instance
+
+# Download GitHub Actions runner binary
+ARCH=$(uname -m)
+if [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then
+ RUNNER_URL=$(echo "$runner_release" | sed 's/x64/arm64/g')
+ log "ARM detected, using: $RUNNER_URL"
+else
+ RUNNER_URL="$runner_release"
+ log "x64 detected, using: $RUNNER_URL"
+fi
+
+if command -v curl >/dev/null 2>&1; then
+ curl -L $RUNNER_URL -o /tmp/runner.tar.gz
+elif command -v wget >/dev/null 2>&1; then
+ wget -q $RUNNER_URL -O /tmp/runner.tar.gz
+else
+ log_error "Neither curl nor wget found. Cannot download runner."
+ terminate_instance "No download tool available"
+fi
+log "Downloaded runner binary"
+
+# Helper function to fetch scripts
+fetch_script() {
+ local script_name="$1"
+ local url="${BASE_URL}/${script_name}"
+ local dest="${BIN_DIR}/${script_name}"
+
+ if command -v curl >/dev/null 2>&1; then
+ curl -fsSL "$url" -o "$dest" || {
+ log_error "Failed to fetch $script_name"
+ terminate_instance "Failed to download $script_name"
+ }
+ elif command -v wget >/dev/null 2>&1; then
+ wget -q "$url" -O "$dest" || {
+ log_error "Failed to fetch $script_name"
+ terminate_instance "Failed to download $script_name"
+ }
+ else
+ log_error "Neither curl nor wget found. Cannot download scripts."
+ terminate_instance "No download tool available"
+ fi
+}
+
+# Fetch job tracking scripts from GitHub
+# These scripts are called by GitHub runner hooks
+log "Fetching runner hook scripts"
+BASE_URL="https://raw.githubusercontent.com/Open-Athena/ec2-gha/${action_sha}/src/ec2_gha/scripts"
+
+fetch_script "job-started-hook.sh"
+fetch_script "job-completed-hook.sh"
+fetch_script "check-runner-termination.sh"
+
+# Replace log prefix placeholders with actual values
+sed -i "s/LOG_PREFIX_JOB_STARTED/${log_prefix_job_started}/g" $BIN_DIR/job-started-hook.sh
+sed -i "s/LOG_PREFIX_JOB_COMPLETED/${log_prefix_job_completed}/g" $BIN_DIR/job-completed-hook.sh
+
+chmod +x $BIN_DIR/job-started-hook.sh $BIN_DIR/job-completed-hook.sh $BIN_DIR/check-runner-termination.sh
+
+# Set up job tracking directory
+mkdir -p $RUNNER_STATE_DIR/jobs
+touch $RUNNER_STATE_DIR/last-activity
+
+# Set up periodic termination check using systemd
+cat > /etc/systemd/system/runner-termination-check.service << EOF
+[Unit]
+Description=Check GitHub runner termination conditions
+After=network.target
+[Service]
+Type=oneshot
+Environment="RUNNER_GRACE_PERIOD=$runner_grace_period"
+Environment="RUNNER_INITIAL_GRACE_PERIOD=$runner_initial_grace_period"
+Environment="RUNNER_POLL_INTERVAL=$runner_poll_interval"
+ExecStart=$BIN_DIR/check-runner-termination.sh
+EOF
+
+cat > /etc/systemd/system/runner-termination-check.timer << EOF
+[Unit]
+Description=Periodic GitHub runner termination check
+Requires=runner-termination-check.service
+[Timer]
+OnBootSec=60s
+OnUnitActiveSec=${runner_poll_interval}s
+[Install]
+WantedBy=timers.target
+EOF
+
+systemctl daemon-reload
+systemctl enable runner-termination-check.timer
+systemctl start runner-termination-check.timer
+
+# Build metadata labels (these will be added to the runner labels)
+METADATA_LABELS=",${INSTANCE_ID},${INSTANCE_TYPE}"
+# Add instance name as a label if provided
+if [ -n "$instance_name" ]; then
+ INSTANCE_NAME_LABEL=$(echo "$instance_name" | tr ' /' '-' | tr -cd '[:alnum:]-_#')
+ METADATA_LABELS="${METADATA_LABELS},${INSTANCE_NAME_LABEL}"
+fi
+
+log "Setting up $RUNNERS_PER_INSTANCE runner(s)"
+
+# Export functions for subprocesses (variables already exported from runner-common.sh)
+export -f configure_runner
+export -f log
+export -f log_error
+export -f get_metadata
+export -f flush_cloudwatch_logs
+export -f deregister_all_runners
+export -f debug_sleep_and_shutdown
+export -f wait_for_dpkg_lock
+
+# Parse space-delimited tokens and pipe-delimited labels
+IFS=' ' read -ra tokens <<< "$runner_tokens"
+IFS='|' read -ra labels <<< "$runner_labels"
+
+num_runners=${#tokens[@]}
+log "Configuring $num_runners runner(s) in parallel"
+
+# Start configuration for each runner in parallel
+pids=()
+for i in ${!tokens[@]}; do
+ token=${tokens[$i]}
+ label=${labels[$i]:-}
+ if [ -z "$token" ]; then
+ log_error "No token for runner $i"
+ continue
+ fi
+ (
+ # Override ERR trap in subshell to prevent global side effects
+ trap 'echo "Subshell error on line $LINENO" >&2; exit 1' ERR
+ configure_runner $i "$token" "${label}$METADATA_LABELS" "$homedir" "$repo" "$INSTANCE_ID" "$runner_grace_period" "$runner_initial_grace_period"
+ echo $? > /tmp/runner-$i-status
+ ) &
+ pids+=($!)
+ log "Started configuration for runner $i (PID: ${pids[-1]})"
+done
+
+# Wait for all background jobs to complete
+log "Waiting for all runner configurations to complete..."
+failed=0
+succeeded=0
+for i in ${!pids[@]}; do
+ wait ${pids[$i]}
+ if [ -f /tmp/runner-$i-status ]; then
+ status=$(cat /tmp/runner-$i-status)
+ rm -f /tmp/runner-$i-status
+ if [ "$status" != "0" ]; then
+ log_error "Runner $i configuration failed"
+ failed=$((failed + 1))
+ else
+ succeeded=$((succeeded + 1))
+ fi
+ fi
+done
+
+# Allow partial success - only terminate if ALL runners failed
+if [ $succeeded -eq 0 ] && [ $failed -gt 0 ]; then
+ terminate_instance "All runners failed to register"
+elif [ $failed -gt 0 ]; then
+ log "WARNING: $failed runner(s) failed, but $succeeded succeeded. Continuing with partial capacity."
+fi
+
+if [ $succeeded -gt 0 ]; then
+ log "$succeeded runner(s) registered and started successfully"
+ touch $RUNNER_STATE_DIR/registered
+else
+ log_error "No runners registered successfully"
+ terminate_instance "No runners registered successfully"
+fi
+
+# Kill registration watchdog now that runners are registered
+if [ -f $RUNNER_STATE_DIR/watchdog.pid ]; then
+ WATCHDOG_PID=$(cat $RUNNER_STATE_DIR/watchdog.pid)
+ kill $WATCHDOG_PID 2>/dev/null || true
+ rm -f $RUNNER_STATE_DIR/watchdog.pid
+fi
+
+# Final setup - ensure runner directories are accessible for debugging
+touch $RUNNER_STATE_DIR/started
+chmod o+x $homedir
+for RUNNER_DIR in $homedir/runner-*; do
+ [ -d "$RUNNER_DIR/_diag" ] && chmod 755 "$RUNNER_DIR/_diag"
+done
+
+log "Runner setup complete"
diff --git a/src/ec2_gha/start.py b/src/ec2_gha/start.py
index f8d7b53..6774b45 100644
--- a/src/ec2_gha/start.py
+++ b/src/ec2_gha/start.py
@@ -3,6 +3,7 @@
from os import environ
from string import Template
import json
+import subprocess
import boto3
from botocore.exceptions import ClientError
@@ -11,6 +12,60 @@
from gha_runner.helper.workflow_cmds import output
from copy import deepcopy
+from ec2_gha.defaults import AUTO, RUNNER_REGISTRATION_TIMEOUT
+
+
+def resolve_ref_to_sha(ref: str) -> str:
+ """Resolve a Git ref (branch/tag/SHA) to a commit SHA using local git.
+
+ Parameters
+ ----------
+ ref : str
+ The Git ref to resolve (branch name, tag, or SHA)
+
+ Returns
+ -------
+ str
+ The commit SHA
+
+ Raises
+ ------
+ RuntimeError
+ If the ref cannot be resolved to a SHA
+ """
+ # Handle Docker container ownership issues by marking directory as safe
+ # This is needed when running in GitHub Actions Docker containers where
+ # the workspace is owned by a different user than the container user
+ subprocess.run(
+ ['git', 'config', '--global', '--add', 'safe.directory', '/github/workspace'],
+ capture_output=True,
+ text=True,
+ check=True # Fail if this doesn't work - we need it for the next command
+ )
+
+ try:
+ # Use git rev-parse to resolve the ref to a SHA
+ # This works for branches, tags, and SHAs (returns SHAs unchanged)
+ result = subprocess.run(
+ ['git', 'rev-parse', ref],
+ capture_output=True,
+ text=True,
+ check=True
+ )
+ sha = result.stdout.strip()
+ if sha:
+ # Only print if we actually resolved something (not just returned a SHA)
+ if sha != ref:
+ print(f"Resolved action_ref '{ref}' to SHA: {sha}")
+ return sha
+ else:
+ raise RuntimeError(f"git rev-parse returned empty output for ref '{ref}'")
+ except subprocess.CalledProcessError as e:
+ raise RuntimeError(
+ f"Failed to resolve action_ref '{ref}' to SHA. "
+ f"Error: {e.stderr or str(e)}"
+ )
+
@dataclass
class StartAWS(CreateCloudInstance):
@@ -18,8 +73,6 @@ class StartAWS(CreateCloudInstance):
Parameters
----------
- home_dir : str
- The home directory of the user.
image_id : str
The ID of the AMI to use.
instance_type : str
@@ -32,6 +85,8 @@ class StartAWS(CreateCloudInstance):
CloudWatch Logs group name for streaming runner logs. Defaults to an empty string.
gh_runner_tokens : list[str]
A list of GitHub runner tokens. Defaults to an empty list.
+ home_dir : str
+ The home directory of the user. If not provided, will be inferred from the AMI.
iam_instance_profile : str
The name of the IAM role to use. Defaults to an empty string.
key_name : str
@@ -40,12 +95,16 @@ class StartAWS(CreateCloudInstance):
A comma-separated list of labels to apply to the runner. Defaults to an empty string.
max_instance_lifetime : str
Maximum instance lifetime in minutes before automatic shutdown. Defaults to "360" (6 hours).
- root_device_size : int
+ root_device_size : str
The size of the root device. Defaults to 0 which uses the default.
runner_initial_grace_period : str
Grace period in seconds before terminating if no jobs have started. Defaults to "180".
runner_grace_period : str
- Grace period in seconds before terminating instance after last job completes. Defaults to "120".
+ Grace period in seconds before terminating instance after last job completes. Defaults to "60".
+ runner_poll_interval : str
+ How often (in seconds) to check termination conditions. Defaults to "10".
+ runners_per_instance : int
+ Number of runners to register per instance. Defaults to 1.
script : str
The script to run on the instance. Defaults to an empty string.
security_group_id : str
@@ -61,20 +120,25 @@ class StartAWS(CreateCloudInstance):
"""
- home_dir: str
image_id: str
instance_type: str
region_name: str
repo: str
cloudwatch_logs_group: str = ""
+ debug: str = ""
gh_runner_tokens: list[str] = field(default_factory=list)
+ grouped_runner_tokens: list[list[str]] = field(default_factory=list)
+ home_dir: str = ""
iam_instance_profile: str = ""
+ instance_name: str = ""
key_name: str = ""
labels: str = ""
max_instance_lifetime: str = "360"
- root_device_size: int = 0
- runner_grace_period: str = "120"
+ root_device_size: str = "0"
+ runner_grace_period: str = "60"
runner_initial_grace_period: str = "180"
+ runner_poll_interval: str = "10"
+ runners_per_instance: int = 1
runner_release: str = ""
script: str = ""
security_group_id: str = ""
@@ -83,7 +147,68 @@ class StartAWS(CreateCloudInstance):
tags: list[dict[str, str]] = field(default_factory=list)
userdata: str = ""
- def _build_aws_params(self, user_data_params: dict) -> dict:
+ def _get_template_vars(self, idx: int = None) -> dict:
+ """Build template variables for instance naming.
+
+ Parameters
+ ----------
+ idx : int | None
+ Instance index for multi-instance launches
+
+ Returns
+ -------
+ dict
+ Dictionary of template variables for string substitution
+ """
+ from os import environ
+ import re
+
+ template_vars = {}
+
+ # Get repository name (just the basename)
+ if environ.get("GITHUB_REPOSITORY"):
+ template_vars["repo"] = environ["GITHUB_REPOSITORY"].split("/")[-1]
+ else:
+ template_vars["repo"] = "unknown"
+
+ # Get workflow full name (e.g., "Test pip install")
+ template_vars["workflow"] = environ.get("GITHUB_WORKFLOW", "unknown")
+
+ # Get workflow filename stem and ref from GITHUB_WORKFLOW_REF
+ workflow_ref = environ.get("GITHUB_WORKFLOW_REF", "")
+ if workflow_ref:
+ # Extract filename and ref from path like "owner/repo/.github/workflows/test.yml@ref"
+ m = re.search(r'/(?P[^/@]+)\.(yml|yaml)@(?P[[^@]+)$', workflow_ref)
+ if m:
+ # Get the workflow filename stem (e.g., "install" from "install.yaml")
+ template_vars["name"] = m['name']
+
+ # Clean up the ref - remove "refs/heads/" or "refs/tags/" prefix
+ ref = m['ref']
+ if ref.startswith('refs/heads/'):
+ ref = ref[11:]
+ elif ref.startswith('refs/tags/'):
+ ref = ref[10:]
+ template_vars["ref"] = ref
+ else:
+ template_vars["name"] = "unknown"
+ template_vars["ref"] = "unknown"
+ else:
+ template_vars["name"] = "unknown"
+ template_vars["ref"] = "unknown"
+
+ # `$run` (number)
+ run_num = environ.get("GITHUB_RUN_NUMBER", "unknown")
+ template_vars["run"] = run_num
+ template_vars["run_number"] = run_num # Legacy alias
+
+ # Add instance index if provided (for multi-instance launches)
+ if idx is not None:
+ template_vars["idx"] = str(idx)
+
+ return template_vars
+
+ def _build_aws_params(self, user_data_params: dict, idx: int = None) -> dict:
"""Build the parameters for the AWS API call.
Parameters
@@ -116,53 +241,31 @@ def _build_aws_params(self, user_data_params: dict) -> dict:
# Add default tags if not already present
default_tags = []
existing_keys = {tag["Key"] for tag in self.tags}
- import os
# Add Name tag if not provided
if "Name" not in existing_keys:
- # Try to create a sensible default Name tag
- name_parts = []
-
- # Use repository basename if available
- if os.environ.get("GITHUB_REPOSITORY"):
- repo_basename = os.environ["GITHUB_REPOSITORY"].split("/")[-1]
- name_parts.append(repo_basename)
-
- # Add workflow name if available
- if os.environ.get("GITHUB_WORKFLOW"):
- # Try to extract just the filename without extension from workflow ref
- workflow_ref = os.environ.get("GITHUB_WORKFLOW_REF", "")
- if workflow_ref:
- # Extract filename from path like "owner/repo/.github/workflows/test.yml@ref"
- import re
- match = re.search(r'/([^/@]+)\.(yml|yaml)@', workflow_ref)
- if match:
- name_parts.append(match.group(1))
- else:
- name_parts.append(os.environ["GITHUB_WORKFLOW"])
- else:
- name_parts.append(os.environ["GITHUB_WORKFLOW"])
-
- # Add run number if available
- if os.environ.get("GITHUB_RUN_NUMBER"):
- name_parts.append(f"#{os.environ['GITHUB_RUN_NUMBER']}")
-
- # Create Name tag if we have any parts
- if name_parts:
- default_tags.append({"Key": "Name", "Value": "/".join(name_parts)})
+ # Get template variables
+ template_vars = self._get_template_vars(idx)
+
+ # Apply the instance name template
+ from string import Template
+ name_template = Template(self.instance_name)
+ name_value = name_template.safe_substitute(**template_vars)
+
+ default_tags.append({"Key": "Name", "Value": name_value})
# Add repository tag if available
- if "repository" not in existing_keys and os.environ.get("GITHUB_REPOSITORY"):
- default_tags.append({"Key": "repository", "Value": os.environ["GITHUB_REPOSITORY"]})
+ if "Repository" not in existing_keys and environ.get("GITHUB_REPOSITORY"):
+ default_tags.append({"Key": "Repository", "Value": environ["GITHUB_REPOSITORY"]})
# Add workflow tag if available
- if "workflow" not in existing_keys and os.environ.get("GITHUB_WORKFLOW"):
- default_tags.append({"Key": "workflow", "Value": os.environ["GITHUB_WORKFLOW"]})
+ if "Workflow" not in existing_keys and environ.get("GITHUB_WORKFLOW"):
+ default_tags.append({"Key": "Workflow", "Value": environ["GITHUB_WORKFLOW"]})
# Add run URL tag if available
- if "gha_url" not in existing_keys and os.environ.get("GITHUB_SERVER_URL") and os.environ.get("GITHUB_REPOSITORY") and os.environ.get("GITHUB_RUN_ID"):
- gha_url = f"{os.environ['GITHUB_SERVER_URL']}/{os.environ['GITHUB_REPOSITORY']}/actions/runs/{os.environ['GITHUB_RUN_ID']}"
- default_tags.append({"Key": "gha_url", "Value": gha_url})
+ if "URL" not in existing_keys and environ.get("GITHUB_SERVER_URL") and environ.get("GITHUB_REPOSITORY") and environ.get("GITHUB_RUN_ID"):
+ gha_url = f"{environ['GITHUB_SERVER_URL']}/{environ['GITHUB_REPOSITORY']}/actions/runs/{environ['GITHUB_RUN_ID']}"
+ default_tags.append({"Key": "URL", "Value": gha_url})
# Combine user tags with default tags
all_tags = self.tags + default_tags
@@ -187,6 +290,19 @@ def _build_user_data(self, **kwargs) -> str:
The user data script as a string.
"""
+ # Import log constants to inject into template
+ from ec2_gha.log_constants import (
+ LOG_PREFIX_JOB_STARTED,
+ LOG_PREFIX_JOB_COMPLETED,
+ )
+
+ # Add log constants to the kwargs
+ kwargs['log_prefix_job_started'] = LOG_PREFIX_JOB_STARTED
+ kwargs['log_prefix_job_completed'] = LOG_PREFIX_JOB_COMPLETED
+
+ # Ensure instance_name has a default value
+ kwargs.setdefault('instance_name', '')
+
template = importlib.resources.files("ec2_gha").joinpath("templates/user-script.sh.templ")
with template.open() as f:
template_content = f.read()
@@ -194,7 +310,14 @@ def _build_user_data(self, **kwargs) -> str:
try:
parsed = Template(template_content)
runner_script = parsed.substitute(**kwargs)
+
+ # Log the final size for informational purposes
+ script_size = len(runner_script)
+ print(f"UserData size: {script_size} bytes ({script_size/16384*100:.1f}% of 16KB limit)")
+
return runner_script
+ except KeyError as e:
+ raise ValueError(f"Missing required template parameter: {e}") from e
except Exception as e:
raise Exception("Error parsing user data template") from e
@@ -228,14 +351,29 @@ def _modify_root_disk_size(self, client, params: dict) -> dict:
block_devices = deepcopy(image_options["Images"][0]["BlockDeviceMappings"])
for idx, block_device in enumerate(block_devices):
if block_device["DeviceName"] == root_device_name:
- if self.root_device_size > 0:
- block_devices[idx]["Ebs"]["VolumeSize"] = self.root_device_size
+ size_str = self.root_device_size.strip()
+ if size_str.startswith('+'):
+ # +N means "AMI size + N GB"
+ # Useful for disk-full testing: +2 means AMI size + 2GB
+ current_size = block_device.get("Ebs", {}).get("VolumeSize", 8)
+ buffer_gb = int(size_str[1:])
+ new_size = current_size + buffer_gb
+ block_devices[idx]["Ebs"]["VolumeSize"] = new_size
params["BlockDeviceMappings"] = block_devices
+ print(f"Setting disk size to {new_size}GB (AMI default {current_size}GB + {buffer_gb}GB)")
+ elif size_str != "0":
+ # Explicit size in GB
+ new_size = int(size_str)
+ if new_size > 0:
+ block_devices[idx]["Ebs"]["VolumeSize"] = new_size
+ params["BlockDeviceMappings"] = block_devices
+ # else: size_str == "0" means use AMI default, do nothing
break
else:
raise e
return params
+
def create_instances(self) -> dict[str, str]:
"""Create instances on AWS.
@@ -250,8 +388,6 @@ def create_instances(self) -> dict[str, str]:
raise ValueError("No GitHub runner tokens provided, cannot create instances.")
if not self.runner_release:
raise ValueError("No runner release provided, cannot create instances.")
- if not self.home_dir:
- raise ValueError("No home directory provided, cannot create instances.")
if not self.image_id:
raise ValueError("No image ID provided, cannot create instances.")
if not self.instance_type:
@@ -259,36 +395,107 @@ def create_instances(self) -> dict[str, str]:
if not self.region_name:
raise ValueError("No region name provided, cannot create instances.")
ec2 = boto3.client("ec2", region_name=self.region_name)
+
+ # Use AUTO to let the instance detect its own home directory
+ if not self.home_dir:
+ self.home_dir = AUTO
id_dict = {}
- for token in self.gh_runner_tokens:
- label = gh.GitHubInstance.generate_random_label()
- # Combine user labels with the generated runner label
- labels = f"{self.labels},{label}" if self.labels else label
+ # Determine which tokens to use
+ tokens_to_use = self.grouped_runner_tokens if self.grouped_runner_tokens else [[t] for t in self.gh_runner_tokens]
+
+ # Determine default instance_name based on instance count
+ instance_count = len(tokens_to_use)
+ default_instance_name = "$repo/$name#$run"
+ if instance_count > 1:
+ default_instance_name = "$repo/$name#$run $idx"
+
+ for idx, instance_tokens in enumerate(tokens_to_use):
+ # Generate labels and tokens for all runners on this instance
+ runner_configs = []
+ for runner_idx, token in enumerate(instance_tokens):
+ label = gh.GitHubInstance.generate_random_label()
+ # Combine user labels with the generated runner label
+ labels = f"{self.labels},{label}" if self.labels else label
+ runner_configs.append({
+ "token": token,
+ "labels": labels,
+ "runner_idx": runner_idx
+ })
+
+ # Simplify runner configs to save template space
+ # Pass tokens as space-delimited, labels as pipe-delimited
+ runner_tokens = " ".join(config["token"] for config in runner_configs)
+ runner_labels = "|".join(config["labels"] for config in runner_configs)
+
+ # Generate instance name using template variables
+ from string import Template
+ template_vars = self._get_template_vars(idx)
+ # Use provided instance_name or the smart default
+ name_pattern = self.instance_name if self.instance_name else default_instance_name
+ name_template = Template(name_pattern)
+ instance_name_value = name_template.safe_substitute(**template_vars)
+
+ # Resolve action_ref to a SHA for security and consistency
+ action_ref = environ.get("INPUT_ACTION_REF")
+ if not action_ref:
+ raise ValueError("action_ref is required but was not provided. Check that runner.yml passes it correctly.")
+ action_sha = resolve_ref_to_sha(action_ref)
user_data_params = {
+ "action_sha": action_sha, # The resolved SHA
"cloudwatch_logs_group": self.cloudwatch_logs_group,
+ "debug": self.debug,
"github_workflow": environ.get("GITHUB_WORKFLOW", ""),
"github_run_id": environ.get("GITHUB_RUN_ID", ""),
"github_run_number": environ.get("GITHUB_RUN_NUMBER", ""),
"homedir": self.home_dir,
- "labels": labels,
+ "instance_name": instance_name_value, # Add the generated instance name
"max_instance_lifetime": self.max_instance_lifetime,
"repo": self.repo,
"runner_grace_period": self.runner_grace_period,
"runner_initial_grace_period": self.runner_initial_grace_period,
+ "runner_poll_interval": self.runner_poll_interval,
+ "runner_registration_timeout": environ.get("INPUT_RUNNER_REGISTRATION_TIMEOUT", "").strip() or RUNNER_REGISTRATION_TIMEOUT,
"runner_release": self.runner_release,
+ "runners_per_instance": str(self.runners_per_instance),
+ "runner_tokens": runner_tokens, # Space-delimited tokens
+ "runner_labels": runner_labels, # Pipe-delimited labels
"script": self.script,
"ssh_pubkey": self.ssh_pubkey,
- "token": token,
"userdata": self.userdata,
}
- params = self._build_aws_params(user_data_params)
- if self.root_device_size > 0:
+ params = self._build_aws_params(user_data_params, idx=idx)
+ if self.root_device_size != "0":
params = self._modify_root_disk_size(ec2, params)
- result = ec2.run_instances(**params)
+
+ # Check UserData size before calling AWS
+ user_data_size = len(params.get("UserData", ""))
+ if user_data_size > 16384:
+ raise ValueError(
+ f"UserData exceeds AWS limit: {user_data_size} bytes (limit: 16384 bytes, "
+ f"over by: {user_data_size - 16384} bytes). "
+ f"Template needs to be reduced by at least {user_data_size - 16384} bytes."
+ )
+
+ try:
+ result = ec2.run_instances(**params)
+ except Exception as e:
+ if "User data is limited to 16384 bytes" in str(e):
+ # This shouldn't happen if our check above works, but just in case
+ raise ValueError(
+ f"UserData exceeds AWS limit: {user_data_size} bytes (limit: 16384 bytes, "
+ f"over by: {user_data_size - 16384} bytes)"
+ ) from e
+ raise
instances = result["Instances"]
id = instances[0]["InstanceId"]
- id_dict[id] = label
+ # For multiple runners per instance, store all labels
+ if self.runners_per_instance > 1:
+ all_labels = [config["labels"] for config in runner_configs]
+ id_dict[id] = all_labels
+ else:
+ # For backward compatibility, store single label as string
+ id_dict[id] = runner_configs[0]["labels"] if runner_configs else ""
return id_dict
def wait_until_ready(self, ids: list[str], **kwargs):
@@ -313,6 +520,34 @@ def wait_until_ready(self, ids: list[str], **kwargs):
else:
waiter.wait(InstanceIds=ids)
+ def get_instance_details(self, ids: list[str]) -> dict[str, dict]:
+ """Get instance details including DNS names.
+
+ Parameters
+ ----------
+ ids : list[str]
+ A list of instance IDs to get details for.
+
+ Returns
+ -------
+ dict[str, dict]
+ A dictionary mapping instance IDs to their details.
+ """
+ ec2 = boto3.client("ec2", self.region_name)
+ response = ec2.describe_instances(InstanceIds=ids)
+
+ details = {}
+ for reservation in response['Reservations']:
+ for instance in reservation['Instances']:
+ details[instance['InstanceId']] = {
+ 'PublicDnsName': instance.get('PublicDnsName', ''),
+ 'PublicIpAddress': instance.get('PublicIpAddress', ''),
+ 'PrivateIpAddress': instance.get('PrivateIpAddress', ''),
+ 'InstanceType': instance.get('InstanceType', ''),
+ 'State': instance['State']['Name']
+ }
+ return details
+
def set_instance_mapping(self, mapping: dict[str, str]):
"""Set the instance mapping.
@@ -324,12 +559,37 @@ def set_instance_mapping(self, mapping: dict[str, str]):
A dictionary of instance IDs and labels.
"""
- github_labels = list(mapping.values())
- output("mapping", json.dumps(mapping))
- output("instances", json.dumps(github_labels))
+ # Build matrix objects for all runners
+ matrix_objects = []
+ idx = 0
+
+ for instance_idx, (instance_id, labels) in enumerate(mapping.items()):
+ if isinstance(labels, list):
+ # Multiple runners on this instance
+ for runner_idx, label in enumerate(labels):
+ matrix_objects.append({
+ "idx": idx,
+ "id": label,
+ "instance_id": instance_id,
+ "instance_idx": instance_idx,
+ "runner_idx": runner_idx
+ })
+ idx += 1
+ else:
+ # Single runner on this instance
+ matrix_objects.append({
+ "idx": idx,
+ "id": labels,
+ "instance_id": instance_id,
+ "instance_idx": instance_idx,
+ "runner_idx": 0
+ })
+ idx += 1
+
+ output("mtx", json.dumps(matrix_objects))
# For single instance use, output simplified values
- if len(mapping) == 1:
+ if len(mapping) == 1 and self.runners_per_instance == 1:
instance_id = list(mapping.keys())[0]
label = list(mapping.values())[0]
output("instance-id", instance_id)
diff --git a/src/ec2_gha/templates/shared-functions.sh b/src/ec2_gha/templates/shared-functions.sh
new file mode 100644
index 0000000..b7167fa
--- /dev/null
+++ b/src/ec2_gha/templates/shared-functions.sh
@@ -0,0 +1,199 @@
+#!/bin/bash
+# Shared functions for runner scripts
+# These functions are used by multiple scripts throughout the runner lifecycle
+
+# Logging functions
+log() { echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a /var/log/runner-setup.log; }
+log_error() { log "ERROR: $1" >&2; }
+
+dn=/dev/null
+
+# Wait for dpkg lock to be released (for Debian/Ubuntu systems)
+wait_for_dpkg_lock() {
+ local t=120
+ local L=/var/lib/dpkg/lock
+ while fuser $L-frontend >$dn 2>&1 || fuser $L >$dn 2>&1; do
+ if [ $t -le 0 ]; then
+ log "WARNING: dpkg lock t, proceeding anyway"
+ break
+ fi
+ log "dpkg is locked, waiting... ($t seconds remaining)"
+ sleep 5
+ t=$((t - 5))
+ done
+}
+
+# Function to flush CloudWatch logs before shutdown
+flush_cloudwatch_logs() {
+ log "Stopping CloudWatch agent to flush logs"
+ if systemctl is-active --quiet amazon-cloudwatch-agent; then
+ systemctl stop amazon-cloudwatch-agent 2>$dn || /opt/aws/amazon-cloudwatch-agent/bin/amazon-cloudwatch-agent-ctl -a stop -m ec2 2>$dn || true
+ fi
+}
+
+# Get EC2 instance metadata (IMDSv2 compatible)
+get_metadata() {
+ local path="$1"
+ local token=$(curl -X PUT -H "X-aws-ec2-metadata-token-ttl-seconds: 300" http://169.254.169.254/latest/api/token 2>$dn || true)
+ if [ -n "$token" ]; then
+ curl -s -H "X-aws-ec2-metadata-token: $token" "http://169.254.169.254/latest/meta-data/$path" 2>$dn || echo "unknown"
+ else
+ curl -s "http://169.254.169.254/latest/meta-data/$path" 2>$dn || echo "unknown"
+ fi
+ return 0 # Always return success to avoid set -e issues
+}
+
+# Function to deregister all runners
+deregister_all_runners() {
+ for RUNNER_DIR in $homedir/runner-*; do
+ if [ -d "$RUNNER_DIR" ] && [ -f "$RUNNER_DIR/config.sh" ]; then
+ log "Deregistering runner in $RUNNER_DIR"
+ cd "$RUNNER_DIR"
+ pkill -INT -f "$RUNNER_DIR/run.sh" 2>$dn || true
+ sleep 1
+ if [ -f "$RUNNER_DIR/.runner-token" ]; then
+ TOKEN=$(cat "$RUNNER_DIR/.runner-token")
+ RUNNER_ALLOW_RUNASROOT=1 ./config.sh remove --token $TOKEN 2>&1
+ log "Deregistration exit: $?"
+ fi
+ fi
+ done
+}
+
+# Function to handle debug mode sleep and shutdown
+debug_sleep_and_shutdown() {
+ # Check if debug is a number (sleep duration in minutes)
+ if [[ "$debug" =~ ^[0-9]+$ ]]; then
+ local sleep_minutes="$debug"
+ local sleep_seconds=$((sleep_minutes * 60))
+ log "Debug: Sleeping ${sleep_minutes} minutes before shutdown..." || true
+ # Detect the SSH user from the home directory
+ local ssh_user=$(basename "$homedir" 2>$dn || echo "ec2-user")
+ local public_ip=$(curl -s http://169.254.169.254/latest/meta-data/public-ipv4)
+ log "SSH into instance with: ssh ${ssh_user}@${public_ip}" || true
+ log "Then check: /var/log/runner-setup.log and /var/log/runner-debug.log" || true
+ sleep "$sleep_seconds"
+ log "Debug period ended, shutting down" || true
+ elif [ "$debug" = "true" ] || [ "$debug" = "True" ] || [ "$debug" = "trace" ]; then
+ # Just tracing enabled, no sleep
+ log "Shutting down immediately (debug tracing enabled but no sleep requested)" || true
+ else
+ log "Shutting down immediately (debug mode not enabled)" || true
+ fi
+
+ # Try multiple shutdown methods as fallbacks (important when disk is full)
+ shutdown -h now 2>/dev/null || {
+ # If shutdown fails, try halt
+ halt -f 2>/dev/null || {
+ # If halt fails, try sysrq if available (Linux only)
+ if [ -w /proc/sysrq-trigger ]; then
+ echo 1 > /proc/sys/kernel/sysrq 2>/dev/null
+ echo o > /proc/sysrq-trigger 2>/dev/null
+ fi
+ # Last resort: force immediate reboot
+ reboot -f 2>/dev/null || true
+ }
+ }
+}
+
+# Function to handle fatal errors and terminate the instance
+terminate_instance() {
+ local reason="$1"
+ local instance_id=$(get_metadata "instance-id")
+
+ # Log error prominently
+ echo "========================================" | tee -a /var/log/runner-setup.log
+ log "FATAL ERROR DETECTED"
+ log "Reason: $reason"
+ log "Instance: $instance_id"
+ log "Script location: $(pwd)"
+ log "User: $(whoami)"
+ log "Debug trace available in: /var/log/runner-debug.log"
+ echo "========================================" | tee -a /var/log/runner-setup.log
+
+ # Try to remove runner if it was partially configured
+ if [ -f "$homedir/config.sh" ] && [ -n "${RUNNER_TOKEN:-}" ]; then
+ cd "$homedir" && ./config.sh remove --token "${RUNNER_TOKEN}" || true
+ fi
+
+ flush_cloudwatch_logs
+ debug_sleep_and_shutdown
+ exit 1
+}
+
+# Function to configure a single GitHub Actions runner
+configure_runner() {
+ local idx=$1
+ local token=$2
+ local labels=$3
+ local homedir=$4
+ local repo=$5
+ local instance_id=$6
+ local runner_grace_period=$7
+ local runner_initial_grace_period=$8
+
+ log "Configuring runner $idx..."
+
+ # Create runner directory and extract runner binary
+ local runner_dir="$homedir/runner-$idx"
+ mkdir -p "$runner_dir"
+ cd "$runner_dir"
+ tar -xzf /tmp/runner.tar.gz
+
+ # Install dependencies if needed
+ if [ -f ./bin/installdependencies.sh ]; then
+ # Quick check for common AMIs with pre-installed deps
+ if command -v dpkg >$dn 2>&1 && dpkg -l libicu[0-9]* 2>$dn | grep -q ^ii; then
+ log "Dependencies exist, skipping install"
+ else
+ log "Installing dependencies..."
+ set +e
+ sudo ./bin/installdependencies.sh >$dn 2>&1
+ local deps_result=$?
+ set -e
+ if [ $deps_result -ne 0 ]; then
+ log "Dependencies script failed, installing manually..."
+ if command -v dnf >$dn 2>&1; then
+ sudo dnf install -y libicu lttng-ust >$dn 2>&1 || true
+ elif command -v yum >$dn 2>&1; then
+ sudo yum install -y libicu >$dn 2>&1 || true
+ elif command -v apt-get >$dn 2>&1; then
+ wait_for_dpkg_lock
+ sudo apt-get update >$dn 2>&1 || true
+ sudo apt-get install -y libicu-dev >$dn 2>&1 || true
+ fi
+ fi
+ fi
+ fi
+
+ # Save token for deregistration
+ echo "$token" > .runner-token
+
+ # Create env file with runner hooks
+ cat > .env << EOF
+ACTIONS_RUNNER_HOOK_JOB_STARTED=/usr/local/bin/job-started-hook.sh
+ACTIONS_RUNNER_HOOK_JOB_COMPLETED=/usr/local/bin/job-completed-hook.sh
+RUNNER_HOME=$runner_dir
+RUNNER_INDEX=$idx
+RUNNER_GRACE_PERIOD=$runner_grace_period
+RUNNER_INITIAL_GRACE_PERIOD=$runner_initial_grace_period
+EOF
+
+ # Configure runner with GitHub
+ local runner_name="ec2-$instance_id-$idx"
+ RUNNER_ALLOW_RUNASROOT=1 ./config.sh --url "https://github.com/$repo" --token "$token" --labels "$labels" --name "$runner_name" --disableupdate --unattended 2>&1 | tee /tmp/runner-$idx-config.log
+
+ if grep -q "Runner successfully added" /tmp/runner-$idx-config.log; then
+ log "Runner $idx registered successfully"
+ else
+ log_error "Failed to register runner $idx"
+ return 1
+ fi
+
+ # Start runner in background
+ RUNNER_ALLOW_RUNASROOT=1 nohup ./run.sh > $dn 2>&1 &
+ local pid=$!
+ log "Started runner $idx in $runner_dir (PID: $pid)"
+
+ return 0
+}
diff --git a/src/ec2_gha/templates/user-script.sh.templ b/src/ec2_gha/templates/user-script.sh.templ
index 006c4e1..b360a55 100644
--- a/src/ec2_gha/templates/user-script.sh.templ
+++ b/src/ec2_gha/templates/user-script.sh.templ
@@ -1,351 +1,65 @@
#!/bin/bash
set -e
-$userdata
-
-# Set up maximum lifetime timeout - do this early to ensure cleanup
-MAX_LIFETIME_MINUTES=$max_instance_lifetime
-echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Setting up maximum lifetime timeout: $${MAX_LIFETIME_MINUTES} minutes"
-nohup bash -c "sleep $${MAX_LIFETIME_MINUTES}m && echo '[$$(date)] Maximum lifetime reached' && shutdown -h now" > /var/log/max-lifetime.log 2>&1 &
-
-# Configure CloudWatch Logs if enabled
-if [ "$cloudwatch_logs_group" != "" ]; then
- echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Installing CloudWatch agent"
- # Use a subshell to prevent CloudWatch failures from stopping the entire script
- (
-
- # Wait for dpkg lock to be released (up to 2 minutes)
- echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Waiting for dpkg lock to be released..."
- timeout=120
- while fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1 || fuser /var/lib/dpkg/lock >/dev/null 2>&1; do
- if [ $$timeout -le 0 ]; then
- echo "[$$(date '+%Y-%m-%d %H:%M:%S')] WARNING: dpkg lock timeout, proceeding anyway"
- break
- fi
- echo "[$$(date '+%Y-%m-%d %H:%M:%S')] dpkg is locked, waiting... ($$timeout seconds remaining)"
- sleep 5
- timeout=$$((timeout - 5))
- done
-
- # Download and install CloudWatch agent
- wget -q https://s3.amazonaws.com/amazoncloudwatch-agent/ubuntu/amd64/latest/amazon-cloudwatch-agent.deb
- dpkg -i -E ./amazon-cloudwatch-agent.deb
- rm amazon-cloudwatch-agent.deb
-
- # Configure CloudWatch agent
- cat > /opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json << 'EOF'
-{
- "agent": {
- "run_as_user": "cwagent"
- },
- "logs": {
- "logs_collected": {
- "files": {
- "collect_list": [
- {
- "file_path": "/var/log/runner-setup.log",
- "log_group_name": "$cloudwatch_logs_group",
- "log_stream_name": "{instance_id}/runner-setup",
- "timezone": "UTC"
- },
- {
- "file_path": "/tmp/job-started-hook.log",
- "log_group_name": "$cloudwatch_logs_group",
- "log_stream_name": "{instance_id}/job-started",
- "timezone": "UTC"
- },
- {
- "file_path": "/tmp/job-completed-hook.log",
- "log_group_name": "$cloudwatch_logs_group",
- "log_stream_name": "{instance_id}/job-completed",
- "timezone": "UTC"
- },
- {
- "file_path": "/tmp/termination-check.log",
- "log_group_name": "$cloudwatch_logs_group",
- "log_stream_name": "{instance_id}/termination",
- "timezone": "UTC"
- },
- {
- "file_path": "$homedir/_diag/Runner_**.log",
- "log_group_name": "$cloudwatch_logs_group",
- "log_stream_name": "{instance_id}/runner-diag",
- "timezone": "UTC"
- },
- {
- "file_path": "$homedir/_diag/Worker_**.log",
- "log_group_name": "$cloudwatch_logs_group",
- "log_stream_name": "{instance_id}/worker-diag",
- "timezone": "UTC"
- }
- ]
- }
- }
- }
-}
-EOF
-
- # Start CloudWatch agent
- /opt/aws/amazon-cloudwatch-agent/bin/amazon-cloudwatch-agent-ctl \
- -a fetch-config \
- -m ec2 \
- -c file:/opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json \
- -s
-
- echo "[$$(date '+%Y-%m-%d %H:%M:%S')] CloudWatch agent started"
- ) || echo "[$$(date '+%Y-%m-%d %H:%M:%S')] WARNING: CloudWatch agent installation failed, continuing without it"
-fi
-
-# Configure SSH access if public key provided
-if [ -n "$ssh_pubkey" ]; then
- echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Configuring SSH access"
-
- # Determine the default user based on the home directory owner
- DEFAULT_USER=$$(stat -c "%U" "$homedir" 2>/dev/null || echo "root")
-
- # Create .ssh directory if it doesn't exist
- mkdir -p "$homedir/.ssh"
- chmod 700 "$homedir/.ssh"
-
- # Add the public key to authorized_keys
- echo "$ssh_pubkey" >> "$homedir/.ssh/authorized_keys"
- chmod 600 "$homedir/.ssh/authorized_keys"
-
- # Set proper ownership
- if [ "$$DEFAULT_USER" != "root" ]; then
- chown -R "$$DEFAULT_USER:$$DEFAULT_USER" "$homedir/.ssh"
- fi
-
- echo "[$$(date '+%Y-%m-%d %H:%M:%S')] SSH key added for user $$DEFAULT_USER"
-fi
-
-# Redirect runner setup logs to a file for CloudWatch
-exec >> /var/log/runner-setup.log 2>&1
-echo "[$$(date '+%Y-%m-%d %H:%M:%S')] GitHub runner setup starting"
-echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Working directory: $homedir"
-cd "$homedir"
-echo "$script" > pre-runner-script.sh
-echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Running pre-runner script"
-source pre-runner-script.sh
-export RUNNER_ALLOW_RUNASROOT=1
-# We will get the latest release from the GitHub API
-echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Downloading runner from: $runner_release"
-curl -L $runner_release -o runner.tar.gz
-echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Extracting runner"
-# `--no-overwrite-dir` is important, otherwise `$homedir` ends up `chown`'d to `1001:docker`, and `sshd` will refuse connection attempts to `$homedir`
-tar --no-overwrite-dir -xzf runner.tar.gz
-echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Setting up job tracking scripts"
-# Create minimal job tracking scripts inline
-cat > /usr/local/bin/job-started-hook.sh << 'EOFS'
-#!/bin/bash
-exec >> /tmp/job-started-hook.log 2>&1
-echo "[$$(date)] Job STARTED : $${GITHUB_WORKFLOW}/$${GITHUB_JOB} (Run: $${GITHUB_RUN_ID}/$${GITHUB_RUN_NUMBER}, Attempt: $${GITHUB_RUN_ATTEMPT})"
-echo " Repository: $${GITHUB_REPOSITORY}"
-echo " Runner: $${RUNNER_NAME}"
-JOB_TRACK_DIR="/var/run/github-runner-jobs"
-mkdir -p "$${JOB_TRACK_DIR}"
-echo "{\"job_id\":\"$${GITHUB_JOB}\",\"run_id\":\"$${GITHUB_RUN_ID}\",\"workflow\":\"$${GITHUB_WORKFLOW}\",\"status\":\"running\"}" > "$${JOB_TRACK_DIR}/$${GITHUB_RUN_ID}-$${GITHUB_JOB}.job"
-# Update activity timestamp
-touch /var/run/github-runner-last-activity
-EOFS
-
-cat > /usr/local/bin/job-completed-hook.sh << 'EOFC'
-#!/bin/bash
-exec >> /tmp/job-completed-hook.log 2>&1
-echo "[$$(date)] Job COMPLETED: $${GITHUB_WORKFLOW}/$${GITHUB_JOB} (Run: $${GITHUB_RUN_ID}/$${GITHUB_RUN_NUMBER}, Attempt: $${GITHUB_RUN_ATTEMPT})"
-echo " Repository: $${GITHUB_REPOSITORY}"
-echo " Runner: $${RUNNER_NAME}"
-JOB_TRACK_DIR="/var/run/github-runner-jobs"
-if [ -f "$${JOB_TRACK_DIR}/$${GITHUB_RUN_ID}-$${GITHUB_JOB}.job" ]; then
- sed -i 's/"status":"running"/"status":"completed"/' "$${JOB_TRACK_DIR}/$${GITHUB_RUN_ID}-$${GITHUB_JOB}.job"
-fi
-# Count remaining running jobs
-RUNNING_JOBS=$$(grep -l '"status":"running"' "$${JOB_TRACK_DIR}"/*.job 2>/dev/null | wc -l || echo 0)
-echo " Running jobs remaining: $${RUNNING_JOBS}"
-# Update activity timestamp
-touch /var/run/github-runner-last-activity
-EOFC
-
-cat > /usr/local/bin/check-runner-termination.sh << 'EOFT'
-#!/bin/bash
-exec >> /tmp/termination-check.log 2>&1
-echo "[$$(date)] Checking termination conditions"
-
-ACTIVITY_FILE="/var/run/github-runner-last-activity"
-GRACE_PERIOD="$${RUNNER_GRACE_PERIOD:-120}"
-INITIAL_GRACE_PERIOD="$${RUNNER_INITIAL_GRACE_PERIOD:-180}"
-JOB_TRACK_DIR="/var/run/github-runner-jobs"
-
-# Check if activity file exists
-if [ ! -f "$${ACTIVITY_FILE}" ]; then
- echo "[$$(date)] WARNING: No activity file found, creating it now"
- touch "$${ACTIVITY_FILE}"
-fi
-
-# Get last activity time and current time
-LAST_ACTIVITY=$$(stat -c %Y "$${ACTIVITY_FILE}" 2>/dev/null || echo 0)
-NOW=$$(date +%s)
-IDLE_TIME=$$((NOW - LAST_ACTIVITY))
-
-# Check if any jobs have ever run
-if ls "$${JOB_TRACK_DIR}"/*.job 2>/dev/null | grep -q .; then
- JOBS_HAVE_RUN=true
- CURRENT_GRACE_PERIOD="$${GRACE_PERIOD}"
-else
- JOBS_HAVE_RUN=false
- CURRENT_GRACE_PERIOD="$${INITIAL_GRACE_PERIOD}"
+# Essential variables from template substitution
+export debug="$debug"
+export homedir="$homedir"
+export repo="$repo"
+export runner_tokens="$runner_tokens"
+export runner_labels="$runner_labels"
+export cloudwatch_logs_group="$cloudwatch_logs_group"
+export runner_grace_period="$runner_grace_period"
+export runner_initial_grace_period="$runner_initial_grace_period"
+export runner_poll_interval="$runner_poll_interval"
+export runner_registration_timeout="$runner_registration_timeout"
+export max_instance_lifetime="$max_instance_lifetime"
+export runners_per_instance="$runners_per_instance"
+export runner_release="$runner_release"
+export ssh_pubkey="$ssh_pubkey"
+export instance_name="$instance_name"
+export action_sha="$action_sha"
+
+# Custom userdata from user (if any)
+export userdata="$userdata"
+export script="$script"
+
+# Log prefixes
+export log_prefix_job_started="$log_prefix_job_started"
+export log_prefix_job_completed="$log_prefix_job_completed"
+
+# Fetch and execute the main script from GitHub
+# action_sha has already been resolved from action_ref in Python for security and consistency
+SCRIPT_URL="https://raw.githubusercontent.com/Open-Athena/ec2-gha/$${action_sha}/src/ec2_gha/scripts/runner-setup.sh"
+echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Fetching main script from: $$SCRIPT_URL (SHA: $${action_sha})" | tee -a /var/log/runner-setup.log
+
+# Try to download with retries
+for i in {1..5}; do
+ if curl -sSL "$$SCRIPT_URL" -o /tmp/runner-setup.sh; then
+ echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Successfully downloaded runner setup script (attempt $$i)" | tee -a /var/log/runner-setup.log
+ break
+ elif wget -q "$$SCRIPT_URL" -O /tmp/runner-setup.sh; then
+ echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Successfully downloaded runner setup script (attempt $$i)" | tee -a /var/log/runner-setup.log
+ break
+ else
+ echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Failed to download script (attempt $$i), retrying..." | tee -a /var/log/runner-setup.log
+ sleep 2
+ fi
+
+ if [ $$i -eq 5 ]; then
+ echo "[$$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Failed to download runner setup script after 5 attempts" | tee -a /var/log/runner-setup.log
+ shutdown -h now
+ exit 1
+ fi
+done
+
+# Verify we got something
+if [ ! -s /tmp/runner-setup.sh ]; then
+ echo "[$$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Downloaded script is empty" | tee -a /var/log/runner-setup.log
+ shutdown -h now
+ exit 1
fi
-echo "[$$(date)] Last activity: $$(date -d @$${LAST_ACTIVITY} '+%Y-%m-%d %H:%M:%S')"
-echo "[$$(date)] Current time: $$(date '+%Y-%m-%d %H:%M:%S')"
-echo "[$$(date)] Idle time: $${IDLE_TIME} seconds (grace period: $${CURRENT_GRACE_PERIOD} seconds)"
-echo "[$$(date)] Jobs have run: $${JOBS_HAVE_RUN}"
-
-# Check for running jobs first
-RUNNING_JOBS=$$(grep -l '"status":"running"' "$${JOB_TRACK_DIR}"/*.job 2>/dev/null | wc -l || echo 0)
-echo "[$$(date)] Running jobs: $${RUNNING_JOBS}"
-
-# Show status of each job for debugging
-echo "[$$(date)] Current job files:"
-for job_file in "$${JOB_TRACK_DIR}"/*.job; do
- if [ -f "$${job_file}" ]; then
- job_status=$$(grep -o '"status":"[^"]*"' "$${job_file}" || echo "unknown")
- echo "[$$(date)] $$(basename "$${job_file}"): $${job_status}"
- fi
-done || echo "[$$(date)] No job files found"
-
-# Never terminate if jobs are running
-if [ "$${RUNNING_JOBS}" -gt 0 ]; then
- echo "[$$(date)] Jobs are still running, not checking idle time"
-elif [ "$${IDLE_TIME}" -gt "$${CURRENT_GRACE_PERIOD}" ]; then
- echo "[$$(date)] No running jobs and no activity for $${IDLE_TIME} seconds, proceeding with termination"
-
- # Try to remove runner from GitHub first
- if [ -f "$homedir/config.sh" ]; then
- echo "[$$(date)] Removing runner from GitHub"
- cd "$homedir"
- # Stop the runner service
- RUNNER_PID=$$(pgrep -f "Runner.Listener" | head -1)
- if [ -n "$${RUNNER_PID}" ]; then
- echo "[$$(date)] Stopping runner PID $${RUNNER_PID}"
- kill -INT "$${RUNNER_PID}" 2>/dev/null || true
- # Wait for it to stop
- for i in {1..10}; do
- if ! kill -0 "$${RUNNER_PID}" 2>/dev/null; then
- echo "[$$(date)] Runner stopped"
- break
- fi
- sleep 1
- done
- fi
-
- # Remove runner from GitHub
- # We need RUNNER_ALLOW_RUNASROOT=1 to remove as root, just like when we configured it
- if RUNNER_ALLOW_RUNASROOT=1 ./config.sh remove --token $token; then
- echo "[$$(date)] Runner removed from GitHub successfully"
- else
- echo "[$$(date)] Failed to remove runner from GitHub"
- fi
- fi
-
- # Flush CloudWatch logs before shutdown
- echo "[$$(date)] Flushing CloudWatch logs"
- sync
- sleep 5
-
- sudo shutdown -h now "Runner terminating after idle timeout"
-else
- echo "[$$(date)] Activity detected within $${CURRENT_GRACE_PERIOD} seconds, not terminating"
-fi
-EOFT
-
-chmod +x /usr/local/bin/job-started-hook.sh /usr/local/bin/job-completed-hook.sh /usr/local/bin/check-runner-termination.sh
-
-# Set up runner hooks
-echo "ACTIONS_RUNNER_HOOK_JOB_STARTED=/usr/local/bin/job-started-hook.sh" > .env
-echo "ACTIONS_RUNNER_HOOK_JOB_COMPLETED=/usr/local/bin/job-completed-hook.sh" >> .env
-echo "RUNNER_HOME=$homedir" >> .env
-echo "RUNNER_GRACE_PERIOD=$runner_grace_period" >> .env
-
-# Set up job tracking directory
-mkdir -p /var/run/github-runner-jobs
-
-# Create initial activity timestamp
-touch /var/run/github-runner-last-activity
-
-# Set up periodic termination check using systemd
-cat > /etc/systemd/system/runner-termination-check.service << 'EOF'
-[Unit]
-Description=Check GitHub runner termination conditions
-After=network.target
-
-[Service]
-Type=oneshot
-ExecStart=/usr/local/bin/check-runner-termination.sh
-EOF
-
-cat > /etc/systemd/system/runner-termination-check.timer << 'EOF'
-[Unit]
-Description=Periodic GitHub runner termination check
-Requires=runner-termination-check.service
-
-[Timer]
-OnBootSec=60s
-OnUnitActiveSec=30s
-
-[Install]
-WantedBy=timers.target
-EOF
-
-# Enable and start the timer
-systemctl daemon-reload
-systemctl enable runner-termination-check.timer
-systemctl start runner-termination-check.timer
-
-# Get instance metadata for descriptive runner name
-INSTANCE_ID=$$(wget -q -O - http://169.254.169.254/latest/meta-data/instance-id || echo "unknown")
-INSTANCE_TYPE=$$(wget -q -O - http://169.254.169.254/latest/meta-data/instance-type || echo "unknown")
-
-# Create runner name with just the instance ID for uniqueness
-RUNNER_NAME="ec2-$${INSTANCE_ID}"
-
-# Build additional labels with metadata for easier correlation
-# These will be visible in the GitHub runner management UI
-METADATA_LABELS=""
-METADATA_LABELS="$${METADATA_LABELS},instance-id:$${INSTANCE_ID}"
-METADATA_LABELS="$${METADATA_LABELS},instance-type:$${INSTANCE_TYPE}"
-
-# Add GitHub workflow metadata passed from the action
-if [ -n "$github_workflow" ]; then
- # Replace spaces and special chars in workflow name for label compatibility
- WORKFLOW_LABEL=$$(echo "$github_workflow" | tr ' /' '-' | tr -cd '[:alnum:]-_')
- METADATA_LABELS="$${METADATA_LABELS},workflow:$${WORKFLOW_LABEL}"
-fi
-if [ -n "$github_run_id" ]; then
- METADATA_LABELS="$${METADATA_LABELS},run-id:$github_run_id"
-fi
-if [ -n "$github_run_number" ]; then
- METADATA_LABELS="$${METADATA_LABELS},run-number:$github_run_number"
-fi
-
-# Combine provided labels (user + runner-xxx) with instance metadata labels
-# The $labels variable already contains user labels and the critical runner-xxx label from Python
-ALL_LABELS="$labels$${METADATA_LABELS}"
-
-echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Configuring runner for repo: $repo"
-echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Runner name: $${RUNNER_NAME}"
-echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Labels: $${ALL_LABELS}"
-./config.sh --url https://github.com/$repo --token $token --labels "$${ALL_LABELS}" --name "$${RUNNER_NAME}" --disableupdate
-echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Starting runner"
-# Create marker file for cleanup service
-touch /var/run/github-runner-started
-# Ensure CloudWatch agent can read diagnostic logs
-# The cwagent user needs to traverse into $homedir to reach _diag
-# Make $homedir world-executable (but not readable) so cwagent can traverse it
-chmod o+x $homedir
-echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Made $homedir traversable for CloudWatch agent"
-# Create _diag directory if it doesn't exist
-mkdir -p $homedir/_diag
-# The _diag files are already world-readable by default, just ensure the directory is too
-chmod 755 $homedir/_diag
-./run.sh
+# Make it executable and run it
+chmod +x /tmp/runner-setup.sh
+echo "[$$(date '+%Y-%m-%d %H:%M:%S')] Executing runner setup script" | tee -a /var/log/runner-setup.log
+exec /tmp/runner-setup.sh
diff --git a/tests/__snapshots__/test_start.ambr b/tests/__snapshots__/test_start.ambr
index 36cb358..b53a1fd 100644
--- a/tests/__snapshots__/test_start.ambr
+++ b/tests/__snapshots__/test_start.ambr
@@ -1,358 +1,287 @@
# serializer version: 1
+# name: test_build_aws_params
+ dict({
+ 'IamInstanceProfile': dict({
+ 'Name': 'test',
+ }),
+ 'ImageId': 'ami-0772db4c976d21e9b',
+ 'InstanceInitiatedShutdownBehavior': 'terminate',
+ 'InstanceType': 't2.micro',
+ 'MaxCount': 1,
+ 'MinCount': 1,
+ 'SecurityGroupIds': list([
+ 'test',
+ ]),
+ 'SubnetId': 'test',
+ 'TagSpecifications': list([
+ dict({
+ 'ResourceType': 'instance',
+ 'Tags': list([
+ dict({
+ 'Key': 'Name',
+ 'Value': 'test',
+ }),
+ dict({
+ 'Key': 'Owner',
+ 'Value': 'test',
+ }),
+ dict({
+ 'Key': 'Repository',
+ 'Value': 'Open-Athena/ec2-gha',
+ }),
+ dict({
+ 'Key': 'Workflow',
+ 'Value': 'CI',
+ }),
+ dict({
+ 'Key': 'URL',
+ 'Value': 'https://github.com/Open-Athena/ec2-gha/actions/runs/16725250800',
+ }),
+ ]),
+ }),
+ ]),
+ 'UserData': '''
+ #!/bin/bash
+ set -e
+
+ # Essential variables from template substitution
+ export debug=""
+ export homedir="/home/ec2-user"
+ export repo="omsf-eco-infra/awsinfratesting"
+ export runner_tokens="test"
+ export runner_labels="label"
+ export cloudwatch_logs_group=""
+ export runner_grace_period="61"
+ export runner_initial_grace_period="181"
+ export runner_poll_interval="11"
+ export runner_registration_timeout="300"
+ export max_instance_lifetime="360"
+ export runners_per_instance="1"
+ export runner_release="test.tar.gz"
+ export ssh_pubkey="ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC test@host"
+ export instance_name=""
+ export action_sha="abc123def456789012345678901234567890abcd"
+
+ # Custom userdata from user (if any)
+ export userdata=""
+ export script="echo 'Hello, World!'"
+
+ # Log prefixes
+ export log_prefix_job_started="Job started:"
+ export log_prefix_job_completed="Job completed:"
+
+ # Fetch and execute the main script from GitHub
+ # action_sha has already been resolved from action_ref in Python for security and consistency
+ SCRIPT_URL="https://raw.githubusercontent.com/Open-Athena/ec2-gha/${action_sha}/src/ec2_gha/scripts/runner-setup.sh"
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Fetching main script from: $SCRIPT_URL (SHA: ${action_sha})" | tee -a /var/log/runner-setup.log
+
+ # Try to download with retries
+ for i in {1..5}; do
+ if curl -sSL "$SCRIPT_URL" -o /tmp/runner-setup.sh; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Successfully downloaded runner setup script (attempt $i)" | tee -a /var/log/runner-setup.log
+ break
+ elif wget -q "$SCRIPT_URL" -O /tmp/runner-setup.sh; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Successfully downloaded runner setup script (attempt $i)" | tee -a /var/log/runner-setup.log
+ break
+ else
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Failed to download script (attempt $i), retrying..." | tee -a /var/log/runner-setup.log
+ sleep 2
+ fi
+
+ if [ $i -eq 5 ]; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Failed to download runner setup script after 5 attempts" | tee -a /var/log/runner-setup.log
+ shutdown -h now
+ exit 1
+ fi
+ done
+
+ # Verify we got something
+ if [ ! -s /tmp/runner-setup.sh ]; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Downloaded script is empty" | tee -a /var/log/runner-setup.log
+ shutdown -h now
+ exit 1
+ fi
+
+ # Make it executable and run it
+ chmod +x /tmp/runner-setup.sh
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Executing runner setup script" | tee -a /var/log/runner-setup.log
+ exec /tmp/runner-setup.sh
+ ''',
+ })
+# ---
+# name: test_build_aws_params_with_idx
+ dict({
+ 'IamInstanceProfile': dict({
+ 'Name': 'test',
+ }),
+ 'ImageId': 'ami-0772db4c976d21e9b',
+ 'InstanceInitiatedShutdownBehavior': 'terminate',
+ 'InstanceType': 't2.micro',
+ 'MaxCount': 1,
+ 'MinCount': 1,
+ 'SecurityGroupIds': list([
+ 'test',
+ ]),
+ 'SubnetId': 'test',
+ 'TagSpecifications': list([
+ dict({
+ 'ResourceType': 'instance',
+ 'Tags': list([
+ dict({
+ 'Key': 'Name',
+ 'Value': 'ec2-gha/test#42 0',
+ }),
+ dict({
+ 'Key': 'Repository',
+ 'Value': 'Open-Athena/ec2-gha',
+ }),
+ dict({
+ 'Key': 'Workflow',
+ 'Value': 'CI',
+ }),
+ dict({
+ 'Key': 'URL',
+ 'Value': 'https://github.com/Open-Athena/ec2-gha/actions/runs/16725250800',
+ }),
+ ]),
+ }),
+ ]),
+ 'UserData': '''
+ #!/bin/bash
+ set -e
+
+ # Essential variables from template substitution
+ export debug=""
+ export homedir="/home/ec2-user"
+ export repo="omsf-eco-infra/awsinfratesting"
+ export runner_tokens="test"
+ export runner_labels="label"
+ export cloudwatch_logs_group=""
+ export runner_grace_period="61"
+ export runner_initial_grace_period="181"
+ export runner_poll_interval="11"
+ export runner_registration_timeout="300"
+ export max_instance_lifetime="360"
+ export runners_per_instance="1"
+ export runner_release="test.tar.gz"
+ export ssh_pubkey="ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC test@host"
+ export instance_name=""
+ export action_sha="abc123def456789012345678901234567890abcd"
+
+ # Custom userdata from user (if any)
+ export userdata=""
+ export script="echo 'Hello, World!'"
+
+ # Log prefixes
+ export log_prefix_job_started="Job started:"
+ export log_prefix_job_completed="Job completed:"
+
+ # Fetch and execute the main script from GitHub
+ # action_sha has already been resolved from action_ref in Python for security and consistency
+ SCRIPT_URL="https://raw.githubusercontent.com/Open-Athena/ec2-gha/${action_sha}/src/ec2_gha/scripts/runner-setup.sh"
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Fetching main script from: $SCRIPT_URL (SHA: ${action_sha})" | tee -a /var/log/runner-setup.log
+
+ # Try to download with retries
+ for i in {1..5}; do
+ if curl -sSL "$SCRIPT_URL" -o /tmp/runner-setup.sh; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Successfully downloaded runner setup script (attempt $i)" | tee -a /var/log/runner-setup.log
+ break
+ elif wget -q "$SCRIPT_URL" -O /tmp/runner-setup.sh; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Successfully downloaded runner setup script (attempt $i)" | tee -a /var/log/runner-setup.log
+ break
+ else
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Failed to download script (attempt $i), retrying..." | tee -a /var/log/runner-setup.log
+ sleep 2
+ fi
+
+ if [ $i -eq 5 ]; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Failed to download runner setup script after 5 attempts" | tee -a /var/log/runner-setup.log
+ shutdown -h now
+ exit 1
+ fi
+ done
+
+ # Verify we got something
+ if [ ! -s /tmp/runner-setup.sh ]; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Downloaded script is empty" | tee -a /var/log/runner-setup.log
+ shutdown -h now
+ exit 1
+ fi
+
+ # Make it executable and run it
+ chmod +x /tmp/runner-setup.sh
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Executing runner setup script" | tee -a /var/log/runner-setup.log
+ exec /tmp/runner-setup.sh
+ ''',
+ })
+# ---
# name: test_build_user_data
'''
#!/bin/bash
set -e
- echo 'custom userdata'
-
- # Set up maximum lifetime timeout - do this early to ensure cleanup
- MAX_LIFETIME_MINUTES=360
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Setting up maximum lifetime timeout: ${MAX_LIFETIME_MINUTES} minutes"
- nohup bash -c "sleep ${MAX_LIFETIME_MINUTES}m && echo '[$(date)] Maximum lifetime reached' && shutdown -h now" > /var/log/max-lifetime.log 2>&1 &
-
- # Configure CloudWatch Logs if enabled
- if [ "" != "" ]; then
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Installing CloudWatch agent"
- # Use a subshell to prevent CloudWatch failures from stopping the entire script
- (
-
- # Wait for dpkg lock to be released (up to 2 minutes)
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Waiting for dpkg lock to be released..."
- timeout=120
- while fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1 || fuser /var/lib/dpkg/lock >/dev/null 2>&1; do
- if [ $timeout -le 0 ]; then
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] WARNING: dpkg lock timeout, proceeding anyway"
- break
- fi
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] dpkg is locked, waiting... ($timeout seconds remaining)"
- sleep 5
- timeout=$((timeout - 5))
- done
-
- # Download and install CloudWatch agent
- wget -q https://s3.amazonaws.com/amazoncloudwatch-agent/ubuntu/amd64/latest/amazon-cloudwatch-agent.deb
- dpkg -i -E ./amazon-cloudwatch-agent.deb
- rm amazon-cloudwatch-agent.deb
-
- # Configure CloudWatch agent
- cat > /opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json << 'EOF'
- {
- "agent": {
- "run_as_user": "cwagent"
- },
- "logs": {
- "logs_collected": {
- "files": {
- "collect_list": [
- {
- "file_path": "/var/log/runner-setup.log",
- "log_group_name": "",
- "log_stream_name": "{instance_id}/runner-setup",
- "timezone": "UTC"
- },
- {
- "file_path": "/tmp/job-started-hook.log",
- "log_group_name": "",
- "log_stream_name": "{instance_id}/job-started",
- "timezone": "UTC"
- },
- {
- "file_path": "/tmp/job-completed-hook.log",
- "log_group_name": "",
- "log_stream_name": "{instance_id}/job-completed",
- "timezone": "UTC"
- },
- {
- "file_path": "/tmp/termination-check.log",
- "log_group_name": "",
- "log_stream_name": "{instance_id}/termination",
- "timezone": "UTC"
- },
- {
- "file_path": "/home/test-user/_diag/Runner_**.log",
- "log_group_name": "",
- "log_stream_name": "{instance_id}/runner-diag",
- "timezone": "UTC"
- },
- {
- "file_path": "/home/test-user/_diag/Worker_**.log",
- "log_group_name": "",
- "log_stream_name": "{instance_id}/worker-diag",
- "timezone": "UTC"
- }
- ]
- }
- }
- }
- }
- EOF
-
- # Start CloudWatch agent
- /opt/aws/amazon-cloudwatch-agent/bin/amazon-cloudwatch-agent-ctl \
- -a fetch-config \
- -m ec2 \
- -c file:/opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json \
- -s
-
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] CloudWatch agent started"
- ) || echo "[$(date '+%Y-%m-%d %H:%M:%S')] WARNING: CloudWatch agent installation failed, continuing without it"
- fi
-
- # Configure SSH access if public key provided
- if [ -n "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC test@host" ]; then
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Configuring SSH access"
-
- # Determine the default user based on the home directory owner
- DEFAULT_USER=$(stat -c "%U" "/home/test-user" 2>/dev/null || echo "root")
-
- # Create .ssh directory if it doesn't exist
- mkdir -p "/home/test-user/.ssh"
- chmod 700 "/home/test-user/.ssh"
-
- # Add the public key to authorized_keys
- echo "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC test@host" >> "/home/test-user/.ssh/authorized_keys"
- chmod 600 "/home/test-user/.ssh/authorized_keys"
-
- # Set proper ownership
- if [ "$DEFAULT_USER" != "root" ]; then
- chown -R "$DEFAULT_USER:$DEFAULT_USER" "/home/test-user/.ssh"
- fi
-
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] SSH key added for user $DEFAULT_USER"
- fi
-
- # Redirect runner setup logs to a file for CloudWatch
- exec >> /var/log/runner-setup.log 2>&1
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] GitHub runner setup starting"
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Working directory: /home/test-user"
- cd "/home/test-user"
- echo "echo 'test script'" > pre-runner-script.sh
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Running pre-runner script"
- source pre-runner-script.sh
- export RUNNER_ALLOW_RUNASROOT=1
- # We will get the latest release from the GitHub API
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Downloading runner from: https://example.com/runner.tar.gz"
- curl -L https://example.com/runner.tar.gz -o runner.tar.gz
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Extracting runner"
- # `--no-overwrite-dir` is important, otherwise `/home/test-user` ends up `chown`'d to `1001:docker`, and `sshd` will refuse connection attempts to `/home/test-user`
- tar --no-overwrite-dir -xzf runner.tar.gz
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Setting up job tracking scripts"
- # Create minimal job tracking scripts inline
- cat > /usr/local/bin/job-started-hook.sh << 'EOFS'
- #!/bin/bash
- exec >> /tmp/job-started-hook.log 2>&1
- echo "[$(date)] Job STARTED : ${GITHUB_WORKFLOW}/${GITHUB_JOB} (Run: ${GITHUB_RUN_ID}/${GITHUB_RUN_NUMBER}, Attempt: ${GITHUB_RUN_ATTEMPT})"
- echo " Repository: ${GITHUB_REPOSITORY}"
- echo " Runner: ${RUNNER_NAME}"
- JOB_TRACK_DIR="/var/run/github-runner-jobs"
- mkdir -p "${JOB_TRACK_DIR}"
- echo "{\"job_id\":\"${GITHUB_JOB}\",\"run_id\":\"${GITHUB_RUN_ID}\",\"workflow\":\"${GITHUB_WORKFLOW}\",\"status\":\"running\"}" > "${JOB_TRACK_DIR}/${GITHUB_RUN_ID}-${GITHUB_JOB}.job"
- # Update activity timestamp
- touch /var/run/github-runner-last-activity
- EOFS
-
- cat > /usr/local/bin/job-completed-hook.sh << 'EOFC'
- #!/bin/bash
- exec >> /tmp/job-completed-hook.log 2>&1
- echo "[$(date)] Job COMPLETED: ${GITHUB_WORKFLOW}/${GITHUB_JOB} (Run: ${GITHUB_RUN_ID}/${GITHUB_RUN_NUMBER}, Attempt: ${GITHUB_RUN_ATTEMPT})"
- echo " Repository: ${GITHUB_REPOSITORY}"
- echo " Runner: ${RUNNER_NAME}"
- JOB_TRACK_DIR="/var/run/github-runner-jobs"
- if [ -f "${JOB_TRACK_DIR}/${GITHUB_RUN_ID}-${GITHUB_JOB}.job" ]; then
- sed -i 's/"status":"running"/"status":"completed"/' "${JOB_TRACK_DIR}/${GITHUB_RUN_ID}-${GITHUB_JOB}.job"
- fi
- # Count remaining running jobs
- RUNNING_JOBS=$(grep -l '"status":"running"' "${JOB_TRACK_DIR}"/*.job 2>/dev/null | wc -l || echo 0)
- echo " Running jobs remaining: ${RUNNING_JOBS}"
- # Update activity timestamp
- touch /var/run/github-runner-last-activity
- EOFC
-
- cat > /usr/local/bin/check-runner-termination.sh << 'EOFT'
- #!/bin/bash
- exec >> /tmp/termination-check.log 2>&1
- echo "[$(date)] Checking termination conditions"
-
- ACTIVITY_FILE="/var/run/github-runner-last-activity"
- GRACE_PERIOD="${RUNNER_GRACE_PERIOD:-120}"
- INITIAL_GRACE_PERIOD="${RUNNER_INITIAL_GRACE_PERIOD:-180}"
- JOB_TRACK_DIR="/var/run/github-runner-jobs"
-
- # Check if activity file exists
- if [ ! -f "${ACTIVITY_FILE}" ]; then
- echo "[$(date)] WARNING: No activity file found, creating it now"
- touch "${ACTIVITY_FILE}"
- fi
-
- # Get last activity time and current time
- LAST_ACTIVITY=$(stat -c %Y "${ACTIVITY_FILE}" 2>/dev/null || echo 0)
- NOW=$(date +%s)
- IDLE_TIME=$((NOW - LAST_ACTIVITY))
-
- # Check if any jobs have ever run
- if ls "${JOB_TRACK_DIR}"/*.job 2>/dev/null | grep -q .; then
- JOBS_HAVE_RUN=true
- CURRENT_GRACE_PERIOD="${GRACE_PERIOD}"
- else
- JOBS_HAVE_RUN=false
- CURRENT_GRACE_PERIOD="${INITIAL_GRACE_PERIOD}"
- fi
-
- echo "[$(date)] Last activity: $(date -d @${LAST_ACTIVITY} '+%Y-%m-%d %H:%M:%S')"
- echo "[$(date)] Current time: $(date '+%Y-%m-%d %H:%M:%S')"
- echo "[$(date)] Idle time: ${IDLE_TIME} seconds (grace period: ${CURRENT_GRACE_PERIOD} seconds)"
- echo "[$(date)] Jobs have run: ${JOBS_HAVE_RUN}"
-
- # Check for running jobs first
- RUNNING_JOBS=$(grep -l '"status":"running"' "${JOB_TRACK_DIR}"/*.job 2>/dev/null | wc -l || echo 0)
- echo "[$(date)] Running jobs: ${RUNNING_JOBS}"
-
- # Show status of each job for debugging
- echo "[$(date)] Current job files:"
- for job_file in "${JOB_TRACK_DIR}"/*.job; do
- if [ -f "${job_file}" ]; then
- job_status=$(grep -o '"status":"[^"]*"' "${job_file}" || echo "unknown")
- echo "[$(date)] $(basename "${job_file}"): ${job_status}"
- fi
- done || echo "[$(date)] No job files found"
-
- # Never terminate if jobs are running
- if [ "${RUNNING_JOBS}" -gt 0 ]; then
- echo "[$(date)] Jobs are still running, not checking idle time"
- elif [ "${IDLE_TIME}" -gt "${CURRENT_GRACE_PERIOD}" ]; then
- echo "[$(date)] No running jobs and no activity for ${IDLE_TIME} seconds, proceeding with termination"
-
- # Try to remove runner from GitHub first
- if [ -f "/home/test-user/config.sh" ]; then
- echo "[$(date)] Removing runner from GitHub"
- cd "/home/test-user"
- # Stop the runner service
- RUNNER_PID=$(pgrep -f "Runner.Listener" | head -1)
- if [ -n "${RUNNER_PID}" ]; then
- echo "[$(date)] Stopping runner PID ${RUNNER_PID}"
- kill -INT "${RUNNER_PID}" 2>/dev/null || true
- # Wait for it to stop
- for i in {1..10}; do
- if ! kill -0 "${RUNNER_PID}" 2>/dev/null; then
- echo "[$(date)] Runner stopped"
- break
- fi
- sleep 1
- done
- fi
-
- # Remove runner from GitHub
- # We need RUNNER_ALLOW_RUNASROOT=1 to remove as root, just like when we configured it
- if RUNNER_ALLOW_RUNASROOT=1 ./config.sh remove --token test-token-xyz; then
- echo "[$(date)] Runner removed from GitHub successfully"
- else
- echo "[$(date)] Failed to remove runner from GitHub"
- fi
- fi
-
- # Flush CloudWatch logs before shutdown
- echo "[$(date)] Flushing CloudWatch logs"
- sync
- sleep 5
-
- sudo shutdown -h now "Runner terminating after idle timeout"
- else
- echo "[$(date)] Activity detected within ${CURRENT_GRACE_PERIOD} seconds, not terminating"
- fi
- EOFT
-
- chmod +x /usr/local/bin/job-started-hook.sh /usr/local/bin/job-completed-hook.sh /usr/local/bin/check-runner-termination.sh
-
- # Set up runner hooks
- echo "ACTIONS_RUNNER_HOOK_JOB_STARTED=/usr/local/bin/job-started-hook.sh" > .env
- echo "ACTIONS_RUNNER_HOOK_JOB_COMPLETED=/usr/local/bin/job-completed-hook.sh" >> .env
- echo "RUNNER_HOME=/home/test-user" >> .env
- echo "RUNNER_GRACE_PERIOD=60" >> .env
-
- # Set up job tracking directory
- mkdir -p /var/run/github-runner-jobs
-
- # Create initial activity timestamp
- touch /var/run/github-runner-last-activity
-
- # Set up periodic termination check using systemd
- cat > /etc/systemd/system/runner-termination-check.service << 'EOF'
- [Unit]
- Description=Check GitHub runner termination conditions
- After=network.target
-
- [Service]
- Type=oneshot
- ExecStart=/usr/local/bin/check-runner-termination.sh
- EOF
-
- cat > /etc/systemd/system/runner-termination-check.timer << 'EOF'
- [Unit]
- Description=Periodic GitHub runner termination check
- Requires=runner-termination-check.service
-
- [Timer]
- OnBootSec=60s
- OnUnitActiveSec=30s
-
- [Install]
- WantedBy=timers.target
- EOF
-
- # Enable and start the timer
- systemctl daemon-reload
- systemctl enable runner-termination-check.timer
- systemctl start runner-termination-check.timer
-
- # Get instance metadata for descriptive runner name
- INSTANCE_ID=$(wget -q -O - http://169.254.169.254/latest/meta-data/instance-id || echo "unknown")
- INSTANCE_TYPE=$(wget -q -O - http://169.254.169.254/latest/meta-data/instance-type || echo "unknown")
-
- # Create runner name with just the instance ID for uniqueness
- RUNNER_NAME="ec2-${INSTANCE_ID}"
-
- # Build additional labels with metadata for easier correlation
- # These will be visible in the GitHub runner management UI
- METADATA_LABELS=""
- METADATA_LABELS="${METADATA_LABELS},instance-id:${INSTANCE_ID}"
- METADATA_LABELS="${METADATA_LABELS},instance-type:${INSTANCE_TYPE}"
-
- # Add GitHub workflow metadata passed from the action
- if [ -n "test-workflow" ]; then
- # Replace spaces and special chars in workflow name for label compatibility
- WORKFLOW_LABEL=$(echo "test-workflow" | tr ' /' '-' | tr -cd '[:alnum:]-_')
- METADATA_LABELS="${METADATA_LABELS},workflow:${WORKFLOW_LABEL}"
- fi
- if [ -n "123456789" ]; then
- METADATA_LABELS="${METADATA_LABELS},run-id:123456789"
- fi
- if [ -n "42" ]; then
- METADATA_LABELS="${METADATA_LABELS},run-number:42"
+ # Essential variables from template substitution
+ export debug=""
+ export homedir="/home/ec2-user"
+ export repo="omsf-eco-infra/awsinfratesting"
+ export runner_tokens="test"
+ export runner_labels="label"
+ export cloudwatch_logs_group=""
+ export runner_grace_period="61"
+ export runner_initial_grace_period="181"
+ export runner_poll_interval="11"
+ export runner_registration_timeout="300"
+ export max_instance_lifetime="360"
+ export runners_per_instance="1"
+ export runner_release="test.tar.gz"
+ export ssh_pubkey="ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC test@host"
+ export instance_name=""
+ export action_sha="abc123def456789012345678901234567890abcd"
+
+ # Custom userdata from user (if any)
+ export userdata=""
+ export script="echo 'Hello, World!'"
+
+ # Log prefixes
+ export log_prefix_job_started="Job started:"
+ export log_prefix_job_completed="Job completed:"
+
+ # Fetch and execute the main script from GitHub
+ # action_sha has already been resolved from action_ref in Python for security and consistency
+ SCRIPT_URL="https://raw.githubusercontent.com/Open-Athena/ec2-gha/${action_sha}/src/ec2_gha/scripts/runner-setup.sh"
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Fetching main script from: $SCRIPT_URL (SHA: ${action_sha})" | tee -a /var/log/runner-setup.log
+
+ # Try to download with retries
+ for i in {1..5}; do
+ if curl -sSL "$SCRIPT_URL" -o /tmp/runner-setup.sh; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Successfully downloaded runner setup script (attempt $i)" | tee -a /var/log/runner-setup.log
+ break
+ elif wget -q "$SCRIPT_URL" -O /tmp/runner-setup.sh; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Successfully downloaded runner setup script (attempt $i)" | tee -a /var/log/runner-setup.log
+ break
+ else
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Failed to download script (attempt $i), retrying..." | tee -a /var/log/runner-setup.log
+ sleep 2
+ fi
+
+ if [ $i -eq 5 ]; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Failed to download runner setup script after 5 attempts" | tee -a /var/log/runner-setup.log
+ shutdown -h now
+ exit 1
+ fi
+ done
+
+ # Verify we got something
+ if [ ! -s /tmp/runner-setup.sh ]; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Downloaded script is empty" | tee -a /var/log/runner-setup.log
+ shutdown -h now
+ exit 1
fi
- # Combine provided labels (user + runner-xxx) with instance metadata labels
- # The test-label variable already contains user labels and the critical runner-xxx label from Python
- ALL_LABELS="test-label${METADATA_LABELS}"
-
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Configuring runner for repo: test-org/test-repo"
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Runner name: ${RUNNER_NAME}"
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Labels: ${ALL_LABELS}"
- ./config.sh --url https://github.com/test-org/test-repo --token test-token-xyz --labels "${ALL_LABELS}" --name "${RUNNER_NAME}" --disableupdate
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Starting runner"
- # Create marker file for cleanup service
- touch /var/run/github-runner-started
- # Ensure CloudWatch agent can read diagnostic logs
- # The cwagent user needs to traverse into /home/test-user to reach _diag
- # Make /home/test-user world-executable (but not readable) so cwagent can traverse it
- chmod o+x /home/test-user
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Made /home/test-user traversable for CloudWatch agent"
- # Create _diag directory if it doesn't exist
- mkdir -p /home/test-user/_diag
- # The _diag files are already world-readable by default, just ensure the directory is too
- chmod 755 /home/test-user/_diag
- ./run.sh
-
+ # Make it executable and run it
+ chmod +x /tmp/runner-setup.sh
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Executing runner setup script" | tee -a /var/log/runner-setup.log
+ exec /tmp/runner-setup.sh
'''
# ---
# name: test_build_user_data_with_cloudwatch
@@ -360,354 +289,67 @@
#!/bin/bash
set -e
-
-
- # Set up maximum lifetime timeout - do this early to ensure cleanup
- MAX_LIFETIME_MINUTES=360
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Setting up maximum lifetime timeout: ${MAX_LIFETIME_MINUTES} minutes"
- nohup bash -c "sleep ${MAX_LIFETIME_MINUTES}m && echo '[$(date)] Maximum lifetime reached' && shutdown -h now" > /var/log/max-lifetime.log 2>&1 &
-
- # Configure CloudWatch Logs if enabled
- if [ "/aws/ec2/github-runners" != "" ]; then
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Installing CloudWatch agent"
- # Use a subshell to prevent CloudWatch failures from stopping the entire script
- (
-
- # Wait for dpkg lock to be released (up to 2 minutes)
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Waiting for dpkg lock to be released..."
- timeout=120
- while fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1 || fuser /var/lib/dpkg/lock >/dev/null 2>&1; do
- if [ $timeout -le 0 ]; then
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] WARNING: dpkg lock timeout, proceeding anyway"
- break
- fi
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] dpkg is locked, waiting... ($timeout seconds remaining)"
- sleep 5
- timeout=$((timeout - 5))
- done
-
- # Download and install CloudWatch agent
- wget -q https://s3.amazonaws.com/amazoncloudwatch-agent/ubuntu/amd64/latest/amazon-cloudwatch-agent.deb
- dpkg -i -E ./amazon-cloudwatch-agent.deb
- rm amazon-cloudwatch-agent.deb
-
- # Configure CloudWatch agent
- cat > /opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json << 'EOF'
- {
- "agent": {
- "run_as_user": "cwagent"
- },
- "logs": {
- "logs_collected": {
- "files": {
- "collect_list": [
- {
- "file_path": "/var/log/runner-setup.log",
- "log_group_name": "/aws/ec2/github-runners",
- "log_stream_name": "{instance_id}/runner-setup",
- "timezone": "UTC"
- },
- {
- "file_path": "/tmp/job-started-hook.log",
- "log_group_name": "/aws/ec2/github-runners",
- "log_stream_name": "{instance_id}/job-started",
- "timezone": "UTC"
- },
- {
- "file_path": "/tmp/job-completed-hook.log",
- "log_group_name": "/aws/ec2/github-runners",
- "log_stream_name": "{instance_id}/job-completed",
- "timezone": "UTC"
- },
- {
- "file_path": "/tmp/termination-check.log",
- "log_group_name": "/aws/ec2/github-runners",
- "log_stream_name": "{instance_id}/termination",
- "timezone": "UTC"
- },
- {
- "file_path": "/home/test-user/_diag/Runner_**.log",
- "log_group_name": "/aws/ec2/github-runners",
- "log_stream_name": "{instance_id}/runner-diag",
- "timezone": "UTC"
- },
- {
- "file_path": "/home/test-user/_diag/Worker_**.log",
- "log_group_name": "/aws/ec2/github-runners",
- "log_stream_name": "{instance_id}/worker-diag",
- "timezone": "UTC"
- }
- ]
- }
- }
- }
- }
- EOF
-
- # Start CloudWatch agent
- /opt/aws/amazon-cloudwatch-agent/bin/amazon-cloudwatch-agent-ctl \
- -a fetch-config \
- -m ec2 \
- -c file:/opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json \
- -s
-
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] CloudWatch agent started"
- ) || echo "[$(date '+%Y-%m-%d %H:%M:%S')] WARNING: CloudWatch agent installation failed, continuing without it"
- fi
-
- # Configure SSH access if public key provided
- if [ -n "" ]; then
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Configuring SSH access"
-
- # Determine the default user based on the home directory owner
- DEFAULT_USER=$(stat -c "%U" "/home/test-user" 2>/dev/null || echo "root")
-
- # Create .ssh directory if it doesn't exist
- mkdir -p "/home/test-user/.ssh"
- chmod 700 "/home/test-user/.ssh"
-
- # Add the public key to authorized_keys
- echo "" >> "/home/test-user/.ssh/authorized_keys"
- chmod 600 "/home/test-user/.ssh/authorized_keys"
-
- # Set proper ownership
- if [ "$DEFAULT_USER" != "root" ]; then
- chown -R "$DEFAULT_USER:$DEFAULT_USER" "/home/test-user/.ssh"
- fi
-
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] SSH key added for user $DEFAULT_USER"
- fi
-
- # Redirect runner setup logs to a file for CloudWatch
- exec >> /var/log/runner-setup.log 2>&1
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] GitHub runner setup starting"
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Working directory: /home/test-user"
- cd "/home/test-user"
- echo "echo 'test script'" > pre-runner-script.sh
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Running pre-runner script"
- source pre-runner-script.sh
- export RUNNER_ALLOW_RUNASROOT=1
- # We will get the latest release from the GitHub API
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Downloading runner from: https://example.com/runner.tar.gz"
- curl -L https://example.com/runner.tar.gz -o runner.tar.gz
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Extracting runner"
- # `--no-overwrite-dir` is important, otherwise `/home/test-user` ends up `chown`'d to `1001:docker`, and `sshd` will refuse connection attempts to `/home/test-user`
- tar --no-overwrite-dir -xzf runner.tar.gz
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Setting up job tracking scripts"
- # Create minimal job tracking scripts inline
- cat > /usr/local/bin/job-started-hook.sh << 'EOFS'
- #!/bin/bash
- exec >> /tmp/job-started-hook.log 2>&1
- echo "[$(date)] Job STARTED : ${GITHUB_WORKFLOW}/${GITHUB_JOB} (Run: ${GITHUB_RUN_ID}/${GITHUB_RUN_NUMBER}, Attempt: ${GITHUB_RUN_ATTEMPT})"
- echo " Repository: ${GITHUB_REPOSITORY}"
- echo " Runner: ${RUNNER_NAME}"
- JOB_TRACK_DIR="/var/run/github-runner-jobs"
- mkdir -p "${JOB_TRACK_DIR}"
- echo "{\"job_id\":\"${GITHUB_JOB}\",\"run_id\":\"${GITHUB_RUN_ID}\",\"workflow\":\"${GITHUB_WORKFLOW}\",\"status\":\"running\"}" > "${JOB_TRACK_DIR}/${GITHUB_RUN_ID}-${GITHUB_JOB}.job"
- # Update activity timestamp
- touch /var/run/github-runner-last-activity
- EOFS
-
- cat > /usr/local/bin/job-completed-hook.sh << 'EOFC'
- #!/bin/bash
- exec >> /tmp/job-completed-hook.log 2>&1
- echo "[$(date)] Job COMPLETED: ${GITHUB_WORKFLOW}/${GITHUB_JOB} (Run: ${GITHUB_RUN_ID}/${GITHUB_RUN_NUMBER}, Attempt: ${GITHUB_RUN_ATTEMPT})"
- echo " Repository: ${GITHUB_REPOSITORY}"
- echo " Runner: ${RUNNER_NAME}"
- JOB_TRACK_DIR="/var/run/github-runner-jobs"
- if [ -f "${JOB_TRACK_DIR}/${GITHUB_RUN_ID}-${GITHUB_JOB}.job" ]; then
- sed -i 's/"status":"running"/"status":"completed"/' "${JOB_TRACK_DIR}/${GITHUB_RUN_ID}-${GITHUB_JOB}.job"
- fi
- # Count remaining running jobs
- RUNNING_JOBS=$(grep -l '"status":"running"' "${JOB_TRACK_DIR}"/*.job 2>/dev/null | wc -l || echo 0)
- echo " Running jobs remaining: ${RUNNING_JOBS}"
- # Update activity timestamp
- touch /var/run/github-runner-last-activity
- EOFC
-
- cat > /usr/local/bin/check-runner-termination.sh << 'EOFT'
- #!/bin/bash
- exec >> /tmp/termination-check.log 2>&1
- echo "[$(date)] Checking termination conditions"
-
- ACTIVITY_FILE="/var/run/github-runner-last-activity"
- GRACE_PERIOD="${RUNNER_GRACE_PERIOD:-120}"
- INITIAL_GRACE_PERIOD="${RUNNER_INITIAL_GRACE_PERIOD:-180}"
- JOB_TRACK_DIR="/var/run/github-runner-jobs"
-
- # Check if activity file exists
- if [ ! -f "${ACTIVITY_FILE}" ]; then
- echo "[$(date)] WARNING: No activity file found, creating it now"
- touch "${ACTIVITY_FILE}"
- fi
-
- # Get last activity time and current time
- LAST_ACTIVITY=$(stat -c %Y "${ACTIVITY_FILE}" 2>/dev/null || echo 0)
- NOW=$(date +%s)
- IDLE_TIME=$((NOW - LAST_ACTIVITY))
-
- # Check if any jobs have ever run
- if ls "${JOB_TRACK_DIR}"/*.job 2>/dev/null | grep -q .; then
- JOBS_HAVE_RUN=true
- CURRENT_GRACE_PERIOD="${GRACE_PERIOD}"
- else
- JOBS_HAVE_RUN=false
- CURRENT_GRACE_PERIOD="${INITIAL_GRACE_PERIOD}"
- fi
-
- echo "[$(date)] Last activity: $(date -d @${LAST_ACTIVITY} '+%Y-%m-%d %H:%M:%S')"
- echo "[$(date)] Current time: $(date '+%Y-%m-%d %H:%M:%S')"
- echo "[$(date)] Idle time: ${IDLE_TIME} seconds (grace period: ${CURRENT_GRACE_PERIOD} seconds)"
- echo "[$(date)] Jobs have run: ${JOBS_HAVE_RUN}"
-
- # Check for running jobs first
- RUNNING_JOBS=$(grep -l '"status":"running"' "${JOB_TRACK_DIR}"/*.job 2>/dev/null | wc -l || echo 0)
- echo "[$(date)] Running jobs: ${RUNNING_JOBS}"
-
- # Show status of each job for debugging
- echo "[$(date)] Current job files:"
- for job_file in "${JOB_TRACK_DIR}"/*.job; do
- if [ -f "${job_file}" ]; then
- job_status=$(grep -o '"status":"[^"]*"' "${job_file}" || echo "unknown")
- echo "[$(date)] $(basename "${job_file}"): ${job_status}"
- fi
- done || echo "[$(date)] No job files found"
-
- # Never terminate if jobs are running
- if [ "${RUNNING_JOBS}" -gt 0 ]; then
- echo "[$(date)] Jobs are still running, not checking idle time"
- elif [ "${IDLE_TIME}" -gt "${CURRENT_GRACE_PERIOD}" ]; then
- echo "[$(date)] No running jobs and no activity for ${IDLE_TIME} seconds, proceeding with termination"
-
- # Try to remove runner from GitHub first
- if [ -f "/home/test-user/config.sh" ]; then
- echo "[$(date)] Removing runner from GitHub"
- cd "/home/test-user"
- # Stop the runner service
- RUNNER_PID=$(pgrep -f "Runner.Listener" | head -1)
- if [ -n "${RUNNER_PID}" ]; then
- echo "[$(date)] Stopping runner PID ${RUNNER_PID}"
- kill -INT "${RUNNER_PID}" 2>/dev/null || true
- # Wait for it to stop
- for i in {1..10}; do
- if ! kill -0 "${RUNNER_PID}" 2>/dev/null; then
- echo "[$(date)] Runner stopped"
- break
- fi
- sleep 1
- done
- fi
-
- # Remove runner from GitHub
- # We need RUNNER_ALLOW_RUNASROOT=1 to remove as root, just like when we configured it
- if RUNNER_ALLOW_RUNASROOT=1 ./config.sh remove --token test-token-xyz; then
- echo "[$(date)] Runner removed from GitHub successfully"
- else
- echo "[$(date)] Failed to remove runner from GitHub"
- fi
- fi
-
- # Flush CloudWatch logs before shutdown
- echo "[$(date)] Flushing CloudWatch logs"
- sync
- sleep 5
-
- sudo shutdown -h now "Runner terminating after idle timeout"
- else
- echo "[$(date)] Activity detected within ${CURRENT_GRACE_PERIOD} seconds, not terminating"
+ # Essential variables from template substitution
+ export debug=""
+ export homedir="/home/ec2-user"
+ export repo="omsf-eco-infra/awsinfratesting"
+ export runner_tokens="test"
+ export runner_labels="label"
+ export cloudwatch_logs_group="/aws/ec2/github-runners"
+ export runner_grace_period="61"
+ export runner_initial_grace_period="181"
+ export runner_poll_interval="11"
+ export runner_registration_timeout="300"
+ export max_instance_lifetime="360"
+ export runners_per_instance="1"
+ export runner_release="test.tar.gz"
+ export ssh_pubkey=""
+ export instance_name=""
+ export action_sha="abc123def456789012345678901234567890abcd"
+
+ # Custom userdata from user (if any)
+ export userdata=""
+ export script="echo 'Hello, World!'"
+
+ # Log prefixes
+ export log_prefix_job_started="Job started:"
+ export log_prefix_job_completed="Job completed:"
+
+ # Fetch and execute the main script from GitHub
+ # action_sha has already been resolved from action_ref in Python for security and consistency
+ SCRIPT_URL="https://raw.githubusercontent.com/Open-Athena/ec2-gha/${action_sha}/src/ec2_gha/scripts/runner-setup.sh"
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Fetching main script from: $SCRIPT_URL (SHA: ${action_sha})" | tee -a /var/log/runner-setup.log
+
+ # Try to download with retries
+ for i in {1..5}; do
+ if curl -sSL "$SCRIPT_URL" -o /tmp/runner-setup.sh; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Successfully downloaded runner setup script (attempt $i)" | tee -a /var/log/runner-setup.log
+ break
+ elif wget -q "$SCRIPT_URL" -O /tmp/runner-setup.sh; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Successfully downloaded runner setup script (attempt $i)" | tee -a /var/log/runner-setup.log
+ break
+ else
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Failed to download script (attempt $i), retrying..." | tee -a /var/log/runner-setup.log
+ sleep 2
+ fi
+
+ if [ $i -eq 5 ]; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Failed to download runner setup script after 5 attempts" | tee -a /var/log/runner-setup.log
+ shutdown -h now
+ exit 1
+ fi
+ done
+
+ # Verify we got something
+ if [ ! -s /tmp/runner-setup.sh ]; then
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: Downloaded script is empty" | tee -a /var/log/runner-setup.log
+ shutdown -h now
+ exit 1
fi
- EOFT
-
- chmod +x /usr/local/bin/job-started-hook.sh /usr/local/bin/job-completed-hook.sh /usr/local/bin/check-runner-termination.sh
-
- # Set up runner hooks
- echo "ACTIONS_RUNNER_HOOK_JOB_STARTED=/usr/local/bin/job-started-hook.sh" > .env
- echo "ACTIONS_RUNNER_HOOK_JOB_COMPLETED=/usr/local/bin/job-completed-hook.sh" >> .env
- echo "RUNNER_HOME=/home/test-user" >> .env
- echo "RUNNER_GRACE_PERIOD=30" >> .env
-
- # Set up job tracking directory
- mkdir -p /var/run/github-runner-jobs
-
- # Create initial activity timestamp
- touch /var/run/github-runner-last-activity
-
- # Set up periodic termination check using systemd
- cat > /etc/systemd/system/runner-termination-check.service << 'EOF'
- [Unit]
- Description=Check GitHub runner termination conditions
- After=network.target
-
- [Service]
- Type=oneshot
- ExecStart=/usr/local/bin/check-runner-termination.sh
- EOF
-
- cat > /etc/systemd/system/runner-termination-check.timer << 'EOF'
- [Unit]
- Description=Periodic GitHub runner termination check
- Requires=runner-termination-check.service
-
- [Timer]
- OnBootSec=60s
- OnUnitActiveSec=30s
-
- [Install]
- WantedBy=timers.target
- EOF
-
- # Enable and start the timer
- systemctl daemon-reload
- systemctl enable runner-termination-check.timer
- systemctl start runner-termination-check.timer
-
- # Get instance metadata for descriptive runner name
- INSTANCE_ID=$(wget -q -O - http://169.254.169.254/latest/meta-data/instance-id || echo "unknown")
- INSTANCE_TYPE=$(wget -q -O - http://169.254.169.254/latest/meta-data/instance-type || echo "unknown")
-
- # Create runner name with just the instance ID for uniqueness
- RUNNER_NAME="ec2-${INSTANCE_ID}"
-
- # Build additional labels with metadata for easier correlation
- # These will be visible in the GitHub runner management UI
- METADATA_LABELS=""
- METADATA_LABELS="${METADATA_LABELS},instance-id:${INSTANCE_ID}"
- METADATA_LABELS="${METADATA_LABELS},instance-type:${INSTANCE_TYPE}"
-
- # Add GitHub workflow metadata passed from the action
- if [ -n "test-workflow" ]; then
- # Replace spaces and special chars in workflow name for label compatibility
- WORKFLOW_LABEL=$(echo "test-workflow" | tr ' /' '-' | tr -cd '[:alnum:]-_')
- METADATA_LABELS="${METADATA_LABELS},workflow:${WORKFLOW_LABEL}"
- fi
- if [ -n "123456789" ]; then
- METADATA_LABELS="${METADATA_LABELS},run-id:123456789"
- fi
- if [ -n "42" ]; then
- METADATA_LABELS="${METADATA_LABELS},run-number:42"
- fi
-
- # Combine provided labels (user + runner-xxx) with instance metadata labels
- # The test-label variable already contains user labels and the critical runner-xxx label from Python
- ALL_LABELS="test-label${METADATA_LABELS}"
-
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Configuring runner for repo: test-org/test-repo"
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Runner name: ${RUNNER_NAME}"
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Labels: ${ALL_LABELS}"
- ./config.sh --url https://github.com/test-org/test-repo --token test-token-xyz --labels "${ALL_LABELS}" --name "${RUNNER_NAME}" --disableupdate
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Starting runner"
- # Create marker file for cleanup service
- touch /var/run/github-runner-started
- # Ensure CloudWatch agent can read diagnostic logs
- # The cwagent user needs to traverse into /home/test-user to reach _diag
- # Make /home/test-user world-executable (but not readable) so cwagent can traverse it
- chmod o+x /home/test-user
- echo "[$(date '+%Y-%m-%d %H:%M:%S')] Made /home/test-user traversable for CloudWatch agent"
- # Create _diag directory if it doesn't exist
- mkdir -p /home/test-user/_diag
- # The _diag files are already world-readable by default, just ensure the directory is too
- chmod 755 /home/test-user/_diag
- ./run.sh
+ # Make it executable and run it
+ chmod +x /tmp/runner-setup.sh
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] Executing runner setup script" | tee -a /var/log/runner-setup.log
+ exec /tmp/runner-setup.sh
'''
# ---
diff --git a/tests/test_start.py b/tests/test_start.py
index d9c1b3f..931738c 100644
--- a/tests/test_start.py
+++ b/tests/test_start.py
@@ -5,80 +5,96 @@
from moto import mock_aws
from ec2_gha.start import StartAWS
+from ec2_gha.defaults import AUTO
@pytest.fixture(scope="function")
-def aws():
+def base_aws_params():
+ """Base parameters for StartAWS initialization"""
+ return {
+ "gh_runner_tokens": ["testing"],
+ "home_dir": "/home/ec2-user",
+ "image_id": "ami-0772db4c976d21e9b",
+ "instance_type": "t2.micro",
+ "region_name": "us-east-1",
+ "repo": "omsf-eco-infra/awsinfratesting",
+ "runner_grace_period": "120",
+ "runner_release": "testing",
+ }
+
+
+@pytest.fixture(scope="function")
+def aws(base_aws_params, monkeypatch):
with mock_aws():
- params = {
- "gh_runner_tokens": ["testing"],
- "home_dir": "/home/ec2-user",
- "image_id": "ami-0772db4c976d21e9b",
- "instance_type": "t2.micro",
- "region_name": "us-east-1",
- "repo": "omsf-eco-infra/awsinfratesting",
- "runner_grace_period": "120",
- "runner_release": "testing",
- }
- yield StartAWS(**params)
+ monkeypatch.setenv("INPUT_ACTION_REF", "v2")
+ # Mock subprocess.run to handle both git config and git rev-parse
+ def mock_subprocess_run(cmd, *args, **kwargs):
+ if cmd[0] == 'git' and cmd[1] == 'config':
+ # git config command - just return success
+ mock_result = Mock()
+ mock_result.returncode = 0
+ mock_result.stdout = ""
+ mock_result.stderr = ""
+ return mock_result
+ elif cmd[0] == 'git' and cmd[1] == 'rev-parse':
+ # git rev-parse command - return mock SHA
+ mock_result = Mock()
+ mock_result.returncode = 0
+ mock_result.stdout = "abc123def456789012345678901234567890abcd\n"
+ mock_result.stderr = ""
+ return mock_result
+ else:
+ raise ValueError(f"Unexpected subprocess call: {cmd}")
+
+ with patch("ec2_gha.start.subprocess.run", side_effect=mock_subprocess_run):
+ yield StartAWS(**base_aws_params)
-def test_build_user_data(aws, snapshot):
- """Test that template parameters are correctly substituted using snapshot testing"""
- params = {
+@pytest.fixture(scope="function")
+def aws_params_user_data():
+ """User data params for AWS params tests"""
+ return {
+ "action_ref": "v2", # Test ref
+ "action_sha": "abc123def456789012345678901234567890abcd", # Mock SHA for testing
"cloudwatch_logs_group": "", # Empty = disabled
- "github_run_id": "123456789",
+ "debug": "", # Empty = disabled
+ "github_run_id": "16725250800",
"github_run_number": "42",
- "github_workflow": "test-workflow",
- "homedir": "/home/test-user",
- "labels": "test-label",
+ "github_workflow": "CI",
+ "homedir": "/home/ec2-user",
"max_instance_lifetime": "360",
- "repo": "test-org/test-repo",
- "runner_grace_period": "60",
- "runner_release": "https://example.com/runner.tar.gz",
- "script": "echo 'test script'",
+ "repo": "omsf-eco-infra/awsinfratesting",
+ "runner_grace_period": "61",
+ "runner_initial_grace_period": "181",
+ "runner_poll_interval": "11",
+ "runner_release": "test.tar.gz",
+ "runner_registration_timeout": "300",
+ "runners_per_instance": "1",
+ "runner_tokens": "test", # Space-delimited tokens
+ "runner_labels": "label", # Pipe-delimited labels
+ "script": "echo 'Hello, World!'",
"ssh_pubkey": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC test@host",
- "token": "test-token-xyz",
- "userdata": "echo 'custom userdata'",
+ "userdata": "",
}
- user_data = aws._build_user_data(**params)
- # Verify all substitutions happened (no template variables remain)
- template_vars = [ f'${k}' for k in params ]
- for var in template_vars:
- assert var not in user_data, f"Template variable {var} was not substituted"
- # Use snapshot to verify the entire output
+def test_build_user_data(aws, aws_params_user_data, snapshot):
+ """Test that template parameters are correctly substituted using snapshot testing"""
+ user_data = aws._build_user_data(**aws_params_user_data)
assert user_data == snapshot
-def test_build_user_data_with_cloudwatch(aws, snapshot):
+def test_build_user_data_with_cloudwatch(aws, aws_params_user_data, snapshot):
"""Test user data with CloudWatch Logs enabled using snapshot testing"""
- params = {
+ params = aws_params_user_data | {
"cloudwatch_logs_group": "/aws/ec2/github-runners",
- "github_run_id": "123456789",
- "github_run_number": "42",
- "github_workflow": "test-workflow",
- "homedir": "/home/test-user",
- "labels": "test-label",
- "max_instance_lifetime": "360",
- "repo": "test-org/test-repo",
- "runner_grace_period": "30",
- "runner_initial_grace_period": "120",
- "runner_release": "https://example.com/runner.tar.gz",
- "script": "echo 'test script'",
+ "runner_grace_period": "61",
+ "runner_initial_grace_period": "181",
+ "runner_poll_interval": "11",
"ssh_pubkey": "",
- "token": "test-token-xyz",
"userdata": "",
}
user_data = aws._build_user_data(**params)
-
- # Verify all substitutions happened (no template variables remain)
- template_vars = [ f'${k}' for k in params ]
- for var in template_vars:
- assert var not in user_data, f"Template variable {var} was not substituted"
-
- # Use snapshot to verify the entire output
assert user_data == snapshot
@@ -97,17 +113,13 @@ def test_build_user_data_missing_params(aws):
@pytest.fixture(scope="function")
-def complete_params():
- params = {
+def complete_params(base_aws_params):
+ """Extended parameters including AWS-specific configurations"""
+ return base_aws_params | {
"gh_runner_tokens": ["test"],
- "home_dir": "/home/ec2-user",
"iam_instance_profile": "test",
- "image_id": "ami-0772db4c976d21e9b",
- "instance_type": "t2.micro",
"labels": "",
- "region_name": "us-east-1",
- "repo": "omsf-eco-infra/awsinfratesting",
- "root_device_size": 100,
+ "root_device_size": "100",
"runner_release": "test.tar.gz",
"security_group_id": "test",
"subnet_id": "test",
@@ -116,58 +128,94 @@ def complete_params():
{"Key": "Owner", "Value": "test"},
],
}
- yield params
-@patch.dict('os.environ', {
- 'GITHUB_REPOSITORY': 'Open-Athena/ec2-gha',
- 'GITHUB_WORKFLOW': 'CI',
- 'GITHUB_SERVER_URL': 'https://github.com',
- 'GITHUB_RUN_ID': '16725250800'
-})
-def test_build_aws_params(complete_params):
- user_data_params = {
- "cloudwatch_logs_group": "",
- "github_run_id": "16725250800",
- "github_run_number": "1",
- "github_workflow": "CI",
- "homedir": "/home/ec2-user",
- "labels": "label",
- "max_instance_lifetime": "360",
- "repo": "omsf-eco-infra/awsinfratesting",
- "runner_grace_period": "120",
- "runner_initial_grace_period": "180",
- "runner_release": "test.tar.gz",
- "script": "echo 'Hello, World!'",
- "ssh_pubkey": "",
- "token": "test",
- "userdata": "",
+@pytest.fixture(scope="function")
+def github_env():
+ """Common GitHub environment variables for tests"""
+ return {
+ 'GITHUB_REPOSITORY': 'Open-Athena/ec2-gha',
+ 'GITHUB_WORKFLOW': 'CI',
+ 'GITHUB_WORKFLOW_REF': 'Open-Athena/ec2-gha/.github/workflows/test.yml@refs/heads/main',
+ 'GITHUB_RUN_NUMBER': '42',
+ 'GITHUB_SERVER_URL': 'https://github.com',
+ 'GITHUB_RUN_ID': '16725250800'
}
- aws = StartAWS(**complete_params)
- params = aws._build_aws_params(user_data_params)
-
- # Test structure without checking exact UserData content
- assert params["ImageId"] == "ami-0772db4c976d21e9b"
- assert params["InstanceType"] == "t2.micro"
- assert params["MinCount"] == 1
- assert params["MaxCount"] == 1
- assert params["SubnetId"] == "test"
- assert params["SecurityGroupIds"] == ["test"]
- assert params["IamInstanceProfile"] == {"Name": "test"}
- assert params["InstanceInitiatedShutdownBehavior"] == "terminate"
- assert "UserData" in params
- assert params["TagSpecifications"] == [
- {
- "ResourceType": "instance",
- "Tags": [
- {"Key": "Name", "Value": "test"},
- {"Key": "Owner", "Value": "test"},
- {"Key": "repository", "Value": "Open-Athena/ec2-gha"},
- {"Key": "workflow", "Value": "CI"},
- {"Key": "gha_url", "Value": "https://github.com/Open-Athena/ec2-gha/actions/runs/16725250800"},
- ],
+
+
+def test_build_aws_params_with_idx(complete_params, aws_params_user_data, github_env, snapshot):
+ """Test _build_aws_params with idx parameter for multi-instance scenarios"""
+ with patch.dict('os.environ', github_env):
+ user_data_params = aws_params_user_data
+ # Remove existing tags to test auto-generated Name tag
+ params_without_tags = complete_params.copy()
+ params_without_tags['tags'] = []
+ # Add instance_name template for testing
+ params_without_tags['instance_name'] = '$repo/$name#$run $idx'
+ aws = StartAWS(**params_without_tags)
+
+ params = aws._build_aws_params(user_data_params, idx=0)
+
+ # Use snapshot to verify the entire structure including UserData
+ assert params == snapshot
+
+
+def test_build_aws_params(complete_params, aws_params_user_data, github_env, snapshot):
+ """Test _build_aws_params without idx parameter"""
+ # Slightly modified github_env without WORKFLOW_REF
+ env = github_env.copy()
+ del env['GITHUB_WORKFLOW_REF']
+
+ with patch.dict('os.environ', env):
+ user_data_params = aws_params_user_data | {"github_run_number": "1"}
+ aws = StartAWS(**complete_params)
+ params = aws._build_aws_params(user_data_params)
+
+ # Use snapshot to verify the entire structure including UserData
+ assert params == snapshot
+
+
+def test_auto_home_dir(complete_params, monkeypatch):
+ """Test that home_dir is set to AUTO when not provided"""
+ params = complete_params.copy()
+ params['home_dir'] = ""
+ aws = StartAWS(**params)
+ aws.gh_runner_tokens = ["test-token"]
+ aws.runner_release = "https://example.com/runner.tar.gz"
+
+ monkeypatch.setenv("INPUT_ACTION_REF", "v2")
+
+ # Mock subprocess.run for git commands
+ def mock_subprocess_run(cmd, *args, **kwargs):
+ if cmd[0] == 'git' and cmd[1] == 'config':
+ mock_result = Mock()
+ mock_result.returncode = 0
+ return mock_result
+ elif cmd[0] == 'git' and cmd[1] == 'rev-parse':
+ mock_result = Mock()
+ mock_result.returncode = 0
+ mock_result.stdout = "abc123def456789012345678901234567890abcd\n"
+ return mock_result
+ else:
+ raise ValueError(f"Unexpected subprocess call: {cmd}")
+
+ with (
+ patch("boto3.client") as mock_client,
+ patch("ec2_gha.start.subprocess.run", side_effect=mock_subprocess_run)
+ ):
+ mock_ec2 = Mock()
+ mock_client.return_value = mock_ec2
+
+ # Mock the run_instances response
+ mock_ec2.run_instances.return_value = {
+ "Instances": [{"InstanceId": "i-123456"}]
}
- ]
+
+ result = aws.create_instances()
+
+ # Verify home_dir was set to AUTO
+ assert aws.home_dir == AUTO
+ assert "i-123456" in result
def test_modify_root_disk_size(complete_params):
@@ -205,6 +253,7 @@ def mock_describe_images(**kwargs):
error_response={"Error": {"Code": "DryRunOperation"}},
operation_name="DescribeImages"
)
+ # This is the second call without DryRun
return mock_image_data
mock_client.describe_images = mock_describe_images
@@ -252,9 +301,53 @@ def test_modify_root_disk_size_permission_error(complete_params):
assert 'AccessDenied' in str(exc_info.value)
+def test_modify_root_disk_size_plus_syntax(complete_params):
+ """Test the +N syntax for adding GB to AMI default size"""
+ mock_client = Mock()
+ complete_params["root_device_size"] = "+5"
+
+ # Mock image data with default size of 8GB
+ mock_image_data = {
+ "Images": [{
+ "RootDeviceName": "/dev/sda1",
+ "BlockDeviceMappings": [
+ {
+ "DeviceName": "/dev/sda1",
+ "Ebs": {
+ "DeleteOnTermination": True,
+ "VolumeSize": 8,
+ "VolumeType": "gp3"
+ }
+ }
+ ]
+ }]
+ }
+
+ def mock_describe_images(**kwargs):
+ if kwargs.get('DryRun', False):
+ raise ClientError(
+ error_response={"Error": {"Code": "DryRunOperation"}},
+ operation_name="DescribeImages"
+ )
+ return mock_image_data
+
+ mock_client.describe_images = mock_describe_images
+ aws = StartAWS(**complete_params)
+
+ # Test with +5 (should be 8 + 5 = 13)
+ result = aws._modify_root_disk_size(mock_client, {})
+ assert result["BlockDeviceMappings"][0]["Ebs"]["VolumeSize"] == 13
+
+ # Test with +2
+ complete_params["root_device_size"] = "+2"
+ aws = StartAWS(**complete_params)
+ result = aws._modify_root_disk_size(mock_client, {})
+ assert result["BlockDeviceMappings"][0]["Ebs"]["VolumeSize"] == 10
+
+
def test_modify_root_disk_size_no_change(complete_params):
mock_client = Mock()
- complete_params["root_device_size"] = 0
+ complete_params["root_device_size"] = "0"
mock_image_data = {
"Images": [{
@@ -312,12 +405,47 @@ def test_create_instances_missing_release(aws):
aws.create_instances()
-def test_create_instances_missing_home_dir(aws):
- aws.home_dir = ""
- with pytest.raises(
- ValueError, match="No home directory provided, cannot create instances."
- ):
- aws.create_instances()
+def test_create_instances_sets_auto_home_dir(base_aws_params, monkeypatch):
+ """Test that home_dir is set to AUTO when not provided"""
+ params = base_aws_params.copy()
+ params['home_dir'] = ""
+
+ monkeypatch.setenv("INPUT_ACTION_REF", "v2")
+
+ # Mock subprocess.run for git commands
+ def mock_subprocess_run(cmd, *args, **kwargs):
+ if cmd[0] == 'git' and cmd[1] == 'config':
+ mock_result = Mock()
+ mock_result.returncode = 0
+ return mock_result
+ elif cmd[0] == 'git' and cmd[1] == 'rev-parse':
+ mock_result = Mock()
+ mock_result.returncode = 0
+ mock_result.stdout = "abc123def456789012345678901234567890abcd\n"
+ return mock_result
+ else:
+ raise ValueError(f"Unexpected subprocess call: {cmd}")
+
+ with mock_aws():
+ aws = StartAWS(**params)
+ aws.gh_runner_tokens = ["test-token"]
+ aws.runner_release = "https://example.com/runner.tar.gz"
+
+ with patch("boto3.client") as mock_client, \
+ patch("ec2_gha.start.subprocess.run", side_effect=mock_subprocess_run):
+ mock_ec2 = Mock()
+ mock_client.return_value = mock_ec2
+
+ # Mock the run_instances response
+ mock_ec2.run_instances.return_value = {
+ "Instances": [{"InstanceId": "i-123456"}]
+ }
+
+ result = aws.create_instances()
+
+ # Verify home_dir was set to AUTO for runtime detection
+ assert aws.home_dir == AUTO
+ assert "i-123456" in result
def test_create_instances_missing_tokens(aws):
@@ -391,8 +519,8 @@ def test_set_instance_mapping(aws, monkeypatch):
with patch("builtins.open", mock_file):
aws.set_instance_mapping(mapping)
- # Should be called 4 times for single instance (mapping, instances, instance-id, label)
- assert mock_file.call_count == 4
+ # Should be called 3 times for single instance (mtx, instance-id, label)
+ assert mock_file.call_count == 3
assert all(call[0][0] == "mock_output_file" for call in mock_file.call_args_list)
@@ -404,6 +532,6 @@ def test_set_instance_mapping_multiple(aws, monkeypatch):
with patch("builtins.open", mock_file):
aws.set_instance_mapping(mapping)
- # Should be called 2 times for multiple instances (mapping, instances only)
- assert mock_file.call_count == 2
+ # Should be called 1 time for multiple instances (mtx only)
+ assert mock_file.call_count == 1
assert all(call[0][0] == "mock_output_file" for call in mock_file.call_args_list)
diff --git a/uv.lock b/uv.lock
new file mode 100644
index 0000000..8851867
--- /dev/null
+++ b/uv.lock
@@ -0,0 +1,766 @@
+version = 1
+revision = 3
+requires-python = ">=3.10"
+
+[[package]]
+name = "boto3"
+version = "1.40.25"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "botocore" },
+ { name = "jmespath" },
+ { name = "s3transfer" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2c/36/de7e622fd7907faec3823eaee7299b55130f577a4ba609717a290e9f3897/boto3-1.40.25.tar.gz", hash = "sha256:debfa4b2c67492d53629a52c999d71cddc31041a8b62ca1a8b1fb60fb0712ee1", size = 111534, upload-time = "2025-09-05T19:23:21.942Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c7/9a/6b280f01f5ec7e812ac8be9803bf52868b190e15c500bee3319d9d68eb34/boto3-1.40.25-py3-none-any.whl", hash = "sha256:d39bc3deb6780d910f00580837b720132055b0604769fd978780865ed3c019ea", size = 139325, upload-time = "2025-09-05T19:23:20.551Z" },
+]
+
+[[package]]
+name = "botocore"
+version = "1.40.25"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jmespath" },
+ { name = "python-dateutil" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/1a/ba/7faa7e1061c2d2d60700815928ec0e5a7eeb83c5311126eccc6125e1797b/botocore-1.40.25.tar.gz", hash = "sha256:41fd186018a48dc517a4312a8d3085d548cb3fb1f463972134140bf7ee55a397", size = 14331329, upload-time = "2025-09-05T19:23:12.37Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/45/e5/4c32b35109bc3f8f8ebe3d78f952d2bf702bacce975a45997cc268c11860/botocore-1.40.25-py3-none-any.whl", hash = "sha256:5603ea9955cd31974446f0b5688911a5dad71fbdfbf7457944cda8a83fcf2a9e", size = 14003384, upload-time = "2025-09-05T19:23:09.731Z" },
+]
+
+[[package]]
+name = "certifi"
+version = "2025.8.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" },
+]
+
+[[package]]
+name = "cffi"
+version = "1.17.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pycparser" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191, upload-time = "2024-09-04T20:43:30.027Z" },
+ { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592, upload-time = "2024-09-04T20:43:32.108Z" },
+ { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024, upload-time = "2024-09-04T20:43:34.186Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188, upload-time = "2024-09-04T20:43:36.286Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571, upload-time = "2024-09-04T20:43:38.586Z" },
+ { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687, upload-time = "2024-09-04T20:43:40.084Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211, upload-time = "2024-09-04T20:43:41.526Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325, upload-time = "2024-09-04T20:43:43.117Z" },
+ { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784, upload-time = "2024-09-04T20:43:45.256Z" },
+ { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564, upload-time = "2024-09-04T20:43:46.779Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804, upload-time = "2024-09-04T20:43:48.186Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299, upload-time = "2024-09-04T20:43:49.812Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264, upload-time = "2024-09-04T20:43:51.124Z" },
+ { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651, upload-time = "2024-09-04T20:43:52.872Z" },
+ { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259, upload-time = "2024-09-04T20:43:56.123Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200, upload-time = "2024-09-04T20:43:57.891Z" },
+ { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235, upload-time = "2024-09-04T20:44:00.18Z" },
+ { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721, upload-time = "2024-09-04T20:44:01.585Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242, upload-time = "2024-09-04T20:44:03.467Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999, upload-time = "2024-09-04T20:44:05.023Z" },
+ { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242, upload-time = "2024-09-04T20:44:06.444Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604, upload-time = "2024-09-04T20:44:08.206Z" },
+ { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727, upload-time = "2024-09-04T20:44:09.481Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400, upload-time = "2024-09-04T20:44:10.873Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" },
+ { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" },
+ { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" },
+ { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" },
+ { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" },
+ { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" },
+ { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" },
+ { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" },
+ { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" },
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" },
+ { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" },
+ { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" },
+ { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" },
+ { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" },
+ { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" },
+ { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" },
+ { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" },
+ { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" },
+ { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" },
+ { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" },
+ { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" },
+ { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" },
+ { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" },
+ { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" },
+ { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" },
+ { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" },
+ { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" },
+ { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" },
+ { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" },
+ { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" },
+ { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" },
+ { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" },
+ { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" },
+ { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" },
+ { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" },
+ { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" },
+ { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" },
+ { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" },
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
+]
+
+[[package]]
+name = "coverage"
+version = "7.10.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/14/70/025b179c993f019105b79575ac6edb5e084fb0f0e63f15cdebef4e454fb5/coverage-7.10.6.tar.gz", hash = "sha256:f644a3ae5933a552a29dbb9aa2f90c677a875f80ebea028e5a52a4f429044b90", size = 823736, upload-time = "2025-08-29T15:35:16.668Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a8/1d/2e64b43d978b5bd184e0756a41415597dfef30fcbd90b747474bd749d45f/coverage-7.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70e7bfbd57126b5554aa482691145f798d7df77489a177a6bef80de78860a356", size = 217025, upload-time = "2025-08-29T15:32:57.169Z" },
+ { url = "https://files.pythonhosted.org/packages/23/62/b1e0f513417c02cc10ef735c3ee5186df55f190f70498b3702d516aad06f/coverage-7.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e41be6f0f19da64af13403e52f2dec38bbc2937af54df8ecef10850ff8d35301", size = 217419, upload-time = "2025-08-29T15:32:59.908Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/16/b800640b7a43e7c538429e4d7223e0a94fd72453a1a048f70bf766f12e96/coverage-7.10.6-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c61fc91ab80b23f5fddbee342d19662f3d3328173229caded831aa0bd7595460", size = 244180, upload-time = "2025-08-29T15:33:01.608Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/6f/5e03631c3305cad187eaf76af0b559fff88af9a0b0c180d006fb02413d7a/coverage-7.10.6-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10356fdd33a7cc06e8051413140bbdc6f972137508a3572e3f59f805cd2832fd", size = 245992, upload-time = "2025-08-29T15:33:03.239Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/a1/f30ea0fb400b080730125b490771ec62b3375789f90af0bb68bfb8a921d7/coverage-7.10.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80b1695cf7c5ebe7b44bf2521221b9bb8cdf69b1f24231149a7e3eb1ae5fa2fb", size = 247851, upload-time = "2025-08-29T15:33:04.603Z" },
+ { url = "https://files.pythonhosted.org/packages/02/8e/cfa8fee8e8ef9a6bb76c7bef039f3302f44e615d2194161a21d3d83ac2e9/coverage-7.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2e4c33e6378b9d52d3454bd08847a8651f4ed23ddbb4a0520227bd346382bbc6", size = 245891, upload-time = "2025-08-29T15:33:06.176Z" },
+ { url = "https://files.pythonhosted.org/packages/93/a9/51be09b75c55c4f6c16d8d73a6a1d46ad764acca0eab48fa2ffaef5958fe/coverage-7.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c8a3ec16e34ef980a46f60dc6ad86ec60f763c3f2fa0db6d261e6e754f72e945", size = 243909, upload-time = "2025-08-29T15:33:07.74Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/a6/ba188b376529ce36483b2d585ca7bdac64aacbe5aa10da5978029a9c94db/coverage-7.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7d79dabc0a56f5af990cc6da9ad1e40766e82773c075f09cc571e2076fef882e", size = 244786, upload-time = "2025-08-29T15:33:08.965Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/4c/37ed872374a21813e0d3215256180c9a382c3f5ced6f2e5da0102fc2fd3e/coverage-7.10.6-cp310-cp310-win32.whl", hash = "sha256:86b9b59f2b16e981906e9d6383eb6446d5b46c278460ae2c36487667717eccf1", size = 219521, upload-time = "2025-08-29T15:33:10.599Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/36/9311352fdc551dec5b973b61f4e453227ce482985a9368305880af4f85dd/coverage-7.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:e132b9152749bd33534e5bd8565c7576f135f157b4029b975e15ee184325f528", size = 220417, upload-time = "2025-08-29T15:33:11.907Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/16/2bea27e212c4980753d6d563a0803c150edeaaddb0771a50d2afc410a261/coverage-7.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c706db3cabb7ceef779de68270150665e710b46d56372455cd741184f3868d8f", size = 217129, upload-time = "2025-08-29T15:33:13.575Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/51/e7159e068831ab37e31aac0969d47b8c5ee25b7d307b51e310ec34869315/coverage-7.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e0c38dc289e0508ef68ec95834cb5d2e96fdbe792eaccaa1bccac3966bbadcc", size = 217532, upload-time = "2025-08-29T15:33:14.872Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/c0/246ccbea53d6099325d25cd208df94ea435cd55f0db38099dd721efc7a1f/coverage-7.10.6-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:752a3005a1ded28f2f3a6e8787e24f28d6abe176ca64677bcd8d53d6fe2ec08a", size = 247931, upload-time = "2025-08-29T15:33:16.142Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/fb/7435ef8ab9b2594a6e3f58505cc30e98ae8b33265d844007737946c59389/coverage-7.10.6-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:689920ecfd60f992cafca4f5477d55720466ad2c7fa29bb56ac8d44a1ac2b47a", size = 249864, upload-time = "2025-08-29T15:33:17.434Z" },
+ { url = "https://files.pythonhosted.org/packages/51/f8/d9d64e8da7bcddb094d511154824038833c81e3a039020a9d6539bf303e9/coverage-7.10.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec98435796d2624d6905820a42f82149ee9fc4f2d45c2c5bc5a44481cc50db62", size = 251969, upload-time = "2025-08-29T15:33:18.822Z" },
+ { url = "https://files.pythonhosted.org/packages/43/28/c43ba0ef19f446d6463c751315140d8f2a521e04c3e79e5c5fe211bfa430/coverage-7.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b37201ce4a458c7a758ecc4efa92fa8ed783c66e0fa3c42ae19fc454a0792153", size = 249659, upload-time = "2025-08-29T15:33:20.407Z" },
+ { url = "https://files.pythonhosted.org/packages/79/3e/53635bd0b72beaacf265784508a0b386defc9ab7fad99ff95f79ce9db555/coverage-7.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2904271c80898663c810a6b067920a61dd8d38341244a3605bd31ab55250dad5", size = 247714, upload-time = "2025-08-29T15:33:21.751Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/55/0964aa87126624e8c159e32b0bc4e84edef78c89a1a4b924d28dd8265625/coverage-7.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5aea98383463d6e1fa4e95416d8de66f2d0cb588774ee20ae1b28df826bcb619", size = 248351, upload-time = "2025-08-29T15:33:23.105Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/ab/6cfa9dc518c6c8e14a691c54e53a9433ba67336c760607e299bfcf520cb1/coverage-7.10.6-cp311-cp311-win32.whl", hash = "sha256:e3fb1fa01d3598002777dd259c0c2e6d9d5e10e7222976fc8e03992f972a2cba", size = 219562, upload-time = "2025-08-29T15:33:24.717Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/18/99b25346690cbc55922e7cfef06d755d4abee803ef335baff0014268eff4/coverage-7.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:f35ed9d945bece26553d5b4c8630453169672bea0050a564456eb88bdffd927e", size = 220453, upload-time = "2025-08-29T15:33:26.482Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/ed/81d86648a07ccb124a5cf1f1a7788712b8d7216b593562683cd5c9b0d2c1/coverage-7.10.6-cp311-cp311-win_arm64.whl", hash = "sha256:99e1a305c7765631d74b98bf7dbf54eeea931f975e80f115437d23848ee8c27c", size = 219127, upload-time = "2025-08-29T15:33:27.777Z" },
+ { url = "https://files.pythonhosted.org/packages/26/06/263f3305c97ad78aab066d116b52250dd316e74fcc20c197b61e07eb391a/coverage-7.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5b2dd6059938063a2c9fee1af729d4f2af28fd1a545e9b7652861f0d752ebcea", size = 217324, upload-time = "2025-08-29T15:33:29.06Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/60/1e1ded9a4fe80d843d7d53b3e395c1db3ff32d6c301e501f393b2e6c1c1f/coverage-7.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:388d80e56191bf846c485c14ae2bc8898aa3124d9d35903fef7d907780477634", size = 217560, upload-time = "2025-08-29T15:33:30.748Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/25/52136173c14e26dfed8b106ed725811bb53c30b896d04d28d74cb64318b3/coverage-7.10.6-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:90cb5b1a4670662719591aa92d0095bb41714970c0b065b02a2610172dbf0af6", size = 249053, upload-time = "2025-08-29T15:33:32.041Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/1d/ae25a7dc58fcce8b172d42ffe5313fc267afe61c97fa872b80ee72d9515a/coverage-7.10.6-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:961834e2f2b863a0e14260a9a273aff07ff7818ab6e66d2addf5628590c628f9", size = 251802, upload-time = "2025-08-29T15:33:33.625Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/7a/1f561d47743710fe996957ed7c124b421320f150f1d38523d8d9102d3e2a/coverage-7.10.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf9a19f5012dab774628491659646335b1928cfc931bf8d97b0d5918dd58033c", size = 252935, upload-time = "2025-08-29T15:33:34.909Z" },
+ { url = "https://files.pythonhosted.org/packages/6c/ad/8b97cd5d28aecdfde792dcbf646bac141167a5cacae2cd775998b45fabb5/coverage-7.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99c4283e2a0e147b9c9cc6bc9c96124de9419d6044837e9799763a0e29a7321a", size = 250855, upload-time = "2025-08-29T15:33:36.922Z" },
+ { url = "https://files.pythonhosted.org/packages/33/6a/95c32b558d9a61858ff9d79580d3877df3eb5bc9eed0941b1f187c89e143/coverage-7.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:282b1b20f45df57cc508c1e033403f02283adfb67d4c9c35a90281d81e5c52c5", size = 248974, upload-time = "2025-08-29T15:33:38.175Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/9c/8ce95dee640a38e760d5b747c10913e7a06554704d60b41e73fdea6a1ffd/coverage-7.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cdbe264f11afd69841bd8c0d83ca10b5b32853263ee62e6ac6a0ab63895f972", size = 250409, upload-time = "2025-08-29T15:33:39.447Z" },
+ { url = "https://files.pythonhosted.org/packages/04/12/7a55b0bdde78a98e2eb2356771fd2dcddb96579e8342bb52aa5bc52e96f0/coverage-7.10.6-cp312-cp312-win32.whl", hash = "sha256:a517feaf3a0a3eca1ee985d8373135cfdedfbba3882a5eab4362bda7c7cf518d", size = 219724, upload-time = "2025-08-29T15:33:41.172Z" },
+ { url = "https://files.pythonhosted.org/packages/36/4a/32b185b8b8e327802c9efce3d3108d2fe2d9d31f153a0f7ecfd59c773705/coverage-7.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:856986eadf41f52b214176d894a7de05331117f6035a28ac0016c0f63d887629", size = 220536, upload-time = "2025-08-29T15:33:42.524Z" },
+ { url = "https://files.pythonhosted.org/packages/08/3a/d5d8dc703e4998038c3099eaf77adddb00536a3cec08c8dcd556a36a3eb4/coverage-7.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:acf36b8268785aad739443fa2780c16260ee3fa09d12b3a70f772ef100939d80", size = 219171, upload-time = "2025-08-29T15:33:43.974Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/e7/917e5953ea29a28c1057729c1d5af9084ab6d9c66217523fd0e10f14d8f6/coverage-7.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ffea0575345e9ee0144dfe5701aa17f3ba546f8c3bb48db62ae101afb740e7d6", size = 217351, upload-time = "2025-08-29T15:33:45.438Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/86/2e161b93a4f11d0ea93f9bebb6a53f113d5d6e416d7561ca41bb0a29996b/coverage-7.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d91d7317cde40a1c249d6b7382750b7e6d86fad9d8eaf4fa3f8f44cf171e80", size = 217600, upload-time = "2025-08-29T15:33:47.269Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/66/d03348fdd8df262b3a7fb4ee5727e6e4936e39e2f3a842e803196946f200/coverage-7.10.6-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e23dd5408fe71a356b41baa82892772a4cefcf758f2ca3383d2aa39e1b7a003", size = 248600, upload-time = "2025-08-29T15:33:48.953Z" },
+ { url = "https://files.pythonhosted.org/packages/73/dd/508420fb47d09d904d962f123221bc249f64b5e56aa93d5f5f7603be475f/coverage-7.10.6-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0f3f56e4cb573755e96a16501a98bf211f100463d70275759e73f3cbc00d4f27", size = 251206, upload-time = "2025-08-29T15:33:50.697Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/1f/9020135734184f439da85c70ea78194c2730e56c2d18aee6e8ff1719d50d/coverage-7.10.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db4a1d897bbbe7339946ffa2fe60c10cc81c43fab8b062d3fcb84188688174a4", size = 252478, upload-time = "2025-08-29T15:33:52.303Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/a4/3d228f3942bb5a2051fde28c136eea23a761177dc4ff4ef54533164ce255/coverage-7.10.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d8fd7879082953c156d5b13c74aa6cca37f6a6f4747b39538504c3f9c63d043d", size = 250637, upload-time = "2025-08-29T15:33:53.67Z" },
+ { url = "https://files.pythonhosted.org/packages/36/e3/293dce8cdb9a83de971637afc59b7190faad60603b40e32635cbd15fbf61/coverage-7.10.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:28395ca3f71cd103b8c116333fa9db867f3a3e1ad6a084aa3725ae002b6583bc", size = 248529, upload-time = "2025-08-29T15:33:55.022Z" },
+ { url = "https://files.pythonhosted.org/packages/90/26/64eecfa214e80dd1d101e420cab2901827de0e49631d666543d0e53cf597/coverage-7.10.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:61c950fc33d29c91b9e18540e1aed7d9f6787cc870a3e4032493bbbe641d12fc", size = 250143, upload-time = "2025-08-29T15:33:56.386Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/70/bd80588338f65ea5b0d97e424b820fb4068b9cfb9597fbd91963086e004b/coverage-7.10.6-cp313-cp313-win32.whl", hash = "sha256:160c00a5e6b6bdf4e5984b0ef21fc860bc94416c41b7df4d63f536d17c38902e", size = 219770, upload-time = "2025-08-29T15:33:58.063Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/14/0b831122305abcc1060c008f6c97bbdc0a913ab47d65070a01dc50293c2b/coverage-7.10.6-cp313-cp313-win_amd64.whl", hash = "sha256:628055297f3e2aa181464c3808402887643405573eb3d9de060d81531fa79d32", size = 220566, upload-time = "2025-08-29T15:33:59.766Z" },
+ { url = "https://files.pythonhosted.org/packages/83/c6/81a83778c1f83f1a4a168ed6673eeedc205afb562d8500175292ca64b94e/coverage-7.10.6-cp313-cp313-win_arm64.whl", hash = "sha256:df4ec1f8540b0bcbe26ca7dd0f541847cc8a108b35596f9f91f59f0c060bfdd2", size = 219195, upload-time = "2025-08-29T15:34:01.191Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/1c/ccccf4bf116f9517275fa85047495515add43e41dfe8e0bef6e333c6b344/coverage-7.10.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c9a8b7a34a4de3ed987f636f71881cd3b8339f61118b1aa311fbda12741bff0b", size = 218059, upload-time = "2025-08-29T15:34:02.91Z" },
+ { url = "https://files.pythonhosted.org/packages/92/97/8a3ceff833d27c7492af4f39d5da6761e9ff624831db9e9f25b3886ddbca/coverage-7.10.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dd5af36092430c2b075cee966719898f2ae87b636cefb85a653f1d0ba5d5393", size = 218287, upload-time = "2025-08-29T15:34:05.106Z" },
+ { url = "https://files.pythonhosted.org/packages/92/d8/50b4a32580cf41ff0423777a2791aaf3269ab60c840b62009aec12d3970d/coverage-7.10.6-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b0353b0f0850d49ada66fdd7d0c7cdb0f86b900bb9e367024fd14a60cecc1e27", size = 259625, upload-time = "2025-08-29T15:34:06.575Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/7e/6a7df5a6fb440a0179d94a348eb6616ed4745e7df26bf2a02bc4db72c421/coverage-7.10.6-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d6b9ae13d5d3e8aeca9ca94198aa7b3ebbc5acfada557d724f2a1f03d2c0b0df", size = 261801, upload-time = "2025-08-29T15:34:08.006Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/4c/a270a414f4ed5d196b9d3d67922968e768cd971d1b251e1b4f75e9362f75/coverage-7.10.6-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:675824a363cc05781b1527b39dc2587b8984965834a748177ee3c37b64ffeafb", size = 264027, upload-time = "2025-08-29T15:34:09.806Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/8b/3210d663d594926c12f373c5370bf1e7c5c3a427519a8afa65b561b9a55c/coverage-7.10.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:692d70ea725f471a547c305f0d0fc6a73480c62fb0da726370c088ab21aed282", size = 261576, upload-time = "2025-08-29T15:34:11.585Z" },
+ { url = "https://files.pythonhosted.org/packages/72/d0/e1961eff67e9e1dba3fc5eb7a4caf726b35a5b03776892da8d79ec895775/coverage-7.10.6-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:851430a9a361c7a8484a36126d1d0ff8d529d97385eacc8dfdc9bfc8c2d2cbe4", size = 259341, upload-time = "2025-08-29T15:34:13.159Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/06/d6478d152cd189b33eac691cba27a40704990ba95de49771285f34a5861e/coverage-7.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d9369a23186d189b2fc95cc08b8160ba242057e887d766864f7adf3c46b2df21", size = 260468, upload-time = "2025-08-29T15:34:14.571Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/73/737440247c914a332f0b47f7598535b29965bf305e19bbc22d4c39615d2b/coverage-7.10.6-cp313-cp313t-win32.whl", hash = "sha256:92be86fcb125e9bda0da7806afd29a3fd33fdf58fba5d60318399adf40bf37d0", size = 220429, upload-time = "2025-08-29T15:34:16.394Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/76/b92d3214740f2357ef4a27c75a526eb6c28f79c402e9f20a922c295c05e2/coverage-7.10.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6b3039e2ca459a70c79523d39347d83b73f2f06af5624905eba7ec34d64d80b5", size = 221493, upload-time = "2025-08-29T15:34:17.835Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/8e/6dcb29c599c8a1f654ec6cb68d76644fe635513af16e932d2d4ad1e5ac6e/coverage-7.10.6-cp313-cp313t-win_arm64.whl", hash = "sha256:3fb99d0786fe17b228eab663d16bee2288e8724d26a199c29325aac4b0319b9b", size = 219757, upload-time = "2025-08-29T15:34:19.248Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/aa/76cf0b5ec00619ef208da4689281d48b57f2c7fde883d14bf9441b74d59f/coverage-7.10.6-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6008a021907be8c4c02f37cdc3ffb258493bdebfeaf9a839f9e71dfdc47b018e", size = 217331, upload-time = "2025-08-29T15:34:20.846Z" },
+ { url = "https://files.pythonhosted.org/packages/65/91/8e41b8c7c505d398d7730206f3cbb4a875a35ca1041efc518051bfce0f6b/coverage-7.10.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5e75e37f23eb144e78940b40395b42f2321951206a4f50e23cfd6e8a198d3ceb", size = 217607, upload-time = "2025-08-29T15:34:22.433Z" },
+ { url = "https://files.pythonhosted.org/packages/87/7f/f718e732a423d442e6616580a951b8d1ec3575ea48bcd0e2228386805e79/coverage-7.10.6-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0f7cb359a448e043c576f0da00aa8bfd796a01b06aa610ca453d4dde09cc1034", size = 248663, upload-time = "2025-08-29T15:34:24.425Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/52/c1106120e6d801ac03e12b5285e971e758e925b6f82ee9b86db3aa10045d/coverage-7.10.6-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c68018e4fc4e14b5668f1353b41ccf4bc83ba355f0e1b3836861c6f042d89ac1", size = 251197, upload-time = "2025-08-29T15:34:25.906Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/ec/3a8645b1bb40e36acde9c0609f08942852a4af91a937fe2c129a38f2d3f5/coverage-7.10.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cd4b2b0707fc55afa160cd5fc33b27ccbf75ca11d81f4ec9863d5793fc6df56a", size = 252551, upload-time = "2025-08-29T15:34:27.337Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/70/09ecb68eeb1155b28a1d16525fd3a9b65fbe75337311a99830df935d62b6/coverage-7.10.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cec13817a651f8804a86e4f79d815b3b28472c910e099e4d5a0e8a3b6a1d4cb", size = 250553, upload-time = "2025-08-29T15:34:29.065Z" },
+ { url = "https://files.pythonhosted.org/packages/c6/80/47df374b893fa812e953b5bc93dcb1427a7b3d7a1a7d2db33043d17f74b9/coverage-7.10.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f2a6a8e06bbda06f78739f40bfb56c45d14eb8249d0f0ea6d4b3d48e1f7c695d", size = 248486, upload-time = "2025-08-29T15:34:30.897Z" },
+ { url = "https://files.pythonhosted.org/packages/4a/65/9f98640979ecee1b0d1a7164b589de720ddf8100d1747d9bbdb84be0c0fb/coverage-7.10.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:081b98395ced0d9bcf60ada7661a0b75f36b78b9d7e39ea0790bb4ed8da14747", size = 249981, upload-time = "2025-08-29T15:34:32.365Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/55/eeb6603371e6629037f47bd25bef300387257ed53a3c5fdb159b7ac8c651/coverage-7.10.6-cp314-cp314-win32.whl", hash = "sha256:6937347c5d7d069ee776b2bf4e1212f912a9f1f141a429c475e6089462fcecc5", size = 220054, upload-time = "2025-08-29T15:34:34.124Z" },
+ { url = "https://files.pythonhosted.org/packages/15/d1/a0912b7611bc35412e919a2cd59ae98e7ea3b475e562668040a43fb27897/coverage-7.10.6-cp314-cp314-win_amd64.whl", hash = "sha256:adec1d980fa07e60b6ef865f9e5410ba760e4e1d26f60f7e5772c73b9a5b0713", size = 220851, upload-time = "2025-08-29T15:34:35.651Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/2d/11880bb8ef80a45338e0b3e0725e4c2d73ffbb4822c29d987078224fd6a5/coverage-7.10.6-cp314-cp314-win_arm64.whl", hash = "sha256:a80f7aef9535442bdcf562e5a0d5a5538ce8abe6bb209cfbf170c462ac2c2a32", size = 219429, upload-time = "2025-08-29T15:34:37.16Z" },
+ { url = "https://files.pythonhosted.org/packages/83/c0/1f00caad775c03a700146f55536ecd097a881ff08d310a58b353a1421be0/coverage-7.10.6-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:0de434f4fbbe5af4fa7989521c655c8c779afb61c53ab561b64dcee6149e4c65", size = 218080, upload-time = "2025-08-29T15:34:38.919Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/c4/b1c5d2bd7cc412cbeb035e257fd06ed4e3e139ac871d16a07434e145d18d/coverage-7.10.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6e31b8155150c57e5ac43ccd289d079eb3f825187d7c66e755a055d2c85794c6", size = 218293, upload-time = "2025-08-29T15:34:40.425Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/07/4468d37c94724bf6ec354e4ec2f205fda194343e3e85fd2e59cec57e6a54/coverage-7.10.6-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:98cede73eb83c31e2118ae8d379c12e3e42736903a8afcca92a7218e1f2903b0", size = 259800, upload-time = "2025-08-29T15:34:41.996Z" },
+ { url = "https://files.pythonhosted.org/packages/82/d8/f8fb351be5fee31690cd8da768fd62f1cfab33c31d9f7baba6cd8960f6b8/coverage-7.10.6-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f863c08f4ff6b64fa8045b1e3da480f5374779ef187f07b82e0538c68cb4ff8e", size = 261965, upload-time = "2025-08-29T15:34:43.61Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/70/65d4d7cfc75c5c6eb2fed3ee5cdf420fd8ae09c4808723a89a81d5b1b9c3/coverage-7.10.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b38261034fda87be356f2c3f42221fdb4171c3ce7658066ae449241485390d5", size = 264220, upload-time = "2025-08-29T15:34:45.387Z" },
+ { url = "https://files.pythonhosted.org/packages/98/3c/069df106d19024324cde10e4ec379fe2fb978017d25e97ebee23002fbadf/coverage-7.10.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e93b1476b79eae849dc3872faeb0bf7948fd9ea34869590bc16a2a00b9c82a7", size = 261660, upload-time = "2025-08-29T15:34:47.288Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/8a/2974d53904080c5dc91af798b3a54a4ccb99a45595cc0dcec6eb9616a57d/coverage-7.10.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ff8a991f70f4c0cf53088abf1e3886edcc87d53004c7bb94e78650b4d3dac3b5", size = 259417, upload-time = "2025-08-29T15:34:48.779Z" },
+ { url = "https://files.pythonhosted.org/packages/30/38/9616a6b49c686394b318974d7f6e08f38b8af2270ce7488e879888d1e5db/coverage-7.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ac765b026c9f33044419cbba1da913cfb82cca1b60598ac1c7a5ed6aac4621a0", size = 260567, upload-time = "2025-08-29T15:34:50.718Z" },
+ { url = "https://files.pythonhosted.org/packages/76/16/3ed2d6312b371a8cf804abf4e14895b70e4c3491c6e53536d63fd0958a8d/coverage-7.10.6-cp314-cp314t-win32.whl", hash = "sha256:441c357d55f4936875636ef2cfb3bee36e466dcf50df9afbd398ce79dba1ebb7", size = 220831, upload-time = "2025-08-29T15:34:52.653Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/e5/d38d0cb830abede2adb8b147770d2a3d0e7fecc7228245b9b1ae6c24930a/coverage-7.10.6-cp314-cp314t-win_amd64.whl", hash = "sha256:073711de3181b2e204e4870ac83a7c4853115b42e9cd4d145f2231e12d670930", size = 221950, upload-time = "2025-08-29T15:34:54.212Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/51/e48e550f6279349895b0ffcd6d2a690e3131ba3a7f4eafccc141966d4dea/coverage-7.10.6-cp314-cp314t-win_arm64.whl", hash = "sha256:137921f2bac5559334ba66122b753db6dc5d1cf01eb7b64eb412bb0d064ef35b", size = 219969, upload-time = "2025-08-29T15:34:55.83Z" },
+ { url = "https://files.pythonhosted.org/packages/44/0c/50db5379b615854b5cf89146f8f5bd1d5a9693d7f3a987e269693521c404/coverage-7.10.6-py3-none-any.whl", hash = "sha256:92c4ecf6bf11b2e85fd4d8204814dc26e6a19f0c9d938c207c5cb0eadfcabbe3", size = 208986, upload-time = "2025-08-29T15:35:14.506Z" },
+]
+
+[package.optional-dependencies]
+toml = [
+ { name = "tomli", marker = "python_full_version <= '3.11'" },
+]
+
+[[package]]
+name = "cryptography"
+version = "45.0.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a7/35/c495bffc2056f2dadb32434f1feedd79abde2a7f8363e1974afa9c33c7e2/cryptography-45.0.7.tar.gz", hash = "sha256:4b1654dfc64ea479c242508eb8c724044f1e964a47d1d1cacc5132292d851971", size = 744980, upload-time = "2025-09-01T11:15:03.146Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0c/91/925c0ac74362172ae4516000fe877912e33b5983df735ff290c653de4913/cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee", size = 7041105, upload-time = "2025-09-01T11:13:59.684Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/63/43641c5acce3a6105cf8bd5baeceeb1846bb63067d26dae3e5db59f1513a/cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6", size = 4205799, upload-time = "2025-09-01T11:14:02.517Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/29/c238dd9107f10bfde09a4d1c52fd38828b1aa353ced11f358b5dd2507d24/cryptography-45.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:577470e39e60a6cd7780793202e63536026d9b8641de011ed9d8174da9ca5339", size = 4430504, upload-time = "2025-09-01T11:14:04.522Z" },
+ { url = "https://files.pythonhosted.org/packages/62/62/24203e7cbcc9bd7c94739428cd30680b18ae6b18377ae66075c8e4771b1b/cryptography-45.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4bd3e5c4b9682bc112d634f2c6ccc6736ed3635fc3319ac2bb11d768cc5a00d8", size = 4209542, upload-time = "2025-09-01T11:14:06.309Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/e3/e7de4771a08620eef2389b86cd87a2c50326827dea5528feb70595439ce4/cryptography-45.0.7-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:465ccac9d70115cd4de7186e60cfe989de73f7bb23e8a7aa45af18f7412e75bf", size = 3889244, upload-time = "2025-09-01T11:14:08.152Z" },
+ { url = "https://files.pythonhosted.org/packages/96/b8/bca71059e79a0bb2f8e4ec61d9c205fbe97876318566cde3b5092529faa9/cryptography-45.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:16ede8a4f7929b4b7ff3642eba2bf79aa1d71f24ab6ee443935c0d269b6bc513", size = 4461975, upload-time = "2025-09-01T11:14:09.755Z" },
+ { url = "https://files.pythonhosted.org/packages/58/67/3f5b26937fe1218c40e95ef4ff8d23c8dc05aa950d54200cc7ea5fb58d28/cryptography-45.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8978132287a9d3ad6b54fcd1e08548033cc09dc6aacacb6c004c73c3eb5d3ac3", size = 4209082, upload-time = "2025-09-01T11:14:11.229Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/e4/b3e68a4ac363406a56cf7b741eeb80d05284d8c60ee1a55cdc7587e2a553/cryptography-45.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b6a0e535baec27b528cb07a119f321ac024592388c5681a5ced167ae98e9fff3", size = 4460397, upload-time = "2025-09-01T11:14:12.924Z" },
+ { url = "https://files.pythonhosted.org/packages/22/49/2c93f3cd4e3efc8cb22b02678c1fad691cff9dd71bb889e030d100acbfe0/cryptography-45.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a24ee598d10befaec178efdff6054bc4d7e883f615bfbcd08126a0f4931c83a6", size = 4337244, upload-time = "2025-09-01T11:14:14.431Z" },
+ { url = "https://files.pythonhosted.org/packages/04/19/030f400de0bccccc09aa262706d90f2ec23d56bc4eb4f4e8268d0ddf3fb8/cryptography-45.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa26fa54c0a9384c27fcdc905a2fb7d60ac6e47d14bc2692145f2b3b1e2cfdbd", size = 4568862, upload-time = "2025-09-01T11:14:16.185Z" },
+ { url = "https://files.pythonhosted.org/packages/29/56/3034a3a353efa65116fa20eb3c990a8c9f0d3db4085429040a7eef9ada5f/cryptography-45.0.7-cp311-abi3-win32.whl", hash = "sha256:bef32a5e327bd8e5af915d3416ffefdbe65ed975b646b3805be81b23580b57b8", size = 2936578, upload-time = "2025-09-01T11:14:17.638Z" },
+ { url = "https://files.pythonhosted.org/packages/b3/61/0ab90f421c6194705a99d0fa9f6ee2045d916e4455fdbb095a9c2c9a520f/cryptography-45.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:3808e6b2e5f0b46d981c24d79648e5c25c35e59902ea4391a0dcb3e667bf7443", size = 3405400, upload-time = "2025-09-01T11:14:18.958Z" },
+ { url = "https://files.pythonhosted.org/packages/63/e8/c436233ddf19c5f15b25ace33979a9dd2e7aa1a59209a0ee8554179f1cc0/cryptography-45.0.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bfb4c801f65dd61cedfc61a83732327fafbac55a47282e6f26f073ca7a41c3b2", size = 7021824, upload-time = "2025-09-01T11:14:20.954Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/4c/8f57f2500d0ccd2675c5d0cc462095adf3faa8c52294ba085c036befb901/cryptography-45.0.7-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:81823935e2f8d476707e85a78a405953a03ef7b7b4f55f93f7c2d9680e5e0691", size = 4202233, upload-time = "2025-09-01T11:14:22.454Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/ac/59b7790b4ccaed739fc44775ce4645c9b8ce54cbec53edf16c74fd80cb2b/cryptography-45.0.7-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3994c809c17fc570c2af12c9b840d7cea85a9fd3e5c0e0491f4fa3c029216d59", size = 4423075, upload-time = "2025-09-01T11:14:24.287Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/56/d4f07ea21434bf891faa088a6ac15d6d98093a66e75e30ad08e88aa2b9ba/cryptography-45.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dad43797959a74103cb59c5dac71409f9c27d34c8a05921341fb64ea8ccb1dd4", size = 4204517, upload-time = "2025-09-01T11:14:25.679Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/ac/924a723299848b4c741c1059752c7cfe09473b6fd77d2920398fc26bfb53/cryptography-45.0.7-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ce7a453385e4c4693985b4a4a3533e041558851eae061a58a5405363b098fcd3", size = 3882893, upload-time = "2025-09-01T11:14:27.1Z" },
+ { url = "https://files.pythonhosted.org/packages/83/dc/4dab2ff0a871cc2d81d3ae6d780991c0192b259c35e4d83fe1de18b20c70/cryptography-45.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b04f85ac3a90c227b6e5890acb0edbaf3140938dbecf07bff618bf3638578cf1", size = 4450132, upload-time = "2025-09-01T11:14:28.58Z" },
+ { url = "https://files.pythonhosted.org/packages/12/dd/b2882b65db8fc944585d7fb00d67cf84a9cef4e77d9ba8f69082e911d0de/cryptography-45.0.7-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:48c41a44ef8b8c2e80ca4527ee81daa4c527df3ecbc9423c41a420a9559d0e27", size = 4204086, upload-time = "2025-09-01T11:14:30.572Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/fa/1d5745d878048699b8eb87c984d4ccc5da4f5008dfd3ad7a94040caca23a/cryptography-45.0.7-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f3df7b3d0f91b88b2106031fd995802a2e9ae13e02c36c1fc075b43f420f3a17", size = 4449383, upload-time = "2025-09-01T11:14:32.046Z" },
+ { url = "https://files.pythonhosted.org/packages/36/8b/fc61f87931bc030598e1876c45b936867bb72777eac693e905ab89832670/cryptography-45.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd342f085542f6eb894ca00ef70236ea46070c8a13824c6bde0dfdcd36065b9b", size = 4332186, upload-time = "2025-09-01T11:14:33.95Z" },
+ { url = "https://files.pythonhosted.org/packages/0b/11/09700ddad7443ccb11d674efdbe9a832b4455dc1f16566d9bd3834922ce5/cryptography-45.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1993a1bb7e4eccfb922b6cd414f072e08ff5816702a0bdb8941c247a6b1b287c", size = 4561639, upload-time = "2025-09-01T11:14:35.343Z" },
+ { url = "https://files.pythonhosted.org/packages/71/ed/8f4c1337e9d3b94d8e50ae0b08ad0304a5709d483bfcadfcc77a23dbcb52/cryptography-45.0.7-cp37-abi3-win32.whl", hash = "sha256:18fcf70f243fe07252dcb1b268a687f2358025ce32f9f88028ca5c364b123ef5", size = 2926552, upload-time = "2025-09-01T11:14:36.929Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/ff/026513ecad58dacd45d1d24ebe52b852165a26e287177de1d545325c0c25/cryptography-45.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:7285a89df4900ed3bfaad5679b1e668cb4b38a8de1ccbfc84b05f34512da0a90", size = 3392742, upload-time = "2025-09-01T11:14:38.368Z" },
+ { url = "https://files.pythonhosted.org/packages/13/3e/e42f1528ca1ea82256b835191eab1be014e0f9f934b60d98b0be8a38ed70/cryptography-45.0.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:de58755d723e86175756f463f2f0bddd45cc36fbd62601228a3f8761c9f58252", size = 3572442, upload-time = "2025-09-01T11:14:39.836Z" },
+ { url = "https://files.pythonhosted.org/packages/59/aa/e947693ab08674a2663ed2534cd8d345cf17bf6a1facf99273e8ec8986dc/cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a20e442e917889d1a6b3c570c9e3fa2fdc398c20868abcea268ea33c024c4083", size = 4142233, upload-time = "2025-09-01T11:14:41.305Z" },
+ { url = "https://files.pythonhosted.org/packages/24/06/09b6f6a2fc43474a32b8fe259038eef1500ee3d3c141599b57ac6c57612c/cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:258e0dff86d1d891169b5af222d362468a9570e2532923088658aa866eb11130", size = 4376202, upload-time = "2025-09-01T11:14:43.047Z" },
+ { url = "https://files.pythonhosted.org/packages/00/f2/c166af87e95ce6ae6d38471a7e039d3a0549c2d55d74e059680162052824/cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d97cf502abe2ab9eff8bd5e4aca274da8d06dd3ef08b759a8d6143f4ad65d4b4", size = 4141900, upload-time = "2025-09-01T11:14:45.089Z" },
+ { url = "https://files.pythonhosted.org/packages/16/b9/e96e0b6cb86eae27ea51fa8a3151535a18e66fe7c451fa90f7f89c85f541/cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:c987dad82e8c65ebc985f5dae5e74a3beda9d0a2a4daf8a1115f3772b59e5141", size = 4375562, upload-time = "2025-09-01T11:14:47.166Z" },
+ { url = "https://files.pythonhosted.org/packages/36/d0/36e8ee39274e9d77baf7d0dafda680cba6e52f3936b846f0d56d64fec915/cryptography-45.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c13b1e3afd29a5b3b2656257f14669ca8fa8d7956d509926f0b130b600b50ab7", size = 3322781, upload-time = "2025-09-01T11:14:48.747Z" },
+ { url = "https://files.pythonhosted.org/packages/99/4e/49199a4c82946938a3e05d2e8ad9482484ba48bbc1e809e3d506c686d051/cryptography-45.0.7-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a862753b36620af6fc54209264f92c716367f2f0ff4624952276a6bbd18cbde", size = 3584634, upload-time = "2025-09-01T11:14:50.593Z" },
+ { url = "https://files.pythonhosted.org/packages/16/ce/5f6ff59ea9c7779dba51b84871c19962529bdcc12e1a6ea172664916c550/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:06ce84dc14df0bf6ea84666f958e6080cdb6fe1231be2a51f3fc1267d9f3fb34", size = 4149533, upload-time = "2025-09-01T11:14:52.091Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/13/b3cfbd257ac96da4b88b46372e662009b7a16833bfc5da33bb97dd5631ae/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d0c5c6bac22b177bf8da7435d9d27a6834ee130309749d162b26c3105c0795a9", size = 4385557, upload-time = "2025-09-01T11:14:53.551Z" },
+ { url = "https://files.pythonhosted.org/packages/1c/c5/8c59d6b7c7b439ba4fc8d0cab868027fd095f215031bc123c3a070962912/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:2f641b64acc00811da98df63df7d59fd4706c0df449da71cb7ac39a0732b40ae", size = 4149023, upload-time = "2025-09-01T11:14:55.022Z" },
+ { url = "https://files.pythonhosted.org/packages/55/32/05385c86d6ca9ab0b4d5bb442d2e3d85e727939a11f3e163fc776ce5eb40/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:f5414a788ecc6ee6bc58560e85ca624258a55ca434884445440a810796ea0e0b", size = 4385722, upload-time = "2025-09-01T11:14:57.319Z" },
+ { url = "https://files.pythonhosted.org/packages/23/87/7ce86f3fa14bc11a5a48c30d8103c26e09b6465f8d8e9d74cf7a0714f043/cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63", size = 3332908, upload-time = "2025-09-01T11:14:58.78Z" },
+]
+
+[[package]]
+name = "ec2-gha"
+version = "1.0.0"
+source = { editable = "." }
+dependencies = [
+ { name = "boto3" },
+ { name = "gha-runner" },
+]
+
+[package.optional-dependencies]
+test = [
+ { name = "moto" },
+ { name = "pytest" },
+ { name = "pytest-cov" },
+ { name = "responses" },
+ { name = "ruff" },
+ { name = "syrupy" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "boto3" },
+ { name = "gha-runner", git = "https://github.com/Open-Athena/gha-runner.git?rev=v1" },
+ { name = "moto", extras = ["ec2"], marker = "extra == 'test'" },
+ { name = "pytest", marker = "extra == 'test'" },
+ { name = "pytest-cov", marker = "extra == 'test'" },
+ { name = "responses", marker = "extra == 'test'" },
+ { name = "ruff", marker = "extra == 'test'" },
+ { name = "syrupy", marker = "extra == 'test'" },
+]
+provides-extras = ["test"]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" },
+]
+
+[[package]]
+name = "gha-runner"
+version = "0.6.1.post19+ge8ef9e3"
+source = { git = "https://github.com/Open-Athena/gha-runner.git?rev=v1#e8ef9e362e00d5be5a58a9af01091d784611bc05" }
+dependencies = [
+ { name = "requests" },
+]
+
+[[package]]
+name = "idna"
+version = "3.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markupsafe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
+]
+
+[[package]]
+name = "jmespath"
+version = "1.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" },
+]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" },
+ { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" },
+ { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" },
+ { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" },
+ { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" },
+ { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" },
+ { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" },
+ { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" },
+ { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" },
+ { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" },
+ { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" },
+ { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" },
+ { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" },
+ { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" },
+ { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" },
+ { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" },
+ { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" },
+ { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" },
+ { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" },
+ { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" },
+ { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" },
+ { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" },
+ { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" },
+ { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" },
+]
+
+[[package]]
+name = "moto"
+version = "5.1.12"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "boto3" },
+ { name = "botocore" },
+ { name = "cryptography" },
+ { name = "jinja2" },
+ { name = "python-dateutil" },
+ { name = "requests" },
+ { name = "responses" },
+ { name = "werkzeug" },
+ { name = "xmltodict" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fe/71/805c0a0b30e362cd759206d4723bad800bc8c6c83a7edd05e55747d03959/moto-5.1.12.tar.gz", hash = "sha256:6eca3a020cb89c188b763610c27c969c32b832205712d3bdcb1a6031a4005187", size = 7185928, upload-time = "2025-09-07T19:38:37.412Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/89/92/cfb9a8be3d6070bef53afb92e03a5a7eb6da0127b29a8438fe00b12f5ed2/moto-5.1.12-py3-none-any.whl", hash = "sha256:c9f1119ab57819ce4b88f793f51c6ca0361b6932a90c59865fd71022acfc5582", size = 5313196, upload-time = "2025-09-07T19:38:34.78Z" },
+]
+
+[[package]]
+name = "packaging"
+version = "25.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
+]
+
+[[package]]
+name = "pluggy"
+version = "1.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
+]
+
+[[package]]
+name = "pycparser"
+version = "2.22"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" },
+]
+
+[[package]]
+name = "pygments"
+version = "2.19.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
+]
+
+[[package]]
+name = "pytest"
+version = "8.4.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "exceptiongroup", marker = "python_full_version < '3.11'" },
+ { name = "iniconfig" },
+ { name = "packaging" },
+ { name = "pluggy" },
+ { name = "pygments" },
+ { name = "tomli", marker = "python_full_version < '3.11'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" },
+]
+
+[[package]]
+name = "pytest-cov"
+version = "6.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "coverage", extra = ["toml"] },
+ { name = "pluggy" },
+ { name = "pytest" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/30/4c/f883ab8f0daad69f47efdf95f55a66b51a8b939c430dadce0611508d9e99/pytest_cov-6.3.0.tar.gz", hash = "sha256:35c580e7800f87ce892e687461166e1ac2bcb8fb9e13aea79032518d6e503ff2", size = 70398, upload-time = "2025-09-06T15:40:14.361Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/80/b4/bb7263e12aade3842b938bc5c6958cae79c5ee18992f9b9349019579da0f/pytest_cov-6.3.0-py3-none-any.whl", hash = "sha256:440db28156d2468cafc0415b4f8e50856a0d11faefa38f30906048fe490f1749", size = 25115, upload-time = "2025-09-06T15:40:12.44Z" },
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" },
+ { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" },
+ { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" },
+ { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" },
+ { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" },
+ { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" },
+ { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" },
+ { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" },
+ { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" },
+ { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" },
+ { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" },
+ { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" },
+ { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" },
+ { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" },
+ { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" },
+ { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" },
+ { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" },
+ { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" },
+]
+
+[[package]]
+name = "requests"
+version = "2.32.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "charset-normalizer" },
+ { name = "idna" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
+]
+
+[[package]]
+name = "responses"
+version = "0.25.8"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pyyaml" },
+ { name = "requests" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0e/95/89c054ad70bfef6da605338b009b2e283485835351a9935c7bfbfaca7ffc/responses-0.25.8.tar.gz", hash = "sha256:9374d047a575c8f781b94454db5cab590b6029505f488d12899ddb10a4af1cf4", size = 79320, upload-time = "2025-08-08T19:01:46.709Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1c/4c/cc276ce57e572c102d9542d383b2cfd551276581dc60004cb94fe8774c11/responses-0.25.8-py3-none-any.whl", hash = "sha256:0c710af92def29c8352ceadff0c3fe340ace27cf5af1bbe46fb71275bcd2831c", size = 34769, upload-time = "2025-08-08T19:01:45.018Z" },
+]
+
+[[package]]
+name = "ruff"
+version = "0.12.12"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a8/f0/e0965dd709b8cabe6356811c0ee8c096806bb57d20b5019eb4e48a117410/ruff-0.12.12.tar.gz", hash = "sha256:b86cd3415dbe31b3b46a71c598f4c4b2f550346d1ccf6326b347cc0c8fd063d6", size = 5359915, upload-time = "2025-09-04T16:50:18.273Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/09/79/8d3d687224d88367b51c7974cec1040c4b015772bfbeffac95face14c04a/ruff-0.12.12-py3-none-linux_armv6l.whl", hash = "sha256:de1c4b916d98ab289818e55ce481e2cacfaad7710b01d1f990c497edf217dafc", size = 12116602, upload-time = "2025-09-04T16:49:18.892Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/c3/6e599657fe192462f94861a09aae935b869aea8a1da07f47d6eae471397c/ruff-0.12.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7acd6045e87fac75a0b0cdedacf9ab3e1ad9d929d149785903cff9bb69ad9727", size = 12868393, upload-time = "2025-09-04T16:49:23.043Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/d2/9e3e40d399abc95336b1843f52fc0daaceb672d0e3c9290a28ff1a96f79d/ruff-0.12.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:abf4073688d7d6da16611f2f126be86523a8ec4343d15d276c614bda8ec44edb", size = 12036967, upload-time = "2025-09-04T16:49:26.04Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/03/6816b2ed08836be272e87107d905f0908be5b4a40c14bfc91043e76631b8/ruff-0.12.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:968e77094b1d7a576992ac078557d1439df678a34c6fe02fd979f973af167577", size = 12276038, upload-time = "2025-09-04T16:49:29.056Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/d5/707b92a61310edf358a389477eabd8af68f375c0ef858194be97ca5b6069/ruff-0.12.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42a67d16e5b1ffc6d21c5f67851e0e769517fb57a8ebad1d0781b30888aa704e", size = 11901110, upload-time = "2025-09-04T16:49:32.07Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/3d/f8b1038f4b9822e26ec3d5b49cf2bc313e3c1564cceb4c1a42820bf74853/ruff-0.12.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b216ec0a0674e4b1214dcc998a5088e54eaf39417327b19ffefba1c4a1e4971e", size = 13668352, upload-time = "2025-09-04T16:49:35.148Z" },
+ { url = "https://files.pythonhosted.org/packages/98/0e/91421368ae6c4f3765dd41a150f760c5f725516028a6be30e58255e3c668/ruff-0.12.12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:59f909c0fdd8f1dcdbfed0b9569b8bf428cf144bec87d9de298dcd4723f5bee8", size = 14638365, upload-time = "2025-09-04T16:49:38.892Z" },
+ { url = "https://files.pythonhosted.org/packages/74/5d/88f3f06a142f58ecc8ecb0c2fe0b82343e2a2b04dcd098809f717cf74b6c/ruff-0.12.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ac93d87047e765336f0c18eacad51dad0c1c33c9df7484c40f98e1d773876f5", size = 14060812, upload-time = "2025-09-04T16:49:42.732Z" },
+ { url = "https://files.pythonhosted.org/packages/13/fc/8962e7ddd2e81863d5c92400820f650b86f97ff919c59836fbc4c1a6d84c/ruff-0.12.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01543c137fd3650d322922e8b14cc133b8ea734617c4891c5a9fccf4bfc9aa92", size = 13050208, upload-time = "2025-09-04T16:49:46.434Z" },
+ { url = "https://files.pythonhosted.org/packages/53/06/8deb52d48a9a624fd37390555d9589e719eac568c020b27e96eed671f25f/ruff-0.12.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afc2fa864197634e549d87fb1e7b6feb01df0a80fd510d6489e1ce8c0b1cc45", size = 13311444, upload-time = "2025-09-04T16:49:49.931Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/81/de5a29af7eb8f341f8140867ffb93f82e4fde7256dadee79016ac87c2716/ruff-0.12.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0c0945246f5ad776cb8925e36af2438e66188d2b57d9cf2eed2c382c58b371e5", size = 13279474, upload-time = "2025-09-04T16:49:53.465Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/14/d9577fdeaf791737ada1b4f5c6b59c21c3326f3f683229096cccd7674e0c/ruff-0.12.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0fbafe8c58e37aae28b84a80ba1817f2ea552e9450156018a478bf1fa80f4e4", size = 12070204, upload-time = "2025-09-04T16:49:56.882Z" },
+ { url = "https://files.pythonhosted.org/packages/77/04/a910078284b47fad54506dc0af13839c418ff704e341c176f64e1127e461/ruff-0.12.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b9c456fb2fc8e1282affa932c9e40f5ec31ec9cbb66751a316bd131273b57c23", size = 11880347, upload-time = "2025-09-04T16:49:59.729Z" },
+ { url = "https://files.pythonhosted.org/packages/df/58/30185fcb0e89f05e7ea82e5817b47798f7fa7179863f9d9ba6fd4fe1b098/ruff-0.12.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5f12856123b0ad0147d90b3961f5c90e7427f9acd4b40050705499c98983f489", size = 12891844, upload-time = "2025-09-04T16:50:02.591Z" },
+ { url = "https://files.pythonhosted.org/packages/21/9c/28a8dacce4855e6703dcb8cdf6c1705d0b23dd01d60150786cd55aa93b16/ruff-0.12.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:26a1b5a2bf7dd2c47e3b46d077cd9c0fc3b93e6c6cc9ed750bd312ae9dc302ee", size = 13360687, upload-time = "2025-09-04T16:50:05.8Z" },
+ { url = "https://files.pythonhosted.org/packages/c8/fa/05b6428a008e60f79546c943e54068316f32ec8ab5c4f73e4563934fbdc7/ruff-0.12.12-py3-none-win32.whl", hash = "sha256:173be2bfc142af07a01e3a759aba6f7791aa47acf3604f610b1c36db888df7b1", size = 12052870, upload-time = "2025-09-04T16:50:09.121Z" },
+ { url = "https://files.pythonhosted.org/packages/85/60/d1e335417804df452589271818749d061b22772b87efda88354cf35cdb7a/ruff-0.12.12-py3-none-win_amd64.whl", hash = "sha256:e99620bf01884e5f38611934c09dd194eb665b0109104acae3ba6102b600fd0d", size = 13178016, upload-time = "2025-09-04T16:50:12.559Z" },
+ { url = "https://files.pythonhosted.org/packages/28/7e/61c42657f6e4614a4258f1c3b0c5b93adc4d1f8575f5229d1906b483099b/ruff-0.12.12-py3-none-win_arm64.whl", hash = "sha256:2a8199cab4ce4d72d158319b63370abf60991495fb733db96cd923a34c52d093", size = 12256762, upload-time = "2025-09-04T16:50:15.737Z" },
+]
+
+[[package]]
+name = "s3transfer"
+version = "0.13.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "botocore" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/6d/05/d52bf1e65044b4e5e27d4e63e8d1579dbdec54fce685908ae09bc3720030/s3transfer-0.13.1.tar.gz", hash = "sha256:c3fdba22ba1bd367922f27ec8032d6a1cf5f10c934fb5d68cf60fd5a23d936cf", size = 150589, upload-time = "2025-07-18T19:22:42.31Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6d/4f/d073e09df851cfa251ef7840007d04db3293a0482ce607d2b993926089be/s3transfer-0.13.1-py3-none-any.whl", hash = "sha256:a981aa7429be23fe6dfc13e80e4020057cbab622b08c0315288758d67cabc724", size = 85308, upload-time = "2025-07-18T19:22:40.947Z" },
+]
+
+[[package]]
+name = "six"
+version = "1.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
+]
+
+[[package]]
+name = "syrupy"
+version = "4.9.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pytest" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8c/f8/022d8704a3314f3e96dbd6bbd16ebe119ce30e35f41aabfa92345652fceb/syrupy-4.9.1.tar.gz", hash = "sha256:b7d0fcadad80a7d2f6c4c71917918e8ebe2483e8c703dfc8d49cdbb01081f9a4", size = 52492, upload-time = "2025-03-24T01:36:37.225Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/9d/aef9ec5fd5a4ee2f6a96032c4eda5888c5c7cec65cef6b28c4fc37671d88/syrupy-4.9.1-py3-none-any.whl", hash = "sha256:b94cc12ed0e5e75b448255430af642516842a2374a46936dd2650cfb6dd20eda", size = 52214, upload-time = "2025-03-24T01:36:35.278Z" },
+]
+
+[[package]]
+name = "tomli"
+version = "2.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" },
+ { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" },
+ { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" },
+ { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" },
+ { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" },
+ { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" },
+ { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" },
+ { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" },
+ { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" },
+ { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" },
+ { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" },
+ { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" },
+ { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" },
+ { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" },
+ { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.15.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
+]
+
+[[package]]
+name = "urllib3"
+version = "2.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
+]
+
+[[package]]
+name = "werkzeug"
+version = "3.1.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markupsafe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9f/69/83029f1f6300c5fb2471d621ab06f6ec6b3324685a2ce0f9777fd4a8b71e/werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746", size = 806925, upload-time = "2024-11-08T15:52:18.093Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/52/24/ab44c871b0f07f491e5d2ad12c9bd7358e527510618cb1b803a88e986db1/werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", size = 224498, upload-time = "2024-11-08T15:52:16.132Z" },
+]
+
+[[package]]
+name = "xmltodict"
+version = "0.15.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/51/ee/b30fdb281b39da57053bd7012870989de6f066d6ef1476d78de8fc427324/xmltodict-0.15.0.tar.gz", hash = "sha256:c6d46b4e3413d1e4fc3e5016f0f1c7a5c10f8ce39efaa0cb099af986ecfc9a53", size = 60285, upload-time = "2025-09-05T00:35:45.947Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d6/56/507a207b96e3aa7365c28bb6702011e7c76c899c1737966b25852eaef3e8/xmltodict-0.15.0-py2.py3-none-any.whl", hash = "sha256:8887783bf1faba1754fc45fdf3fe03fbb3629c811ae57f91c018aace4c58d4ed", size = 10965, upload-time = "2025-09-05T00:35:44.583Z" },
+]
]