diff --git a/.dockerignore b/.dockerignore index 8d02ebe4a..d39e7fee4 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,3 +3,31 @@ submits/ submit_results/ .venv/ node_modules/ + +# Python +__pycache__/ +*.py[cod] +*.pyd +*.pyo +*.so +.pytest_cache/ +.mypy_cache/ +.ruff_cache/ +.coverage +htmlcov/ + +# Node +**/dist/ +**/.vite/ + +# VCS / tooling +.git/ + +# Local data (avoid baking into images) +kelvin_data/ +**/*.log + +# Editor +.vscode/ +.idea/ +.DS_Store diff --git a/.env.example b/.env.example index b0b85a140..6081fc184 100644 --- a/.env.example +++ b/.env.example @@ -1,4 +1,5 @@ ### Kelvin +# ------------------------------------------------------------------------------ # !!! IMPORTANT: For Production deployments using Deployment Service, all file paths must be specified as absolute due to use of DooD (Docker out of Docker) @@ -12,6 +13,9 @@ KELVIN__TASKS_PATH=./tasks KELVIN__SUBMITS_PATH=./submits # Path where submit results will be stored KELVIN__SUBMIT_RESULTS_PATH=./submit_results +# (Optional) Internal URL used by workers. Defaults to https://nginx when running locally with Docker; +# otherwise defaults to the request URL in production or non-Docker local environments. +# API_INTERNAL_BASEURL=https://custom-internal-url ### Postgres DATABASE__HOST=127.0.0.1 @@ -40,9 +44,21 @@ OPENAI__API_KEY=your_openai_api_key_here OPENAI__API_URL=http://localhost:8080/v1 OPENAI__MODEL=openai/gpt-oss-120b +### Evaluator Workers +# ------------------------------------------------------------------------------ +# Number of worker processes +EVALUATOR_CPU_REPLICAS=32 +EVALUATOR_CUDA_REPLICAS=32 + +# Redis Connection for Evaluators +# - If running LOCALLY (same machine as app): Leave these commented out or set to 'redis' and '6379'. +# - If running DISTRIBUTED (on a different machine): Set these to the IP/Host and Port of the main server's Redis. +# EVALUATOR_REDIS__HOST=redis +# EVALUATOR_REDIS__PORT=6379 + + ### Deployment Service -# ID of the docker group on the host machine (get it via `getent group docker | cut -d: -f3`) -DOCKER_GROUP_ID=999 +# ------------------------------------------------------------------------------ SECURITY__WEBHOOK_SECRET=yoursecretvalue SECURITY__ALLOWED_HOSTS=["localhost", "127.0.0.1", "nginx", "kelvin.cs.vsb.cz"] diff --git a/.github/workflows/build-evaluator-images.yml b/.github/workflows/build-evaluator-images.yml new file mode 100644 index 000000000..ff4c7837a --- /dev/null +++ b/.github/workflows/build-evaluator-images.yml @@ -0,0 +1,47 @@ +name: Evaluator Docker Images + +on: + pull_request: + merge_group: + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ (github.event_name == 'merge_group' || github.event_name == 'workflow_dispatch') && 'build' || github.sha }} + cancel-in-progress: ${{ github.event_name != 'merge_group' }} + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v6 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + driver: docker + + - name: Build images + run: | + python3 evaluator/images/build.py + + # Summary job to enable easier handling of required status checks. + # On PRs, we need everything to be green, while deploy jobs are skipped. + # On master, we need everything to be green. + # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB! + conclusion-images: + needs: [ build ] + # We need to ensure this job does *not* get skipped if its dependencies fail, + # because a skipped job is considered a success by GitHub. So we have to + # overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run + # when the workflow is canceled manually. + if: ${{ !cancelled() }} + runs-on: ubuntu-latest + steps: + - name: Conclusion Images + run: | + # Print the dependent jobs to see them in the CI log + jq -C <<< '${{ toJson(needs) }}' + # Check if all jobs that we depend on (in the needs array) + # were either successful or skipped. + jq --exit-status 'all(.result == "success" or .result == "skipped")' <<< '${{ toJson(needs) }}' diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 451d2dd95..33d9c277d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,7 +11,7 @@ concurrency: env: # Configure a constant location for the uv cache UV_CACHE_DIR: /tmp/.uv-cache - UV_VERSION: "0.9.20" + UV_VERSION: "0.10.0" jobs: @@ -95,6 +95,9 @@ jobs: test-deployment-service: runs-on: ubuntu-latest + defaults: + run: + working-directory: deployment_service/ steps: - name: Checkout sources @@ -114,32 +117,26 @@ jobs: working-directory: "deployment_service" - name: Install dependencies - working-directory: deployment_service/ run: | uv sync --frozen - name: Ruff Linter - working-directory: deployment_service/ run: uv run ruff check --output-format=github - name: Ruff Formatter if: success() || failure() - working-directory: deployment_service/ run: uv run ruff format --check - name: Check lockfile if: success() || failure() - working-directory: deployment_service/ run: uv lock --locked - name: MyPy if: success() || failure() - working-directory: deployment_service/ run: | uv run mypy --check . - name: Run tests - working-directory: deployment_service/ run: uv run pytest env: SECURITY__WEBHOOK_SECRET: "yoursecretvalue" @@ -166,10 +163,11 @@ jobs: - name: Build Kelvin Docker image uses: docker/build-push-action@v6 with: - cache-from: type=registry,ref=ghcr.io/mrlvsb/kelvin-ci-cache + target: runtime + cache-from: type=gha # Only write the cache in the master branch or workflow_dispatch builds # https://github.com/docker/build-push-action/issues/845#issuecomment-1512619265 - cache-to: ${{ (github.event_name == 'merge_group' || github.event_name == 'workflow_dispatch') && 'type=registry,ref=ghcr.io/mrlvsb/kelvin-ci-cache,compression=zstd' || '' }} + cache-to: ${{ (github.event_name == 'merge_group' || github.event_name == 'workflow_dispatch') && 'type=gha,mode=max' || '' }} tags: ghcr.io/mrlvsb/kelvin:latest,ghcr.io/mrlvsb/kelvin:${{ github.sha }} outputs: type=docker,dest=${{ runner.temp }}/kelvin.tar @@ -177,27 +175,45 @@ jobs: uses: docker/build-push-action@v6 with: context: "{{defaultContext}}:deployment_service" - cache-from: type=registry,ref=ghcr.io/mrlvsb/deployment-ci-cache + cache-from: type=gha # Only write the cache in the master branch or workflow_dispatch builds # https://github.com/docker/build-push-action/issues/845#issuecomment-1512619265 - cache-to: ${{ (github.event_name == 'merge_group' || github.event_name == 'workflow_dispatch') && 'type=registry,ref=ghcr.io/mrlvsb/deployment-ci-cache,compression=zstd' || '' }} + cache-to: ${{ (github.event_name == 'merge_group' || github.event_name == 'workflow_dispatch') && 'type=gha,mode=max' || '' }} tags: ghcr.io/mrlvsb/deployment:latest,ghcr.io/mrlvsb/deployment:${{ github.sha }} outputs: type=docker,dest=${{ runner.temp }}/deployment.tar - - name: Share built image + - name: Build Kelvin-Evaluator Docker image + uses: docker/build-push-action@v6 + with: + target: evaluator + cache-from: type=gha + # Only write the cache in the master branch or workflow_dispatch builds + # https://github.com/docker/build-push-action/issues/845#issuecomment-1512619265 + cache-to: ${{ (github.event_name == 'merge_group' || github.event_name == 'workflow_dispatch') && 'type=gha,mode=max' || '' }} + tags: ghcr.io/mrlvsb/kelvin-evaluator:latest,ghcr.io/mrlvsb/kelvin-evaluator:${{ github.sha }} + outputs: type=docker,dest=${{ runner.temp }}/kelvin-evaluator.tar + + - name: Share Kelvin image uses: actions/upload-artifact@v6 with: name: kelvin path: ${{ runner.temp }}/kelvin.tar retention-days: 1 - - name: Share built image + - name: Share Deployment_Service image uses: actions/upload-artifact@v6 with: name: deployment path: ${{ runner.temp }}/deployment.tar retention-days: 1 + - name: Share Kelvin-Evaluator image + uses: actions/upload-artifact@v6 + with: + name: kelvin-evaluator + path: ${{ runner.temp }}/kelvin-evaluator.tar + retention-days: 1 + build-docs: runs-on: ubuntu-latest steps: @@ -255,19 +271,25 @@ jobs: - name: Set up Docker uses: docker/setup-buildx-action@v3 - - name: Download built image + - name: Download Kelvin image uses: actions/download-artifact@v6 with: name: kelvin path: ${{ runner.temp }} - - name: Download Deployment_service image + - name: Download Deployment_Service image if: steps.changed-files-deployment.outputs.any_changed == 'true' uses: actions/download-artifact@v6 with: name: deployment path: ${{ runner.temp }} + - name: Download Kelvin-Evaluator image + uses: actions/download-artifact@v6 + with: + name: kelvin-evaluator + path: ${{ runner.temp }} + - name: Load image id: load_image run: | @@ -278,6 +300,12 @@ jobs: echo "$LOADED" SHA_TAG=$(echo "$LOADED" | grep -v ':latest' | awk '{print $3}') echo "app_image_tag=$SHA_TAG" >> $GITHUB_OUTPUT + + LOADED_EVAL=$(docker load --input ${{ runner.temp }}/kelvin-evaluator.tar) + echo "$LOADED_EVAL" + SHA_TAG_EVAL=$(echo "$LOADED_EVAL" | grep -v ':latest' | awk '{print $3}') + echo "evaluator_image_tag=$SHA_TAG_EVAL" >> $GITHUB_OUTPUT + if [ "${{ steps.changed-files-deployment.outputs.any_changed }}" = "true" ]; then docker load --input ${{ runner.temp }}/deployment.tar fi @@ -291,7 +319,9 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Push Docker image with SHA tag - run: docker push ${{ steps.load_image.outputs.app_image_tag }} + run: | + docker push ${{ steps.load_image.outputs.app_image_tag }} + docker push ${{ steps.load_image.outputs.evaluator_image_tag }} - name: Trigger on-prem deployment run: | @@ -302,12 +332,23 @@ jobs: --commit-sha ${{ github.sha }} \ --healthcheck-url https://kelvin.cs.vsb.cz/api/v2/health \ --url https://kelvin.cs.vsb.cz/deployment/ + + python3 deployment_service/deploy.py \ + --service-name evaluator_scheduler \ + --container-name kelvin_evaluator_scheduler \ + --image ${{ steps.load_image.outputs.evaluator_image_tag }} \ + --commit-sha ${{ github.sha }} \ + --url https://kelvin.cs.vsb.cz/deployment/ \ + --health-check-timeout 240 env: WEBHOOK_SECRET: ${{ secrets.WEBHOOK_SECRET }} - name: Push Kelvin Docker image with latest tag run: docker push ghcr.io/mrlvsb/kelvin:latest + - name: Push Kelvin Evaluator Docker image with latest tag + run: docker push ghcr.io/mrlvsb/kelvin-evaluator:latest + - name: Push Deployment_service Docker image with all tags if: steps.changed-files-deployment.outputs.any_changed == 'true' run: docker push --all-tags ghcr.io/mrlvsb/deployment @@ -318,6 +359,12 @@ jobs: package-type: 'container' min-versions-to-keep: 15 + - uses: actions/delete-package-versions@v5 + with: + package-name: 'kelvin-evaluator' + package-type: 'container' + min-versions-to-keep: 15 + - uses: actions/delete-package-versions@v5 if: steps.changed-files-deployment.outputs.any_changed == 'true' with: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a8bb7af71..b39a1949c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v6.0.0 hooks: - id: check-yaml args: [--allow-multiple-documents] @@ -18,11 +18,8 @@ repos: - id: mixed-line-ending args: [ --fix=lf ] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.7 + rev: v0.15.0 hooks: - id: ruff-format - - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.7 - hooks: - - id: ruff + - id: ruff-check args: [ --fix ] diff --git a/Dockerfile b/Dockerfile index 88d6313d7..b8830760b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,13 @@ -FROM ghcr.io/astral-sh/uv:python3.12-bookworm AS build-backend +FROM python:3.12-slim-bookworm AS build-backend + +COPY --from=ghcr.io/astral-sh/uv:0.10.0 /uv /usr/local/bin/uv RUN export DEBIAN_FRONTEND=noninteractive && \ apt-get update && \ apt-get install -y \ -o APT::Install-Recommends=false \ -o APT::Install-Suggests=false \ + build-essential \ libsasl2-dev \ libgraphviz-dev @@ -26,14 +29,15 @@ RUN npm ci RUN npm run build -FROM python:3.12-bookworm AS runtime +FROM python:3.12-slim-bookworm AS runtime RUN export DEBIAN_FRONTEND=noninteractive && \ apt-get update && \ apt-get install -y \ -o APT::Install-Recommends=false \ -o APT::Install-Suggests=false \ - graphviz && \ + graphviz \ + libmagic1 && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* @@ -43,6 +47,8 @@ WORKDIR /app # We want to use ID 1000, to have the same ID as the default outside user # And we also want group 101, to provide share access to the Unix uWSGI # socket with the nginx image. +RUN getent group 101 >/dev/null || groupadd -g 101 webserver + RUN useradd --uid 1000 --gid 101 --shell /bin/false --system webserver RUN chown -R webserver . @@ -72,3 +78,45 @@ COPY --chown=webserver deploy/entrypoint.sh ./ STOPSIGNAL SIGINT ENTRYPOINT ["/app/entrypoint.sh"] + +FROM runtime AS evaluator + +USER root + +RUN export DEBIAN_FRONTEND=noninteractive && \ + apt-get update && \ + apt-get install -y \ + -o APT::Install-Recommends=false \ + -o APT::Install-Suggests=false \ + ca-certificates \ + curl \ + procps && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +RUN mkdir -p /etc/apt/keyrings && \ + curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc +RUN chmod a+r /etc/apt/keyrings/docker.asc + +RUN echo \ + "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian \ + $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ + tee /etc/apt/sources.list.d/docker.list > /dev/null + +RUN export DEBIAN_FRONTEND=noninteractive && \ + apt-get update && \ + apt-get install -y \ + -o APT::Install-Recommends=false \ + -o APT::Install-Suggests=false \ + docker-ce docker-ce-cli containerd.io docker-compose-plugin && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +USER webserver + +COPY --chown=webserver evaluator/entrypoint.sh /app/evaluator-entrypoint.sh + +ENTRYPOINT ["/app/evaluator-entrypoint.sh"] +CMD ["rqworker", "default", "evaluator", "--with-scheduler"] +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD pgrep -f "rqworker" || exit 1 diff --git a/api/views/default.py b/api/views/default.py index ab25c48c4..9192cde26 100644 --- a/api/views/default.py +++ b/api/views/default.py @@ -706,7 +706,7 @@ def set_subject(task): else: return JsonResponse( { - "errors": [f'Invalid task type {data.get("type")}'], + "errors": [f"Invalid task type {data.get('type')}"], }, status=400, ) diff --git a/common/evaluate.py b/common/evaluate.py index 484420444..cc75b6967 100644 --- a/common/evaluate.py +++ b/common/evaluate.py @@ -8,6 +8,7 @@ import django_rq import requests import yaml +from django.conf import settings from django.core import signing from django.urls import reverse from django.utils import timezone @@ -101,10 +102,16 @@ def get_meta(login): def evaluate_job(submit_url, task_url, token, meta): logging.basicConfig(level=logging.DEBUG) s = requests.Session() + if settings.DEBUG: + s.verify = False logging.info(f"Evaluating {submit_url}") - with tempfile.TemporaryDirectory() as workdir: + # Create kelvin subdirectory in system temp (cross-platform) + kelvin_temp = os.path.join(tempfile.gettempdir(), "kelvin") + os.makedirs(kelvin_temp, exist_ok=True) + + with tempfile.TemporaryDirectory(dir=kelvin_temp) as workdir: os.chdir(workdir) def untar(url, dest): diff --git a/common/event_log.py b/common/event_log.py index 5b141eb4b..bc65cd2e1 100644 --- a/common/event_log.py +++ b/common/event_log.py @@ -81,7 +81,7 @@ class Action(models.TextChoices): created_at = models.DateTimeField(auto_now_add=True) def __str__(self): - return f"{self.action} ({self.user.username}) at {self.created_at.strftime("%d. %m. %y %H:%M:%S")} from {self.ip_address}" + return f"{self.action} ({self.user.username}) at {self.created_at.strftime('%d. %m. %y %H:%M:%S')} from {self.ip_address}" def deserialize(self) -> UserEventBase | None: shared = dict( diff --git a/common/utils.py b/common/utils.py index 8c05f0e47..0d2e10b82 100644 --- a/common/utils.py +++ b/common/utils.py @@ -8,6 +8,7 @@ import django.contrib.auth.models import requests +from django.conf import settings from django.http import HttpRequest from ipware import get_client_ip @@ -109,6 +110,13 @@ def download_source_to_path(source_url: str, destination_path: str) -> None: def build_absolute_uri(request, location): base_uri = os.getenv("API_INTERNAL_BASEURL", None) + + # If the URL is the default Docker-internal one, only use it in DEBUG mode. + # This prevents Production from accidentally using the internal container hostname + # instead of the public domain, unless explicitly forced. + if base_uri == "https://nginx" and not settings.DEBUG: + base_uri = None + if base_uri: return "".join([base_uri, location]) return request.build_absolute_uri(location) diff --git a/deployment_service/app/config.py b/deployment_service/app/config.py index 8baac655a..979775124 100644 --- a/deployment_service/app/config.py +++ b/deployment_service/app/config.py @@ -41,6 +41,7 @@ class Settings(BaseSettings): docker: Docker debug: bool = False log_level: str = "INFO" + health_check_timeout: int = 90 model_config = SettingsConfigDict( extra="ignore", diff --git a/deployment_service/app/deployment.py b/deployment_service/app/deployment.py index c7541a535..c12331be5 100644 --- a/deployment_service/app/deployment.py +++ b/deployment_service/app/deployment.py @@ -15,7 +15,6 @@ from app.config import get_settings from app.models import ImageInfo -HEALTH_CHECK_TIMEOUT = 90 # seconds HEALTH_CHECK_INTERVAL = 5 # seconds IMAGE_PULL_TIMEOUT = 600 # 10 minutes @@ -66,11 +65,13 @@ def __init__( compose_path: Path, compose_env_file: Path | None, container_name: str, - healthcheck_url: str, + healthcheck_url: str | None, + health_check_timeout: int | None = None, ): self.service_name = service_name self.container_name = container_name self.healthcheck_url = healthcheck_url + self.health_check_timeout = health_check_timeout self.image_tag = image["tag"] self.commit_sha = commit_sha self.stable_compose_path = str(compose_path.resolve()) @@ -275,14 +276,12 @@ async def _swap_service( ) return True - async def _health_check(self) -> bool: - """Performs a health check by making HTTP requests to a specified URL.""" - self.logger.info(f"Performing health check on {self.healthcheck_url}...") - end_time = time.time() + HEALTH_CHECK_TIMEOUT + async def _health_check_http(self, end_time: float, healthcheck_url: str) -> bool: + self.logger.info(f"Performing HTTP health check on {healthcheck_url}...") async with httpx.AsyncClient(verify=not get_settings().debug) as client: while time.time() < end_time: try: - response = await client.get(self.healthcheck_url, timeout=2.0) + response = await client.get(healthcheck_url, timeout=2.0) self.logger.info(f"Health check response status: {response.status_code}") if response.status_code == 200: self.logger.info("Health check passed.") @@ -290,9 +289,56 @@ async def _health_check(self) -> bool: except httpx.RequestError as exc: self.logger.warning(f"Health check request failed: {exc}") await asyncio.sleep(HEALTH_CHECK_INTERVAL) - self.logger.error("Health check timed out.") + self.logger.error("HTTP health check timed out.") + return False + + async def _health_check_docker(self, end_time: float, container_name: str) -> bool: + self.logger.info(f"Performing Docker container health state check for {container_name}...") + while time.time() < end_time: + try: + container = self.client.containers.get(container_name) + container.reload() + + state = container.attrs.get("State", {}) + if "Health" not in state: + self.logger.error( + "Container has no HEALTHCHECK configured. " + "Please add a HEALTHCHECK to your Dockerfile or provide a --healthcheck-url." + ) + return False + + health_status = state.get("Health", {}).get("Status") + + self.logger.info(f"Container health status: {health_status}") + + if health_status == "healthy": + self.logger.info("Docker health check passed.") + return True + if health_status == "unhealthy": + self.logger.warning("Container is unhealthy.") + return False + except NotFound: + self.logger.warning("Container not found during health check.") + except Exception as e: + self.logger.warning(f"Error checking container health: {e}") + + await asyncio.sleep(HEALTH_CHECK_INTERVAL) + + self.logger.error("Docker health check timed out.") return False + async def _health_check(self) -> bool: + """Performs a health check. + If healthcheck_url is provided, makes HTTP requests to it. + If healthcheck_url is None, checks the container's Docker health status. + """ + timeout = self.health_check_timeout or get_settings().health_check_timeout + end_time = time.time() + timeout + + if self.healthcheck_url: + return await self._health_check_http(end_time, self.healthcheck_url) + return await self._health_check_docker(end_time, self.container_name) + def _cleanup(self, old_image_id: str) -> None: """Removes the old Docker image after a successful deployment.""" if not old_image_id or old_image_id == self.image_tag: diff --git a/deployment_service/app/main.py b/deployment_service/app/main.py index f3e59da9a..39ebb7d10 100644 --- a/deployment_service/app/main.py +++ b/deployment_service/app/main.py @@ -127,7 +127,8 @@ async def deploy(request: DeploymentRequest, response: Response): compose_path=get_settings().docker.compose_file_path, compose_env_file=get_settings().docker.compose_env_file, container_name=request.container_name, - healthcheck_url=str(request.healthcheck_url), + healthcheck_url=str(request.healthcheck_url) if request.healthcheck_url else None, + health_check_timeout=request.health_check_timeout, ) try: logs = await manager.run() diff --git a/deployment_service/app/models.py b/deployment_service/app/models.py index 25fc573ba..b586f457e 100644 --- a/deployment_service/app/models.py +++ b/deployment_service/app/models.py @@ -49,15 +49,22 @@ class DeploymentRequest(BaseModel): healthcheck_url: Annotated[ AnyHttpUrl | None, Field( - default="https://kelvin.cs.vsb.cz/api/v2/health", - description="The URL to check the health of the service.", + default=None, + description="The URL to check the health of the service. If not provided, the container's internal health status is checked.", examples=["https://kelvin.cs.vsb.cz/api/v2/health"], ), ] + health_check_timeout: int | None = Field( + default=None, + description="Optional timeout for the health check in seconds. Defaults to global setting if not provided.", + examples=[120], + ) @field_validator("healthcheck_url", mode="after") @classmethod - def validate_healthcheck_url(cls, value: AnyHttpUrl) -> AnyHttpUrl: + def validate_healthcheck_url(cls, value: AnyHttpUrl | None) -> AnyHttpUrl | None: + if value is None: + return value host = value.host if host not in get_settings().security.allowed_hosts: raise ValueError( diff --git a/deployment_service/deploy.py b/deployment_service/deploy.py index 29c969bfd..b4ee38a3e 100755 --- a/deployment_service/deploy.py +++ b/deployment_service/deploy.py @@ -10,18 +10,18 @@ import urllib.request -def format_for_github_summary(status_code, response_json): +def format_for_github_summary(status_code, response_json, service_name): logs = response_json.get("logs", []) error_message = response_json.get("error") if not (200 <= status_code < 300): - title = f"## ❌ Deployment Failed (Status: {status_code})" + title = f"## ❌ Deployment Failed: {service_name} (Status: {status_code})" if not error_message: summary_lines = [f"**Error:** `{response_json.get('detail', 'Unknown error')}`"] else: summary_lines = [f"**Error:** `{error_message}`"] else: - title = f"## ✅ Deployment Succeeded (Status: {status_code})" + title = f"## ✅ Deployment Succeeded: {service_name} (Status: {status_code})" summary_lines = ["The deployment process completed successfully."] summary_lines.append("\n
\n\nView full deployment logs\n\n```text") @@ -63,8 +63,14 @@ def main(): ) parser.add_argument( "--healthcheck-url", - default="https://kelvin.cs.vsb.cz/api/v2/health", - help="The full URL for the application's health check endpoint. (e.g., 'https://nginx/api/v2/health')", + default=None, + help="The full URL for the application's health check endpoint. (e.g., 'https://nginx/api/v2/health'). If not provided, the container's health status will be checked.", + ) + parser.add_argument( + "--health-check-timeout", + type=int, + default=None, + help="Optional timeout for the health check in seconds. Overrides the server-side default.", ) parser.add_argument( @@ -97,6 +103,7 @@ def main(): "image": args.image, "commit_sha": args.commit_sha, "healthcheck_url": args.healthcheck_url, + "health_check_timeout": args.health_check_timeout, } message_data = json.dumps(message_dict).encode("utf-8") signature = hmac.new(secret.encode("utf-8"), message_data, hashlib.sha256).hexdigest() @@ -128,7 +135,7 @@ def main(): "error": f"Invalid JSON response from server (Status: {status_code}).", } - summary_content = format_for_github_summary(status_code, response_json) + summary_content = format_for_github_summary(status_code, response_json, args.service_name) if is_github_env: summary_file_path = str(os.getenv("GITHUB_STEP_SUMMARY")) with open(summary_file_path, "a", encoding="utf-8") as f: diff --git a/deployment_service/tests/test_deployment.py b/deployment_service/tests/test_deployment.py index 7c930f987..8da940ae3 100644 --- a/deployment_service/tests/test_deployment.py +++ b/deployment_service/tests/test_deployment.py @@ -110,10 +110,44 @@ def fake_time(): result = await manager_instance._health_check() assert result is False - assert "Health check timed out" in manager_instance.logs[-1] + assert "HTTP health check timed out." in manager_instance.logs[-1] assert mock_sleep.called +@pytest.mark.asyncio +async def test_health_check_docker_healthy(manager_instance): + manager_instance.healthcheck_url = None + mock_container = MagicMock() + mock_container.attrs = {"State": {"Health": {"Status": "healthy"}}} + manager_instance.client.containers.get.return_value = mock_container + + result = await manager_instance._health_check() + assert result is True + assert "Docker health check passed." in manager_instance.logs[-1] + + +@pytest.mark.asyncio +@patch("asyncio.sleep", new_callable=AsyncMock) +async def test_health_check_docker_timeout(mock_sleep, manager_instance): + manager_instance.healthcheck_url = None + # Fake time that increases every call + current_time = 0 + + def fake_time(): + nonlocal current_time + current_time += 1 + return current_time + + with patch("time.time", side_effect=fake_time): + mock_container = MagicMock() + mock_container.attrs = {"State": {"Health": {"Status": "starting"}}} + manager_instance.client.containers.get.return_value = mock_container + + result = await manager_instance._health_check() + assert result is False + assert "Docker health check timed out." in manager_instance.logs[-1] + + @pytest.mark.asyncio async def test_swap_service_critical_error(manager_instance): manager_instance._run_command = AsyncMock(side_effect=[True, False]) diff --git a/docker-compose.yml b/docker-compose.yml index 931f61675..13e2f3925 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,7 +12,7 @@ services: dockerfile: Dockerfile target: runtime image: "ghcr.io/mrlvsb/kelvin:${APP_IMAGE_TAG:-latest}" # Interpolation for Deployment Service - pull_policy: always + pull_policy: never restart: unless-stopped environment: # Hardcode the DB and Redis hostnames and ports, since they necessarily @@ -26,6 +26,9 @@ services: - DATABASE__USERNAME=${DATABASE__USERNAME} - DATABASE__PASSWORD=${DATABASE__PASSWORD} - KELVIN__HOST_URL=${KELVIN__HOST_URL} + # - Defaults to 'https://nginx' for local docker development (to fix loopback ref to 127.0.0.1) + # - IGNORED by app if value is 'https://nginx' AND DEBUG=False + - API_INTERNAL_BASEURL=${API_INTERNAL_BASEURL:-https://nginx} volumes: - app_static:/app/static - app_socket:/socket @@ -76,6 +79,77 @@ services: - app_static:/app/static:ro - app_socket:/socket + evaluator_scheduler: + container_name: kelvin_evaluator_scheduler + depends_on: + - redis + - app + profiles: [ prod ] + build: + context: . + dockerfile: Dockerfile + target: evaluator + image: "ghcr.io/mrlvsb/kelvin-evaluator:${EVALUATOR_IMAGE_TAG:-latest}" + pull_policy: never + restart: unless-stopped + command: "rqworker default evaluator --with-scheduler" + environment: + - REDIS__HOST=redis + - REDIS__PORT=6379 + - DOCKER_HOST=docker_proxy:2375 + volumes: + # Mount /tmp/kelvin so Docker-in-Docker can access temporary evaluation directories + - /tmp/kelvin:/tmp/kelvin + + evaluator_cpu: + container_name: kelvin_evaluator_cpu + profiles: [ evaluator-cpu ] + build: + context: . + dockerfile: Dockerfile + target: evaluator + image: "ghcr.io/mrlvsb/kelvin-evaluator:${EVALUATOR_IMAGE_TAG:-latest}" + pull_policy: always + restart: unless-stopped + command: "rqworker-pool default evaluator --num-workers ${EVALUATOR_CPU_REPLICAS:-32}" + environment: + # Option to specify Redis host and port to connect from other machines where evaluator runs, + # fallbacks to 'redis' hostname and default port, which means same machine + - REDIS__HOST=${EVALUATOR_REDIS__HOST:-redis} + - REDIS__PORT=${EVALUATOR_REDIS__PORT:-6379} + - DOCKER_HOST=docker_proxy:2375 + volumes: + # Mount /tmp/kelvin so Docker-in-Docker can access temporary evaluation directories + - /tmp/kelvin:/tmp/kelvin + + evaluator_cuda: + container_name: kelvin_evaluator_cuda + profiles: [ evaluator-cuda ] + build: + context: . + dockerfile: Dockerfile + target: evaluator + image: "ghcr.io/mrlvsb/kelvin-evaluator:${EVALUATOR_IMAGE_TAG:-latest}" + pull_policy: always + restart: unless-stopped + command: "rqworker-pool cuda --num-workers ${EVALUATOR_CUDA_REPLICAS:-32}" + environment: + # Option to specify Redis host and port to connect from other machines where evaluator runs, + # fallbacks to 'redis' hostname and default port, which means same machine + - REDIS__HOST=${EVALUATOR_REDIS__HOST:-redis} + - REDIS__PORT=${EVALUATOR_REDIS__PORT:-6379} + - DOCKER_HOST=docker_proxy:2375 + volumes: + # Mount /tmp/kelvin so Docker-in-Docker can access temporary evaluation directories + - /tmp/kelvin:/tmp/kelvin + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: all + capabilities: [ gpu ] + deployment: container_name: kelvin_deployment_service profiles: [ prod ] @@ -86,8 +160,6 @@ services: image: "ghcr.io/mrlvsb/deployment:latest" pull_policy: always restart: unless-stopped - group_add: - - "${DOCKER_GROUP_ID:?DOCKER_GROUP_ID is not set}" environment: # Hardcode the docker-compose.yml path inside the container to match the repository volume mount path (must be in sync with volume that mounts the repo) - DOCKER__COMPOSE_FILE_PATH=/kelvin/docker-compose.yml @@ -95,9 +167,20 @@ services: - DEBUG=${DEBUG:-false} - SECURITY__WEBHOOK_SECRET=${SECURITY__WEBHOOK_SECRET} - SECURITY__ALLOWED_HOSTS=${SECURITY__ALLOWED_HOSTS} + - DOCKER_HOST=docker_proxy:2375 volumes: # Mount the Kelvin repo to /kelvin inside the container,must be in sync with DOCKER__COMPOSE_FILE_PATH - ${REPO__DIRECTORY_PATH}:/kelvin + + # proxy from TCP to unix socket, so we dont need to run web as root with different uid + # or solve issues with socket permissions + docker_proxy: + container_name: kelvin_docker_proxy + profiles: [ prod ] + image: alpine/socat + command: tcp-listen:2375,fork,reuseaddr unix-connect:/var/run/docker.sock + user: root + volumes: - /var/run/docker.sock:/var/run/docker.sock volumes: diff --git a/docs/docs/01-intro/01-installation.mdx b/docs/docs/01-intro/01-installation.mdx index b8f1996fb..c8b38ef0d 100644 --- a/docs/docs/01-intro/01-installation.mdx +++ b/docs/docs/01-intro/01-installation.mdx @@ -9,7 +9,7 @@ Kelvin relies on several runtime services, so some setup is required before use. ```bash apt-get install libsasl2-dev libgraphviz-dev graphviz gcc libxml2-dev libxslt1-dev libffi-dev \ - libz-dev python3-pip curl pre-commit + libz-dev curl ``` ## Install Python dependencies @@ -115,12 +115,18 @@ And then you should be able to find the application on [`http://localhost:8000`] --- -### Git pre-commit hooks +### Git pre-commit (prek) hooks -To ensure that the code is formatted correctly and linted, you can install pre-commit hooks: +To ensure that the code is formatted correctly and linted, you can install pre-commit (prek) hooks: ```bash -pre-commit install +curl --proto '=https' --tlsv1.2 -LsSf https://github.com/j178/prek/releases/download/v0.3.2/prek-installer.sh | sh +``` + +and then enable hooks with their installation: + +```bash +prek install ``` ### Deploying workers @@ -146,5 +152,5 @@ uv run build.py Then you can start a worker with the following command (in kelvin root folder): ```bash -uv run manage.py rqworker default evaluator summary --with-scheduler +uv run manage.py rqworker default evaluator --with-scheduler ``` diff --git a/docs/docs/02-developers-guide/02-deployment.mdx b/docs/docs/02-developers-guide/02-deployment.mdx index 940141d06..9cb8153ef 100644 --- a/docs/docs/02-developers-guide/02-deployment.mdx +++ b/docs/docs/02-developers-guide/02-deployment.mdx @@ -129,8 +129,10 @@ the specified commit_sha into it using git show. This isolates the new configura ### 4. Health Check -The manager performs an active health check by repeatedly sending HTTP GET requests to a specified `healthcheck_url`. -It continuously polls this endpoint until it receives a 200 OK status code, which indicates the service is ready. If the health check times out, it triggers a rollback. +The manager performs an active health check by repeatedly sending HTTP GET requests to a specified `healthcheck_url` or by using the Docker healthcheck (if the `healthcheck_url` is not specified). +It continuously polls this endpoint until it receives a 200 OK status code or docker container status changes to `healthy`, which indicates the service is ready. + +The timeout for this check defaults to **90 seconds** (configurable via `health_check_timeout` in `config.py`). It can also be overridden per-request by providing a `health_check_timeout` in the deployment payload. If the health check times out, it triggers a rollback. ### 5. Rollback (on Failure) diff --git a/docs/docs/02-developers-guide/03-evaluator-images.mdx b/docs/docs/02-developers-guide/03-evaluator-images.mdx new file mode 100644 index 000000000..6e8040d33 --- /dev/null +++ b/docs/docs/02-developers-guide/03-evaluator-images.mdx @@ -0,0 +1,61 @@ +--- +sidebar_position: 3 +title: Evaluator Images +--- + +Kelvin uses a set of Docker images to evaluate student submissions safely and consistently. +These images are built in a hierarchical manner to minimize duplication and ensure consistency. + +## Image Structure + +All evaluator images are located in `evaluator/images/`. The structure is designed to allow dependency inheritance between images. + +- **`base`**: The base image (`kelvin/base`) that all other images typically inherit from. It contains common tools and configurations (like `locale` settings, common libraries). +- **Language-specific images**: Images like `gcc`, `java`, `pythonrun`, etc. These inherit from `kelvin/base` (or other kelvin images) and install specific compilers or interpreters. + +## Dependency Management + +The build system automatically detects dependencies between images by parsing their `Dockerfile` `FROM` instructions. +For example, if `evaluator/images/gcc/Dockerfile` contains `FROM kelvin/base`, the build system ensures `kelvin/base` is built before `kelvin/gcc`. + +## Building Images + +To build the images, use the provided `build.py` script located in `evaluator/images/`. + +```bash +cd evaluator/images +uv run build.py +``` + +### Build Options + +- **`--dry-run`**: Print the `docker build` commands that would be executed without running them. +- **`--list-order`**: Print the calculated build order (batches of images that can be built in parallel). + +```bash +uv run build.py --dry-run +``` + +## Adding a New Runtime Environment + +To add support for a new language or tool: + +1. **Create a new directory** in `evaluator/images/` (e.g., `evaluator/images/rust`). +2. **Create a `Dockerfile`** in that directory. +3. **Inherit from `kelvin/base`** (or another appropriate parent). + + Example `Dockerfile`: + + ```dockerfile + FROM kelvin/base + + RUN apt-get update && apt-get install -y rustc + ``` + +4. **Run the build script**. It will automatically detect the new image and its dependency. + +```bash +uv run build.py +``` + +The image will be named `kelvin/` (e.g., `kelvin/rust`). diff --git a/docs/docs/03-teachers-guide/02-task-configuration/03-tests.mdx b/docs/docs/03-teachers-guide/02-task-configuration/03-tests.mdx index 920fe61ca..b6664d5e5 100644 --- a/docs/docs/03-teachers-guide/02-task-configuration/03-tests.mdx +++ b/docs/docs/03-teachers-guide/02-task-configuration/03-tests.mdx @@ -15,6 +15,14 @@ pipeline: - type: tests ``` +You can optionally specify a custom Docker image to run the tests in: + +```yaml +pipeline: + - type: tests + image: kelvin/custom-image +``` + ::: Static tests can be defined by files in the task directory. @@ -26,7 +34,7 @@ It is recommended to prepend the test number to each test because tests are orde This test will execute the student program and checks if it prints `2020` in the standard output. -``` +```plaintext # 01_year.out 2020 ``` @@ -35,12 +43,12 @@ This test will execute the student program and checks if it prints `2020` in the The standard input is passed to the program and then the student's result on the output is compared to the expected stdout result. -``` +```plaintext # 02_sum.in 1 2 3 4 ``` -``` +```plaintext # 02_sum.out 10 ``` @@ -49,7 +57,7 @@ The standard input is passed to the program and then the student's result on the Checks if the student's program created file `result.txt` with the expected content. -``` +```plaintext # 03_nums.file_out.result.txt 1 2 3 4 5 6 7 8 9 10 ``` @@ -59,7 +67,7 @@ Checks if the student's program created file `result.txt` with the expected cont Provides the input file `data.txt` to student's program. It can be combined with stdout or file comparing. -``` +```plaintext # 04_nums.file_in.data.txt 1 2 3 4 5 6 7 8 9 10 ``` diff --git a/docs/docs/03-teachers-guide/02-task-configuration/05-pipeline.mdx b/docs/docs/03-teachers-guide/02-task-configuration/05-pipeline.mdx index 42b5ac8df..e15d5450e 100644 --- a/docs/docs/03-teachers-guide/02-task-configuration/05-pipeline.mdx +++ b/docs/docs/03-teachers-guide/02-task-configuration/05-pipeline.mdx @@ -196,6 +196,16 @@ pipeline: Commands prefixed with **#** are not shown on the result page. ::: +### i) Tests + +Action for running predefined input/output tests. See [Tests configuration](./03-tests.mdx) for more details. + +```yaml +pipeline: + - type: tests + image: kelvin/run # Docker image to use for running tests. Default: kelvin/run +``` + ## Docker Own private actions can be implemented in any language in a [docker container](https://github.com/mrlvsb/kelvin/tree/master/evaluator/images) and published to the official docker hub. diff --git a/evaluator/entrypoint.sh b/evaluator/entrypoint.sh new file mode 100755 index 000000000..0f4231b69 --- /dev/null +++ b/evaluator/entrypoint.sh @@ -0,0 +1,9 @@ +#!/bin/bash +set -eux + +# Run the image builder to ensure all required images are present +echo "Building evaluator images..." +python /app/evaluator/images/build.py + +# Execute the passed command +exec python manage.py "$@" diff --git a/evaluator/images/base/Dockerfile b/evaluator/images/base/Dockerfile index cc5eb6011..6faa41b43 100644 --- a/evaluator/images/base/Dockerfile +++ b/evaluator/images/base/Dockerfile @@ -1,30 +1,27 @@ FROM ubuntu:24.04 -RUN apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y \ +ENV LANG=en_US.UTF-8 +ENV LANGUAGE=en_US:en +ENV LC_ALL=en_US.UTF-8 + +RUN export DEBIAN_FRONTEND=noninteractive && \ + apt-get update && \ + apt-get install -y \ + -o APT::Install-Recommends=false \ + -o APT::Install-Suggests=false \ + build-essential \ locales=2.39-0ubuntu8 \ gcc=4:13.2.0-7ubuntu1 \ g++=4:13.2.0-7ubuntu1 \ gdb=15.0.50.20240403-0ubuntu1 \ nasm=2.16.01-1build1 \ python3=3.12.3-0ubuntu2.1 \ + python3-pip=24.0+dfsg-1ubuntu1 \ + python3-wheel=0.42.0-2 \ cmake=3.28.3-1build7 && \ - rm -rf /var/lib/apt/lists/* - -RUN apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y \ - python3-pip && \ - rm -rf /var/lib/apt/lists/* - -# For HTML sanitization -RUN python3 -m pip install --break-system-packages bleach==5.0.1 + apt-get clean && \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen -ENV LANG=en_US.UTF-8 -ENV LANGUAGE=en_US:en -ENV LC_ALL=en_US.UTF-8 -# Workaround for https://github.com/dotnet/sdk/issues/31457 -# It is included here, because it has to be present not only for building .NET projects, -# but also for running them (e.g. in the `run` image). -ENV DOTNET_EnableWriteXorExecute=0 +RUN python3 -m pip install --break-system-packages bleach==6.3.0 diff --git a/evaluator/images/build.py b/evaluator/images/build.py index 34b822bfd..d397d2383 100755 --- a/evaluator/images/build.py +++ b/evaluator/images/build.py @@ -1,55 +1,125 @@ #!/usr/bin/env python3 -import os +import argparse import hashlib +import os import subprocess +import logging +import shlex + from pathlib import Path +from typing import Dict, List, Set, Generator + +logging.basicConfig(level=logging.INFO, format="%(message)s") + +BASE_PATH = os.path.dirname(os.path.realpath(__file__)) + + +class ImageBuilder: + def __init__(self, base_path: str): + self.base_path = base_path + self.deps: Dict[str, List[str]] = {} + self.images: Dict[str, str] = {} # name -> path + self._scan_images() + + def _scan_images(self): + """Finds all Dockerfiles and builds the dependency graph.""" + for path in Path(self.base_path).rglob("Dockerfile"): + name = "kelvin/" + os.path.basename(os.path.dirname(path)) + self.images[name] = str(path) + + parents = set() + with open(path, "r") as f: + for line in f: + parts = line.strip().split() + if parts and parts[0].upper() == "FROM": + # Handle multi-stage builds + image_name = parts[1] + if image_name.startswith("kelvin/") and image_name.endswith(":latest"): + image_name = image_name[:-7] + parents.add(image_name) + + if not parents: + logging.warning(f"Image {name} has no FROM instruction") + continue + + if name not in self.deps: + self.deps[name] = [] + + for parent in parents: + self.deps[name].append(parent) + + def _build_deps(self, deps: Dict[str, List[str]]) -> List[Set[str]]: + """Original dependency resolution algorithm.""" + d = dict((k, set(deps[k])) for k in deps) + r = [] + while d: + # values not in keys (items without dep) + t = set(i for v in d.values() for i in v) - set(d.keys()) + # and keys without value (items without dep) + t.update(k for k, v in d.items() if not v) + # can be done right away + r.append(t) + # and cleaned up + d = dict(((k, v - t) for k, v in d.items() if v)) + return r + + def get_build_order(self) -> Generator[List[str], None, None]: + """Returns batches of images that can be built in parallel, in dependency order.""" + build_groups = self._build_deps(self.deps) + for group in build_groups: + # Filter solely for kelvin images as we do not need to build external deps + kelvin_images = sorted([img for img in group if img.startswith("kelvin/")]) + if kelvin_images: + yield kelvin_images + def list_order(self): + for batch in self.get_build_order(): + print(batch) -def build_deps(arg): - d = dict((k, set(arg[k])) for k in arg) - r = [] - while d: - # values not in keys (items without dep) - t = set(i for v in d.values() for i in v) - set(d.keys()) - # and keys without value (items without dep) - t.update(k for k, v in d.items() if not v) - # can be done right away - r.append(t) - # and cleaned up - d = dict(((k, v - t) for k, v in d.items() if v)) - return r - - -deps = {} -base_path = os.path.dirname(os.path.realpath(__file__)) -for path in Path(base_path).rglob("Dockerfile"): - name = "kelvin/" + os.path.basename(os.path.dirname(path)) - - parent = None - with open(path) as f: - for line in f: - parts = line.strip().split(" ") - if parts[0].upper() == "FROM": - parent = parts[1] - if not parent: - print(f"Image {name} has no FROM") - exit(1) - - if name not in deps: - deps[name] = [] - - deps[name].append(parent) - -for group in build_deps(deps): - for image in group: + def _build_single_image(self, image: str, dry_run: bool): if image.startswith("kelvin/"): name = image.split("/")[1] - print(f"============ {name} ============") - - with open(os.path.join(base_path, name, "Dockerfile"), "rb") as f: - hash = hashlib.md5(f.read()).hexdigest() - image_name = f"{image}:{hash}" - cmd = ["docker", "build", "-t", image_name, "-t", image, "."] - p = subprocess.check_call( - cmd, cwd=os.path.join(os.path.dirname(os.path.realpath(__file__)), name) - ) + logging.info(f"============ {name} ============") + + image_path = self.images[image] + image_dir = os.path.dirname(image_path) + + with open(image_path, "rb") as f: + file_hash = hashlib.md5(f.read()).hexdigest() + + image_name_hash = f"{image}:{file_hash}" + + # Use simple docker build (or buildx which defaults to docker driver if not setup otherwise) + # We remove --load because default driver loads automatically. + # We remove --build-context because default driver shares daemon state. + cmd = ["docker", "build", "-t", image_name_hash, "-t", f"{image}:latest", "."] + + if dry_run: + logging.info(f"cd {image_dir} && {shlex.join(cmd)}") + else: + subprocess.check_call(cmd, cwd=image_dir) + + def build(self, dry_run: bool = False): + for batch in self.get_build_order(): + for image in batch: + self._build_single_image(image, dry_run) + + +def main(): + parser = argparse.ArgumentParser(description="Manage Kelvin Docker images") + + parser.add_argument("--list-order", action="store_true", help="Show the build dependency order") + parser.add_argument("--dry-run", action="store_true", help="Print commands without executing") + + args = parser.parse_args() + + builder = ImageBuilder(BASE_PATH) + + if args.list_order: + builder.list_order() + else: + builder.build(dry_run=args.dry_run) + + +if __name__ == "__main__": + main() diff --git a/evaluator/images/cargo/Dockerfile b/evaluator/images/cargo/Dockerfile index 965a6e3b5..df8fab9ca 100644 --- a/evaluator/images/cargo/Dockerfile +++ b/evaluator/images/cargo/Dockerfile @@ -1,12 +1,16 @@ FROM rust:1.90.0 -RUN apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y \ +RUN export DEBIAN_FRONTEND=noninteractive && \ + apt-get update && \ + apt-get install -y \ + -o APT::Install-Recommends=false \ + -o APT::Install-Suggests=false \ python3-pip && \ - rm -rf /var/lib/apt/lists/* + apt-get clean && \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* # For HTML sanitization -RUN python3 -m pip install --break-system-packages bleach==5.0.1 +RUN python3 -m pip install --break-system-packages bleach==6.3.0 RUN rustup component add clippy diff --git a/evaluator/images/cargo/entry.py b/evaluator/images/cargo/entry.py index 3590d8fff..4aafd226b 100755 --- a/evaluator/images/cargo/entry.py +++ b/evaluator/images/cargo/entry.py @@ -180,7 +180,7 @@ def run_cargo(command: str, args: List[str]) -> BuildResult: artifacts = output.binary_artifacts if len(artifacts) > 1: stdout += f""" -Warning: multiple binary artifacts built ({', '.join([artifact.name for artifact in artifacts])}). +Warning: multiple binary artifacts built ({", ".join([artifact.name for artifact in artifacts])}). Using the first one for further commands. """ if len(artifacts) > 0: diff --git a/evaluator/images/clang-tidy/Dockerfile b/evaluator/images/clang-tidy/Dockerfile index 50deca9d0..9b6745183 100644 --- a/evaluator/images/clang-tidy/Dockerfile +++ b/evaluator/images/clang-tidy/Dockerfile @@ -1,5 +1,15 @@ -FROM alpine:edge -RUN apk update && apk add clang-extra-tools libc-dev linux-headers python3 py3-yaml libstdc++ g++ +FROM kelvin/gcc + +RUN export DEBIAN_FRONTEND=noninteractive && \ + apt-get update && \ + apt-get install -y \ + -o APT::Install-Recommends=false \ + -o APT::Install-Suggests=false \ + clang-tidy \ + python3-yaml && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + ADD analyze.py build_urls.py / RUN /build_urls.py CMD /analyze.py diff --git a/evaluator/images/dotnet/Dockerfile b/evaluator/images/dotnet/Dockerfile index fa741ff63..dd285c69e 100644 --- a/evaluator/images/dotnet/Dockerfile +++ b/evaluator/images/dotnet/Dockerfile @@ -1,19 +1,25 @@ -FROM kelvin/base +FROM kelvin/base:latest -RUN apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y \ +ENV DOTNET_EnableWriteXorExecute=0 + +RUN export DEBIAN_FRONTEND=noninteractive && \ + apt-get update && \ + apt-get install -y \ + -o APT::Install-Recommends=false \ + -o APT::Install-Suggests=false \ software-properties-common RUN add-apt-repository ppa:dotnet/backports -RUN apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y \ +RUN export DEBIAN_FRONTEND=noninteractive && \ + apt-get update && \ + apt-get install -y \ + -o APT::Install-Recommends=false \ + -o APT::Install-Suggests=false \ dotnet-sdk-9.0 \ - aspnetcore-runtime-9.0 \ - python3-pip && \ - rm -rf /var/lib/apt/lists/* - -RUN python3 -m pip install --break-system-packages bleach==5.0.1 + aspnetcore-runtime-9.0 && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* ADD entry.py / CMD /entry.py diff --git a/evaluator/images/gcc/Dockerfile b/evaluator/images/gcc/Dockerfile index 0c044e5c7..79cd6cc06 100644 --- a/evaluator/images/gcc/Dockerfile +++ b/evaluator/images/gcc/Dockerfile @@ -1,4 +1,5 @@ FROM kelvin/base + ADD wrapper /wrapper/gcc ADD wrapper /wrapper/cc ADD wrapper /wrapper/g++ diff --git a/evaluator/images/gcc/entry.py b/evaluator/images/gcc/entry.py index b0a4844ef..744a38597 100755 --- a/evaluator/images/gcc/entry.py +++ b/evaluator/images/gcc/entry.py @@ -53,7 +53,7 @@ def compile(makeflags: str, cmakeflags: str, html_output: io.StringIO): "CXXFLAGS": flags, "LDFLAGS": ldflags, "CLICOLOR_FORCE": "1", - "PATH": f'/wrapper:{os.getenv("PATH")}', + "PATH": f"/wrapper:{os.getenv('PATH')}", "CMAKE_EXPORT_COMPILE_COMMANDS": "ON", } diff --git a/evaluator/images/java/Dockerfile b/evaluator/images/java/Dockerfile index 97295493e..c4aa97aef 100644 --- a/evaluator/images/java/Dockerfile +++ b/evaluator/images/java/Dockerfile @@ -1,21 +1,29 @@ FROM kelvin/base -RUN apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y \ +RUN export DEBIAN_FRONTEND=noninteractive && \ + apt-get update && \ + apt-get install -y \ + -o APT::Install-Recommends=false \ + -o APT::Install-Suggests=false \ openjdk-21-jdk \ - python3-pip \ wget && \ + apt-get clean && \ rm -rf /var/lib/apt/lists/* -#RUN wget https://dlcdn.apache.org/maven/maven-3/3.9.9/binaries/apache-maven-3.9.9-bin.tar.gz -P /tmp + RUN wget https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.tar.gz -P /tmp + RUN tar xf /tmp/apache-maven-*-bin.tar.gz -C /opt + RUN ln -s /opt/apache-maven-3.9.9/ /opt/maven -RUN echo 'export JAVA_HOME=$(dirname $(dirname $(readlink -f $(which java))))\n\ -export M2_HOME=/opt/maven\n\ -export MAVEN_HOME=/opt/maven\n\ -export PATH=$M2_HOME/bin:$PATH' > /etc/profile.d/maven.sh -RUN chmod +x /etc/profile.d/maven.sh -RUN /usr/bin/python3 -m pip install --break-system-packages bleach==5.0.1 + +# To find out exactly folder path, run the docker image and enter +# `update-alternatives --config java` or `dirname $(dirname $(readlink -f $(which java)))` command +# We support only amd64 architecture, so the path is hardcoded to java-21-openjdk-amd64, but it can be changed if needed +ENV JAVA_HOME="/usr/lib/jvm/java-21-openjdk-amd64" +ENV M2_HOME="/opt/maven" +ENV MAVEN_HOME="/opt/maven" +ENV PATH="$M2_HOME/bin:$PATH" + RUN mkdir /.m2 RUN chmod u+rwx,g+rwx,o+rwx /.m2 diff --git a/evaluator/images/java/entry.py b/evaluator/images/java/entry.py index 6638fd33c..26007d9a9 100755 --- a/evaluator/images/java/entry.py +++ b/evaluator/images/java/entry.py @@ -126,10 +126,6 @@ def build_java_project(run_tests: bool) -> BuildResult: if len(executable_class_names) == 1: main_script_lines = [ "#!/bin/bash", - f"JAVA_HOME={os.environ['JAVA_HOME']}", - f"MAVEN_HOME={os.environ['MAVEN_HOME']}", - f"M2_HOME={os.environ['M2_HOME']}", - f"PATH={os.environ['PATH']}", f"mvn --quiet exec:java -Dexec.mainClass={executable_class_names[0]}", ] script_name = "main" @@ -170,25 +166,6 @@ def build_java_project(run_tests: bool) -> BuildResult: ) -def get_java_home(): - try: - java_bin_path = subprocess.check_output("which java", shell=True, text=True).strip() - java_real_path = subprocess.check_output( - f"readlink -f {java_bin_path}", shell=True, text=True - ).strip() - java_home = os.path.dirname(os.path.dirname(java_real_path)) - return java_home - except subprocess.CalledProcessError: - # Zde můžete logovat chybu nebo vrátit výchozí hodnotu - return None - - -# set environment variables -os.environ["JAVA_HOME"] = get_java_home() or "/usr/lib/jvm/default-java" -os.environ["M2_HOME"] = "/opt/maven" -os.environ["MAVEN_HOME"] = "/opt/maven" -os.environ["PATH"] = f"{os.environ['M2_HOME']}/bin:{os.environ['PATH']}" - run_tests = os.getenv("PIPE_UNITTESTS", False) result = build_java_project(run_tests) diff --git a/evaluator/images/pythonrun/Dockerfile b/evaluator/images/pythonrun/Dockerfile index 910f6a45c..8e0a8a1d8 100644 --- a/evaluator/images/pythonrun/Dockerfile +++ b/evaluator/images/pythonrun/Dockerfile @@ -1,6 +1,3 @@ -FROM kelvin/run -RUN apt-get update && \ - apt-get install -y --no-install-recommends python3-pip python3-wheel && \ - rm -rf /var/lib/apt/lists/* +FROM kelvin/base -RUN pip3 install --break-system-packages pytest +RUN pip3 install --break-system-packages pytest==9.0.2 flake8==7.3.0 diff --git a/evaluator/images/run/Dockerfile b/evaluator/images/run/Dockerfile index 923fe4f31..f5f4f3306 100644 --- a/evaluator/images/run/Dockerfile +++ b/evaluator/images/run/Dockerfile @@ -1,9 +1,16 @@ FROM kelvin/base -RUN apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y asciinema imagemagick webp python3-magic \ - openjdk-21-jdk && \ - rm -rf /var/lib/apt/lists/* +RUN export DEBIAN_FRONTEND=noninteractive && \ + apt-get update && \ + apt-get install -y \ + -o APT::Install-Recommends=false \ + -o APT::Install-Suggests=false \ + asciinema \ + imagemagick \ + webp \ + python3-magic && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* ADD entry.py / CMD /entry.py diff --git a/evaluator/pipelines.py b/evaluator/pipelines.py index e9cc24603..e43f01d50 100644 --- a/evaluator/pipelines.py +++ b/evaluator/pipelines.py @@ -86,7 +86,7 @@ def fmt_value(v): "-v", evaluation.submit_path + ":/work", "--ulimit", - f'fsize={limits["fsize"]}:{limits["fsize"]}', + f"fsize={limits['fsize']}:{limits['fsize']}", "-m", str(limits["memory"]), "--memory-swap", @@ -267,19 +267,28 @@ def to_file(input): class TestsPipe: - def __init__(self, executable="./main", limits=None, timeout=5, before=None, **kwargs): + def __init__( + self, + executable="./main", + limits=None, + timeout=5, + before=None, + image="kelvin/run", + **kwargs, + ): super().__init__(**kwargs) self.executable = [executable] if isinstance(executable, str) else executable self.limits = limits self.timeout = timeout self.before = [] if not before else before + self.image = image def run(self, evaluation): results = [] result_dir = os.path.join(evaluation.result_path, self.id) os.mkdir(result_dir) - image = prepare_container(docker_image("kelvin/run"), self.before) + image = prepare_container(docker_image(self.image), self.before) container = ( subprocess.check_output( create_docker_cmd( diff --git a/evaluator/testsets.py b/evaluator/testsets.py index ca31f3823..5763851f7 100644 --- a/evaluator/testsets.py +++ b/evaluator/testsets.py @@ -233,7 +233,7 @@ def parse_conf_pipeline(self, conf): self.pipeline.append(pipe) except Exception as e: - self.add_warning(f'pipe {item["type"]}: {e}\n{traceback.format_exc()}') + self.add_warning(f"pipe {item['type']}: {e}\n{traceback.format_exc()}") def parse_conf_tests(self, conf): allowed_keys = ["name", "title", "exit_code", "args", "files"] diff --git a/frontend/src/PipelineValidation.js b/frontend/src/PipelineValidation.js index be666fa9f..67e6f55cc 100644 --- a/frontend/src/PipelineValidation.js +++ b/frontend/src/PipelineValidation.js @@ -567,7 +567,11 @@ const rules = new DictRule({ ], tests: [ new DockerPipeRule({ - executable: new UnionRule(new ValueRule(), new ArrayRule()) + executable: new UnionRule(new ValueRule(), new ArrayRule()), + image: [ + new ValueRule(), + 'Docker image to use for running tests. Default: kelvin/run' + ] }), 'Run input/output/files tests on compiled program.' ], diff --git a/pyproject.toml b/pyproject.toml index 958a510db..1d40f0cfc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,12 +49,12 @@ dependencies = [ "py7zr==1.0.0", ] -[tool.uv] -dev-dependencies = [ - "pre-commit>=3.8.0", +[dependency-groups] +dev = [ "ruff==0.6.3", "pyrefly==0.18.0", "django-debug-toolbar>=6.0.0", + "prek>=0.3.2", ] [tool.ruff] diff --git a/uv.lock b/uv.lock index 2926ee45f..f31c0078d 100644 --- a/uv.lock +++ b/uv.lock @@ -144,15 +144,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/63/e285470a4880a4f36edabe4810057bd4b562c6ddcc165eacf9c3c7210b40/cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235", size = 181956, upload-time = "2023-09-28T18:01:24.971Z" }, ] -[[package]] -name = "cfgv" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, -] - [[package]] name = "charset-normalizer" version = "3.3.2" @@ -275,15 +266,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c", size = 9561, upload-time = "2023-05-27T16:07:09.379Z" }, ] -[[package]] -name = "distlib" -version = "0.3.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c4/91/e2df406fb4efacdf46871c25cde65d3c6ee5e173b7e5a4547a47bae91920/distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64", size = 609931, upload-time = "2023-12-12T07:14:03.091Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/41/9307e4f5f9976bc8b7fea0b66367734e8faf3ec84bc0d412d8cfabbb66cd/distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784", size = 468850, upload-time = "2023-12-12T07:13:59.966Z" }, -] - [[package]] name = "distro" version = "1.9.0" @@ -455,15 +437,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, ] -[[package]] -name = "filelock" -version = "3.16.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037, upload-time = "2024-09-17T19:02:01.779Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163, upload-time = "2024-09-17T19:02:00.268Z" }, -] - [[package]] name = "h11" version = "0.16.0" @@ -510,15 +483,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] -[[package]] -name = "identify" -version = "2.6.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/29/bb/25024dbcc93516c492b75919e76f389bac754a3e4248682fba32b250c880/identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98", size = 99097, upload-time = "2024-09-14T23:50:32.513Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7d/0c/4ef72754c050979fdcc06c744715ae70ea37e734816bb6514f79df77a42f/identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0", size = 98972, upload-time = "2024-09-14T23:50:30.747Z" }, -] - [[package]] name = "idna" version = "3.6" @@ -659,7 +623,7 @@ dependencies = [ [package.dev-dependencies] dev = [ { name = "django-debug-toolbar" }, - { name = "pre-commit" }, + { name = "prek" }, { name = "pyrefly" }, { name = "ruff" }, ] @@ -712,7 +676,7 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ { name = "django-debug-toolbar", specifier = ">=6.0.0" }, - { name = "pre-commit", specifier = ">=3.8.0" }, + { name = "prek", specifier = ">=0.3.2" }, { name = "pyrefly", specifier = "==0.18.0" }, { name = "ruff", specifier = "==0.6.3" }, ] @@ -825,15 +789,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9b/cd/dc52755d30ba41c60243235460961fc28022e5b6731f16c268667625baea/networkx-2.5-py3-none-any.whl", hash = "sha256:8c5812e9f798d37c50570d15c4a69d5710a18d77bafc903ee9c5fba7454c616c", size = 1615413, upload-time = "2020-08-22T20:38:37.263Z" }, ] -[[package]] -name = "nodeenv" -version = "1.9.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, -] - [[package]] name = "numpy" version = "1.26.4" @@ -927,15 +882,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/61/6cff8a8dbbac3d7fb7adb435b60737a7d0b0849f53e3af38f2c94d988da6/pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48", size = 2229322, upload-time = "2024-01-02T09:15:57.475Z" }, ] -[[package]] -name = "platformdirs" -version = "4.3.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302, upload-time = "2024-09-17T19:06:50.688Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439, upload-time = "2024-09-17T19:06:49.212Z" }, -] - [[package]] name = "plum-dispatch" version = "2.5.8" @@ -951,19 +897,27 @@ wheels = [ ] [[package]] -name = "pre-commit" -version = "3.8.0" +name = "prek" +version = "0.3.2" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cfgv" }, - { name = "identify" }, - { name = "nodeenv" }, - { name = "pyyaml" }, - { name = "virtualenv" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/64/10/97ee2fa54dff1e9da9badbc5e35d0bbaef0776271ea5907eccf64140f72f/pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af", size = 177815, upload-time = "2024-07-28T19:59:01.538Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/f5/ee52def928dd1355c20bcfcf765e1e61434635c33f3075e848e7b83a157b/prek-0.3.2.tar.gz", hash = "sha256:dce0074ff1a21290748ca567b4bda7553ee305a8c7b14d737e6c58364a499364", size = 334229, upload-time = "2026-02-06T13:49:47.539Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/92/caae8c86e94681b42c246f0bca35c059a2f0529e5b92619f6aba4cf7e7b6/pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f", size = 204643, upload-time = "2024-07-28T19:58:59.335Z" }, + { url = "https://files.pythonhosted.org/packages/76/69/70a5fc881290a63910494df2677c0fb241d27cfaa435bbcd0de5cd2e2443/prek-0.3.2-py3-none-linux_armv6l.whl", hash = "sha256:4f352f9c3fc98aeed4c8b2ec4dbf16fc386e45eea163c44d67e5571489bd8e6f", size = 4614960, upload-time = "2026-02-06T13:50:05.818Z" }, + { url = "https://files.pythonhosted.org/packages/c0/15/a82d5d32a2207ccae5d86ea9e44f2b93531ed000faf83a253e8d1108e026/prek-0.3.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4a000cfbc3a6ec7d424f8be3c3e69ccd595448197f92daac8652382d0acc2593", size = 4622889, upload-time = "2026-02-06T13:49:53.662Z" }, + { url = "https://files.pythonhosted.org/packages/89/75/ea833b58a12741397017baef9b66a6e443bfa8286ecbd645d14111446280/prek-0.3.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5436bdc2702cbd7bcf9e355564ae66f8131211e65fefae54665a94a07c3d450a", size = 4239653, upload-time = "2026-02-06T13:50:02.88Z" }, + { url = "https://files.pythonhosted.org/packages/10/b4/d9c3885987afac6e20df4cb7db14e3b0d5a08a77ae4916488254ebac4d0b/prek-0.3.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:0161b5f584f9e7f416d6cf40a17b98f17953050ff8d8350ec60f20fe966b86b6", size = 4595101, upload-time = "2026-02-06T13:49:49.813Z" }, + { url = "https://files.pythonhosted.org/packages/21/a6/1a06473ed83dbc898de22838abdb13954e2583ce229f857f61828384634c/prek-0.3.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e641e8533bca38797eebb49aa89ed0e8db0e61225943b27008c257e3af4d631", size = 4521978, upload-time = "2026-02-06T13:49:41.266Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5e/c38390d5612e6d86b32151c1d2fdab74a57913473193591f0eb00c894c21/prek-0.3.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfca1810d49d3f9ef37599c958c4e716bc19a1d78a7e88cbdcb332e0b008994f", size = 4829108, upload-time = "2026-02-06T13:49:44.598Z" }, + { url = "https://files.pythonhosted.org/packages/80/a6/cecce2ab623747ff65ed990bb0d95fa38449ee19b348234862acf9392fff/prek-0.3.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d69d754299a95a85dc20196f633232f306bee7e7c8cba61791f49ce70404ec", size = 5357520, upload-time = "2026-02-06T13:49:48.512Z" }, + { url = "https://files.pythonhosted.org/packages/a5/18/d6bcb29501514023c76d55d5cd03bdbc037737c8de8b6bc41cdebfb1682c/prek-0.3.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:539dcb90ad9b20837968539855df6a29493b328a1ae87641560768eed4f313b0", size = 4852635, upload-time = "2026-02-06T13:49:58.347Z" }, + { url = "https://files.pythonhosted.org/packages/1b/0a/ae46f34ba27ba87aea5c9ad4ac9cd3e07e014fd5079ae079c84198f62118/prek-0.3.2-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:1998db3d0cbe243984736c82232be51318f9192e2433919a6b1c5790f600b5fd", size = 4599484, upload-time = "2026-02-06T13:49:43.296Z" }, + { url = "https://files.pythonhosted.org/packages/1a/a9/73bfb5b3f7c3583f9b0d431924873928705cdef6abb3d0461c37254a681b/prek-0.3.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:07ab237a5415a3e8c0db54de9d63899bcd947624bdd8820d26f12e65f8d19eb7", size = 4657694, upload-time = "2026-02-06T13:50:01.074Z" }, + { url = "https://files.pythonhosted.org/packages/a7/bc/0994bc176e1a80110fad3babce2c98b0ac4007630774c9e18fc200a34781/prek-0.3.2-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:0ced19701d69c14a08125f14a5dd03945982edf59e793c73a95caf4697a7ac30", size = 4509337, upload-time = "2026-02-06T13:49:54.891Z" }, + { url = "https://files.pythonhosted.org/packages/f9/13/e73f85f65ba8f626468e5d1694ab3763111513da08e0074517f40238c061/prek-0.3.2-py3-none-musllinux_1_1_i686.whl", hash = "sha256:ffb28189f976fa111e770ee94e4f298add307714568fb7d610c8a7095cb1ce59", size = 4697350, upload-time = "2026-02-06T13:50:04.526Z" }, + { url = "https://files.pythonhosted.org/packages/14/47/98c46dcd580305b9960252a4eb966f1a7b1035c55c363f378d85662ba400/prek-0.3.2-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:f63134b3eea14421789a7335d86f99aee277cb520427196f2923b9260c60e5c5", size = 4955860, upload-time = "2026-02-06T13:49:56.581Z" }, + { url = "https://files.pythonhosted.org/packages/73/42/1bb4bba3ff47897df11e9dfd774027cdfa135482c961a54e079af0faf45a/prek-0.3.2-py3-none-win32.whl", hash = "sha256:58c806bd1344becd480ef5a5ba348846cc000af0e1fbe854fef91181a2e06461", size = 4267619, upload-time = "2026-02-06T13:49:39.503Z" }, + { url = "https://files.pythonhosted.org/packages/97/11/6665f47a7c350d83de17403c90bbf7a762ef50876ece456a86f64f46fbfb/prek-0.3.2-py3-none-win_amd64.whl", hash = "sha256:70114b48e9eb8048b2c11b4c7715ce618529c6af71acc84dd8877871a2ef71a6", size = 4624324, upload-time = "2026-02-06T13:49:45.922Z" }, + { url = "https://files.pythonhosted.org/packages/22/e7/740997ca82574d03426f897fd88afe3fc8a7306b8c7ea342a8bc1c538488/prek-0.3.2-py3-none-win_arm64.whl", hash = "sha256:9144d176d0daa2469a25c303ef6f6fa95a8df015eb275232f5cb53551ecefef0", size = 4336008, upload-time = "2026-02-06T13:49:52.27Z" }, ] [[package]] @@ -1577,20 +1531,6 @@ version = "2.0.24" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/1b/ed/136698c76722268569eac4e48ab90f3ced8b8035e414a8290cb935c40c16/uwsgi-2.0.24.tar.gz", hash = "sha256:77b6dd5cd633f4ae87ee393f7701f617736815499407376e78f3d16467523afe", size = 810559, upload-time = "2024-02-08T16:43:45.204Z" } -[[package]] -name = "virtualenv" -version = "20.26.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "distlib" }, - { name = "filelock" }, - { name = "platformdirs" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bf/4c/66ce54c8736ff164e85117ca36b02a1e14c042a6963f85eeda82664fda4e/virtualenv-20.26.5.tar.gz", hash = "sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4", size = 9371932, upload-time = "2024-09-17T21:48:54.006Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/1d/e1a44fdd6d30829ba21fc58b5d98a67e7aae8f4165f11d091e53aec12560/virtualenv-20.26.5-py3-none-any.whl", hash = "sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6", size = 5999288, upload-time = "2024-09-17T21:48:51.283Z" }, -] - [[package]] name = "wrapt" version = "1.16.0" diff --git a/web/views/student.py b/web/views/student.py index b5c203678..37ac9a6af 100644 --- a/web/views/student.py +++ b/web/views/student.py @@ -192,8 +192,8 @@ def student_index(request: HttpRequest) -> HttpResponse: ): semesters.append( { - "label": f'{year}/{year + 1} {"winter" if winter else "summer"}', - "value": f'{year}{"W" if winter else "S"}', + "label": f"{year}/{year + 1} {'winter' if winter else 'summer'}", + "value": f"{year}{'W' if winter else 'S'}", } )