diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 3ea5b1a..20681f6 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -59,6 +59,9 @@ RUN git config --system init.defaultBranch main RUN git config --system core.autocrlf input RUN git config --system color.ui auto +# Ensure vscode user owns the workspace directory +RUN chown -R $USERNAME:$USERNAME /workspace + # Configure zsh USER $USERNAME ENV HOME=/home/$USERNAME @@ -71,10 +74,14 @@ RUN curl https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install. # Set Locale for Functional Autocompletion in zsh RUN sudo locale-gen en_US.UTF-8 +# Install python dev dependencies +COPY pyproject.toml /workspace/pyproject.toml +RUN pip install --upgrade pip +RUN pip install -e ".[dev]" + # Install Backend Dependencies COPY backend/requirements.txt /workspace/backend/requirements.txt WORKDIR /workspace/backend -RUN pip install --upgrade pip RUN pip install -r requirements.txt # Expose application ports diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 9471d47..ac40d87 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -19,8 +19,7 @@ "ms-python.vscode-pylance", "charliermarsh.ruff", "gruntfuggly.todo-tree", - "ms-azuretools.vscode-docker", - "ms-python.isort" + "ms-azuretools.vscode-docker" ], "settings": { "terminal.integrated.defaultProfile.linux": "zsh", @@ -75,7 +74,10 @@ "python.defaultInterpreterPath": "/usr/local/bin/python", "[python]": { "editor.defaultFormatter": "charliermarsh.ruff", - "editor.tabSize": 4 + "editor.tabSize": 4, + "editor.codeActionsOnSave": { + "source.organizeImports": "never" // Let Ruff handle import sorting + } }, "python.analysis.extraPaths": ["backend"], "python.testing.pytestEnabled": true, @@ -87,11 +89,7 @@ "reportImportCycles": "error" }, "python.analysis.typeCheckingMode": "basic", - "python.analysis.autoImportCompletions": true, - "ruff.enable": true, - "ruff.organizeImports": true, - "ruff.fixAll": true, - "ruff.lineLength": 100 + "python.analysis.autoImportCompletions": true } } } diff --git a/.devcontainer/post_create.sh b/.devcontainer/post_create.sh index c1f5433..540c81c 100755 --- a/.devcontainer/post_create.sh +++ b/.devcontainer/post_create.sh @@ -1,8 +1,20 @@ #!/bin/bash +set -e -cd ../frontend +# Expected to run from .devcontainer/ + +echo "================== Installing pre-commit hooks =================" +cd .. +pre-commit install +pre-commit install-hooks + +echo "" +echo "=============== Installing frontend dependencies ===============" +cd ./frontend npm i --verbose +echo "" +echo "==================== Setting up the database ===================" cd ../backend python -m script.create_db python -m script.create_test_db diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 893fc1e..bdf1ac9 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,21 +1,23 @@ --- name: Feature request about: Suggest an idea for this project -title: '' -labels: '' -assignees: '' - +title: "" +labels: "" +assignees: "" --- ### Motivation + Why are we doing this? What problem does it solve? ### Deliverables + - Specific thing to implement/fix - Another deliverable - etc. ### Important Notes + - Dependencies (e.g., Depends on #XX) - Implementation constraints or preferences - Any other context or gotchas diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..de1c155 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,28 @@ +repos: + # General purpose + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v6.0.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + + # Python + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.14.10 + hooks: + - id: ruff-check # Linting + args: [--fix] # Auto-fix issues + - id: ruff-format # Formatting + + # JavaScript/TypeScript and other Prettier compatible files + - repo: local + hooks: + - id: prettier + name: prettier + # Runs prettier on staged, removing the `frontend/` prefix to work with relative filepaths + entry: bash -c 'cd frontend && npx prettier --write "${@/#frontend\//}"' -- + language: system + files: ^frontend/.*\.(js|jsx|ts|tsx|json|css|md)$ + pass_filenames: true diff --git a/README.md b/README.md index 1b610ae..3f7223a 100644 --- a/README.md +++ b/README.md @@ -87,15 +87,17 @@ Or you can do the actions manually. Then, ## Running The App -*If you haven't run in a day or more, run `python -m script.reset_dev` from the `/backend` directory to ensure all mock data is updated to be centered around today's date* +_If you haven't run in a day or more, run `python -m script.reset_dev` from the `/backend` directory to ensure all mock data is updated to be centered around today's date_ ### VSCode Debugger (Recommended) + Navigate to the "Debug and Run" tab on the VSCode side bar. At the top of the side bar, next to the green play button, select the desired module to run + - **Backend**: Starts the FastAPI backend on http://localhost:8000 - **Purge & Frontend**: Starts the Next.js frontend on http://localhost:3000 - - *The "Purge" part of this is referring to the task that kills any `next dev` processes in order to address a devcontainer issue. Note that this prevents you from running multiple of these debug sessions concurrently. If mulitple are needed, refer to the manual instructions below* + - _The "Purge" part of this is referring to the task that kills any `next dev` processes in order to address a devcontainer issue. Note that this prevents you from running multiple of these debug sessions concurrently. If mulitple are needed, refer to the manual instructions below_ - **Full Stack**: Starts both of the above in separate terminals Then simply press the green play button diff --git a/backend/.env.template b/backend/.env.template index 71a9cc8..822632e 100644 --- a/backend/.env.template +++ b/backend/.env.template @@ -4,4 +4,4 @@ POSTGRES_PASSWORD=admin POSTGRES_HOST=db POSTGRES_PORT=5432 HOST=localhost -GOOGLE_MAPS_API_KEY=DA_KEEYYY #replace with your actual API key \ No newline at end of file +GOOGLE_MAPS_API_KEY=DA_KEEYYY #replace with your actual API key diff --git a/backend/script/create_db.py b/backend/script/create_db.py index cf1babe..51868a8 100644 --- a/backend/script/create_db.py +++ b/backend/script/create_db.py @@ -1,6 +1,6 @@ +from sqlalchemy import create_engine, text from src.core.config import env from src.core.database import server_url -from sqlalchemy import create_engine, text engine = create_engine(server_url(sync=True), isolation_level="AUTOCOMMIT") diff --git a/backend/script/create_test_db.py b/backend/script/create_test_db.py index 2846ff3..2ced15a 100644 --- a/backend/script/create_test_db.py +++ b/backend/script/create_test_db.py @@ -1,13 +1,11 @@ -from src.core.database import server_url from sqlalchemy import create_engine, text +from src.core.database import server_url engine = create_engine(server_url(sync=True), isolation_level="AUTOCOMMIT") with engine.connect() as connection: # Check if test database already exists - result = connection.execute( - text("SELECT 1 FROM pg_database WHERE datname = 'ocsl_test'") - ) + result = connection.execute(text("SELECT 1 FROM pg_database WHERE datname = 'ocsl_test'")) if result.fetchone(): print("Test database 'ocsl_test' already exists") else: diff --git a/backend/script/delete_db.py b/backend/script/delete_db.py index 1314359..ca93d23 100644 --- a/backend/script/delete_db.py +++ b/backend/script/delete_db.py @@ -1,6 +1,6 @@ +from sqlalchemy import create_engine, text from src.core.config import env from src.core.database import server_url -from sqlalchemy import create_engine, text engine = create_engine(server_url(sync=True), isolation_level="AUTOCOMMIT") diff --git a/backend/script/reset_dev.py b/backend/script/reset_dev.py index a12ab0a..4fa80ee 100644 --- a/backend/script/reset_dev.py +++ b/backend/script/reset_dev.py @@ -8,7 +8,7 @@ import asyncio import json import re -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta from pathlib import Path import src.modules # Ensure all modules are imported so their entities are registered # noqa: F401 @@ -29,7 +29,7 @@ def parse_date(date_str: str | None) -> datetime | None: if not date_str or date_str == "null": return None - now = datetime.now(timezone.utc) + now = datetime.now(UTC) if date_str.startswith("NOW"): match = re.match(r"NOW([+-])(\d+)([hdwmy])", date_str) @@ -55,7 +55,7 @@ def parse_date(date_str: str | None) -> datetime | None: # Parse ISO format string and ensure it's timezone-aware dt = datetime.fromisoformat(date_str) if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) + dt = dt.replace(tzinfo=UTC) return dt @@ -80,7 +80,6 @@ async def reset_dev(): async with AsyncSessionLocal() as session: with open( str(Path(__file__).parent.parent.parent / "frontend" / "shared" / "mock_data.json"), - "r", ) as f: data = json.load(f) @@ -144,7 +143,9 @@ async def reset_dev(): for party_data in data["parties"]: party_datetime = parse_date(party_data["party_datetime"]) - assert party_datetime is not None, f"party_datetime required for party {party_data['id']}" + assert party_datetime is not None, ( + f"party_datetime required for party {party_data['id']}" + ) party = PartyEntity( party_datetime=party_datetime, diff --git a/backend/src/core/authentication.py b/backend/src/core/authentication.py index 650da3a..13162f4 100644 --- a/backend/src/core/authentication.py +++ b/backend/src/core/authentication.py @@ -13,8 +13,8 @@ class HTTPBearer401(HTTPBearer): async def __call__(self, request: Request): try: return await super().__call__(request) - except Exception: - raise CredentialsException() + except Exception as e: + raise CredentialsException() from e bearer_scheme = HTTPBearer401() diff --git a/backend/src/core/database.py b/backend/src/core/database.py index cca425c..a0120b9 100644 --- a/backend/src/core/database.py +++ b/backend/src/core/database.py @@ -1,4 +1,4 @@ -from typing import AsyncGenerator +from collections.abc import AsyncGenerator from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine from sqlalchemy.orm import DeclarativeBase diff --git a/backend/src/core/models.py b/backend/src/core/models.py index 5aaea33..4242452 100644 --- a/backend/src/core/models.py +++ b/backend/src/core/models.py @@ -1,11 +1,7 @@ -from typing import Generic, TypeVar - from pydantic import BaseModel -T = TypeVar("T") - -class PaginatedResponse(BaseModel, Generic[T]): +class PaginatedResponse[T](BaseModel): items: list[T] total_records: int page_size: int diff --git a/backend/src/modules/__init__.py b/backend/src/modules/__init__.py index 349be19..34688ed 100644 --- a/backend/src/modules/__init__.py +++ b/backend/src/modules/__init__.py @@ -25,10 +25,10 @@ from .student.student_entity import StudentEntity __all__ = [ - "StudentEntity", "AccountEntity", - "PoliceEntity", - "PartyEntity", - "LocationEntity", "ComplaintEntity", + "LocationEntity", + "PartyEntity", + "PoliceEntity", + "StudentEntity", ] diff --git a/backend/src/modules/account/account_service.py b/backend/src/modules/account/account_service.py index 5440875..199ff5e 100644 --- a/backend/src/modules/account/account_service.py +++ b/backend/src/modules/account/account_service.py @@ -89,9 +89,9 @@ async def create_account(self, data: AccountData) -> AccountDto: try: self.session.add(new_account) await self.session.commit() - except IntegrityError: + except IntegrityError as e: # handle race condition where another session inserted the same email - raise AccountConflictException(data.email) + raise AccountConflictException(data.email) from e await self.session.refresh(new_account) return new_account.to_dto() @@ -117,8 +117,8 @@ async def update_account(self, account_id: int, data: AccountData) -> AccountDto try: self.session.add(account_entity) await self.session.commit() - except IntegrityError: - raise AccountConflictException(data.email) + except IntegrityError as e: + raise AccountConflictException(data.email) from e await self.session.refresh(account_entity) return account_entity.to_dto() diff --git a/backend/src/modules/complaint/complaint_service.py b/backend/src/modules/complaint/complaint_service.py index c452061..1969b48 100644 --- a/backend/src/modules/complaint/complaint_service.py +++ b/backend/src/modules/complaint/complaint_service.py @@ -57,7 +57,7 @@ async def create_complaint(self, location_id: int, data: ComplaintData) -> Compl except IntegrityError as e: # Foreign key constraint violation indicates location doesn't exist if "locations" in str(e).lower() or "foreign key" in str(e).lower(): - raise LocationNotFoundException(location_id) + raise LocationNotFoundException(location_id) from e raise await self.session.refresh(new_complaint) return new_complaint.to_dto() @@ -78,7 +78,7 @@ async def update_complaint( except IntegrityError as e: # Foreign key constraint violation indicates location doesn't exist if "locations" in str(e).lower() or "foreign key" in str(e).lower(): - raise LocationNotFoundException(location_id) + raise LocationNotFoundException(location_id) from e raise await self.session.refresh(complaint_entity) return complaint_entity.to_dto() diff --git a/backend/src/modules/location/location_entity.py b/backend/src/modules/location/location_entity.py index 2958e98..f2cc497 100644 --- a/backend/src/modules/location/location_entity.py +++ b/backend/src/modules/location/location_entity.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime from typing import Self from sqlalchemy import DECIMAL, DateTime, Index, Integer, String @@ -61,7 +61,7 @@ def to_dto(self) -> LocationDto: hold_exp = self.hold_expiration if hold_exp is not None and hold_exp.tzinfo is None: - hold_exp = hold_exp.replace(tzinfo=timezone.utc) + hold_exp = hold_exp.replace(tzinfo=UTC) return LocationDto( id=self.id, diff --git a/backend/src/modules/location/location_router.py b/backend/src/modules/location/location_router.py index 4a4272d..3391f86 100644 --- a/backend/src/modules/location/location_router.py +++ b/backend/src/modules/location/location_router.py @@ -45,13 +45,13 @@ async def autocomplete_address( raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=str(e), - ) - except Exception: + ) from e + except Exception as e: # Log error in production raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to fetch address suggestions. Please try again later.", - ) + ) from e @location_router.get( @@ -78,12 +78,12 @@ async def get_place_details( raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=str(e), - ) - except Exception: + ) from e + except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to fetch place details. Please try again later.", - ) + ) from e @location_router.get("/", response_model=PaginatedLocationResponse) diff --git a/backend/src/modules/location/location_service.py b/backend/src/modules/location/location_service.py index e060e70..88b58f2 100644 --- a/backend/src/modules/location/location_service.py +++ b/backend/src/modules/location/location_service.py @@ -1,5 +1,5 @@ import asyncio -from datetime import datetime, timezone +from datetime import UTC, datetime import googlemaps from fastapi import Depends @@ -60,7 +60,8 @@ def __init__(self, location_id: int, hold_expiration: datetime): class CountLimitExceededException(BadRequestException): def __init__(self, location_id: int, count_type: str): super().__init__( - f"Cannot increment {count_type} for location {location_id}: maximum count of {MAX_COUNT} reached" + f"Cannot increment {count_type} for location {location_id}: " + f"maximum count of {MAX_COUNT} reached" ) @@ -95,9 +96,7 @@ async def _get_location_entity_by_place_id(self, google_place_id: str) -> Locati def assert_valid_location_hold(self, location: LocationDto) -> None: """Validate that location does not have an active hold.""" - if location.hold_expiration is not None and location.hold_expiration > datetime.now( - timezone.utc - ): + if location.hold_expiration is not None and location.hold_expiration > datetime.now(UTC): raise LocationHoldActiveException(location.id, location.hold_expiration) async def get_locations(self) -> list[LocationDto]: @@ -126,9 +125,9 @@ async def create_location(self, data: LocationData) -> LocationDto: try: self.session.add(new_location) await self.session.commit() - except IntegrityError: + except IntegrityError as e: # handle race condition where another session inserted the same google_place_id - raise LocationConflictException(data.google_place_id) + raise LocationConflictException(data.google_place_id) from e await self.session.refresh(new_location) return new_location.to_dto() @@ -153,9 +152,11 @@ async def get_or_create_location(self, place_id: str) -> LocationDto: async def update_location(self, location_id: int, data: LocationData) -> LocationDto: location_entity = await self._get_location_entity_by_id(location_id) - if data.google_place_id != location_entity.google_place_id: - if await self._get_location_entity_by_place_id(data.google_place_id): - raise LocationConflictException(data.google_place_id) + if ( + data.google_place_id != location_entity.google_place_id + and await self._get_location_entity_by_place_id(data.google_place_id) + ): + raise LocationConflictException(data.google_place_id) for key, value in data.model_dump().items(): if key == "id": @@ -166,8 +167,8 @@ async def update_location(self, location_id: int, data: LocationData) -> Locatio try: self.session.add(location_entity) await self.session.commit() - except IntegrityError: - raise LocationConflictException(data.google_place_id) + except IntegrityError as e: + raise LocationConflictException(data.google_place_id) from e await self.session.refresh(location_entity) return location_entity.to_dto() @@ -226,15 +227,15 @@ async def autocomplete_address(self, input_text: str) -> list[AutocompleteResult except GoogleMapsAPIException: raise except googlemaps.exceptions.ApiError as e: - raise GoogleMapsAPIException(f"API error ({e.status}): {str(e)}") + raise GoogleMapsAPIException(f"API error ({e.status}): {e!s}") from e except googlemaps.exceptions.Timeout as e: - raise GoogleMapsAPIException(f"Request timed out: {str(e)}") + raise GoogleMapsAPIException(f"Request timed out: {e!s}") from e except googlemaps.exceptions.HTTPError as e: - raise GoogleMapsAPIException(f"HTTP error: {str(e)}") + raise GoogleMapsAPIException(f"HTTP error: {e!s}") from e except googlemaps.exceptions.TransportError as e: - raise GoogleMapsAPIException(f"Transport error: {str(e)}") + raise GoogleMapsAPIException(f"Transport error: {e!s}") from e except Exception as e: - raise GoogleMapsAPIException(f"Failed to autocomplete address: {str(e)}") + raise GoogleMapsAPIException(f"Failed to autocomplete address: {e!s}") from e async def get_place_details(self, place_id: str) -> AddressData: """ @@ -314,16 +315,16 @@ async def get_place_details(self, place_id: str) -> AddressData: except googlemaps.exceptions.ApiError as e: # Map Google Maps API error statuses to appropriate exceptions if e.status == "NOT_FOUND": - raise PlaceNotFoundException(place_id) + raise PlaceNotFoundException(place_id) from e elif e.status == "INVALID_REQUEST": - raise InvalidPlaceIdException(place_id) + raise InvalidPlaceIdException(place_id) from e else: - raise GoogleMapsAPIException(f"API error ({e.status}): {str(e)}") + raise GoogleMapsAPIException(f"API error ({e.status}): {e!s}") from e except googlemaps.exceptions.Timeout as e: - raise GoogleMapsAPIException(f"Request timed out: {str(e)}") + raise GoogleMapsAPIException(f"Request timed out: {e!s}") from e except googlemaps.exceptions.HTTPError as e: - raise GoogleMapsAPIException(f"HTTP error: {str(e)}") + raise GoogleMapsAPIException(f"HTTP error: {e!s}") from e except googlemaps.exceptions.TransportError as e: - raise GoogleMapsAPIException(f"Transport error: {str(e)}") + raise GoogleMapsAPIException(f"Transport error: {e!s}") from e except Exception as e: - raise GoogleMapsAPIException(f"Failed to get place details: {str(e)}") + raise GoogleMapsAPIException(f"Failed to get place details: {e!s}") from e diff --git a/backend/src/modules/party/party_entity.py b/backend/src/modules/party/party_entity.py index a9a5621..f4926b4 100644 --- a/backend/src/modules/party/party_entity.py +++ b/backend/src/modules/party/party_entity.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime from typing import TYPE_CHECKING, Self from sqlalchemy import DateTime, Enum, ForeignKey, Integer, String, select @@ -61,7 +61,7 @@ def to_dto(self) -> PartyDto: # Ensure party_datetime is timezone-aware party_dt = self.party_datetime if party_dt.tzinfo is None: - party_dt = party_dt.replace(tzinfo=timezone.utc) + party_dt = party_dt.replace(tzinfo=UTC) return PartyDto( id=self.id, diff --git a/backend/src/modules/party/party_model.py b/backend/src/modules/party/party_model.py index 51788fc..a08f9be 100644 --- a/backend/src/modules/party/party_model.py +++ b/backend/src/modules/party/party_model.py @@ -1,4 +1,4 @@ -from typing import Annotated, Literal, Union +from typing import Annotated, Literal from pydantic import AwareDatetime, BaseModel, EmailStr, Field from src.core.models import PaginatedResponse @@ -59,9 +59,7 @@ class AdminCreatePartyDto(BaseModel): # Discriminated union for party creation/update requests -CreatePartyDto = Annotated[ - Union[StudentCreatePartyDto, AdminCreatePartyDto], Field(discriminator="type") -] +CreatePartyDto = Annotated[StudentCreatePartyDto | AdminCreatePartyDto, Field(discriminator="type")] PaginatedPartiesResponse = PaginatedResponse[PartyDto] diff --git a/backend/src/modules/party/party_router.py b/backend/src/modules/party/party_router.py index 71b8c7e..3f7a8e9 100644 --- a/backend/src/modules/party/party_router.py +++ b/backend/src/modules/party/party_router.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime from fastapi import APIRouter, Depends, Query from fastapi.responses import Response @@ -40,9 +40,11 @@ async def create_party( - Students: provide type="student", party_datetime, place_id, and contact_two (ContactDTO) - contact_one is auto-filled from the authenticated student - - Admins: provide type="admin", party_datetime, place_id, contact_one_email, and contact_two (ContactDTO) + - Admins: provide type="admin", party_datetime, place_id, contact_one_email, and + contact_two (ContactDTO) - contact_one_email identifies the first contact by email - - contact_two is a ContactDTO with email, first_name, last_name, phone_number, and contact_preference + - contact_two is a ContactDTO with email, first_name, last_name, phone_number, and + contact_preference The location will be automatically created if it doesn't exist in the database. If contact_two's email doesn't exist in the system, a new student account will be created. @@ -147,12 +149,12 @@ async def get_parties_nearby( """ # Parse date strings to datetime objects try: - start_datetime = datetime.strptime(start_date, "%Y-%m-%d").replace(tzinfo=timezone.utc) - end_datetime = datetime.strptime(end_date, "%Y-%m-%d").replace(tzinfo=timezone.utc) + start_datetime = datetime.strptime(start_date, "%Y-%m-%d").replace(tzinfo=UTC) + end_datetime = datetime.strptime(end_date, "%Y-%m-%d").replace(tzinfo=UTC) # Set end_datetime to end of day (23:59:59) end_datetime = end_datetime.replace(hour=23, minute=59, second=59) except ValueError as e: - raise UnprocessableEntityException(f"Invalid date format. Expected YYYY-MM-DD: {str(e)}") + raise UnprocessableEntityException(f"Invalid date format. Expected YYYY-MM-DD: {e}") from e # Validate that start_date is not greater than end_date if start_datetime > end_datetime: @@ -201,8 +203,10 @@ async def get_parties_csv( end_datetime = datetime.strptime(end_date, "%Y-%m-%d") end_datetime = end_datetime.replace(hour=23, minute=59, second=59, microsecond=999999) - except ValueError: - raise UnprocessableEntityException("Invalid date format. Use YYYY-MM-DD format for dates.") + except ValueError as e: + raise UnprocessableEntityException( + "Invalid date format. Use YYYY-MM-DD format for dates." + ) from e # Validate that start_date is not greater than end_date if start_datetime > end_datetime: @@ -230,9 +234,11 @@ async def update_party( - Students: provide type="student", party_datetime, place_id, and contact_two (ContactDTO) - contact_one is auto-filled from the authenticated student - - Admins: provide type="admin", party_datetime, place_id, contact_one_email, and contact_two (ContactDTO) + - Admins: provide type="admin", party_datetime, place_id, contact_one_email, and + contact_two (ContactDTO) - contact_one_email identifies the first contact by email - - contact_two is a ContactDTO with email, first_name, last_name, phone_number, and contact_preference + - contact_two is a ContactDTO with email, first_name, last_name, phone_number, and + contact_preference The location will be automatically created if it doesn't exist in the database. If contact_two's email doesn't exist in the system, a new student account will be created. diff --git a/backend/src/modules/party/party_service.py b/backend/src/modules/party/party_service.py index 285bad1..cf5d952 100644 --- a/backend/src/modules/party/party_service.py +++ b/backend/src/modules/party/party_service.py @@ -1,8 +1,7 @@ import csv import io import math -from datetime import datetime, timedelta, timezone -from typing import List +from datetime import UTC, datetime, timedelta from fastapi import Depends from sqlalchemy import select @@ -74,12 +73,12 @@ async def _get_party_entity_by_id(self, party_id: int) -> PartyEntity: def _calculate_business_days_ahead(self, target_date: datetime) -> int: """Calculate the number of business days between now and target date.""" # Ensure both datetimes are timezone-aware (use UTC) - current_date = datetime.now(timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0) + current_date = datetime.now(UTC).replace(hour=0, minute=0, second=0, microsecond=0) # If target_date is naive, make it UTC-aware; otherwise keep its timezone if target_date.tzinfo is None: target_date_only = target_date.replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc + hour=0, minute=0, second=0, microsecond=0, tzinfo=UTC ) else: target_date_only = target_date.replace(hour=0, minute=0, second=0, microsecond=0) @@ -109,18 +108,16 @@ async def _validate_party_smart_attendance(self, student_id: int) -> None: raise PartySmartNotCompletedException(student_id) # Calculate the most recent August 1st - now = datetime.now(timezone.utc) + now = datetime.now(UTC) current_year = now.year # August 1st of the current year (UTC) - august_first_this_year = datetime(current_year, 8, 1, 0, 0, 0, tzinfo=timezone.utc) + august_first_this_year = datetime(current_year, 8, 1, 0, 0, 0, tzinfo=UTC) # If today is before August 1st, use last year's August 1st # Otherwise, use this year's August 1st if now < august_first_this_year: - most_recent_august_first = datetime( - current_year - 1, 8, 1, 0, 0, 0, tzinfo=timezone.utc - ) + most_recent_august_first = datetime(current_year - 1, 8, 1, 0, 0, 0, tzinfo=UTC) else: most_recent_august_first = august_first_this_year @@ -146,8 +143,8 @@ async def _get_student_by_email(self, email: str) -> StudentEntity: # First find the account by email try: account = await self.account_service.get_account_by_email(email) - except AccountByEmailNotFoundException: - raise StudentNotFoundException(email=email) + except AccountByEmailNotFoundException as e: + raise StudentNotFoundException(email=email) from e # Then get the student entity result = await self.session.execute( @@ -160,7 +157,7 @@ async def _get_student_by_email(self, email: str) -> StudentEntity: raise StudentNotFoundException(account.id) return student - async def get_parties(self, skip: int = 0, limit: int | None = None) -> List[PartyDto]: + async def get_parties(self, skip: int = 0, limit: int | None = None) -> list[PartyDto]: query = ( select(PartyEntity) .offset(skip) @@ -179,7 +176,7 @@ async def get_party_by_id(self, party_id: int) -> PartyDto: party_entity = await self._get_party_entity_by_id(party_id) return party_entity.to_dto() - async def get_parties_by_location(self, location_id: int) -> List[PartyDto]: + async def get_parties_by_location(self, location_id: int) -> list[PartyDto]: result = await self.session.execute( select(PartyEntity) .where(PartyEntity.location_id == location_id) @@ -191,7 +188,7 @@ async def get_parties_by_location(self, location_id: int) -> List[PartyDto]: parties = result.scalars().all() return [party.to_dto() for party in parties] - async def get_parties_by_contact(self, student_id: int) -> List[PartyDto]: + async def get_parties_by_contact(self, student_id: int) -> list[PartyDto]: result = await self.session.execute( select(PartyEntity) .where(PartyEntity.contact_one_id == student_id) @@ -205,7 +202,7 @@ async def get_parties_by_contact(self, student_id: int) -> List[PartyDto]: async def get_parties_by_date_range( self, start_date: datetime, end_date: datetime - ) -> List[PartyDto]: + ) -> list[PartyDto]: result = await self.session.execute( select(PartyEntity) .where( @@ -230,7 +227,7 @@ async def create_party(self, data: PartyData) -> PartyDto: self.session.add(new_party) await self.session.commit() except IntegrityError as e: - raise PartyConflictException(f"Failed to create party: {str(e)}") + raise PartyConflictException(f"Failed to create party: {e!s}") from e return await new_party.load_dto(self.session) async def update_party(self, party_id: int, data: PartyData) -> PartyDto: @@ -252,7 +249,7 @@ async def update_party(self, party_id: int, data: PartyData) -> PartyDto: self.session.add(party_entity) await self.session.commit() except IntegrityError as e: - raise PartyConflictException(f"Failed to update party: {str(e)}") + raise PartyConflictException(f"Failed to update party: {e!s}") from e return await party_entity.load_dto(self.session) async def create_party_from_student_dto( @@ -377,7 +374,7 @@ async def get_party_count(self) -> int: async def get_parties_by_student_and_date( self, student_id: int, target_date: datetime - ) -> List[PartyDto]: + ) -> list[PartyDto]: start_of_day = target_date.replace(hour=0, minute=0, second=0, microsecond=0) end_of_day = target_date.replace(hour=23, minute=59, second=59, microsecond=999999) @@ -396,8 +393,8 @@ async def get_parties_by_student_and_date( parties = result.scalars().all() return [party.to_dto() for party in parties] - async def get_parties_by_radius(self, latitude: float, longitude: float) -> List[PartyDto]: - current_time = datetime.now(timezone.utc) + async def get_parties_by_radius(self, latitude: float, longitude: float) -> list[PartyDto]: + current_time = datetime.now(UTC) start_time = current_time - timedelta(hours=6) end_time = current_time + timedelta(hours=12) @@ -437,7 +434,7 @@ async def get_parties_by_radius_and_date_range( longitude: float, start_date: datetime, end_date: datetime, - ) -> List[PartyDto]: + ) -> list[PartyDto]: """ Get parties within a radius of a location within a specified date range. @@ -493,7 +490,7 @@ def _calculate_haversine_distance( r = 3959 return c * r - async def export_parties_to_csv(self, parties: List[PartyDto]) -> str: + async def export_parties_to_csv(self, parties: list[PartyDto]) -> str: """ Export a list of parties to CSV format. @@ -575,7 +572,10 @@ async def export_parties_to_csv(self, parties: List[PartyDto]) -> str: contact_one_phone = "" contact_one_preference = "" if party_entity.contact_one: - contact_one_full_name = f"{party_entity.contact_one.account.first_name} {party_entity.contact_one.account.last_name}" + contact_one_full_name = ( + f"{party_entity.contact_one.account.first_name} " + f"{party_entity.contact_one.account.last_name}" + ) contact_one_phone = party_entity.contact_one.phone_number or "" contact_one_preference = ( party_entity.contact_one.contact_preference.value diff --git a/backend/src/modules/police/police_router.py b/backend/src/modules/police/police_router.py index 886fdfb..e77562d 100644 --- a/backend/src/modules/police/police_router.py +++ b/backend/src/modules/police/police_router.py @@ -13,7 +13,9 @@ response_model=LocationDto, status_code=status.HTTP_200_OK, summary="Increment location warning count", - description="Increments the warning count for a location. Requires police or admin authentication.", + description=( + "Increments the warning count for a location. Requires police or admin authentication." + ), ) async def increment_warnings( location_id: int, @@ -31,7 +33,9 @@ async def increment_warnings( response_model=LocationDto, status_code=status.HTTP_200_OK, summary="Increment location citation count", - description="Increments the citation count for a location. Requires police or admin authentication.", + description=( + "Increments the citation count for a location. Requires police or admin authentication." + ), ) async def increment_citations( location_id: int, diff --git a/backend/src/modules/student/student_entity.py b/backend/src/modules/student/student_entity.py index 202000b..4de3e70 100644 --- a/backend/src/modules/student/student_entity.py +++ b/backend/src/modules/student/student_entity.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime from typing import TYPE_CHECKING, Self from sqlalchemy import DateTime, Enum, ForeignKey, Integer, String, select @@ -40,7 +40,7 @@ def to_dto(self) -> "StudentDto": # Ensure last_registered is timezone-aware if present last_reg = self.last_registered if last_reg is not None and last_reg.tzinfo is None: - last_reg = last_reg.replace(tzinfo=timezone.utc) + last_reg = last_reg.replace(tzinfo=UTC) return StudentDto( id=self.account_id, diff --git a/backend/src/modules/student/student_service.py b/backend/src/modules/student/student_service.py index 66abf2c..b305f0d 100644 --- a/backend/src/modules/student/student_service.py +++ b/backend/src/modules/student/student_service.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime from fastapi import Depends from sqlalchemy import func, select @@ -130,9 +130,9 @@ async def create_student(self, data: StudentDataWithNames, account_id: int) -> S try: self.session.add(new_student) await self.session.commit() - except IntegrityError: + except IntegrityError as e: await self.session.rollback() - raise StudentConflictException(data.phone_number) + raise StudentConflictException(data.phone_number) from e await self.session.refresh(new_student, ["account"]) return new_student.to_dto() @@ -149,9 +149,11 @@ async def update_student( if account.role != AccountRole.STUDENT: raise InvalidAccountRoleException(account_id, account.role) - if data.phone_number != student_entity.phone_number: - if await self._get_student_entity_by_phone(data.phone_number): - raise StudentConflictException(data.phone_number) + if ( + data.phone_number != student_entity.phone_number + and await self._get_student_entity_by_phone(data.phone_number) + ): + raise StudentConflictException(data.phone_number) # Only update account names if data includes them (StudentDataWithNames) if isinstance(data, StudentDataWithNames): @@ -166,9 +168,9 @@ async def update_student( try: self.session.add(student_entity) await self.session.commit() - except IntegrityError: + except IntegrityError as e: await self.session.rollback() - raise StudentConflictException(data.phone_number) + raise StudentConflictException(data.phone_number) from e await self.session.refresh(student_entity, ["account"]) return student_entity.to_dto() @@ -189,7 +191,7 @@ async def update_is_registered(self, account_id: int, is_registered: bool) -> St student_entity = await self._get_student_entity_by_account_id(account_id) if is_registered: - student_entity.last_registered = datetime.now(timezone.utc) + student_entity.last_registered = datetime.now(UTC) else: student_entity.last_registered = None diff --git a/backend/test/conftest.py b/backend/test/conftest.py index 425bbe6..c72a021 100644 --- a/backend/test/conftest.py +++ b/backend/test/conftest.py @@ -4,7 +4,8 @@ os.environ["GOOGLE_MAPS_API_KEY"] = "invalid_google_maps_api_key_for_tests" -from typing import Any, AsyncGenerator, Callable +from collections.abc import AsyncGenerator, Callable +from typing import Any from unittest.mock import MagicMock, patch import bcrypt @@ -53,13 +54,13 @@ async def test_engine(): @pytest_asyncio.fixture(scope="function") async def test_session(test_engine: AsyncEngine): """Create a new session and truncate all tables after each test.""" - TestAsyncSessionLocal = async_sessionmaker( + test_async_session_local = async_sessionmaker( bind=test_engine, expire_on_commit=False, class_=AsyncSession, ) - async with TestAsyncSessionLocal() as session: + async with test_async_session_local() as session: yield session # Clean up: truncate all tables and reset sequences diff --git a/backend/test/modules/account/account_service_test.py b/backend/test/modules/account/account_service_test.py index 3518de3..2dbedd2 100644 --- a/backend/test/modules/account/account_service_test.py +++ b/backend/test/modules/account/account_service_test.py @@ -40,7 +40,7 @@ async def test_get_accounts( accounts = await self.account_service.get_accounts() [ self.account_utils.assert_matches(account, entity) - for account, entity in zip(accounts, accounts_two_per_role) + for account, entity in zip(accounts, accounts_two_per_role, strict=False) ] @pytest.mark.asyncio diff --git a/backend/test/modules/complaint/complaint_router_test.py b/backend/test/modules/complaint/complaint_router_test.py index 74a1741..c551871 100644 --- a/backend/test/modules/complaint/complaint_router_test.py +++ b/backend/test/modules/complaint/complaint_router_test.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime import pytest from httpx import AsyncClient @@ -125,7 +125,7 @@ async def test_update_complaint_success(self) -> None: complaint = await self.complaint_utils.create_one() update_data = await self.complaint_utils.next_data( location_id=complaint.location_id, - complaint_datetime=datetime(2025, 11, 20, 23, 0, 0, tzinfo=timezone.utc), + complaint_datetime=datetime(2025, 11, 20, 23, 0, 0, tzinfo=UTC), description="Updated description", ) diff --git a/backend/test/modules/complaint/complaint_service_test.py b/backend/test/modules/complaint/complaint_service_test.py index 8019bdd..a82977d 100644 --- a/backend/test/modules/complaint/complaint_service_test.py +++ b/backend/test/modules/complaint/complaint_service_test.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime import pytest from src.modules.complaint.complaint_service import ComplaintNotFoundException, ComplaintService @@ -60,7 +60,7 @@ async def test_get_complaints_by_location(self) -> None: fetched = await self.complaint_service.get_complaints_by_location(location.id) assert len(fetched) == 2 - for complaint, expected in zip(fetched, complaints): + for complaint, expected in zip(fetched, complaints, strict=False): self.complaint_utils.assert_matches(complaint, expected) @pytest.mark.asyncio @@ -156,7 +156,7 @@ async def test_complaint_data_persistence(self) -> None: location = await self.location_utils.create_one() data = await self.complaint_utils.next_data( location_id=location.id, - complaint_datetime=datetime(2025, 12, 25, 14, 30, 45, tzinfo=timezone.utc), + complaint_datetime=datetime(2025, 12, 25, 14, 30, 45, tzinfo=UTC), description="Detailed description of the complaint issue", ) diff --git a/backend/test/modules/complaint/complaint_utils.py b/backend/test/modules/complaint/complaint_utils.py index db33993..88e623f 100644 --- a/backend/test/modules/complaint/complaint_utils.py +++ b/backend/test/modules/complaint/complaint_utils.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta from typing import Any, TypedDict, Unpack, override from sqlalchemy.ext.asyncio import AsyncSession @@ -35,7 +35,7 @@ def generate_defaults(count: int) -> dict[str, Any]: return { "location_id": 1, "complaint_datetime": ( - datetime(2025, 11, 18, 20, 30, 0, tzinfo=timezone.utc) + timedelta(days=count) + datetime(2025, 11, 18, 20, 30, 0, tzinfo=UTC) + timedelta(days=count) ).isoformat(), "description": f"Complaint {count}", } diff --git a/backend/test/modules/location/location_service_test.py b/backend/test/modules/location/location_service_test.py index ea0be42..381955c 100644 --- a/backend/test/modules/location/location_service_test.py +++ b/backend/test/modules/location/location_service_test.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime import googlemaps import pytest @@ -52,7 +52,7 @@ async def test_create_location_with_full_data(self): unit="Apt 2B", warning_count=1, citation_count=2, - hold_expiration=datetime(2025, 12, 31, 23, 59, 59, tzinfo=timezone.utc), + hold_expiration=datetime(2025, 12, 31, 23, 59, 59, tzinfo=UTC), ) location = await self.location_service.create_location(data) @@ -76,7 +76,7 @@ async def test_get_locations(self): fetched = await self.location_service.get_locations() assert len(fetched) == 3 - for loc, f in zip(locations, fetched): + for loc, f in zip(locations, fetched, strict=False): self.location_utils.assert_matches(loc, f) @pytest.mark.asyncio @@ -129,7 +129,9 @@ async def test_update_location_not_found(self): @pytest.mark.asyncio async def test_update_location_conflict(self): - """Test updating a location with another location's google_place_id raises conflict exception""" + """ + Test updating a location with another location's google_place_id raises conflict exception + """ locations = await self.location_utils.create_many(i=2) location1 = locations[0] location2 = locations[1] @@ -356,7 +358,7 @@ async def test_autocomplete_address_success(self): results = await self.location_service.autocomplete_address(address) assert len(results) == 2 - for r, p in zip(results, mock_predictions): + for r, p in zip(results, mock_predictions, strict=False): self.gmaps_utils.assert_autocomplete_matches( result=r, expected_description=p["description"], diff --git a/backend/test/modules/location/location_utils.py b/backend/test/modules/location/location_utils.py index f50ef48..73cae77 100644 --- a/backend/test/modules/location/location_utils.py +++ b/backend/test/modules/location/location_utils.py @@ -64,7 +64,7 @@ def generate_defaults(count: int) -> dict[str, Any]: "zip_code": f"275{14 + count % 10}", "warning_count": count % 5, "citation_count": count % 3, - "hold_expiration": None if count % 2 == 0 else None, + "hold_expiration": None, } defaults["formatted_address"] = ( f"{defaults['street_number']} {defaults['street_name']} {defaults['unit']}, " @@ -196,7 +196,7 @@ def mock_autocomplete_predictions( List of mock prediction dictionaries matching Google Maps API format """ predictions = [] - for i in range(count): + for _ in range(count): data = self.location_utils.get_or_default( overrides, {"google_place_id", "formatted_address"}, diff --git a/backend/test/modules/party/party_router_test.py b/backend/test/modules/party/party_router_test.py index abc58b6..45566ef 100644 --- a/backend/test/modules/party/party_router_test.py +++ b/backend/test/modules/party/party_router_test.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta import pytest import pytest_asyncio @@ -31,7 +31,8 @@ "/api/parties/nearby?place_id=ChIJTest&start_date=2025-01-01&end_date=2025-12-31", None, ), - # POST endpoint requires condiitional body - tested separately in TestPartyCreateAdminRouter and TestPartyCreateStudentRouter + # POST endpoint requires condiitional body - tested separately in + # TestPartyCreateAdminRouter and TestPartyCreateStudentRouter # ({"student"}, "POST", "/api/parties/", {}), ) @@ -174,7 +175,7 @@ async def test_create_party_as_admin_success(self): @pytest.mark.asyncio async def test_create_party_as_admin_location_on_hold(self): """Test admin cannot create party at location on hold.""" - hold_expiration = datetime.now(timezone.utc) + timedelta(days=30) + hold_expiration = datetime.now(UTC) + timedelta(days=30) location_with_hold = await self.location_utils.create_one(hold_expiration=hold_expiration) payload = await self.party_utils.next_admin_create_dto( @@ -224,7 +225,7 @@ async def current_student(self) -> StudentEntity: # Set last_registered to indicate Party Smart completion student = await self.student_utils.create_one( - account_id=account.id, last_registered=datetime.now(timezone.utc) - timedelta(days=1) + account_id=account.id, last_registered=datetime.now(UTC) - timedelta(days=1) ) return student @@ -286,7 +287,7 @@ async def test_get_parties_nearby_empty(self): location_data = await self.location_utils.next_data() self.gmaps_utils.mock_place_details(**location_data.model_dump()) - now = datetime.now(timezone.utc) + now = datetime.now(UTC) params = { "place_id": location_data.google_place_id, "start_date": now.strftime("%Y-%m-%d"), @@ -322,7 +323,7 @@ async def test_get_parties_nearby_within_radius(self): ) # Create parties within time window - now = datetime.now(timezone.utc) + now = datetime.now(UTC) party_within = await self.party_utils.create_one( location_id=location_within.id, party_datetime=now + timedelta(hours=2), @@ -363,7 +364,7 @@ async def test_get_parties_nearby_with_date_range(self): longitude=search_lon, ) - base_time = datetime.now(timezone.utc) + timedelta(hours=2) + base_time = datetime.now(UTC) + timedelta(hours=2) # Party within date range party_valid = await self.party_utils.create_one( @@ -448,7 +449,7 @@ def _setup(self, party_utils: PartyTestUtils, admin_client: AsyncClient): @pytest.mark.asyncio async def test_get_parties_csv_empty(self): """Test CSV export with no parties.""" - now = datetime.now(timezone.utc) + now = datetime.now(UTC) params = { "start_date": now.strftime("%Y-%m-%d"), "end_date": (now + timedelta(days=30)).strftime("%Y-%m-%d"), @@ -469,7 +470,7 @@ async def test_get_parties_csv_with_data(self): parties = await self.party_utils.create_many(i=3) # Get date range that covers all parties - now = datetime.now(timezone.utc) + now = datetime.now(UTC) params = { "start_date": (now - timedelta(days=1)).strftime("%Y-%m-%d"), "end_date": (now + timedelta(days=365)).strftime("%Y-%m-%d"), diff --git a/backend/test/modules/party/party_service_test.py b/backend/test/modules/party/party_service_test.py index 63ba0a0..309584f 100644 --- a/backend/test/modules/party/party_service_test.py +++ b/backend/test/modules/party/party_service_test.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta import pytest from src.modules.location.location_service import LocationNotFoundException @@ -75,7 +75,7 @@ async def test_get_parties(self): assert len(fetched) == 3 # Verify all parties match - for created, fetched_party in zip(created_parties, fetched): + for created, fetched_party in zip(created_parties, fetched, strict=False): self.party_utils.assert_matches(created, fetched_party) @pytest.mark.asyncio @@ -254,7 +254,7 @@ async def test_get_parties_by_date_range(self): @pytest.mark.asyncio async def test_get_parties_by_date_range_multiple_parties(self): """Test date range query with multiple parties.""" - base_datetime = datetime.now(timezone.utc) + timedelta(days=1) + base_datetime = datetime.now(UTC) + timedelta(days=1) # Create parties at different times party1 = await self.party_utils.create_one(party_datetime=base_datetime) @@ -404,12 +404,12 @@ async def test_get_parties_by_radius(self): # Create parties within time window party_within = await self.party_utils.create_one( location_id=location_within.id, - party_datetime=datetime.now(timezone.utc) + timedelta(hours=2), + party_datetime=datetime.now(UTC) + timedelta(hours=2), ) party_outside = await self.party_utils.create_one( location_id=location_outside.id, - party_datetime=datetime.now(timezone.utc) + timedelta(hours=2), + party_datetime=datetime.now(UTC) + timedelta(hours=2), ) parties = await self.party_service.get_parties_by_radius(search_lat, search_lon) @@ -432,7 +432,7 @@ async def test_get_parties_by_radius_time_window_past(self): # Party 7 hours in the past (outside window) await self.party_utils.create_one( location_id=location.id, - party_datetime=datetime.now(timezone.utc) - timedelta(hours=7), + party_datetime=datetime.now(UTC) - timedelta(hours=7), ) parties = await self.party_service.get_parties_by_radius(search_lat, search_lon) @@ -452,7 +452,7 @@ async def test_get_parties_by_radius_time_window_future(self): # Party 13 hours in the future (outside window) await self.party_utils.create_one( location_id=location.id, - party_datetime=datetime.now(timezone.utc) + timedelta(hours=13), + party_datetime=datetime.now(UTC) + timedelta(hours=13), ) parties = await self.party_service.get_parties_by_radius(search_lat, search_lon) @@ -472,13 +472,13 @@ async def test_get_parties_by_radius_time_window_boundaries(self): # Party ~6 hours in the past (just within window) party_past = await self.party_utils.create_one( location_id=location.id, - party_datetime=datetime.now(timezone.utc) - timedelta(hours=5, minutes=59), + party_datetime=datetime.now(UTC) - timedelta(hours=5, minutes=59), ) # Party ~12 hours in the future (just within window) party_future = await self.party_utils.create_one( location_id=location.id, - party_datetime=datetime.now(timezone.utc) + timedelta(hours=11, minutes=59), + party_datetime=datetime.now(UTC) + timedelta(hours=11, minutes=59), ) parties = await self.party_service.get_parties_by_radius(search_lat, search_lon) @@ -503,7 +503,7 @@ async def test_get_parties_by_radius_and_date_range(self): longitude=search_lon, ) - base_time = datetime.now(timezone.utc) + timedelta(hours=2) + base_time = datetime.now(UTC) + timedelta(hours=2) party_valid = await self.party_utils.create_one( location_id=location_within.id, @@ -536,8 +536,8 @@ async def test_get_parties_by_radius_and_date_range_empty(self): search_lat = 40.7128 search_lon = -74.0060 - start_date = datetime.now(timezone.utc) + timedelta(days=1) - end_date = datetime.now(timezone.utc) + timedelta(days=2) + start_date = datetime.now(UTC) + timedelta(days=1) + end_date = datetime.now(UTC) + timedelta(days=2) parties = await self.party_service.get_parties_by_radius_and_date_range( search_lat, search_lon, start_date, end_date @@ -555,7 +555,7 @@ async def test_get_parties_by_radius_and_date_range_boundary_start(self): longitude=search_lon, ) - start_date = datetime.now(timezone.utc) + timedelta(hours=2) + start_date = datetime.now(UTC) + timedelta(hours=2) party = await self.party_utils.create_one( location_id=location.id, @@ -581,7 +581,7 @@ async def test_get_parties_by_radius_and_date_range_boundary_end(self): longitude=search_lon, ) - end_date = datetime.now(timezone.utc) + timedelta(hours=3) + end_date = datetime.now(UTC) + timedelta(hours=3) party = await self.party_utils.create_one( location_id=location.id, diff --git a/backend/test/modules/party/party_utils.py b/backend/test/modules/party/party_utils.py index be7a70c..55b8806 100644 --- a/backend/test/modules/party/party_utils.py +++ b/backend/test/modules/party/party_utils.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta from typing import Any, TypedDict, Unpack, override from sqlalchemy.ext.asyncio import AsyncSession @@ -19,7 +19,7 @@ def get_valid_party_datetime() -> datetime: """Get a datetime that is at least 3 business days from now.""" days_ahead = 5 # Start with 5 calendar days to ensure 3 business days - return datetime.now(timezone.utc) + timedelta(days=days_ahead) + return datetime.now(UTC) + timedelta(days=days_ahead) class PartyOverrides(TypedDict, total=False): @@ -160,9 +160,9 @@ def assert_matches( dt2 = resource2.party_datetime if dt1.tzinfo is None: - dt1 = dt1.replace(tzinfo=timezone.utc) + dt1 = dt1.replace(tzinfo=UTC) if dt2.tzinfo is None: - dt2 = dt2.replace(tzinfo=timezone.utc) + dt2 = dt2.replace(tzinfo=UTC) assert dt1 == dt2, f"Party datetime mismatch: {dt1} != {dt2}" diff --git a/backend/test/modules/student/student_router_test.py b/backend/test/modules/student/student_router_test.py index 64b4eb3..11c1143 100644 --- a/backend/test/modules/student/student_router_test.py +++ b/backend/test/modules/student/student_router_test.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime import pytest import pytest_asyncio @@ -172,7 +172,7 @@ async def test_create_student_success(self): @pytest.mark.asyncio async def test_create_student_with_datetime(self): """Test creating a student with last_registered datetime.""" - dt = datetime(2024, 3, 15, 10, 30, 0, tzinfo=timezone.utc) + dt = datetime(2024, 3, 15, 10, 30, 0, tzinfo=UTC) payload = await self.student_utils.next_student_create(last_registered=dt) response = await self.admin_client.post( @@ -339,7 +339,7 @@ async def test_update_is_registered_mark_as_registered_as_staff(self): @pytest.mark.asyncio async def test_update_is_registered_mark_as_not_registered_as_admin(self): """Test unmarking a student as registered (admin authentication).""" - student = await self.student_utils.create_one(last_registered=datetime.now(timezone.utc)) + student = await self.student_utils.create_one(last_registered=datetime.now(UTC)) payload = {"is_registered": False} response = await self.admin_client.patch( @@ -402,8 +402,8 @@ class TestStudentMeRouter: async def current_student(self) -> StudentEntity: """Create a student for the current authenticated user. - Note: student_client authenticates as user with id=3 (from mock_authenticate in authentication.py) - so we need to ensure the account has id=3. + Note: student_client authenticates as user with id=3 (from mock_authenticate in + authentication.py) so we need to ensure the account has id=3. """ # The student_client from conftest uses id=3 for students in mock_authenticate # We need to create dummy accounts for IDs 1 and 2 first @@ -497,13 +497,13 @@ async def test_get_me_parties_with_data(self, current_student: StudentEntity): # Create a party where current_student is contact_one party1 = await self.party_utils.create_one( - party_datetime=datetime(2024, 12, 1, 20, 0, 0, tzinfo=timezone.utc), + party_datetime=datetime(2024, 12, 1, 20, 0, 0, tzinfo=UTC), contact_one_id=current_student.account_id, ) # Create a party where other_student is contact_one (should not be returned) party2 = await self.party_utils.create_one( - party_datetime=datetime(2024, 12, 15, 21, 0, 0, tzinfo=timezone.utc), + party_datetime=datetime(2024, 12, 15, 21, 0, 0, tzinfo=UTC), contact_one_id=other_student.account_id, ) diff --git a/backend/test/modules/student/student_service_test.py b/backend/test/modules/student/student_service_test.py index 555559d..4aea11f 100644 --- a/backend/test/modules/student/student_service_test.py +++ b/backend/test/modules/student/student_service_test.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import UTC, datetime import pytest from sqlalchemy.ext.asyncio import AsyncSession @@ -66,7 +66,7 @@ async def test_get_students(self): fetched = await self.student_service.get_students() assert len(fetched) == 3 - for s, f in zip(students, fetched): + for s, f in zip(students, fetched, strict=False): self.student_utils.assert_matches(s, f) @pytest.mark.asyncio @@ -132,7 +132,7 @@ async def test_delete_student_not_found(self): @pytest.mark.asyncio async def test_create_student_with_datetime_timezone(self): account = await self.account_utils.create_one(role=AccountRole.STUDENT.value) - last_reg = datetime(2024, 1, 15, 10, 30, 0, tzinfo=timezone.utc) + last_reg = datetime(2024, 1, 15, 10, 30, 0, tzinfo=UTC) data = await self.student_utils.next_data_with_names(last_registered=last_reg) student = await self.student_service.create_student(data, account_id=account.id) @@ -142,7 +142,7 @@ async def test_create_student_with_datetime_timezone(self): async def test_update_student_with_datetime_timezone(self): student_entity = await self.student_utils.create_one() - last_reg = datetime(2024, 3, 20, 14, 45, 30, tzinfo=timezone.utc) + last_reg = datetime(2024, 3, 20, 14, 45, 30, tzinfo=UTC) update_data = await self.student_utils.next_data_with_names(last_registered=last_reg) updated = await self.student_service.update_student(student_entity.account_id, update_data) self.student_utils.assert_matches(updated, update_data) @@ -191,18 +191,18 @@ async def test_update_is_registered_true(self): assert student_entity.last_registered is None - before_update = datetime.now(timezone.utc) + before_update = datetime.now(UTC) updated = await self.student_service.update_is_registered( student_entity.account_id, is_registered=True ) - after_update = datetime.now(timezone.utc) + after_update = datetime.now(UTC) assert updated.last_registered is not None assert before_update <= updated.last_registered <= after_update @pytest.mark.asyncio async def test_update_is_registered_false(self): - last_reg = datetime(2024, 1, 15, 10, 30, 0, tzinfo=timezone.utc) + last_reg = datetime(2024, 1, 15, 10, 30, 0, tzinfo=UTC) student_entity = await self.student_utils.create_one(last_registered=last_reg) assert student_entity.last_registered is not None diff --git a/backend/test/utils/http/assertions.py b/backend/test/utils/http/assertions.py index 2a71a1b..5158c68 100644 --- a/backend/test/utils/http/assertions.py +++ b/backend/test/utils/http/assertions.py @@ -1,39 +1,39 @@ """Utility functions for asserting HTTP responses in tests.""" -from typing import Any, Optional, TypeVar, get_args, get_origin, overload +from typing import Any, get_args, get_origin, overload from fastapi import HTTPException from httpx import Response from pydantic import BaseModel from src.core.models import PaginatedResponse -T = TypeVar("T", bound=BaseModel) - @overload -def assert_res_success(res: Response, ExpectedModel: type[T], *, status: Optional[int] = 200) -> T: +def assert_res_success[T: BaseModel]( + res: Response, expected_model: type[T], *, status: int | None = 200 +) -> T: pass @overload -def assert_res_success( - res: Response, ExpectedModel: type[list[T]], *, status: Optional[int] = 200 +def assert_res_success[T: BaseModel]( + res: Response, expected_model: type[list[T]], *, status: int | None = 200 ) -> list[T]: pass -def assert_res_success( +def assert_res_success[T: BaseModel]( res: Response, - ExpectedModel: type[T] | type[list[T]], + expected_model: type[T] | type[list[T]], *, - status: Optional[int] = 200, + status: int | None = 200, ) -> T | list[T]: """ - Assert that a response is a successful response. Validates and converts to the expected model type. + Assert that a response is a successful response. Validates & converts to the expected model type Args: res: The HTTP response to check - ExpectedModel: The expected Pydantic model type (or list of model type) of the response data + expected_model: The expected Pydantic model type (or list of model type) of response data status: Optional expected HTTP status code (default 200) Returns: @@ -58,14 +58,14 @@ def assert_res_success( data = res.json() assert data is not None, "Expected response data but got None" - model_origin = get_origin(ExpectedModel) + model_origin = get_origin(expected_model) # List model case if model_origin is list: assert isinstance(data, list), f"Expected list response but got {type(data).__name__}" # Get the model type inside the list - args = get_args(ExpectedModel) + args = get_args(expected_model) assert args is not None, "Expected type args for list model but got None" assert len(args) > 0, "Expected at least one type arg for list model but got empty args" inner_model = args[0] @@ -79,11 +79,11 @@ def assert_res_success( # Single model case assert isinstance(data, dict), f"Expected dict response but got {type(data).__name__}" - assert isinstance(ExpectedModel, type) and issubclass(ExpectedModel, BaseModel) - model_fields = set(ExpectedModel.model_fields.keys()) + assert isinstance(expected_model, type) and issubclass(expected_model, BaseModel) + model_fields = set(expected_model.model_fields.keys()) extra_fields = set(data.keys()) - model_fields assert not extra_fields, f"Unexpected fields {extra_fields} in response data: {data}" - return ExpectedModel(**data) + return expected_model(**data) def assert_res_failure(res: Response, expected_error: HTTPException) -> dict[str, Any]: @@ -131,7 +131,7 @@ def assert_res_failure(res: Response, expected_error: HTTPException) -> dict[str def assert_res_validation_error( - res: Response, *, expected_fields: Optional[list[str]] = None + res: Response, *, expected_fields: list[str] | None = None ) -> dict[str, Any]: """ Assert that a response is a FastAPI validation error (422). @@ -191,21 +191,21 @@ def assert_res_validation_error( return data -def assert_res_paginated( +def assert_res_paginated[T: BaseModel]( res: Response, - ItemModel: type[T], + item_model: type[T], *, - total_records: Optional[int] = None, + total_records: int | None = None, page_number: int = 1, - page_size: Optional[int] = None, - total_pages: Optional[int] = None, + page_size: int | None = None, + total_pages: int | None = None, ) -> PaginatedResponse[T]: """ Assert that a response is a successful paginated response. Args: res: The HTTP response to check - ItemModel: The expected type of items in the paginated response + item_model: The expected type of items in the paginated response total_records: Optional expected total_records count page_number: Expected page number (default 1) page_size: Optional expected page size @@ -237,12 +237,12 @@ def assert_res_paginated( assert isinstance(items, list), f"Expected items to be a list but got {type(items).__name__}" # Convert items to model instances - item_fields = set(ItemModel.model_fields.keys()) + item_fields = set(item_model.model_fields.keys()) converted_items = [] for item in items: extra = set(item.keys()) - item_fields assert not extra, f"Unexpected fields {extra} in response item: {item}" - converted_items.append(ItemModel(**item)) + converted_items.append(item_model(**item)) # Validate pagination metadata if total_records is not None: @@ -262,11 +262,7 @@ def assert_res_paginated( # Calculate expected_total_pages if not provided but both total and page_size are known if total_pages is None and total_records is not None and page_size is not None: if page_size > 0: - total_pages = ( - (total_records + page_size - 1) // page_size - if total_records > 0 - else 0 - ) + total_pages = (total_records + page_size - 1) // page_size if total_records > 0 else 0 else: # Page size of 0 means all results on one page (no pagination) total_pages = 0 if total_records == 0 else 1 @@ -277,7 +273,7 @@ def assert_res_paginated( ) # Return a properly typed PaginatedResponse - return PaginatedResponse[ItemModel]( + return PaginatedResponse[item_model]( items=converted_items, total_records=data["total_records"], page_size=data["page_size"], diff --git a/backend/test/utils/http/test_templates.py b/backend/test/utils/http/test_templates.py index fff6173..207cc11 100644 --- a/backend/test/utils/http/test_templates.py +++ b/backend/test/utils/http/test_templates.py @@ -1,4 +1,5 @@ -from typing import Any, AsyncGenerator, Callable +from collections.abc import AsyncGenerator, Callable +from typing import Any import pytest from httpx import AsyncClient diff --git a/backend/test/utils/resource_test_utils.py b/backend/test/utils/resource_test_utils.py index 5fe8fa9..f32f423 100644 --- a/backend/test/utils/resource_test_utils.py +++ b/backend/test/utils/resource_test_utils.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from datetime import datetime, timezone +from datetime import UTC, datetime from typing import Any from pydantic import BaseModel @@ -13,32 +13,42 @@ class ResourceTestUtils[ ResourceData: BaseModel, OtherModels: BaseModel, ](ABC): - """Abstract base class for test utilities that manage resource creation and validation. + """Abstract base class for test utilities that manage resource creation and + validation. This utility class provides a standardized interface for repeated logic across tests. It is responsible for: - - Generating unique test data for resources, including dicts, Pydantic models, and SQLAlchemy entities. + - Generating unique test data for resources, including dicts, Pydantic + models, and SQLAlchemy entities. - Creating multiple resource instances in the database for testing purposes. - Validating that two resource instances match in their data fields. - It uses an internal counter to ensure unique test data generation across multiple calls. + It uses an internal counter to ensure unique test data generation across + multiple calls. - The class primarily provides default implementations for utility methods, expecting subclasses to - provide overrides for any logic that doesn't match the default behavior. The only method that must be - overridden is `generate_defaults`, which is responsible for generating unique default test data. Each - subsequent method builds upon this to provide dicts, models, and entities. + The class primarily provides default implementations for utility methods, + expecting subclasses to provide overrides for any logic that doesn't match + the default behavior. The only method that must be overridden is + `generate_defaults`, which is responsible for generating unique default + test data. Each subsequent method builds upon this to provide dicts, + models, and entities. Note on overrides typing: - Overrides are typed as `Any` in this base class because python does not support generic type parameters - that extend `TypedDict`, and the only way to perfectly type kwargs are with `TypedDict`. In order for overrides - to have proper typing, subclasses must override all methods that use kwargs, specifying the correct `TypedDict` type, - delegating directly to the base class if not changing default behavior. + Overrides are typed as `Any` in this base class because python does + not support generic type parameters that extend `TypedDict`, and the + only way to perfectly type kwargs are with `TypedDict`. In order for + overrides to have proper typing, subclasses must override all methods + that use kwargs, specifying the correct `TypedDict` type, delegating + directly to the base class if not changing default behavior. Type Parameters: - ResourceEntity: The SQLAlchemy entity class that implements EntityBase. - Note: the default implementation expects `from_data` - and `to_dto` methods to be present. However, these methods are not required in the type definition to allow for flexibility, - expecting a subclass to override `next_entity` and/or `entity_to_dict` as needed if these methods are not present. - - ResourceData: The Pydantic model representing the resource's data object used to create entities. + and `to_dto` methods to be present. However, these methods are not + required in the type definition to allow for flexibility, expecting a + subclass to override `next_entity` and/or `entity_to_dict` as needed + if these methods are not present. + - ResourceData: The Pydantic model representing the resource's data object + used to create entities. - OtherModels: Additional Pydantic models that may be used in assertions - Represented by a union if multiple models are applicable. - Primarily used for typing in the assert_matches method. @@ -55,10 +65,16 @@ class PersonOverrides(TypedDict, total=False): last_name: str ... - class PersonTestUtils(ResourceTestUtils[PersonEntity, PersonData, Person | DbPerson]): + class PersonTestUtils( + ResourceTestUtils[PersonEntity, PersonData, Person | DbPerson] + ): @staticmethod def generate_defaults(count: int) -> dict: - return {"first_name": f"FPerson{count}", "last_name": f"LPerson{count}", ...} + return { + "first_name": f"FPerson{count}", + "last_name": f"LPerson{count}", + ... + } @override async def next_dict(self, **overrides: Unpack[PersonOverrides]) -> dict: @@ -67,10 +83,12 @@ async def next_dict(self, **overrides: Unpack[PersonOverrides]) -> dict: overrides["account_id"] = account.id return await super().next_dict(**overrides) - # =============================== Typing Overrides ================================ + # ========================== Typing Overrides =========================== @override - async def next_data(self, **overrides: Unpack[PersonOverrides]) -> PersonData: + async def next_data( + self, **overrides: Unpack[PersonOverrides] + ) -> PersonData: return await super().next_data(**overrides) ... @@ -95,12 +113,15 @@ def __init__( entity_class: type[ResourceEntity], data_class: type[ResourceData], ): - """Initialize the ResourceTestUtils with a database session and resource classes. + """Initialize the ResourceTestUtils with a database session and resource + classes. Args: session (AsyncSession): The SQLAlchemy async database session. - entity_class (type[ResourceEntity]): The SQLAlchemy entity class, for instantiation at runtime - data_class (type[ResourceData]): The Pydantic model class for resource data, for instantiation at runtime + entity_class (type[ResourceEntity]): The SQLAlchemy entity class, + for instantiation at runtime + data_class (type[ResourceData]): The Pydantic model class for + resource data, for instantiation at runtime """ self.session = session self._ResourceEntity = entity_class @@ -152,7 +173,7 @@ async def next_dict(self, **overrides: Any) -> dict: return data async def next_data(self, **overrides: Any) -> ResourceData: - """Generate the next unique Pydantic creation model for the resource, applying any overrides. + """Generate the next unique Pydantic creation model for the resource, applying any overrides Args: **overrides: Fields to override in the generated model. @@ -168,7 +189,7 @@ async def next_entity(self, **overrides: Any) -> ResourceEntity: """ data = await self.next_data(**overrides) if not hasattr(self._ResourceEntity, "from_data") or not callable( - getattr(self._ResourceEntity, "from_data") + self._ResourceEntity.from_data # pyright: ignore[reportAttributeAccessIssue] ): raise AttributeError( f"{self._ResourceEntity.__name__} must implement a 'from_data' classmethod" @@ -176,7 +197,7 @@ async def next_entity(self, **overrides: Any) -> ResourceEntity: return self._ResourceEntity.from_data(data) # type: ignore async def create_many(self, *, i: int, **overrides: Any) -> list[ResourceEntity]: - """Create multiple resource entities in the database, applying any overrides to every entity. + """Create multiple resource entities in the database, applying any overrides to every entity Args: i (int): The number of resource entities to create. @@ -206,9 +227,7 @@ async def get_all(self) -> list[ResourceEntity]: def entity_to_dict(self, entity: ResourceEntity) -> dict: """Convert a resource entity to a dict via its model representation.""" - if not hasattr(self._ResourceEntity, "to_dto") or not callable( - getattr(self._ResourceEntity, "to_dto") - ): + if not hasattr(self._ResourceEntity, "to_dto") or not callable(self._ResourceEntity.to_dto): # pyright: ignore[reportAttributeAccessIssue] raise AttributeError( f"{self._ResourceEntity.__name__} must implement a 'to_dto' method" ) @@ -224,8 +243,8 @@ def assert_matches( Extra fields are ignored; only shared fields are compared. Args: - resource1 (ResourceEntity | ResourceData | OtherModels | None): The first resource instance. - resource2 (ResourceEntity | ResourceData | OtherModels | None): The second resource instance. + resource1: The first resource instance. + resource2: The second resource instance. """ assert resource1 is not None, "First resource is None" assert resource2 is not None, "Second resource is None" @@ -247,7 +266,7 @@ def assert_matches( val2 = dict2[key] if isinstance(val1, datetime) and isinstance(val2, datetime): - val1 = val1.replace(tzinfo=timezone.utc) - val2 = val2.replace(tzinfo=timezone.utc) + val1 = val1.replace(tzinfo=UTC) + val2 = val2.replace(tzinfo=UTC) assert val1 == val2, f"Mismatch on field '{key}': {val1} != {val2}" diff --git a/frontend/eslint.config.mjs b/frontend/eslint.config.mjs index 719cea2..09d5c8b 100644 --- a/frontend/eslint.config.mjs +++ b/frontend/eslint.config.mjs @@ -1,6 +1,6 @@ +import { FlatCompat } from "@eslint/eslintrc"; import { dirname } from "path"; import { fileURLToPath } from "url"; -import { FlatCompat } from "@eslint/eslintrc"; const __filename = fileURLToPath(import.meta.url); const __dirname = dirname(__filename); diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 9b0dbc8..5bf675f 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -41,11 +41,13 @@ "devDependencies": { "@eslint/eslintrc": "^3", "@tailwindcss/postcss": "^4", + "@trivago/prettier-plugin-sort-imports": "^6.0.2", "@types/node": "^20", "@types/react": "^19", "@types/react-dom": "^19", "eslint": "^9", "eslint-config-next": "15.5.4", + "prettier": "^3.7.4", "tailwindcss": "^4", "tw-animate-css": "^1.4.0", "typescript": "^5" @@ -3196,6 +3198,73 @@ "url": "https://github.com/sponsors/tannerlinsley" } }, + "node_modules/@trivago/prettier-plugin-sort-imports": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@trivago/prettier-plugin-sort-imports/-/prettier-plugin-sort-imports-6.0.2.tgz", + "integrity": "sha512-3DgfkukFyC/sE/VuYjaUUWoFfuVjPK55vOFDsxD56XXynFMCZDYFogH2l/hDfOsQAm1myoU/1xByJ3tWqtulXA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@babel/generator": "^7.28.0", + "@babel/parser": "^7.28.0", + "@babel/traverse": "^7.28.0", + "@babel/types": "^7.28.0", + "javascript-natural-sort": "^0.7.1", + "lodash-es": "^4.17.21", + "minimatch": "^9.0.0", + "parse-imports-exports": "^0.2.4" + }, + "engines": { + "node": ">= 20" + }, + "peerDependencies": { + "@vue/compiler-sfc": "3.x", + "prettier": "2.x - 3.x", + "prettier-plugin-ember-template-tag": ">= 2.0.0", + "prettier-plugin-svelte": "3.x", + "svelte": "4.x || 5.x" + }, + "peerDependenciesMeta": { + "@vue/compiler-sfc": { + "optional": true + }, + "prettier-plugin-ember-template-tag": { + "optional": true + }, + "prettier-plugin-svelte": { + "optional": true + }, + "svelte": { + "optional": true + } + } + }, + "node_modules/@trivago/prettier-plugin-sort-imports/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@trivago/prettier-plugin-sort-imports/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@ts-morph/common": { "version": "0.27.0", "resolved": "https://registry.npmjs.org/@ts-morph/common/-/common-0.27.0.tgz", @@ -7252,6 +7321,13 @@ "node": ">= 0.4" } }, + "node_modules/javascript-natural-sort": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/javascript-natural-sort/-/javascript-natural-sort-0.7.1.tgz", + "integrity": "sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==", + "dev": true, + "license": "MIT" + }, "node_modules/jiti": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", @@ -7683,6 +7759,13 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/lodash-es": { + "version": "4.17.22", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.22.tgz", + "integrity": "sha512-XEawp1t0gxSi9x01glktRZ5HDy0HXqrM0x5pXQM98EaI0NxO6jVM7omDOxsuEo5UIASAnm2bRp1Jt/e0a2XU8Q==", + "dev": true, + "license": "MIT" + }, "node_modules/lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", @@ -8559,6 +8642,16 @@ "node": ">=6" } }, + "node_modules/parse-imports-exports": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/parse-imports-exports/-/parse-imports-exports-0.2.4.tgz", + "integrity": "sha512-4s6vd6dx1AotCx/RCI2m7t7GCh5bDRUtGNvRfHSP2wbBQdMi67pPe7mtzmgwcaQ8VKK/6IB7Glfyu3qdZJPybQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parse-statements": "1.0.11" + } + }, "node_modules/parse-json": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", @@ -8589,6 +8682,13 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/parse-statements": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/parse-statements/-/parse-statements-1.0.11.tgz", + "integrity": "sha512-HlsyYdMBnbPQ9Jr/VgJ1YF4scnldvJpJxCVx6KgqPL4dxppsWrJHCIIxQXMJrqGnsRkNPATbeMJ8Yxu7JMsYcA==", + "dev": true, + "license": "MIT" + }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -8733,6 +8833,22 @@ "node": ">= 0.8.0" } }, + "node_modules/prettier": { + "version": "3.7.4", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.7.4.tgz", + "integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, "node_modules/pretty-format": { "version": "3.8.0", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-3.8.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index c2ddc6f..481b185 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -42,13 +42,34 @@ "devDependencies": { "@eslint/eslintrc": "^3", "@tailwindcss/postcss": "^4", + "@trivago/prettier-plugin-sort-imports": "^6.0.2", "@types/node": "^20", "@types/react": "^19", "@types/react-dom": "^19", "eslint": "^9", "eslint-config-next": "15.5.4", + "prettier": "^3.7.4", "tailwindcss": "^4", "tw-animate-css": "^1.4.0", "typescript": "^5" + }, + "prettier": { + "semi": true, + "trailingComma": "es5", + "singleQuote": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "arrowParens": "always", + "endOfLine": "lf", + "plugins": [ + "@trivago/prettier-plugin-sort-imports" + ], + "importOrder": [ + "", + "^[./]" + ], + "importOrderSeparation": false, + "importOrderSortSpecifiers": true } } diff --git a/frontend/public/file.svg b/frontend/public/file.svg index 004145c..16fe3d3 100644 --- a/frontend/public/file.svg +++ b/frontend/public/file.svg @@ -1 +1 @@ - \ No newline at end of file + diff --git a/frontend/public/globe.svg b/frontend/public/globe.svg index 567f17b..c7215fe 100644 --- a/frontend/public/globe.svg +++ b/frontend/public/globe.svg @@ -1 +1 @@ - \ No newline at end of file + diff --git a/frontend/public/next.svg b/frontend/public/next.svg index 5174b28..5bb00d4 100644 --- a/frontend/public/next.svg +++ b/frontend/public/next.svg @@ -1 +1 @@ - \ No newline at end of file + diff --git a/frontend/public/vercel.svg b/frontend/public/vercel.svg index 7705396..5215157 100644 --- a/frontend/public/vercel.svg +++ b/frontend/public/vercel.svg @@ -1 +1 @@ - \ No newline at end of file + diff --git a/frontend/public/window.svg b/frontend/public/window.svg index b2b2a44..d05e7a1 100644 --- a/frontend/public/window.svg +++ b/frontend/public/window.svg @@ -1 +1 @@ - \ No newline at end of file + diff --git a/frontend/src/app/police/_components/EmbeddedMap.tsx b/frontend/src/app/police/_components/EmbeddedMap.tsx index 32c4ea1..dc8c929 100644 --- a/frontend/src/app/police/_components/EmbeddedMap.tsx +++ b/frontend/src/app/police/_components/EmbeddedMap.tsx @@ -2,8 +2,8 @@ import { PartyDto } from "@/lib/api/party/party.types"; import { - AdvancedMarker, APIProvider, + AdvancedMarker, InfoWindow, Map, Pin, diff --git a/frontend/src/app/police/_components/PartyCsvExportButton.tsx b/frontend/src/app/police/_components/PartyCsvExportButton.tsx index 33f8aeb..9ec3df5 100644 --- a/frontend/src/app/police/_components/PartyCsvExportButton.tsx +++ b/frontend/src/app/police/_components/PartyCsvExportButton.tsx @@ -1,11 +1,10 @@ "use client"; -import { Download } from "lucide-react"; -import { useState } from "react"; - import { Button } from "@/components/ui/button"; import { PartyService } from "@/lib/api/party/party.service"; import getMockClient from "@/lib/network/mockClient"; +import { Download } from "lucide-react"; +import { useState } from "react"; interface PartyCsvExportButtonProps { startDate: Date | undefined; diff --git a/frontend/src/app/police/page.tsx b/frontend/src/app/police/page.tsx index 4451c7a..4e4ab0d 100644 --- a/frontend/src/app/police/page.tsx +++ b/frontend/src/app/police/page.tsx @@ -1,9 +1,9 @@ "use client"; -import DateRangeFilter from "@/components/DateRangeFilter"; import EmbeddedMap from "@/app/police/_components/EmbeddedMap"; import PartyList from "@/app/police/_components/PartyList"; import AddressSearch from "@/components/AddressSearch"; +import DateRangeFilter from "@/components/DateRangeFilter"; import { LocationService } from "@/lib/api/location/location.service"; import { AutocompleteResult } from "@/lib/api/location/location.types"; import { PartyService } from "@/lib/api/party/party.service"; diff --git a/frontend/src/app/staff/_components/account/AccountTable.tsx b/frontend/src/app/staff/_components/account/AccountTable.tsx index 77b7da3..5dd22d5 100644 --- a/frontend/src/app/staff/_components/account/AccountTable.tsx +++ b/frontend/src/app/staff/_components/account/AccountTable.tsx @@ -1,17 +1,16 @@ "use client"; import { AccountService } from "@/lib/api/account/account.service"; +import type { AccountDto, AccountRole } from "@/lib/api/account/account.types"; import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; import { ColumnDef } from "@tanstack/react-table"; +import { isAxiosError } from "axios"; import { useState } from "react"; import * as z from "zod"; import { useSidebar } from "../shared/sidebar/SidebarContext"; import { TableTemplate } from "../shared/table/TableTemplate"; import AccountTableForm, { accountTableFormSchema } from "./AccountTableForm"; -import type { AccountDto, AccountRole } from "@/lib/api/account/account.types"; -import { isAxiosError } from "axios"; - type AccountTableFormValues = z.infer; const accountService = new AccountService(); diff --git a/frontend/src/app/staff/_components/party/PartyTable.tsx b/frontend/src/app/staff/_components/party/PartyTable.tsx index 6f52ca7..56d6c90 100644 --- a/frontend/src/app/staff/_components/party/PartyTable.tsx +++ b/frontend/src/app/staff/_components/party/PartyTable.tsx @@ -11,10 +11,10 @@ import { DateRange } from "react-day-picker"; import { GenericInfoChip } from "../shared/sidebar/GenericInfoChip"; import { useSidebar } from "../shared/sidebar/SidebarContext"; import { TableTemplate } from "../shared/table/TableTemplate"; +import PartyTableForm from "./PartyTableForm"; import ContactInfoChipDetails from "./details/ContactInfoChipDetails"; import LocationInfoChipDetails from "./details/LocationInfoChipDetails"; import StudentInfoChipDetails from "./details/StudentInfoChipDetails"; -import PartyTableForm from "./PartyTableForm"; const partyService = new PartyService(); diff --git a/frontend/src/app/staff/_components/shared/dialog/DeleteConfirmDialog.tsx b/frontend/src/app/staff/_components/shared/dialog/DeleteConfirmDialog.tsx index 3c1f269..079e45f 100644 --- a/frontend/src/app/staff/_components/shared/dialog/DeleteConfirmDialog.tsx +++ b/frontend/src/app/staff/_components/shared/dialog/DeleteConfirmDialog.tsx @@ -2,60 +2,60 @@ import { Button } from "@/components/ui/button"; import { - Dialog, - DialogContent, - DialogDescription, - DialogFooter, - DialogHeader, - DialogTitle, + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, } from "@/components/ui/dialog"; interface DeleteConfirmDialogProps { - open: boolean; - onOpenChange: (open: boolean) => void; - onConfirm: () => void; - title?: string; - description?: string; - isDeleting?: boolean; + open: boolean; + onOpenChange: (open: boolean) => void; + onConfirm: () => void; + title?: string; + description?: string; + isDeleting?: boolean; } export function DeleteConfirmDialog({ - open, - onOpenChange, - onConfirm, - title = "Delete Item", - description = "Are you sure you want to delete this item? This action cannot be undone.", - isDeleting = false, + open, + onOpenChange, + onConfirm, + title = "Delete Item", + description = "Are you sure you want to delete this item? This action cannot be undone.", + isDeleting = false, }: DeleteConfirmDialogProps) { - const handleConfirm = () => { - onConfirm(); - onOpenChange(false); - }; + const handleConfirm = () => { + onConfirm(); + onOpenChange(false); + }; - return ( - - - - {title} - {description} - - - - - - - - ); + return ( + + + + {title} + {description} + + + + + + + + ); } diff --git a/frontend/src/app/staff/_components/shared/sidebar/SidebarContext.tsx b/frontend/src/app/staff/_components/shared/sidebar/SidebarContext.tsx index b402536..6793678 100644 --- a/frontend/src/app/staff/_components/shared/sidebar/SidebarContext.tsx +++ b/frontend/src/app/staff/_components/shared/sidebar/SidebarContext.tsx @@ -1,5 +1,5 @@ "use client"; -import { createContext, ReactNode, useContext, useState } from "react"; +import { ReactNode, createContext, useContext, useState } from "react"; type SidebarContextType = { isOpen: boolean; diff --git a/frontend/src/app/staff/_components/shared/table/ColumnHeader.tsx b/frontend/src/app/staff/_components/shared/table/ColumnHeader.tsx index c6f336b..f6e1ee2 100644 --- a/frontend/src/app/staff/_components/shared/table/ColumnHeader.tsx +++ b/frontend/src/app/staff/_components/shared/table/ColumnHeader.tsx @@ -2,107 +2,103 @@ import { Button } from "@/components/ui/button"; import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuSeparator, - DropdownMenuTrigger, + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuSeparator, + DropdownMenuTrigger, } from "@/components/ui/dropdown-menu"; import { Column } from "@tanstack/react-table"; import { ArrowDown, ArrowUp, ChevronDown, Filter, X } from "lucide-react"; import { useState } from "react"; interface ColumnHeaderProps { - column: Column; - title: string; - onFilterClick?: () => void; + column: Column; + title: string; + onFilterClick?: () => void; } export function ColumnHeader({ - column, - title, - onFilterClick, + column, + title, + onFilterClick, }: ColumnHeaderProps) { - const [open, setOpen] = useState(false); - const isFiltered = column.getIsFiltered(); - const isSorted = column.getIsSorted(); - const canFilter = column.getCanFilter(); + const [open, setOpen] = useState(false); + const isFiltered = column.getIsFiltered(); + const isSorted = column.getIsSorted(); + const canFilter = column.getCanFilter(); - return ( -
- - - - - - { - column.toggleSorting(false); - setOpen(false); - }} - > - - Sort Ascending - {isSorted === "asc" && ( - - )} - - { - column.toggleSorting(true); - setOpen(false); - }} - > - - Sort Descending - {isSorted === "desc" && ( - - )} - - {canFilter && ( - <> - - {isFiltered ? ( - { - column.setFilterValue(undefined); - setOpen(false); - }} - className="text-red-600" - > - - Clear Filter - - ) : ( - { - setOpen(false); - onFilterClick?.(); - }} - > - - Add Filter - - )} - - )} - - -
- ); + return ( +
+ + + + + + { + column.toggleSorting(false); + setOpen(false); + }} + > + + Sort Ascending + {isSorted === "asc" && ( + + )} + + { + column.toggleSorting(true); + setOpen(false); + }} + > + + Sort Descending + {isSorted === "desc" && ( + + )} + + {canFilter && ( + <> + + {isFiltered ? ( + { + column.setFilterValue(undefined); + setOpen(false); + }} + className="text-red-600" + > + + Clear Filter + + ) : ( + { + setOpen(false); + onFilterClick?.(); + }} + > + + Add Filter + + )} + + )} + + +
+ ); } diff --git a/frontend/src/app/staff/_components/shared/table/TableTemplate.tsx b/frontend/src/app/staff/_components/shared/table/TableTemplate.tsx index d283d17..5e92407 100644 --- a/frontend/src/app/staff/_components/shared/table/TableTemplate.tsx +++ b/frontend/src/app/staff/_components/shared/table/TableTemplate.tsx @@ -1,7 +1,12 @@ "use client"; import { Button } from "@/components/ui/button"; - +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; import { Table, TableBody, @@ -11,25 +16,18 @@ import { TableHeader, TableRow, } from "@/components/ui/table"; - -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuTrigger, -} from "@/components/ui/dropdown-menu"; import { Column, ColumnDef, ColumnFiltersState, + PaginationState, + Row, + SortingState, flexRender, getCoreRowModel, getFilteredRowModel, getPaginationRowModel, getSortedRowModel, - PaginationState, - Row, - SortingState, useReactTable, } from "@tanstack/react-table"; import { diff --git a/frontend/src/app/student/_components/Header.tsx b/frontend/src/app/student/_components/Header.tsx index 2cf14e7..c3a3cec 100644 --- a/frontend/src/app/student/_components/Header.tsx +++ b/frontend/src/app/student/_components/Header.tsx @@ -1,5 +1,5 @@ -import logout from "@/components/icons/log-out.svg"; import OCSLLogo from "@/components/icons/OCSL_logo.svg"; +import logout from "@/components/icons/log-out.svg"; import pfp from "@/components/icons/pfp_temp.svg"; import user from "@/components/icons/user.svg"; import { diff --git a/frontend/src/app/student/_components/PartyRegistrationForm.tsx b/frontend/src/app/student/_components/PartyRegistrationForm.tsx index 0fa7e86..a48b05a 100644 --- a/frontend/src/app/student/_components/PartyRegistrationForm.tsx +++ b/frontend/src/app/student/_components/PartyRegistrationForm.tsx @@ -1,10 +1,5 @@ "use client"; -import { addBusinessDays, format, isAfter, startOfDay } from "date-fns"; -import { CalendarIcon } from "lucide-react"; -import { useState } from "react"; -import * as z from "zod"; - import AddressSearch from "@/components/AddressSearch"; import { Button } from "@/components/ui/button"; import { Calendar } from "@/components/ui/calendar"; @@ -31,6 +26,10 @@ import { } from "@/components/ui/select"; import { LocationService } from "@/lib/api/location/location.service"; import { AutocompleteResult } from "@/lib/api/location/location.types"; +import { addBusinessDays, format, isAfter, startOfDay } from "date-fns"; +import { CalendarIcon } from "lucide-react"; +import { useState } from "react"; +import * as z from "zod"; const partyFormSchema = z.object({ address: z.string().min(1, "Address is required"), diff --git a/frontend/src/app/student/profile/page.tsx b/frontend/src/app/student/profile/page.tsx index 85578f1..11138a6 100644 --- a/frontend/src/app/student/profile/page.tsx +++ b/frontend/src/app/student/profile/page.tsx @@ -35,11 +35,7 @@ export default function StudentProfilePage() {
- {student && ( - - )} + {student && }
); diff --git a/frontend/src/components/ui/badge.tsx b/frontend/src/components/ui/badge.tsx index fd3a406..23d643c 100644 --- a/frontend/src/components/ui/badge.tsx +++ b/frontend/src/components/ui/badge.tsx @@ -1,8 +1,7 @@ -import * as React from "react" -import { Slot } from "@radix-ui/react-slot" -import { cva, type VariantProps } from "class-variance-authority" - -import { cn } from "@/lib/utils" +import { cn } from "@/lib/utils"; +import { Slot } from "@radix-ui/react-slot"; +import { type VariantProps, cva } from "class-variance-authority"; +import * as React from "react"; const badgeVariants = cva( "inline-flex items-center justify-center rounded-full border px-2 py-0.5 text-xs font-medium w-fit whitespace-nowrap shrink-0 [&>svg]:size-3 gap-1 [&>svg]:pointer-events-none focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive transition-[color,box-shadow] overflow-hidden", @@ -23,7 +22,7 @@ const badgeVariants = cva( variant: "default", }, } -) +); function Badge({ className, @@ -32,7 +31,7 @@ function Badge({ ...props }: React.ComponentProps<"span"> & VariantProps & { asChild?: boolean }) { - const Comp = asChild ? Slot : "span" + const Comp = asChild ? Slot : "span"; return ( - ) + ); } -export { Badge, badgeVariants } +export { Badge, badgeVariants }; diff --git a/frontend/src/components/ui/button.tsx b/frontend/src/components/ui/button.tsx index 26fc539..59f0fdc 100644 --- a/frontend/src/components/ui/button.tsx +++ b/frontend/src/components/ui/button.tsx @@ -1,9 +1,8 @@ +import { cn } from "@/lib/utils"; import { Slot } from "@radix-ui/react-slot"; -import { cva, type VariantProps } from "class-variance-authority"; +import { type VariantProps, cva } from "class-variance-authority"; import * as React from "react"; -import { cn } from "@/lib/utils"; - const buttonVariants = cva( "inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium transition-all disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg:not([class*='size-'])]:size-4 shrink-0 [&_svg]:shrink-0 outline-none focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive", { diff --git a/frontend/src/components/ui/calendar.tsx b/frontend/src/components/ui/calendar.tsx index 4d7c46a..8a4a564 100644 --- a/frontend/src/components/ui/calendar.tsx +++ b/frontend/src/components/ui/calendar.tsx @@ -1,15 +1,14 @@ -"use client" +"use client"; -import * as React from "react" +import { Button, buttonVariants } from "@/components/ui/button"; +import { cn } from "@/lib/utils"; import { ChevronDownIcon, ChevronLeftIcon, ChevronRightIcon, -} from "lucide-react" -import { DayButton, DayPicker, getDefaultClassNames } from "react-day-picker" - -import { cn } from "@/lib/utils" -import { Button, buttonVariants } from "@/components/ui/button" +} from "lucide-react"; +import * as React from "react"; +import { DayButton, DayPicker, getDefaultClassNames } from "react-day-picker"; function Calendar({ className, @@ -21,9 +20,9 @@ function Calendar({ components, ...props }: React.ComponentProps & { - buttonVariant?: React.ComponentProps["variant"] + buttonVariant?: React.ComponentProps["variant"]; }) { - const defaultClassNames = getDefaultClassNames() + const defaultClassNames = getDefaultClassNames(); return ( - ) + ); }, Chevron: ({ className, orientation, ...props }) => { if (orientation === "left") { return ( - ) + ); } if (orientation === "right") { @@ -148,12 +147,12 @@ function Calendar({ className={cn("size-4", className)} {...props} /> - ) + ); } return ( - ) + ); }, DayButton: CalendarDayButton, WeekNumber: ({ children, ...props }) => { @@ -163,13 +162,13 @@ function Calendar({ {children} - ) + ); }, ...components, }} {...props} /> - ) + ); } function CalendarDayButton({ @@ -178,12 +177,12 @@ function CalendarDayButton({ modifiers, ...props }: React.ComponentProps) { - const defaultClassNames = getDefaultClassNames() + const defaultClassNames = getDefaultClassNames(); - const ref = React.useRef(null) + const ref = React.useRef(null); React.useEffect(() => { - if (modifiers.focused) ref.current?.focus() - }, [modifiers.focused]) + if (modifiers.focused) ref.current?.focus(); + }, [modifiers.focused]); return (