diff --git a/src/config.py b/src/config.py index 8b9615e..e206e2e 100644 --- a/src/config.py +++ b/src/config.py @@ -37,6 +37,7 @@ class Settings(BaseSettings): POOL_SIZE: int = 5 POOL_MAX_OVERFLOW: int = 10 + REPORT_CACHE_MINUTES: int = 60 SERVICE_API_KEY: str = "default_service_secret_change_me" ADMIN_API_KEY: str = "default_admin_secret_change_me" diff --git a/src/main.py b/src/main.py index 45c9a8d..614bc90 100644 --- a/src/main.py +++ b/src/main.py @@ -171,6 +171,7 @@ async def custom_rate_limit_exceeded_handler( def on_startup() -> None: global model_pipeline, etl_scheduler settings = get_settings() + create_generated_reports_table() if not settings.SKIP_MODEL_TRAINING: model_pipeline = train_logistic_regression_pipeline() @@ -519,9 +520,13 @@ def get_example_revenue_input() -> EventRevenueInput: def generate_daily_report(payload: DailyReportRequest) -> Any: try: target_date: date = payload.target_date or date.today() - report_path = generate_daily_report_csv( + settings = get_settings() + report_path, cache_hit = generate_daily_report_csv( target_date=target_date, output_format=payload.output_format, + event_id=payload.event_id, + force_regenerate=payload.force_regenerate, + cache_minutes=settings.REPORT_CACHE_MINUTES, ) sales_data = _query_daily_sales(target_date) transfer_stats = _query_transfer_stats(target_date) @@ -530,6 +535,7 @@ def generate_daily_report(payload: DailyReportRequest) -> Any: total_sales: int = sum(row["tickets_sold"] for row in sales_data) total_revenue: float = sum(row["revenue"] for row in sales_data) + msg = "Report served from cache" if cache_hit else f"Report generated successfully at {report_path}" return DailyReportResponse( success=True, report_path=report_path, @@ -540,7 +546,8 @@ def generate_daily_report(payload: DailyReportRequest) -> Any: "total_transfers": transfer_stats.get("total_transfers", 0), "invalid_scans": invalid_scan_stats.get("invalid_scans", 0), }, - message=f"Report generated successfully at {report_path}", + cache_hit=cache_hit, + message=msg, ) except Exception as exc: log_error("Daily report generation failed", {"error": str(exc)}) diff --git a/src/report_service.py b/src/report_service.py index 69d2295..d9c5a1b 100644 --- a/src/report_service.py +++ b/src/report_service.py @@ -4,7 +4,7 @@ import re from datetime import date, datetime from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Tuple from sqlalchemy import text @@ -119,6 +119,106 @@ def scan_and_populate_reports() -> None: logger.info("reports/ directory scan complete") +# --------------------------------------------------------------------------- +# generated_reports table helpers +# --------------------------------------------------------------------------- + +def create_generated_reports_table() -> None: + """Create the generated_reports table if it does not yet exist.""" + engine = _pg_engine() + if engine is None: + logger.info("Skipping generated_reports table creation — no DB engine") + return + with engine.connect() as conn: + conn.execute(text(""" + CREATE TABLE IF NOT EXISTS generated_reports ( + id SERIAL PRIMARY KEY, + filename TEXT NOT NULL, + report_date DATE NOT NULL, + event_id TEXT, + format TEXT NOT NULL, + size_bytes INTEGER NOT NULL, + generated_at TIMESTAMP NOT NULL DEFAULT NOW() + ) + """)) + conn.commit() + logger.info("generated_reports table ready") + + +def insert_report_metadata( + filename: str, + report_date: date, + event_id: Optional[str], + fmt: str, + size_bytes: int, + generated_at: datetime, +) -> None: + """Insert a row into generated_reports after a file is written.""" + engine = _pg_engine() + if engine is None: + return + with engine.connect() as conn: + conn.execute( + text(""" + INSERT INTO generated_reports + (filename, report_date, event_id, format, size_bytes, generated_at) + VALUES + (:filename, :report_date, :event_id, :format, :size_bytes, :generated_at) + """), + { + "filename": filename, + "report_date": report_date, + "event_id": event_id, + "format": fmt, + "size_bytes": size_bytes, + "generated_at": generated_at, + }, + ) + conn.commit() + + +def check_report_cache( + report_date: date, + event_id: Optional[str], + fmt: str, + cache_minutes: int, +) -> Optional[Dict[str, Any]]: + """Return metadata for the most recent matching cached report, or None.""" + engine = _pg_engine() + if engine is None: + return None + with engine.connect() as conn: + result = conn.execute( + text(""" + SELECT filename, size_bytes, generated_at + FROM generated_reports + WHERE report_date = :report_date + AND format = :format + AND ( + (:event_id IS NULL AND event_id IS NULL) + OR event_id = :event_id + ) + AND generated_at >= NOW() - (:cache_minutes || ' minutes')::INTERVAL + ORDER BY generated_at DESC + LIMIT 1 + """), + { + "report_date": report_date, + "event_id": event_id, + "format": fmt, + "cache_minutes": cache_minutes, + }, + ) + row = result.fetchone() + if row is None: + return None + return { + "filename": row[0], + "size_bytes": row[1], + "generated_at": row[2], + } + + def _pg_engine(): return _db.get_engine() @@ -194,14 +294,27 @@ def _query_invalid_scans(target_date: Optional[date] = None) -> Dict[str, int]: def generate_daily_report_csv( target_date: Optional[date] = None, output_format: str = "csv", -) -> str: + event_id: Optional[str] = None, + force_regenerate: bool = False, + cache_minutes: int = 60, +) -> Tuple[str, bool]: """Generate a daily sales report as CSV or JSON. - Returns the path to the generated file as a string. + Returns (path_to_generated_file, cache_hit). + When cache_hit is True the file already existed and was not regenerated. """ if target_date is None: target_date = date.today() + # --- cache check --- + if not force_regenerate: + cached = check_report_cache(target_date, event_id, output_format, cache_minutes) + if cached is not None: + cached_path = str(REPORTS_DIR / cached["filename"]) + if Path(cached_path).exists(): + logger.info("Cache hit — returning existing report %s", cached["filename"]) + return cached_path, True + _ensure_reports_dir() sales_data = _query_daily_sales(target_date) @@ -238,6 +351,10 @@ def generate_daily_report_csv( with open(filepath, "w") as f: json.dump(report_data, f, indent=2) + size_bytes = filepath.stat().st_size + now = datetime.utcnow() + insert_report_metadata(filename, report_date=target_date, event_id=event_id, + fmt="json", size_bytes=size_bytes, generated_at=now) generated_at = datetime.utcnow() insert_report_metadata( filename=filename, @@ -247,7 +364,7 @@ def generate_daily_report_csv( generated_at=generated_at, ) logger.info("Generated JSON report: %s", filepath) - return str(filepath) + return str(filepath), False # CSV format (default) filename = f"daily_report_{target_date}_{timestamp}.csv" @@ -277,6 +394,10 @@ def generate_daily_report_csv( f"${row['revenue']:.2f}", ]) + size_bytes = filepath.stat().st_size + now = datetime.utcnow() + insert_report_metadata(filename, report_date=target_date, event_id=event_id, + fmt="csv", size_bytes=size_bytes, generated_at=now) generated_at = datetime.utcnow() insert_report_metadata( filename=filename, @@ -286,4 +407,4 @@ def generate_daily_report_csv( generated_at=generated_at, ) logger.info("Generated CSV report: %s", filepath) - return str(filepath) \ No newline at end of file + return str(filepath), False \ No newline at end of file diff --git a/src/types_custom.py b/src/types_custom.py index cab483b..9eb75a1 100644 --- a/src/types_custom.py +++ b/src/types_custom.py @@ -116,6 +116,8 @@ class DailyReportRequest(BaseModel): model_config = ConfigDict(extra="forbid") target_date: Optional[date] = Field(None, description="Target date in YYYY-MM-DD format. Defaults to today.") output_format: Literal["csv", "json"] = Field("csv", description="Output format: 'csv' or 'json'") + event_id: Optional[str] = Field(None, description="Optional event ID to scope the report. Null means all events.") + force_regenerate: bool = Field(False, description="When True, skip cache and always generate a fresh report.") class DailyReportResponse(BaseModel): @@ -125,6 +127,7 @@ class DailyReportResponse(BaseModel): report_path: Optional[str] = Field(None, description="Path to generated report file") report_date: str = Field(..., description="Date of the report") summary: Dict[str, Any] = Field(..., description="Summary statistics") + cache_hit: bool = Field(False, description="True when the response was served from a cached report") message: Optional[str] = Field(None, description="Additional information or error message") diff --git a/tests/test_daily_report.py b/tests/test_daily_report.py index 2bf056e..4071ffd 100644 --- a/tests/test_daily_report.py +++ b/tests/test_daily_report.py @@ -63,9 +63,7 @@ def test_generate_daily_report_csv_format(mock_db_data): patch("src.report_service._query_invalid_scans", return_value={"invalid_scans": 2}): target_date = date(2025, 10, 4) - report_path = generate_daily_report_csv(target_date=target_date, output_format="csv") - - assert report_path is not None + report_path, _ = generate_daily_report_csv(target_date=target_date, output_format="csv") assert Path(report_path).exists() assert "daily_report_2025-10-04" in report_path assert report_path.endswith(".csv") @@ -90,7 +88,7 @@ def test_generate_daily_report_json_format(mock_db_data): patch("src.report_service._query_invalid_scans", return_value={"invalid_scans": 2}): target_date = date(2025, 10, 4) - report_path = generate_daily_report_csv(target_date=target_date, output_format="json") + report_path, _ = generate_daily_report_csv(target_date=target_date, output_format="json") assert report_path is not None assert Path(report_path).exists() @@ -200,8 +198,8 @@ def test_report_includes_all_summary_fields(mock_db_data): patch("src.report_service._query_invalid_scans", return_value={"invalid_scans": 7}): target_date = date(2025, 10, 4) - report_path = generate_daily_report_csv(target_date=target_date, output_format="json") - + report_path, _ = generate_daily_report_csv(target_date=target_date, output_format="json") + with open(report_path, "r") as f: data = json.load(f) @@ -235,8 +233,8 @@ def test_csv_report_structure(mock_db_data): patch("src.report_service._query_invalid_scans", return_value={"invalid_scans": 0}): target_date = date(2025, 10, 4) - report_path = generate_daily_report_csv(target_date=target_date, output_format="csv") - + report_path, _ = generate_daily_report_csv(target_date=target_date, output_format="csv") + with open(report_path, "r") as f: reader = csv.reader(f) rows = list(reader) diff --git a/tests/test_report_caching.py b/tests/test_report_caching.py new file mode 100644 index 0000000..26c1c14 --- /dev/null +++ b/tests/test_report_caching.py @@ -0,0 +1,393 @@ +"""Tests for report generation caching (issue #156). + +Covers: +- Cache hit: returns existing file without regenerating +- Cache miss: generates a fresh file when no cached entry exists +- force_regenerate: bypasses cache and always writes a new file +- Endpoint-level: cache_hit flag surfaces in the API response +""" +import json +from datetime import date, datetime +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest +from fastapi.testclient import TestClient + +from src.main import app +from src.report_service import ( + REPORTS_DIR, + check_report_cache, + generate_daily_report_csv, + insert_report_metadata, +) + +client = TestClient(app) + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +_MOCK_SALES = [ + {"event_id": "E1", "sale_date": "2025-01-01", "tickets_sold": 10, "revenue": 100.0} +] +_MOCK_NAMES = {"E1": "Test Event"} +_MOCK_TRANSFERS = {"total_transfers": 0} +_MOCK_SCANS = {"invalid_scans": 0} + +_DB_PATCHES = [ + ("src.report_service._query_daily_sales", _MOCK_SALES), + ("src.report_service._query_event_names", _MOCK_NAMES), + ("src.report_service._query_transfer_stats", _MOCK_TRANSFERS), + ("src.report_service._query_invalid_scans", _MOCK_SCANS), +] + + +def _patch_db(func): + """Decorator: patches all four DB query helpers.""" + import functools + + @functools.wraps(func) + def wrapper(*args, **kwargs): + patches = [patch(target, return_value=value) for target, value in _DB_PATCHES] + for p in patches: + p.start() + try: + return func(*args, **kwargs) + finally: + for p in patches: + p.stop() + + return wrapper + + +@pytest.fixture(autouse=True) +def cleanup_reports(): + yield + if REPORTS_DIR.exists(): + for f in REPORTS_DIR.glob("daily_report_*"): + f.unlink(missing_ok=True) + + +# --------------------------------------------------------------------------- +# Unit tests: generate_daily_report_csv +# --------------------------------------------------------------------------- + + +class TestCacheMiss: + @_patch_db + def test_returns_path_and_false_on_no_db(self, tmp_path, monkeypatch): + """With no DB engine, cache is always missed and file is generated.""" + monkeypatch.setattr("src.report_service.REPORTS_DIR", tmp_path) + with patch("src.report_service._pg_engine", return_value=None): + path, cache_hit = generate_daily_report_csv( + target_date=date(2025, 1, 1), output_format="csv" + ) + assert not cache_hit + assert Path(path).exists() + assert "daily_report_2025-01-01" in path + + @_patch_db + def test_inserts_metadata_after_generation(self, tmp_path, monkeypatch): + """insert_report_metadata is called once after a fresh CSV is written.""" + monkeypatch.setattr("src.report_service.REPORTS_DIR", tmp_path) + with patch("src.report_service._pg_engine", return_value=None), \ + patch("src.report_service.insert_report_metadata") as mock_insert: + path, cache_hit = generate_daily_report_csv( + target_date=date(2025, 1, 1), output_format="csv" + ) + mock_insert.assert_called_once() + call_kwargs = mock_insert.call_args + assert call_kwargs.kwargs["fmt"] == "csv" + assert not cache_hit + + @_patch_db + def test_cache_miss_json(self, tmp_path, monkeypatch): + """Cache miss for JSON format generates a .json file.""" + monkeypatch.setattr("src.report_service.REPORTS_DIR", tmp_path) + with patch("src.report_service._pg_engine", return_value=None): + path, cache_hit = generate_daily_report_csv( + target_date=date(2025, 1, 1), output_format="json" + ) + assert not cache_hit + assert path.endswith(".json") + + +class TestCacheHit: + @_patch_db + def test_returns_existing_file_without_rewriting(self, tmp_path, monkeypatch): + """When check_report_cache returns metadata and the file exists, no new file is written.""" + monkeypatch.setattr("src.report_service.REPORTS_DIR", tmp_path) + + # Pre-create the "cached" file + cached_filename = "daily_report_2025-01-01_cached.csv" + cached_file = tmp_path / cached_filename + cached_file.write_text("cached content") + + cached_meta = { + "filename": cached_filename, + "size_bytes": cached_file.stat().st_size, + "generated_at": datetime(2025, 1, 1, 10, 0, 0), + } + + with patch("src.report_service.check_report_cache", return_value=cached_meta) as mock_check, \ + patch("src.report_service.insert_report_metadata") as mock_insert: + path, cache_hit = generate_daily_report_csv( + target_date=date(2025, 1, 1), + output_format="csv", + cache_minutes=60, + ) + + assert cache_hit is True + assert path == str(tmp_path / cached_filename) + mock_check.assert_called_once_with(date(2025, 1, 1), None, "csv", 60) + # No new metadata row should be inserted for a cache hit + mock_insert.assert_not_called() + + @_patch_db + def test_cache_hit_skipped_when_file_missing(self, tmp_path, monkeypatch): + """If DB says cached but file is gone, fall through and regenerate.""" + monkeypatch.setattr("src.report_service.REPORTS_DIR", tmp_path) + + cached_meta = { + "filename": "daily_report_2025-01-01_gone.csv", + "size_bytes": 512, + "generated_at": datetime(2025, 1, 1, 10, 0, 0), + } + + with patch("src.report_service.check_report_cache", return_value=cached_meta), \ + patch("src.report_service._pg_engine", return_value=None): + path, cache_hit = generate_daily_report_csv( + target_date=date(2025, 1, 1), + output_format="csv", + cache_minutes=60, + ) + + assert not cache_hit + assert Path(path).exists() + + +class TestForceRegenerate: + @_patch_db + def test_force_regenerate_skips_cache_check(self, tmp_path, monkeypatch): + """force_regenerate=True does not call check_report_cache.""" + monkeypatch.setattr("src.report_service.REPORTS_DIR", tmp_path) + + with patch("src.report_service.check_report_cache") as mock_check, \ + patch("src.report_service._pg_engine", return_value=None): + path, cache_hit = generate_daily_report_csv( + target_date=date(2025, 1, 1), + output_format="csv", + force_regenerate=True, + ) + + mock_check.assert_not_called() + assert not cache_hit + assert Path(path).exists() + + @_patch_db + def test_force_regenerate_writes_new_file_even_when_cached(self, tmp_path, monkeypatch): + """force_regenerate=True always produces a new file even if cache would hit.""" + monkeypatch.setattr("src.report_service.REPORTS_DIR", tmp_path) + + # check_report_cache would normally hit, but it should never be called + cached_meta = { + "filename": "daily_report_2025-01-01_old.csv", + "size_bytes": 10, + "generated_at": datetime(2025, 1, 1, 10, 0, 0), + } + + with patch("src.report_service.check_report_cache", return_value=cached_meta) as mock_check, \ + patch("src.report_service._pg_engine", return_value=None): + path, cache_hit = generate_daily_report_csv( + target_date=date(2025, 1, 1), + output_format="csv", + force_regenerate=True, + ) + + mock_check.assert_not_called() + assert not cache_hit + + +# --------------------------------------------------------------------------- +# Unit tests: check_report_cache +# --------------------------------------------------------------------------- + + +class TestCheckReportCache: + def test_returns_none_when_no_engine(self): + with patch("src.report_service._pg_engine", return_value=None): + result = check_report_cache(date(2025, 1, 1), None, "csv", 60) + assert result is None + + def test_returns_none_when_no_rows(self): + mock_conn = MagicMock() + mock_conn.__enter__ = MagicMock(return_value=mock_conn) + mock_conn.__exit__ = MagicMock(return_value=False) + mock_conn.execute.return_value.fetchone.return_value = None + mock_engine = MagicMock() + mock_engine.connect.return_value = mock_conn + + with patch("src.report_service._pg_engine", return_value=mock_engine): + result = check_report_cache(date(2025, 1, 1), None, "csv", 60) + assert result is None + + def test_returns_metadata_dict_when_row_exists(self): + fake_row = ("daily_report_2025-01-01_abc.csv", 4096, datetime(2025, 1, 1, 9, 0)) + mock_conn = MagicMock() + mock_conn.__enter__ = MagicMock(return_value=mock_conn) + mock_conn.__exit__ = MagicMock(return_value=False) + mock_conn.execute.return_value.fetchone.return_value = fake_row + mock_engine = MagicMock() + mock_engine.connect.return_value = mock_conn + + with patch("src.report_service._pg_engine", return_value=mock_engine): + result = check_report_cache(date(2025, 1, 1), None, "csv", 60) + + assert result is not None + assert result["filename"] == "daily_report_2025-01-01_abc.csv" + assert result["size_bytes"] == 4096 + + +# --------------------------------------------------------------------------- +# Unit tests: insert_report_metadata +# --------------------------------------------------------------------------- + + +class TestInsertReportMetadata: + def test_no_op_when_no_engine(self): + with patch("src.report_service._pg_engine", return_value=None): + # Should not raise + insert_report_metadata( + filename="test.csv", + report_date=date(2025, 1, 1), + event_id=None, + fmt="csv", + size_bytes=1024, + generated_at=datetime(2025, 1, 1), + ) + + def test_executes_insert(self): + mock_conn = MagicMock() + mock_conn.__enter__ = MagicMock(return_value=mock_conn) + mock_conn.__exit__ = MagicMock(return_value=False) + mock_engine = MagicMock() + mock_engine.connect.return_value = mock_conn + + with patch("src.report_service._pg_engine", return_value=mock_engine): + insert_report_metadata( + filename="daily_report_2025-01-01_x.csv", + report_date=date(2025, 1, 1), + event_id="E1", + fmt="csv", + size_bytes=2048, + generated_at=datetime(2025, 1, 1, 12, 0), + ) + + mock_conn.execute.assert_called_once() + mock_conn.commit.assert_called_once() + + +# --------------------------------------------------------------------------- +# Endpoint tests +# --------------------------------------------------------------------------- + + +class TestEndpointCacheHit: + def test_cache_hit_flag_in_response(self): + """When generate_daily_report_csv returns cache_hit=True, API reflects it.""" + with patch( + "src.main.generate_daily_report_csv", + return_value=("reports/daily_report_2025-01-01_x.csv", True), + ), patch("src.main._query_daily_sales", return_value=_MOCK_SALES), \ + patch("src.main._query_transfer_stats", return_value=_MOCK_TRANSFERS), \ + patch("src.main._query_invalid_scans", return_value=_MOCK_SCANS): + response = client.post( + "/generate-daily-report", + json={"target_date": "2025-01-01", "output_format": "csv"}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert data["cache_hit"] is True + assert "cache" in data["message"].lower() + + def test_cache_miss_flag_in_response(self): + """When generate_daily_report_csv returns cache_hit=False, API reflects it.""" + with patch( + "src.main.generate_daily_report_csv", + return_value=("reports/daily_report_2025-01-01_new.csv", False), + ), patch("src.main._query_daily_sales", return_value=_MOCK_SALES), \ + patch("src.main._query_transfer_stats", return_value=_MOCK_TRANSFERS), \ + patch("src.main._query_invalid_scans", return_value=_MOCK_SCANS): + response = client.post( + "/generate-daily-report", + json={"target_date": "2025-01-01", "output_format": "csv"}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["cache_hit"] is False + assert "generated" in data["message"].lower() + + +class TestEndpointForceRegenerate: + def test_force_regenerate_passes_flag_to_service(self): + """force_regenerate=true is forwarded to generate_daily_report_csv.""" + with patch( + "src.main.generate_daily_report_csv", + return_value=("reports/daily_report_2025-01-01_fresh.csv", False), + ) as mock_gen, \ + patch("src.main._query_daily_sales", return_value=_MOCK_SALES), \ + patch("src.main._query_transfer_stats", return_value=_MOCK_TRANSFERS), \ + patch("src.main._query_invalid_scans", return_value=_MOCK_SCANS): + response = client.post( + "/generate-daily-report", + json={ + "target_date": "2025-01-01", + "output_format": "csv", + "force_regenerate": True, + }, + ) + + assert response.status_code == 200 + _, call_kwargs = mock_gen.call_args + assert call_kwargs["force_regenerate"] is True + + def test_force_regenerate_default_is_false(self): + """force_regenerate defaults to False when not supplied.""" + with patch( + "src.main.generate_daily_report_csv", + return_value=("reports/daily_report_2025-01-01_x.csv", False), + ) as mock_gen, \ + patch("src.main._query_daily_sales", return_value=_MOCK_SALES), \ + patch("src.main._query_transfer_stats", return_value=_MOCK_TRANSFERS), \ + patch("src.main._query_invalid_scans", return_value=_MOCK_SCANS): + client.post( + "/generate-daily-report", + json={"target_date": "2025-01-01"}, + ) + + _, call_kwargs = mock_gen.call_args + assert call_kwargs["force_regenerate"] is False + + +class TestEndpointCacheMinutes: + def test_report_cache_minutes_from_settings(self): + """REPORT_CACHE_MINUTES from settings is forwarded to the service.""" + with patch( + "src.main.generate_daily_report_csv", + return_value=("reports/daily_report_2025-01-01_x.csv", False), + ) as mock_gen, \ + patch("src.main._query_daily_sales", return_value=_MOCK_SALES), \ + patch("src.main._query_transfer_stats", return_value=_MOCK_TRANSFERS), \ + patch("src.main._query_invalid_scans", return_value=_MOCK_SCANS), \ + patch("src.main.get_settings") as mock_settings: + mock_settings.return_value.REPORT_CACHE_MINUTES = 30 + mock_settings.return_value.SKIP_MODEL_TRAINING = True + mock_settings.return_value.ENABLE_ETL_SCHEDULER = False + client.post("/generate-daily-report", json={"target_date": "2025-01-01"}) + + _, call_kwargs = mock_gen.call_args + assert call_kwargs["cache_minutes"] == 30 diff --git a/tests/test_standard_modules_unittest.py b/tests/test_standard_modules_unittest.py index 3eb1733..1ed373c 100644 --- a/tests/test_standard_modules_unittest.py +++ b/tests/test_standard_modules_unittest.py @@ -164,7 +164,7 @@ def test_generate_daily_report_json(self): with patch("src.report_service._query_event_names", return_value={"E1": "Show"}): with patch("src.report_service._query_transfer_stats", return_value={"total_transfers": 1}): with patch("src.report_service._query_invalid_scans", return_value={"invalid_scans": 0}): - path = generate_daily_report_csv(target_date=date(2025, 10, 4), output_format="json") + path, _ = generate_daily_report_csv(target_date=date(2025, 10, 4), output_format="json") with open(path, "r", encoding="utf-8") as handle: data = json.load(handle) self.assertEqual(data["summary"]["total_sales"], 5)