9
9
from typing import Optional , List
10
10
11
11
from fastapi import APIRouter , HTTPException , status , Form , UploadFile
12
+ from fastapi .concurrency import run_in_threadpool
12
13
from pydantic import BaseModel
13
14
from starlette .responses import StreamingResponse
14
15
from typing_extensions import Annotated , Literal
15
16
16
17
import cea .config
18
+ import cea .inputlocator
17
19
from cea .datamanagement .format_helper .cea4_migrate import migrate_cea3_to_cea4
18
20
from cea .datamanagement .format_helper .cea4_migrate_db import migrate_cea3_to_cea4_db
19
21
from cea .datamanagement .format_helper .cea4_verify import cea4_verify
@@ -397,6 +399,23 @@ class DownloadScenario(BaseModel):
397
399
project : str
398
400
scenarios : List [str ]
399
401
input_files : bool
402
+ output_files : Literal ["summary" , "detailed" ]
403
+
404
+
405
+ def run_summary (project : str , scenario_name : str ):
406
+ """Run the summary function to ensure all summary files are generated"""
407
+ config = cea .config .Configuration (cea .config .DEFAULT_CONFIG )
408
+ config .project = project
409
+ config .scenario_name = scenario_name
410
+
411
+ config .result_summary .aggregate_by_building = True
412
+
413
+ try :
414
+ from cea .import_export .result_summary import main as result_summary_main
415
+ result_summary_main (config )
416
+ except Exception as e :
417
+ logger .error (f"Error generating summary for { scenario_name } : { str (e )} " )
418
+ raise e
400
419
401
420
402
421
@router .post ("/scenario/download" )
@@ -414,33 +433,75 @@ async def download_scenario(form: DownloadScenario, project_root: CEAProjectRoot
414
433
415
434
project = form .project .strip ()
416
435
scenarios = form .scenarios
417
- input_files_only = form .input_files
436
+ input_files = form .input_files
437
+ output_files_level = form .output_files
418
438
419
439
filename = f"{ project } _scenarios.zip" if len (scenarios ) > 1 else f"{ project } _{ scenarios [0 ]} .zip"
420
440
421
441
temp_file_path = None
422
442
try :
423
443
with tempfile .NamedTemporaryFile (delete = False ) as temp_file :
424
444
temp_file_path = temp_file .name
425
- with zipfile .ZipFile (temp_file , 'w' , zipfile .ZIP_DEFLATED ) as zip_file :
426
- base_path = Path (project_root ) / project
445
+
446
+ # Use compresslevel=1 for faster zipping, at the cost of compression ratio
447
+ with zipfile .ZipFile (temp_file , 'w' , zipfile .ZIP_DEFLATED , compresslevel = 1 ) as zip_file :
448
+ base_path = Path (secure_path (Path (project_root , project ).resolve ()))
427
449
450
+ # Collect all files first for batch processing
451
+ files_to_zip = []
428
452
for scenario in scenarios :
429
- scenario_path = base_path / scenario
453
+ # sanitize scenario for fs ops and zip arcnames
454
+ scenario_name = Path (scenario ).name
455
+ scenario_path = Path (secure_path ((base_path / scenario_name ).resolve ()))
456
+
430
457
if not scenario_path .exists ():
431
458
continue
432
-
433
- target_path = (scenario_path / "inputs" ) if input_files_only else scenario_path
434
- prefix = f"{ scenario } /inputs" if input_files_only else scenario
435
-
436
- for item_path in target_path .rglob ('*' ):
437
- if item_path .is_file () and item_path .suffix in VALID_EXTENSIONS :
438
- relative_path = str (Path (prefix ) / item_path .relative_to (target_path ))
439
- zip_file .write (item_path , arcname = relative_path )
459
+
460
+ input_paths = (scenario_path / "inputs" )
461
+ if input_files and input_paths .exists ():
462
+ for root , dirs , files in os .walk (input_paths ):
463
+ root_path = Path (root )
464
+ for file in files :
465
+ if Path (file ).suffix in VALID_EXTENSIONS :
466
+ item_path = root_path / file
467
+ relative_path = str (Path (scenario_name ) / "inputs" / item_path .relative_to (input_paths ))
468
+ files_to_zip .append ((item_path , relative_path ))
469
+
470
+ output_paths = (scenario_path / "outputs" )
471
+ if output_files_level == "detailed" and output_paths .exists ():
472
+ for root , dirs , files in os .walk (output_paths ):
473
+ root_path = Path (root )
474
+ for file in files :
475
+ if Path (file ).suffix in VALID_EXTENSIONS :
476
+ item_path = root_path / file
477
+ relative_path = str (Path (scenario_name ) / "outputs" / item_path .relative_to (output_paths ))
478
+ files_to_zip .append ((item_path , relative_path ))
479
+
480
+ elif output_files_level == "summary" :
481
+ # create summary files first
482
+ await run_in_threadpool (run_summary , str (base_path ), scenario_name )
483
+
484
+ export_paths = (scenario_path / "export" / "results" )
485
+ if not export_paths .exists ():
486
+ raise ValueError (f"Export results path does not exist for scenario { scenario_name } " )
487
+
488
+ for root , dirs , files in os .walk (export_paths ):
489
+ root_path = Path (root )
490
+ for file in files :
491
+ if Path (file ).suffix in VALID_EXTENSIONS :
492
+ item_path = root_path / file
493
+ relative_path = str (
494
+ Path (scenario_name ) / "export" / "results" / item_path .relative_to (export_paths ))
495
+ files_to_zip .append ((item_path , relative_path ))
496
+
497
+ # Batch write all files to zip
498
+ logger .info (f"Writing { len (files_to_zip )} files to zip..." )
499
+ for item_path , archive_name in files_to_zip :
500
+ zip_file .write (item_path , arcname = archive_name )
440
501
441
502
# Get the file size for Content-Length header
442
503
file_size = os .path .getsize (temp_file_path )
443
-
504
+
444
505
# Define the streaming function
445
506
async def file_streamer ():
446
507
try :
0 commit comments