@@ -413,9 +413,6 @@ def upload_file_to_gcs(local_file_path, gcs_file_path):
413413 bool: True if successful, False otherwise
414414 """
415415 if not use_gcs_output :
416- print (
417- f"GCS output not configured, skipping upload of { local_file_path } to { gcs_file_path } ."
418- )
419416 return True
420417
421418 try :
@@ -755,6 +752,7 @@ def process(args):
755752# %% Function to check the output directory for completed chunks and upload them to GCS
756753
757754processed_chunks_bounds = None
755+ uploaded_files = []
758756
759757
760758# TODO this probably wants to bulk together uploads to reduce overhead
@@ -810,10 +808,10 @@ def check_and_upload_completed_chunks():
810808 )
811809 if upload_file_to_gcs (chunk_file , gcs_chunk_path ):
812810 uploaded_count += 1
813- print (f"Uploaded chunk: { gcs_chunk_path } " )
814811 # Remove local chunk to save space
815812 if use_gcs_output :
816813 chunk_file .unlink ()
814+ uploaded_files .append ((chunk_file , gcs_chunk_path ))
817815
818816 return uploaded_count
819817
@@ -842,9 +840,10 @@ def upload_any_remaining_chunks():
842840 )
843841 if upload_file_to_gcs (chunk_file , gcs_chunk_path ):
844842 uploaded_count += 1
845- print (f"Uploaded chunk: { gcs_chunk_path } " )
846843 # Remove local chunk to save space
847- chunk_file .unlink ()
844+ if use_gcs_output :
845+ chunk_file .unlink ()
846+ uploaded_files .append ((chunk_file , gcs_chunk_path ))
848847
849848 return uploaded_count
850849
@@ -878,6 +877,6 @@ def upload_any_remaining_chunks():
878877 print (f"Final upload completed: { total_uploaded_files } chunks uploaded" )
879878
880879# %% Serve the dataset to be used in neuroglancer
881- vols [0 ].viewer (port = 1337 )
880+ # vols[0].viewer(port=1337)
882881
883882# %%
0 commit comments