From ca582791bedb325c0af472ffa880679fa2f34591 Mon Sep 17 00:00:00 2001 From: jtyoung84 <104453205+jtyoung84@users.noreply.github.com> Date: Tue, 7 May 2024 09:19:19 -0700 Subject: [PATCH] hot-fix: processor full name --- README.md | 3 --- src/aind_data_transfer_service/templates/index.html | 5 ++--- tests/test_hpc_models.py | 1 - 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 79085d4..c786392 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,6 @@ You can go to http://aind-data-transfer-service to submit a `.csv` or `.xlsx` fi What each column means in the job submission template: -- **processor_full_name**: Name of the person submitting the upload job - **project_name**: Project name. A full list can be downloaded at [Project Names](http://aind-metadata-service/project_names) - **process_capsule_id**: Optional Code Ocean capsule or pipeline to run when data is uploaded - **platform**: For a list of platforms click [here](https://github.com/AllenNeuralDynamics/aind-data-schema/blob/main/src/aind_data_schema/models/platforms.py). @@ -73,11 +72,9 @@ platform = Platform.BEHAVIOR behavior_config = ModalityConfigs(modality=Modality.BEHAVIOR, source=(source_dir / "Behavior")) behavior_videos_config = ModalityConfigs(modality=Modality.BEHAVIOR_VIDEOS, source=(source_dir / "Behavior videos")) metadata_dir = source_dir / "Config" # This is an optional folder of pre-compiled metadata json files -processor_full_name="Anna Apple" project_name="Ephys Platform" upload_job_configs = BasicUploadJobConfigs( - processor_full_name=processor_full_name, project_name=project_name, s3_bucket = s3_bucket, platform = platform, diff --git a/src/aind_data_transfer_service/templates/index.html b/src/aind_data_transfer_service/templates/index.html index 4e5c3f1..4ce0b04 100644 --- a/src/aind_data_transfer_service/templates/index.html +++ b/src/aind_data_transfer_service/templates/index.html @@ -145,7 +145,7 @@

Submit Jobs

let jobsLength = jobs.length; var table = document.createElement('table'), tr, td, row; addTableRow( - [ "processor_full_name", "project_name", "process_capsule_id", "s3_bucket", "platform", "subject_id", "acq_datetime", "metadata_dir", "modality", "modality.source" ], + [ "project_name", "process_capsule_id", "s3_bucket", "platform", "subject_id", "acq_datetime", "metadata_dir", "modality", "modality.source" ], table, tr, td, true ); for (row = 0; row < jobsLength; row++) { @@ -153,8 +153,7 @@

Submit Jobs

let modalities = job.modalities; let modalitiesLength = modalities.length; addTableRow( - [ { value: job.processor_full_name, rowspan: modalitiesLength }, - { value: job.project_name, rowspan: modalitiesLength }, + [ { value: job.project_name, rowspan: modalitiesLength }, { value: job.process_capsule_id ?? "", rowspan: modalitiesLength }, { value: job.s3_bucket, rowspan: modalitiesLength }, { value: job.platform.abbreviation, rowspan: modalitiesLength }, diff --git a/tests/test_hpc_models.py b/tests/test_hpc_models.py index 7f7094d..c8afa22 100644 --- a/tests/test_hpc_models.py +++ b/tests/test_hpc_models.py @@ -133,7 +133,6 @@ class TestHpcJobSubmitSettings(unittest.TestCase): example_config = BasicUploadJobConfigs( aws_param_store_name="/some/param/store", - processor_full_name="John Smith", project_name="Behavior Platform", s3_bucket="some_bucket", platform=Platform.ECEPHYS,