diff --git a/airlock_processor/StatusChangedQueueTrigger/__init__.py b/airlock_processor/StatusChangedQueueTrigger/__init__.py index 5946a9b4ec..879c0b134b 100644 --- a/airlock_processor/StatusChangedQueueTrigger/__init__.py +++ b/airlock_processor/StatusChangedQueueTrigger/__init__.py @@ -18,7 +18,7 @@ class RequestProperties(BaseModel): new_status: str previous_status: Optional[str] type: str - workspace_id: str + workspace_unique_identifier_suffix: str class ContainersCopyMetadata: @@ -48,18 +48,18 @@ def handle_status_changed(request_properties: RequestProperties, stepResultEvent new_status = request_properties.new_status previous_status = request_properties.previous_status req_id = request_properties.request_id - ws_id = request_properties.workspace_id + unique_suffix = request_properties.workspace_unique_identifier_suffix request_type = request_properties.type - logging.info('Processing request with id %s. new status is "%s", type is "%s"', req_id, new_status, request_type) + logging.info(f'Processing request with id {req_id}. new status is {new_status}, previous status is {previous_status}, unique_suffix is {unique_suffix} type is {request_type}') if new_status == constants.STAGE_DRAFT: - account_name = get_storage_account(status=constants.STAGE_DRAFT, request_type=request_type, short_workspace_id=ws_id) + account_name = get_storage_account(status=constants.STAGE_DRAFT, request_type=request_type, workspace_unique_identifier_suffix=unique_suffix) blob_operations.create_container(account_name, req_id) return if new_status == constants.STAGE_CANCELLED: - storage_account_name = get_storage_account(previous_status, request_type, ws_id) + storage_account_name = get_storage_account(previous_status, request_type, unique_suffix) container_to_delete_url = blob_operations.get_blob_url(account_name=storage_account_name, container_name=req_id) set_output_event_to_trigger_container_deletion(dataDeletionEvent, request_properties, container_url=container_to_delete_url) return @@ -69,7 +69,7 @@ def handle_status_changed(request_properties: RequestProperties, stepResultEvent if (is_require_data_copy(new_status)): logging.info('Request with id %s. requires data copy between storage accounts', req_id) - containers_metadata = get_source_dest_for_copy(new_status=new_status, previous_status=previous_status, request_type=request_type, short_workspace_id=ws_id) + containers_metadata = get_source_dest_for_copy(new_status=new_status, previous_status=previous_status, request_type=request_type, workspace_unique_identifier_suffix=unique_suffix) blob_operations.create_container(containers_metadata.dest_account_name, req_id) blob_operations.copy_data(containers_metadata.source_account_name, containers_metadata.dest_account_name, req_id) @@ -102,7 +102,7 @@ def is_require_data_copy(new_status: str): return False -def get_source_dest_for_copy(new_status: str, previous_status: str, request_type: str, short_workspace_id: str) -> ContainersCopyMetadata: +def get_source_dest_for_copy(new_status: str, previous_status: str, request_type: str, workspace_unique_identifier_suffix: str) -> ContainersCopyMetadata: # sanity if is_require_data_copy(new_status) is False: raise Exception("Given new status is not supported") @@ -114,19 +114,19 @@ def get_source_dest_for_copy(new_status: str, previous_status: str, request_type logging.error(msg) raise Exception(msg) - source_account_name = get_storage_account(previous_status, request_type, short_workspace_id) - dest_account_name = get_storage_account_destination_for_copy(new_status, request_type, short_workspace_id) + source_account_name = get_storage_account(previous_status, request_type, workspace_unique_identifier_suffix) + dest_account_name = get_storage_account_destination_for_copy(new_status, request_type, workspace_unique_identifier_suffix) return ContainersCopyMetadata(source_account_name, dest_account_name) -def get_storage_account(status: str, request_type: str, short_workspace_id: str) -> str: +def get_storage_account(status: str, request_type: str, workspace_unique_identifier_suffix: str) -> str: tre_id = _get_tre_id() if request_type == constants.IMPORT_TYPE: if status == constants.STAGE_DRAFT: return constants.STORAGE_ACCOUNT_NAME_IMPORT_EXTERNAL + tre_id elif status == constants.STAGE_APPROVED: - return constants.STORAGE_ACCOUNT_NAME_IMPORT_APPROVED + short_workspace_id + return constants.STORAGE_ACCOUNT_NAME_IMPORT_APPROVED + workspace_unique_identifier_suffix elif status == constants.STAGE_REJECTED: return constants.STORAGE_ACCOUNT_NAME_IMPORT_REJECTED + tre_id elif status == constants.STAGE_BLOCKED_BY_SCAN: @@ -136,29 +136,29 @@ def get_storage_account(status: str, request_type: str, short_workspace_id: str) if request_type == constants.EXPORT_TYPE: if status == constants.STAGE_DRAFT: - return constants.STORAGE_ACCOUNT_NAME_EXPORT_INTERNAL + short_workspace_id + return constants.STORAGE_ACCOUNT_NAME_EXPORT_INTERNAL + workspace_unique_identifier_suffix elif status == constants.STAGE_APPROVED: return constants.STORAGE_ACCOUNT_NAME_EXPORT_APPROVED + tre_id elif status == constants.STAGE_REJECTED: - return constants.STORAGE_ACCOUNT_NAME_EXPORT_REJECTED + short_workspace_id + return constants.STORAGE_ACCOUNT_NAME_EXPORT_REJECTED + workspace_unique_identifier_suffix elif status == constants.STAGE_BLOCKED_BY_SCAN: - return constants.STORAGE_ACCOUNT_NAME_EXPORT_BLOCKED + short_workspace_id + return constants.STORAGE_ACCOUNT_NAME_EXPORT_BLOCKED + workspace_unique_identifier_suffix elif status in [constants.STAGE_IN_REVIEW, constants.STAGE_SUBMITTED, constants.STAGE_APPROVAL_INPROGRESS, constants.STAGE_REJECTION_INPROGRESS, constants.STAGE_BLOCKING_INPROGRESS]: - return constants.STORAGE_ACCOUNT_NAME_EXPORT_INPROGRESS + short_workspace_id + return constants.STORAGE_ACCOUNT_NAME_EXPORT_INPROGRESS + workspace_unique_identifier_suffix error_message = f"Missing current storage account definition for status '{status}' and request type '{request_type}'." logging.error(error_message) raise Exception(error_message) -def get_storage_account_destination_for_copy(new_status: str, request_type: str, short_workspace_id: str) -> str: +def get_storage_account_destination_for_copy(new_status: str, request_type: str, workspace_unique_identifier_suffix: str) -> str: tre_id = _get_tre_id() if request_type == constants.IMPORT_TYPE: if new_status == constants.STAGE_SUBMITTED: return constants.STORAGE_ACCOUNT_NAME_IMPORT_INPROGRESS + tre_id elif new_status == constants.STAGE_APPROVAL_INPROGRESS: - return constants.STORAGE_ACCOUNT_NAME_IMPORT_APPROVED + short_workspace_id + return constants.STORAGE_ACCOUNT_NAME_IMPORT_APPROVED + workspace_unique_identifier_suffix elif new_status == constants.STAGE_REJECTION_INPROGRESS: return constants.STORAGE_ACCOUNT_NAME_IMPORT_REJECTED + tre_id elif new_status == constants.STAGE_BLOCKING_INPROGRESS: @@ -166,13 +166,13 @@ def get_storage_account_destination_for_copy(new_status: str, request_type: str, if request_type == constants.EXPORT_TYPE: if new_status == constants.STAGE_SUBMITTED: - return constants.STORAGE_ACCOUNT_NAME_EXPORT_INPROGRESS + short_workspace_id + return constants.STORAGE_ACCOUNT_NAME_EXPORT_INPROGRESS + workspace_unique_identifier_suffix elif new_status == constants.STAGE_APPROVAL_INPROGRESS: return constants.STORAGE_ACCOUNT_NAME_EXPORT_APPROVED + tre_id elif new_status == constants.STAGE_REJECTION_INPROGRESS: - return constants.STORAGE_ACCOUNT_NAME_EXPORT_REJECTED + short_workspace_id + return constants.STORAGE_ACCOUNT_NAME_EXPORT_REJECTED + workspace_unique_identifier_suffix elif new_status == constants.STAGE_BLOCKING_INPROGRESS: - return constants.STORAGE_ACCOUNT_NAME_EXPORT_BLOCKED + short_workspace_id + return constants.STORAGE_ACCOUNT_NAME_EXPORT_BLOCKED + workspace_unique_identifier_suffix error_message = f"Missing copy destination storage account definition for status '{new_status}' and request type '{request_type}'." logging.error(error_message) @@ -218,7 +218,7 @@ def set_output_event_to_trigger_container_deletion(dataDeletionEvent, request_pr def get_request_files(request_properties: RequestProperties): - storage_account_name = get_storage_account(request_properties.previous_status, request_properties.type, request_properties.workspace_id) + storage_account_name = get_storage_account(request_properties.previous_status, request_properties.type, request_properties.workspace_unique_identifier_suffix) return blob_operations.get_request_files(account_name=storage_account_name, request_id=request_properties.request_id) diff --git a/airlock_processor/_version.py b/airlock_processor/_version.py index bc8c296f6a..c8b45469ab 100644 --- a/airlock_processor/_version.py +++ b/airlock_processor/_version.py @@ -1 +1 @@ -__version__ = "0.7.2" +__version__ = "0.7.3" \ No newline at end of file diff --git a/airlock_processor/tests/test_status_change_queue_trigger.py b/airlock_processor/tests/test_status_change_queue_trigger.py index 65bac890a5..284010a00f 100644 --- a/airlock_processor/tests/test_status_change_queue_trigger.py +++ b/airlock_processor/tests/test_status_change_queue_trigger.py @@ -11,25 +11,25 @@ class TestPropertiesExtraction(): def test_extract_prop_valid_body_return_all_values(self): - message_body = "{ \"data\": { \"request_id\":\"123\",\"new_status\":\"456\" ,\"previous_status\":\"789\" , \"type\":\"101112\", \"workspace_id\":\"ws1\" }}" + message_body = "{ \"data\": { \"request_id\":\"123\",\"new_status\":\"456\" ,\"previous_status\":\"789\" , \"type\":\"101112\", \"workspace_unique_identifier_suffix\":\"ws1\" }}" message = _mock_service_bus_message(body=message_body) req_prop = extract_properties(message) assert req_prop.request_id == "123" assert req_prop.new_status == "456" assert req_prop.previous_status == "789" assert req_prop.type == "101112" - assert req_prop.workspace_id == "ws1" + assert req_prop.workspace_unique_identifier_suffix == "ws1" def test_extract_prop_missing_arg_throws(self): - message_body = "{ \"data\": { \"status\":\"456\" , \"type\":\"789\", \"workspace_id\":\"ws1\" }}" + message_body = "{ \"data\": { \"status\":\"456\" , \"type\":\"789\", \"workspace_unique_identifier_suffix\":\"ws1\" }}" message = _mock_service_bus_message(body=message_body) pytest.raises(ValidationError, extract_properties, message) - message_body = "{ \"data\": { \"request_id\":\"123\", \"type\":\"789\", \"workspace_id\":\"ws1\" }}" + message_body = "{ \"data\": { \"request_id\":\"123\", \"type\":\"789\", \"workspace_unique_identifier_suffix\":\"ws1\" }}" message = _mock_service_bus_message(body=message_body) pytest.raises(ValidationError, extract_properties, message) - message_body = "{ \"data\": { \"request_id\":\"123\",\"status\":\"456\" , \"workspace_id\":\"ws1\" }}" + message_body = "{ \"data\": { \"request_id\":\"123\",\"status\":\"456\" , \"workspace_unique_identifier_suffix\":\"ws1\" }}" message = _mock_service_bus_message(body=message_body) pytest.raises(ValidationError, extract_properties, message) @@ -72,7 +72,7 @@ class TestFileEnumeration(): @patch("StatusChangedQueueTrigger.is_require_data_copy", return_value=False) @patch.dict(os.environ, {"TRE_ID": "tre-id"}, clear=True) def test_get_request_files_should_be_called_on_submit_stage(self, _, mock_get_request_files, mock_set_output_event_to_report_request_files): - message_body = "{ \"data\": { \"request_id\":\"123\",\"new_status\":\"submitted\" ,\"previous_status\":\"draft\" , \"type\":\"export\", \"workspace_id\":\"ws1\" }}" + message_body = "{ \"data\": { \"request_id\":\"123\",\"new_status\":\"submitted\" ,\"previous_status\":\"draft\" , \"type\":\"export\", \"workspace_unique_identifier_suffix\":\"ws1\" }}" message = _mock_service_bus_message(body=message_body) main(msg=message, stepResultEvent=MagicMock(), dataDeletionEvent=MagicMock()) assert mock_get_request_files.called @@ -82,7 +82,7 @@ def test_get_request_files_should_be_called_on_submit_stage(self, _, mock_get_re @patch("StatusChangedQueueTrigger.get_request_files") @patch("StatusChangedQueueTrigger.handle_status_changed") def test_get_request_files_should_not_be_called_if_new_status_is_not_submit(self, _, mock_get_request_files, mock_set_output_event_to_report_failure): - message_body = "{ \"data\": { \"request_id\":\"123\",\"new_status\":\"fake-status\" ,\"previous_status\":\"None\" , \"type\":\"export\", \"workspace_id\":\"ws1\" }}" + message_body = "{ \"data\": { \"request_id\":\"123\",\"new_status\":\"fake-status\" ,\"previous_status\":\"None\" , \"type\":\"export\", \"workspace_unique_identifier_suffix\":\"ws1\" }}" message = _mock_service_bus_message(body=message_body) main(msg=message, stepResultEvent=MagicMock(), dataDeletionEvent=MagicMock()) assert not mock_get_request_files.called @@ -92,7 +92,7 @@ def test_get_request_files_should_not_be_called_if_new_status_is_not_submit(self @patch("StatusChangedQueueTrigger.get_request_files") @patch("StatusChangedQueueTrigger.handle_status_changed", side_effect=Exception) def test_get_request_files_should_be_called_when_failing_during_submit_stage(self, _, mock_get_request_files, mock_set_output_event_to_report_failure): - message_body = "{ \"data\": { \"request_id\":\"123\",\"new_status\":\"submitted\" ,\"previous_status\":\"draft\" , \"type\":\"export\", \"workspace_id\":\"ws1\" }}" + message_body = "{ \"data\": { \"request_id\":\"123\",\"new_status\":\"submitted\" ,\"previous_status\":\"draft\" , \"type\":\"export\", \"workspace_unique_identifier_suffix\":\"ws1\" }}" message = _mock_service_bus_message(body=message_body) main(msg=message, stepResultEvent=MagicMock(), dataDeletionEvent=MagicMock()) assert mock_get_request_files.called @@ -102,7 +102,7 @@ def test_get_request_files_should_be_called_when_failing_during_submit_stage(sel @patch.dict(os.environ, {"TRE_ID": "tre-id"}, clear=True) def test_get_request_files_called_with_correct_storage_account(self, mock_get_request_files): source_storage_account_for_submitted_stage = constants.STORAGE_ACCOUNT_NAME_EXPORT_INTERNAL + 'ws1' - message_body = "{ \"data\": { \"request_id\":\"123\",\"new_status\":\"submitted\" ,\"previous_status\":\"draft\" , \"type\":\"export\", \"workspace_id\":\"ws1\" }}" + message_body = "{ \"data\": { \"request_id\":\"123\",\"new_status\":\"submitted\" ,\"previous_status\":\"draft\" , \"type\":\"export\", \"workspace_unique_identifier_suffix\":\"ws1\" }}" message = _mock_service_bus_message(body=message_body) request_properties = extract_properties(message) get_request_files(request_properties) @@ -113,7 +113,7 @@ class TestFilesDeletion(): @patch("StatusChangedQueueTrigger.set_output_event_to_trigger_container_deletion") @patch.dict(os.environ, {"TRE_ID": "tre-id"}, clear=True) def test_delete_request_files_should_be_called_on_cancel_stage(self, mock_set_output_event_to_trigger_container_deletion): - message_body = "{ \"data\": { \"request_id\":\"123\",\"new_status\":\"cancelled\" ,\"previous_status\":\"draft\" , \"type\":\"export\", \"workspace_id\":\"ws1\" }}" + message_body = "{ \"data\": { \"request_id\":\"123\",\"new_status\":\"cancelled\" ,\"previous_status\":\"draft\" , \"type\":\"export\", \"workspace_unique_identifier_suffix\":\"ws1\" }}" message = _mock_service_bus_message(body=message_body) main(msg=message, stepResultEvent=MagicMock(), dataDeletionEvent=MagicMock()) assert mock_set_output_event_to_trigger_container_deletion.called diff --git a/api_app/_version.py b/api_app/_version.py index 2cfeb05209..4cf63cebab 100644 --- a/api_app/_version.py +++ b/api_app/_version.py @@ -1 +1 @@ -__version__ = "0.18.11" +__version__ = "0.18.12" \ No newline at end of file diff --git a/api_app/api/routes/migrations.py b/api_app/api/routes/migrations.py index 692f664583..4fc400ace0 100644 --- a/api_app/api/routes/migrations.py +++ b/api_app/api/routes/migrations.py @@ -77,9 +77,9 @@ async def migrate_database(resources_repo=Depends(get_repository(ResourceReposit num_updated = await airlock_migration.update_review_decision_values() migrations.append(Migration(issueNumber="3152", status=f'Updated {num_updated} airlock requests with new reviewDecision value')) - logger.info("PR 3358 - Migrate OperationSteps of Operations") - num_updated = await resource_migration.migrate_step_id_of_operation_steps(operations_repo) - migrations.append(Migration(issueNumber="3358", status=f'Updated {num_updated} operations')) + logging.info("PR 3243 - Migrate reviewDecision of Airlock Reviews") + num_updated = await resource_migration.add_unique_identifier_suffix() + migrations.append(Migration(issueNumber="3243", status=f'Added the unique_identifier_suffix field to {num_updated} resources')) return MigrationOutList(migrations=migrations) except Exception as e: diff --git a/api_app/db/migrations/resources.py b/api_app/db/migrations/resources.py index 4c99bff4cf..df525f6020 100644 --- a/api_app/db/migrations/resources.py +++ b/api_app/db/migrations/resources.py @@ -40,15 +40,11 @@ async def archive_history(self, resource_history_repository: ResourceHistoryRepo return num_updated - async def migrate_step_id_of_operation_steps(self, operations_repository: OperationRepository) -> int: + async def add_unique_identifier_suffix(self) -> int: num_updated = 0 - for operation in await operations_repository.query("SELECT * from c WHERE ARRAY_LENGTH(c.steps) > 0 AND IS_DEFINED(c.steps[0].stepId)"): - for operation_step in operation['steps']: - operation_step['templateStepId'] = operation_step['stepId'] - operation_step['id'] = str(uuid.uuid4()) - del operation_step['stepId'] - - await operations_repository.update_item_dict(operation) + for resource in await self.query("SELECT * from c WHERE NOT IS_DEFINED(c.properties.unique_identifier_suffix)"): + resource['properties']['unique_identifier_suffix'] = resource['id'][-4:] + await self.update_item_dict(resource) num_updated = num_updated + 1 return num_updated diff --git a/api_app/event_grid/event_sender.py b/api_app/event_grid/event_sender.py index 1821c65589..211e689b93 100644 --- a/api_app/event_grid/event_sender.py +++ b/api_app/event_grid/event_sender.py @@ -11,16 +11,15 @@ from services.logging import logger -async def send_status_changed_event(airlock_request: AirlockRequest, previous_status: Optional[AirlockRequestStatus]): +async def send_status_changed_event(airlock_request: AirlockRequest, workspace_unique_identifier_suffix: str, previous_status: Optional[AirlockRequestStatus]): request_id = airlock_request.id new_status = airlock_request.status.value previous_status = previous_status.value if previous_status else None request_type = airlock_request.type.value - short_workspace_id = airlock_request.workspaceId[-4:] status_changed_event = EventGridEvent( event_type="statusChanged", - data=StatusChangedData(request_id=request_id, new_status=new_status, previous_status=previous_status, type=request_type, workspace_id=short_workspace_id).__dict__, + data=StatusChangedData(request_id=request_id, new_status=new_status, previous_status=previous_status, type=request_type, workspace_unique_identifier_suffix=workspace_unique_identifier_suffix).__dict__, subject=f"{request_id}/statusChanged", data_version="2.0" ) diff --git a/api_app/models/domain/events.py b/api_app/models/domain/events.py index 76d7c557c9..4b0690abe6 100644 --- a/api_app/models/domain/events.py +++ b/api_app/models/domain/events.py @@ -39,4 +39,4 @@ class StatusChangedData(AzureTREModel): new_status: str previous_status: Optional[str] type: str - workspace_id: str + workspace_unique_identifier_suffix: str diff --git a/api_app/services/airlock.py b/api_app/services/airlock.py index 6e79d49b64..960bd6af61 100644 --- a/api_app/services/airlock.py +++ b/api_app/services/airlock.py @@ -38,7 +38,8 @@ def get_account_by_request(airlock_request: AirlockRequest, workspace: Workspace) -> str: tre_id = config.TRE_ID - short_workspace_id = workspace.id[-4:] + unique_suffix = workspace.properties['unique_identifier_suffix'] + if airlock_request.type == constants.IMPORT_TYPE: if airlock_request.status == AirlockRequestStatus.Draft: return constants.STORAGE_ACCOUNT_NAME_IMPORT_EXTERNAL.format(tre_id) @@ -47,24 +48,24 @@ def get_account_by_request(airlock_request: AirlockRequest, workspace: Workspace elif airlock_request.status == AirlockRequestStatus.InReview: return constants.STORAGE_ACCOUNT_NAME_IMPORT_INPROGRESS.format(tre_id) elif airlock_request.status == AirlockRequestStatus.Approved: - return constants.STORAGE_ACCOUNT_NAME_IMPORT_APPROVED.format(short_workspace_id) + return constants.STORAGE_ACCOUNT_NAME_IMPORT_APPROVED.format(unique_suffix) elif airlock_request.status == AirlockRequestStatus.Rejected: return constants.STORAGE_ACCOUNT_NAME_IMPORT_REJECTED.format(tre_id) elif airlock_request.status == AirlockRequestStatus.Blocked: return constants.STORAGE_ACCOUNT_NAME_IMPORT_BLOCKED.format(tre_id) else: if airlock_request.status == AirlockRequestStatus.Draft: - return constants.STORAGE_ACCOUNT_NAME_EXPORT_INTERNAL.format(short_workspace_id) + return constants.STORAGE_ACCOUNT_NAME_EXPORT_INTERNAL.format(unique_suffix) elif airlock_request.status in AirlockRequestStatus.Submitted: - return constants.STORAGE_ACCOUNT_NAME_EXPORT_INPROGRESS.format(short_workspace_id) + return constants.STORAGE_ACCOUNT_NAME_EXPORT_INPROGRESS.format(unique_suffix) elif airlock_request.status == AirlockRequestStatus.InReview: - return constants.STORAGE_ACCOUNT_NAME_EXPORT_INPROGRESS.format(short_workspace_id) + return constants.STORAGE_ACCOUNT_NAME_EXPORT_INPROGRESS.format(unique_suffix) elif airlock_request.status == AirlockRequestStatus.Approved: return constants.STORAGE_ACCOUNT_NAME_EXPORT_APPROVED.format(tre_id) elif airlock_request.status == AirlockRequestStatus.Rejected: - return constants.STORAGE_ACCOUNT_NAME_EXPORT_REJECTED.format(short_workspace_id) + return constants.STORAGE_ACCOUNT_NAME_EXPORT_REJECTED.format(unique_suffix) elif airlock_request.status == AirlockRequestStatus.Blocked: - return constants.STORAGE_ACCOUNT_NAME_EXPORT_BLOCKED.format(short_workspace_id) + return constants.STORAGE_ACCOUNT_NAME_EXPORT_BLOCKED.format(unique_suffix) def validate_user_allowed_to_access_storage_account(user: User, airlock_request: AirlockRequest): @@ -287,15 +288,13 @@ async def save_and_publish_event_airlock_request(airlock_request: AirlockRequest raise HTTPException(status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail=strings.STATE_STORE_ENDPOINT_NOT_RESPONDING) try: - logger.debug(f"Sending status changed event for airlock request item: {airlock_request.id}") - await send_status_changed_event(airlock_request=airlock_request, previous_status=None) + logging.debug(f"Sending status changed event for airlock request item: {airlock_request.id}") + await send_status_changed_event(airlock_request=airlock_request, workspace_unique_identifier_suffix=workspace.properties['unique_identifier_suffix'], previous_status=None) await send_airlock_notification_event(airlock_request, workspace, role_assignment_details) except Exception: await airlock_request_repo.delete_item(airlock_request.id) logger.exception("Failed sending status_changed message") raise HTTPException(status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail=strings.EVENT_GRID_GENERAL_ERROR_MESSAGE) - - async def update_and_publish_event_airlock_request( airlock_request: AirlockRequest, airlock_request_repo: AirlockRequestRepository, @@ -327,10 +326,9 @@ async def update_and_publish_event_airlock_request( if not new_status: logger.debug(f"Skipping sending 'status changed' event for airlock request item: {airlock_request.id} - there is no status change") return updated_airlock_request - try: - logger.debug(f"Sending status changed event for airlock request item: {airlock_request.id}") - await send_status_changed_event(airlock_request=updated_airlock_request, previous_status=airlock_request.status) + logging.debug(f"Sending status changed event for airlock request item: {airlock_request.id}") + await send_status_changed_event(airlock_request=updated_airlock_request, workspace_unique_identifier_suffix=workspace.properties['unique_identifier_suffix'], previous_status=airlock_request.status) access_service = get_access_service() role_assignment_details = access_service.get_workspace_role_assignment_details(workspace) await send_airlock_notification_event(updated_airlock_request, workspace, role_assignment_details) diff --git a/api_app/tests_ma/test_api/test_routes/test_airlock.py b/api_app/tests_ma/test_api/test_routes/test_airlock.py index 592b04fea9..b0ada76904 100644 --- a/api_app/tests_ma/test_api/test_routes/test_airlock.py +++ b/api_app/tests_ma/test_api/test_routes/test_airlock.py @@ -283,8 +283,7 @@ async def test_get_airlock_container_link_cancelled_request_returns_400(self, _, airlock_request_id=AIRLOCK_REQUEST_ID)) assert response.status_code == status.HTTP_400_BAD_REQUEST - @patch("api.dependencies.workspaces.WorkspaceRepository.get_workspace_by_id", - return_value=sample_workspace(WORKSPACE_ID)) + @patch("api.dependencies.workspaces.WorkspaceRepository.get_workspace_by_id", return_value=sample_workspace(WORKSPACE_ID, {'unique_identifier_suffix': '123456'})) @patch("api.routes.airlock.AirlockRequestRepository.read_item_by_id", return_value=sample_airlock_request_object(status=AirlockRequestStatus.Approved)) @patch("services.airlock.validate_user_allowed_to_access_storage_account") @patch("services.airlock.get_airlock_request_container_sas_token", return_value="valid-sas-token") @@ -429,7 +428,7 @@ def inner(user): @pytest.mark.parametrize("role", (role for role in get_required_roles(endpoint=create_draft_request))) @patch("api.routes.workspaces.OperationRepository.resource_has_deployed_operation") - @patch("api.dependencies.workspaces.WorkspaceRepository.get_workspace_by_id", return_value=sample_workspace(WORKSPACE_ID)) + @patch("api.dependencies.workspaces.WorkspaceRepository.get_workspace_by_id", return_value=sample_workspace(WORKSPACE_ID, {'unique_identifier_suffix': '123456'})) @patch("api.routes.airlock.AirlockRequestRepository.read_item_by_id", return_value=sample_airlock_request_object(status=AirlockRequestStatus.Draft)) @patch("services.airlock.get_airlock_request_container_sas_token", return_value="valid-sas-token") async def test_get_airlock_container_link_is_accessible_to_every_role_that_can_create_request(self, _, __, ___, ____, app, client, log_in_with_user, role): diff --git a/api_app/tests_ma/test_api/test_routes/test_migrations.py b/api_app/tests_ma/test_api/test_routes/test_migrations.py index d49239d222..71b7a66148 100644 --- a/api_app/tests_ma/test_api/test_routes/test_migrations.py +++ b/api_app/tests_ma/test_api/test_routes/test_migrations.py @@ -45,13 +45,12 @@ def _prepare(self, app, admin_user): @ patch("api.routes.migrations.AirlockMigration.rename_field_name") @ patch("api.routes.migrations.AirlockMigration.change_review_resources_to_dict") @ patch("api.routes.migrations.AirlockMigration.update_review_decision_values") - @ patch("api.routes.migrations.ResourceMigration.migrate_step_id_of_operation_steps") - async def test_post_migrations_returns_202_on_successful(self, migrate_step_id_of_operation_steps, update_review_decision_values, + @ patch("api.routes.migrations.ResourceMigration.add_unique_identifier_suffix") + async def test_post_migrations_returns_202_on_successful(self, add_unique_identifier_suffix, update_review_decision_values, change_review_resources_to_dict, airlock_rename_field, add_created_by_and_rename_in_history, - check_min_firewall_version, workspace_migration, shared_services_migration, - rename_field, add_deployment_field, archive_history, _, logging, client, app): + check_min_firewall_version, workspace_migration, shared_services_migration, rename_field, + add_deployment_field, archive_history, _, logging, client, app): response = await client.post(app.url_path_for(strings.API_MIGRATE_DATABASE)) - check_min_firewall_version.assert_called_once() shared_services_migration.assert_called_once() workspace_migration.assert_called_once() @@ -63,6 +62,7 @@ async def test_post_migrations_returns_202_on_successful(self, migrate_step_id_o update_review_decision_values.assert_called_once() migrate_step_id_of_operation_steps.assert_called_once() archive_history.assert_called_once() + add_unique_identifier_suffix.assert_called_once() logging.assert_called() assert response.status_code == status.HTTP_202_ACCEPTED diff --git a/api_app/tests_ma/test_service_bus/test_airlock_request_status_update.py b/api_app/tests_ma/test_service_bus/test_airlock_request_status_update.py index d9165db2f5..841d1927f5 100644 --- a/api_app/tests_ma/test_service_bus/test_airlock_request_status_update.py +++ b/api_app/tests_ma/test_service_bus/test_airlock_request_status_update.py @@ -26,6 +26,7 @@ def sample_workspace(): properties={ "display_name": "research workspace", "description": "research workspace", + "unique_identifier_suffix": "123456", "client_id": "12345" }, resourcePath="test") diff --git a/api_app/tests_ma/test_services/test_airlock.py b/api_app/tests_ma/test_services/test_airlock.py index d8a26a1df8..6ed2e9e9ac 100644 --- a/api_app/tests_ma/test_services/test_airlock.py +++ b/api_app/tests_ma/test_services/test_airlock.py @@ -44,6 +44,7 @@ def sample_workspace(): properties={ "client_id": "12345", "display_name": "my research workspace", + "unique_identifier_suffix": WORKSPACE_ID[-6:], "description": "for science!"}, resourcePath="test") @@ -85,7 +86,7 @@ def sample_airlock_user_resource_object(): def sample_status_changed_event(new_status="draft", previous_status=None): status_changed_event = EventGridEvent( event_type="statusChanged", - data=StatusChangedData(request_id=AIRLOCK_REQUEST_ID, new_status=new_status, previous_status=previous_status, type=AirlockRequestType.Import, workspace_id=WORKSPACE_ID[-4:]).__dict__, + data=StatusChangedData(request_id=AIRLOCK_REQUEST_ID, new_status=new_status, previous_status=previous_status, type=AirlockRequestType.Import, workspace_unique_identifier_suffix=WORKSPACE_ID[-6:]).__dict__, subject=f"{AIRLOCK_REQUEST_ID}/statusChanged", data_version="2.0" )