diff --git a/CHANGELOG.md b/CHANGELOG.md index 4604eface..447f05921 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ -## 0.54.1 (2026-01-21) +## 0.54.1 (2026-01-22) ### Features diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index 33edff433..769c831fd 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -3,20 +3,35 @@ This documentation provides guidance on developer workflows for working with the code in this repository. Table of Contents: -* [Development Environment Setup](#development-environment-setup) -* [The Development Loop](#the-development-loop) -* [Documentation](#documentation) - * [Code Organization](#code-organization) -* [Testing](#testing) - * [Writing tests](#writing-tests) - * [Unit tests](#unit-tests) - * [Integration tests](#integration-tests) - * [Squish GUI Submitter tests](#squish-tests) -* [Changelog Guidelines](#changelog-guidelines) -* [Things to Know](#things-to-know) - * [Public contracts](#public-contracts) - * [Library Dependencies](#dependencies) - * [Qt and Calling AWS APIs](#qt-and-calling-aws-including-aws-deadline-cloud-apis) +- [Development documentation](#development-documentation) + - [Development Environment Setup](#development-environment-setup) + - [The Development Loop](#the-development-loop) + - [Documentation](#documentation) + - [Code Organization](#code-organization) + - [Testing](#testing) + - [Writing Tests](#writing-tests) + - [Unit Tests](#unit-tests) + - [Running Unit Tests](#running-unit-tests) + - [Running Docker-based Unit Tests](#running-docker-based-unit-tests) + - [Integration Tests](#integration-tests) + - [Running Integration Tests](#running-integration-tests) + - [Squish GUI Submitter Tests](#squish-gui-submitter-tests) + - [Running Squish GUI Submitter Tests](#running-squish-gui-submitter-tests) + - [Changelog Guidelines](#changelog-guidelines) + - [Things to Know](#things-to-know) + - [Public Contracts](#public-contracts) + - [Private Modules](#private-modules) + - [Public Modules](#public-modules) + - [On `import os as _os`](#on-import-os-as-_os) + - [Library Dependencies](#library-dependencies) + - [Why is a new dependency needed?](#why-is-a-new-dependency-needed) + - [Quality of the dependency](#quality-of-the-dependency) + - [Version Pinning](#version-pinning) + - [Licensing](#licensing) + - [Qt and Calling AWS (including AWS Deadline Cloud) APIs](#qt-and-calling-aws-including-aws-deadline-cloud-apis) + - [Pattern 1: Simple Async Operations (Recommended)](#pattern-1-simple-async-operations-recommended) + - [Pattern 2: Long-Running Operations with Progress](#pattern-2-long-running-operations-with-progress) +- [Profiling in Deadline Cloud](#profiling-in-deadline-cloud) ## Development Environment Setup @@ -378,66 +393,89 @@ for a signal from the application. If interacting with the GUI can start multiple background threads, you should also track which is the latest, so the code only applies the result of the newest operation. -See `deadline_config_dialog.py` for some examples that do all of the above. Here's some -code that was edited to show how it fits together: +See `deadline_config_dialog.py` for some examples that do all of the above. + +### Pattern 1: Simple Async Operations (Recommended) + +For simple fetch-and-display operations, use `AsyncTaskRunner`: ```python +from deadline.client.ui.controllers import AsyncTaskRunner + class MyCustomWidget(QWidget): - # Signals for the widget to receive from the thread - background_exception = Signal(str, BaseException) - update = Signal(int, BackgroundResult) - - def __init__(self, ...): - # Save information about the thread - self.__refresh_thread = None - self.__refresh_id = 0 - - # Use the CancelationFlag object to decouple the cancelation value - # from the window lifetime. - self.canceled = CancelationFlag() - self.destroyed.connect(self.canceled.set_canceled) - - # Connect the Signals to handler functions that run on the main thread - self.update.connect(self.handle_update) - self.background_exception.connect(self.handle_background_exception) - - def handle_background_exception(self, e: BaseException): - # Handle the error - QMessageBox.warning(...) - - def handle_update(self, refresh_id: int, result: BackgroundResult): - # Apply the refresh if it's still for the latest call - if refresh_id == self.__refresh_id: - # Do something with result - self.result_widget.set_message(result) + def __init__(self, ...): + self._runner = AsyncTaskRunner(self) + self._runner.task_error.connect(self._on_error, Qt.QueuedConnection) def start_the_refresh(self): - # This function starts the thread to run in the background - - # Update the GUI state to reflect the update self.result_widget.set_refreshing_status(True) - - self.__refresh_id += 1 - self.__refresh_thread = threading.Thread( - target=self._refresh_thread_function, - name=f"AWS Deadline Cloud Refresh Thread", - args=(self.__refresh_id,), + self._runner.run( + operation_key="my_refresh", + fn=self._fetch_data, + on_success=self._handle_result, + on_error=self._handle_error, ) - self.__refresh_thread.start() - - def _refresh_thread_function(self, refresh_id: int): - # This function is for the background thread - try: - # Call the slow operations - result = boto3_client.potentially_expensive_api(...) - # Only emit the result if it isn't canceled - if not self.canceled: - self.update.emit(refresh_id, result) - except BaseException as e: - # Use multiple signals for different meanings, such as handling errors. - if not self.canceled: - self.background_exception.emit(f"Background thread error", e) + def _fetch_data(self): + # This runs in background thread + return boto3_client.potentially_expensive_api(...) + + def _handle_result(self, result): + self.result_widget.set_refreshing_status(False) + self.result_widget.set_message(result) + + def _handle_error(self, error): + self.result_widget.set_refreshing_status(False) + QMessageBox.warning(self, "Error", str(error)) +``` + +### Pattern 2: Long-Running Operations with Progress + +For complex operations with progress callbacks, use a `QThread` subclass: + +```python +from qtpy.QtCore import QThread, Signal, Qt + +class MyWorker(QThread): + progress = Signal(int, str) # percent, message + succeeded = Signal(object) + failed = Signal(BaseException) + + def __init__(self, parent=None): + super().__init__(parent) + self._canceled = False + + def cancel(self): + self._canceled = True + + def run(self): + try: + for i, item in enumerate(items): + if self._canceled: + return + self.progress.emit(i * 100 // len(items), f"Processing {item}") + process(item) + self.succeeded.emit(result) + except Exception as e: + if not self._canceled: + self.failed.emit(e) + + +class MyCustomWidget(QWidget): + def __init__(self, ...): + self._worker = MyWorker(self) + self._worker.progress.connect(self._on_progress, Qt.QueuedConnection) + self._worker.succeeded.connect(self._on_success, Qt.QueuedConnection) + self._worker.failed.connect(self._on_error, Qt.QueuedConnection) + + def start_the_operation(self): + self._worker.start() + + def closeEvent(self, event): + if self._worker.isRunning(): + self._worker.cancel() + self._worker.wait() + super().closeEvent(event) ``` # Profiling in Deadline Cloud diff --git a/requirements-integ-testing.txt b/requirements-integ-testing.txt index 6fa424ee2..f84968040 100644 --- a/requirements-integ-testing.txt +++ b/requirements-integ-testing.txt @@ -1,4 +1,4 @@ -deadline-cloud-test-fixtures == 0.18.8 # Pinning due to a regression in 0.18.9 +deadline-cloud-test-fixtures ~= 0.18.10 # MCP (Model Context Protocol) library for MCP server integration tests mcp >= 1.13.0 pytest-asyncio == 1.* \ No newline at end of file diff --git a/requirements-testing.txt b/requirements-testing.txt index 083bfe8fe..46462c3be 100644 --- a/requirements-testing.txt +++ b/requirements-testing.txt @@ -7,6 +7,9 @@ pytest-cov == 7.*; python_version > '3.8' pytest-cov == 5.*; python_version <= '3.8' pytest-timeout == 2.* pytest-xdist == 3.* +pytest-qt == 4.* +# GUI testing dependencies +PySide6-essentials >= 6.6,< 6.11 freezegun == 1.* types-pyyaml == 6.* twine == 4.*; python_version == '3.7' diff --git a/scripts/attributions/approved_text/Darwin/THIRD_PARTY_LICENSES b/scripts/attributions/approved_text/Darwin/THIRD_PARTY_LICENSES index 13a05afe1..e899df759 100644 --- a/scripts/attributions/approved_text/Darwin/THIRD_PARTY_LICENSES +++ b/scripts/attributions/approved_text/Darwin/THIRD_PARTY_LICENSES @@ -502,26 +502,27 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. jmespath +MIT License + Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, dis- -tribute, sublicense, and/or sell copies of the Software, and to permit -persons to whom the Software is furnished to do so, subject to the fol- -lowing conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- -ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. packaging diff --git a/scripts/attributions/approved_text/Linux/THIRD_PARTY_LICENSES b/scripts/attributions/approved_text/Linux/THIRD_PARTY_LICENSES index bcc2bef8b..69c043512 100644 --- a/scripts/attributions/approved_text/Linux/THIRD_PARTY_LICENSES +++ b/scripts/attributions/approved_text/Linux/THIRD_PARTY_LICENSES @@ -502,26 +502,27 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. jmespath +MIT License + Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, dis- -tribute, sublicense, and/or sell copies of the Software, and to permit -persons to whom the Software is furnished to do so, subject to the fol- -lowing conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- -ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. packaging diff --git a/scripts/attributions/approved_text/Windows/THIRD_PARTY_LICENSES b/scripts/attributions/approved_text/Windows/THIRD_PARTY_LICENSES index 94bce395e..1d3246417 100644 --- a/scripts/attributions/approved_text/Windows/THIRD_PARTY_LICENSES +++ b/scripts/attributions/approved_text/Windows/THIRD_PARTY_LICENSES @@ -532,26 +532,27 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. jmespath +MIT License + Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, dis- -tribute, sublicense, and/or sell copies of the Software, and to permit -persons to whom the Software is furnished to do so, subject to the fol- -lowing conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- -ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. packaging diff --git a/scripts/attributions/cli.py b/scripts/attributions/cli.py index c08de96d1..bb7edda08 100644 --- a/scripts/attributions/cli.py +++ b/scripts/attributions/cli.py @@ -68,7 +68,7 @@ "spdx": _BSD_3_CLAUSE, }, "jmespath": { - "license_sha256": "66b313cce80ed0623fc7db3f24863a0c80fd83eb341a46b57864158ae74faa56", + "license_sha256": "6eefacfa4d71b82d08408c751470ac8d9854538da2142cb27be0287fb13d0ab9", "spdx": _MIT, }, "packaging": { diff --git a/src/deadline/client/ui/_utils.py b/src/deadline/client/ui/_utils.py index a0905ed60..148234055 100644 --- a/src/deadline/client/ui/_utils.py +++ b/src/deadline/client/ui/_utils.py @@ -234,6 +234,14 @@ class CancelationFlag: function of the class. With this object, you can bind it to the cancelation flag's set_canceled method instead. + .. deprecated:: + This class is deprecated and will be removed in a future release. + Use Qt's native threading mechanisms instead: + - For simple async operations, use `AsyncTaskRunner` from + `deadline.client.ui.controllers` + - For complex operations with progress callbacks, use a + `QThread` subclass with signals + Example usage: class MyWidget(QWidget): @@ -251,6 +259,15 @@ def _my_thread_function(self): """ def __init__(self): + import warnings + + warnings.warn( + "CancelationFlag is deprecated and will be removed in a future release. " + "Use AsyncTaskRunner from deadline.client.ui.controllers for simple async operations, " + "or a QThread subclass with signals for complex operations with progress callbacks.", + DeprecationWarning, + stacklevel=2, + ) self.canceled = False def set_canceled(self): diff --git a/src/deadline/client/ui/controllers/__init__.py b/src/deadline/client/ui/controllers/__init__.py new file mode 100644 index 000000000..00b124d53 --- /dev/null +++ b/src/deadline/client/ui/controllers/__init__.py @@ -0,0 +1,46 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +""" +Controllers for Deadline Cloud UI operations. + +This module provides the controller layer that separates business logic +from UI components. All async API operations should go through these +controllers to ensure proper ordering and thread safety. + +The key components are: + +- :class:`DeadlineUIController`: Central controller managing all async API + operations. Widgets connect to its signals rather than making API calls directly. + +- :class:`AsyncTaskRunner`: Manages background task execution with automatic + cancellation of superseded operations. + +- :class:`AsyncTask`: A QRunnable for executing callables in the thread pool + with proper signal emission. + +- :class:`DeadlineThreadPool`: Dedicated thread pool for Deadline operations, + isolated from other Qt background work. + +Example usage:: + + from deadline.client.ui.controllers import DeadlineUIController + from qtpy.QtCore import Qt + + controller = DeadlineUIController.getInstance() + controller.farms_updated.connect(self._on_farms_updated, Qt.QueuedConnection) + controller.refresh_farms() +""" + +from ._async_task import AsyncTask as AsyncTask +from ._async_task import WorkerSignals as WorkerSignals +from ._async_runner import AsyncTaskRunner as AsyncTaskRunner +from ._deadline_controller import DeadlineUIController as DeadlineUIController +from ._thread_pool import DeadlineThreadPool as DeadlineThreadPool + +__all__ = [ + "AsyncTask", + "AsyncTaskRunner", + "DeadlineThreadPool", + "DeadlineUIController", + "WorkerSignals", +] diff --git a/src/deadline/client/ui/controllers/_async_runner.py b/src/deadline/client/ui/controllers/_async_runner.py new file mode 100644 index 000000000..a09b609a7 --- /dev/null +++ b/src/deadline/client/ui/controllers/_async_runner.py @@ -0,0 +1,161 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +""" +High-level async task management with automatic cancellation. +""" + +from logging import getLogger +from typing import Any, Callable, Dict, Optional + +from qtpy.QtCore import QObject, Qt, Signal + +from ._async_task import AsyncTask +from ._thread_pool import DeadlineThreadPool + + +logger = getLogger(__name__) + + +class AsyncTaskRunner(QObject): + """ + Manages async task execution with automatic cancellation of superseded operations. + + When a new task is started with the same operation_key, any previous task + with that key is automatically canceled. This prevents race conditions + where an older, slower request returns after a newer one. + + All signal connections use Qt.QueuedConnection to ensure thread-safe + delivery from background threads to the main thread. + + Example:: + + from deadline.client.ui.controllers import AsyncTaskRunner + + runner = AsyncTaskRunner() + runner.run( + operation_key="list_farms", + fn=api.list_farms, + on_success=self._handle_farms, + on_error=self._handle_error, + config=config, # kwargs passed to fn + ) + + Signals: + task_error: Emitted when any task encounters an error. + Args: (operation_key, exception) + """ + + task_error = Signal(str, BaseException) + + def __init__(self, parent: Optional[QObject] = None) -> None: + super().__init__(parent) + self._thread_pool = DeadlineThreadPool.instance() + self._active_tasks: Dict[str, AsyncTask] = {} + self._operation_counter = 0 + + def run( + self, + operation_key: str, + fn: Callable[..., Any], + on_success: Optional[Callable[[Any], None]] = None, + on_error: Optional[Callable[[BaseException], None]] = None, + *args: Any, + **kwargs: Any, + ) -> int: + """ + Run a function asynchronously in the dedicated thread pool. + + Args: + operation_key: Unique key identifying this operation type. + If a task with the same key is running, it + will be canceled before starting the new one. + fn: The function to execute in the background + on_success: Callback invoked with fn's return value on success. + Called in the main thread via queued connection. + on_error: Callback invoked with the exception on failure. + Called in the main thread via queued connection. + *args: Positional arguments passed to fn + **kwargs: Keyword arguments passed to fn + + Returns: + Operation ID for tracking this specific task instance + """ + # Cancel any existing task with the same key + self.cancel(operation_key) + + # Generate unique operation ID + self._operation_counter += 1 + operation_id = self._operation_counter + + # Create the task + task = AsyncTask(fn, *args, operation_id=operation_id, **kwargs) + + # Connect success callback with queued connection for thread safety + if on_success is not None: + task.signals.result.connect(on_success, Qt.QueuedConnection) + + # Connect error callbacks + if on_error is not None: + task.signals.error.connect(on_error, Qt.QueuedConnection) + + # Always emit to our error signal for centralized logging + # Use a closure to capture operation_key + def emit_task_error(e: BaseException) -> None: + self.task_error.emit(operation_key, e) + + task.signals.error.connect(emit_task_error, Qt.QueuedConnection) + + # Clean up tracking when task finishes + # Use a closure to capture operation_key + def cleanup() -> None: + self._active_tasks.pop(operation_key, None) + + task.signals.finished.connect(cleanup, Qt.QueuedConnection) + + # Track and start + self._active_tasks[operation_key] = task + self._thread_pool.start(task) + + logger.debug(f"Started async task: {operation_key} (id={operation_id})") + return operation_id + + def cancel(self, operation_key: str) -> bool: + """ + Cancel a running task by operation key. + + Args: + operation_key: The key of the operation to cancel + + Returns: + True if a task was canceled, False if no task was running + """ + task = self._active_tasks.pop(operation_key, None) + if task is not None: + task.cancel() + logger.debug(f"Canceled async task: {operation_key}") + return True + return False + + def cancel_all(self) -> int: + """ + Cancel all running tasks. + + Returns: + Number of tasks that were canceled + """ + count = len(self._active_tasks) + for task in self._active_tasks.values(): + task.cancel() + self._active_tasks.clear() + if count > 0: + logger.debug(f"Canceled {count} async tasks") + return count + + def is_running(self, operation_key: str) -> bool: + """Check if a task with the given key is currently running.""" + return operation_key in self._active_tasks + + @property + def active_task_count(self) -> int: + """Number of currently active tasks.""" + return len(self._active_tasks) diff --git a/src/deadline/client/ui/controllers/_async_task.py b/src/deadline/client/ui/controllers/_async_task.py new file mode 100644 index 000000000..8c2bb7f77 --- /dev/null +++ b/src/deadline/client/ui/controllers/_async_task.py @@ -0,0 +1,126 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +""" +Async task execution using Qt's threading primitives. + +This module provides a clean pattern for running background operations +with proper Qt signal integration and automatic cancellation handling. +""" + +from typing import Any, Callable, Optional + +from qtpy.QtCore import QObject, QRunnable, Signal + + +class WorkerSignals(QObject): + """ + Signals for QRunnable workers. + + QRunnable doesn't inherit from QObject, so we need a separate + signals class that can be attached to the runnable. + + Signals: + finished: Emitted when the task completes (success or failure) + error: Emitted with the exception when the task fails + result: Emitted with the return value when the task succeeds + """ + + finished = Signal() + error = Signal(BaseException) + result = Signal(object) + + +class AsyncTask(QRunnable): + """ + A QRunnable that executes a callable and emits signals on completion. + + This class provides a clean way to run functions in a background thread + while communicating results back to the main thread via Qt signals. + + The task supports cancellation - when canceled, no signals will be + emitted even if the underlying function completes. + + Signals are emitted from the background thread, so connections should + use Qt.QueuedConnection for thread-safe delivery to the main thread. + + Example:: + + from qtpy.QtCore import Qt + from deadline.client.ui.controllers import AsyncTask, DeadlineThreadPool + + def fetch_data(): + return api.list_farms() + + task = AsyncTask(fetch_data) + task.signals.result.connect(handle_result, Qt.QueuedConnection) + task.signals.error.connect(handle_error, Qt.QueuedConnection) + DeadlineThreadPool.instance().start(task) + + Args: + fn: The callable to execute in the background + *args: Positional arguments for fn + operation_id: Optional ID for tracking/cancellation + **kwargs: Keyword arguments for fn + """ + + def __init__( + self, + fn: Callable[..., Any], + *args: Any, + operation_id: Optional[int] = None, + **kwargs: Any, + ) -> None: + super().__init__() + self.fn = fn + self.args = args + self.kwargs = kwargs + self.operation_id = operation_id + self.signals = WorkerSignals() + self._is_canceled = False + + # Allow thread pool to clean up automatically + self.setAutoDelete(True) + + def cancel(self) -> None: + """ + Mark this task as canceled. + + The task checks this flag before emitting signals, preventing + stale results from being delivered after cancellation. + + Note that this does not interrupt the running function - it only + prevents signal emission after the function completes. + """ + self._is_canceled = True + + @property + def is_canceled(self) -> bool: + """Check if this task has been canceled.""" + return self._is_canceled + + def run(self) -> None: + """ + Execute the task in the thread pool. + + This method runs in a background thread. All signal emissions + are guarded by cancellation checks to prevent race conditions. + + The execution flow is: + 1. Check if canceled before starting + 2. Execute the function + 3. Check if canceled before emitting result/error + 4. Emit finished signal (if not canceled) + """ + if self._is_canceled: + return + + try: + result = self.fn(*self.args, **self.kwargs) + if not self._is_canceled: + self.signals.result.emit(result) + except Exception as e: + if not self._is_canceled: + self.signals.error.emit(e) + finally: + if not self._is_canceled: + self.signals.finished.emit() diff --git a/src/deadline/client/ui/controllers/_deadline_controller.py b/src/deadline/client/ui/controllers/_deadline_controller.py new file mode 100644 index 000000000..cfdc7dd63 --- /dev/null +++ b/src/deadline/client/ui/controllers/_deadline_controller.py @@ -0,0 +1,446 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +""" +Central controller for Deadline Cloud UI operations. + +This controller manages all async API operations, ensuring proper +ordering of dependent calls and preventing race conditions. +""" + +from configparser import ConfigParser +from logging import getLogger +from typing import List, Optional, Tuple +import sys + +from qtpy.QtCore import QObject, Qt, Signal + +from ... import api +from ...job_bundle.parameters import JobParameter +from ._async_runner import AsyncTaskRunner + + +logger = getLogger(__name__) + + +# Type alias for resource lists: [(display_name, id), ...] +ResourceList = List[Tuple[str, str]] + + +class DeadlineUIController(QObject): + """ + Central controller for Deadline Cloud UI operations. + + All API calls should go through this controller to ensure: + - Proper ordering of dependent operations (farm -> queue -> storage profile) + - Automatic cancellation of superseded requests + - Thread-safe signal delivery to UI components + - Centralized error handling + + Widgets should connect to this controller's signals rather than + making API calls directly. + + Example:: + + from deadline.client.ui.controllers import DeadlineUIController + from qtpy.QtCore import Qt + + controller = DeadlineUIController.getInstance() + controller.farms_updated.connect( + self._on_farms_updated, Qt.QueuedConnection + ) + controller.refresh_farms() + + Signals: + farms_updated: Emitted when farm list is updated. Args: [(name, farm_id), ...] + queues_updated: Emitted when queue list is updated. Args: [(name, queue_id), ...] + storage_profiles_updated: Emitted when storage profiles are updated. + queue_parameters_updated: Emitted when queue parameters are loaded. + farms_loading: Emitted when farm loading state changes. Args: bool + queues_loading: Emitted when queue loading state changes. Args: bool + storage_profiles_loading: Emitted when storage profile loading state changes. + queue_parameters_loading: Emitted when queue parameter loading state changes. + operation_failed: Emitted when any operation fails. Args: (operation_name, exception) + """ + + # ───────────────────────────────────────────────────────────── + # Resource List Signals + # ───────────────────────────────────────────────────────────── + + farms_updated = Signal(list) + queues_updated = Signal(list) + storage_profiles_updated = Signal(list) + queue_parameters_updated = Signal(list) + + # ───────────────────────────────────────────────────────────── + # Loading State Signals + # ───────────────────────────────────────────────────────────── + + farms_loading = Signal(bool) + queues_loading = Signal(bool) + storage_profiles_loading = Signal(bool) + queue_parameters_loading = Signal(bool) + + # ───────────────────────────────────────────────────────────── + # Error Signal + # ───────────────────────────────────────────────────────────── + + operation_failed = Signal(str, BaseException) + + # ───────────────────────────────────────────────────────────── + # Singleton + # ───────────────────────────────────────────────────────────── + + _instance: Optional["DeadlineUIController"] = None + + @classmethod + def getInstance(cls) -> "DeadlineUIController": + """Get the singleton controller instance.""" + if cls._instance is None: + cls._instance = DeadlineUIController() + return cls._instance + + @classmethod + def resetInstance(cls) -> None: + """ + Reset the singleton instance. + + Shuts down the existing instance and clears it. + A new instance will be created on next getInstance() call. + Primarily useful for testing. + """ + if cls._instance is not None: + cls._instance.shutdown() + cls._instance = None + + def __init__(self, parent: Optional[QObject] = None) -> None: + super().__init__(parent) + + self._task_runner = AsyncTaskRunner(self) + self._task_runner.task_error.connect(self.operation_failed.emit, Qt.QueuedConnection) + + # Configuration + self._config: Optional[ConfigParser] = None + + # Current selection state (for cascading refreshes) + self._current_farm_id: str = "" + self._current_queue_id: str = "" + + # ───────────────────────────────────────────────────────────── + # Configuration + # ───────────────────────────────────────────────────────────── + + def set_config(self, config: Optional[ConfigParser]) -> None: + """ + Update the configuration used for API calls. + + Args: + config: ConfigParser instance, or None to use default config + """ + if config is not None: + self._config = ConfigParser() + self._config.read_dict(config) + else: + self._config = None + + @property + def config(self) -> Optional[ConfigParser]: + """Current configuration.""" + return self._config + + @property + def current_farm_id(self) -> str: + """Currently selected farm ID.""" + return self._current_farm_id + + @property + def current_queue_id(self) -> str: + """Currently selected queue ID.""" + return self._current_queue_id + + # ───────────────────────────────────────────────────────────── + # Farm Operations + # ───────────────────────────────────────────────────────────── + + def refresh_farms(self) -> None: + """Fetch the list of farms asynchronously.""" + self.farms_loading.emit(True) + self._task_runner.run( + operation_key="list_farms", + fn=self._fetch_farms, + on_success=self._on_farms_success, + on_error=self._on_farms_error, + ) + + def _fetch_farms(self) -> ResourceList: + """Fetch farms from API. Runs in background thread.""" + response = api.list_farms(config=self._config) + return sorted( + [(item["displayName"], item["farmId"]) for item in response["farms"]], + key=lambda item: (item[0].casefold(), item[1]), + ) + + def _on_farms_success(self, farms: ResourceList) -> None: + """Handle successful farm list fetch.""" + self.farms_loading.emit(False) + self.farms_updated.emit(farms) + + def _on_farms_error(self, error: BaseException) -> None: + """Handle farm list fetch error.""" + # AccessDeniedException is expected when credentials are invalid or expired. + # Don't log this as an exception. + error_name = type(error).__name__ + if "AccessDeniedException" in error_name: + logger.debug(f"Could not fetch farms: {error}") + else: + logger.exception("Failed to fetch farms", exc_info=error) + self.farms_loading.emit(False) + self.farms_updated.emit([]) + + # ───────────────────────────────────────────────────────────── + # Queue Operations + # ───────────────────────────────────────────────────────────── + + def refresh_queues(self, farm_id: Optional[str] = None) -> None: + """ + Fetch the list of queues for a farm. + + Args: + farm_id: Farm ID to fetch queues for. If None, uses current farm. + """ + if farm_id is None: + farm_id = self._current_farm_id + + if not farm_id: + self.queues_updated.emit([]) + return + + self.queues_loading.emit(True) + self._task_runner.run( + operation_key="list_queues", + fn=self._fetch_queues, + on_success=self._on_queues_success, + on_error=self._on_queues_error, + farm_id=farm_id, + ) + + def _fetch_queues(self, farm_id: str) -> ResourceList: + """Fetch queues from API. Runs in background thread.""" + response = api.list_queues(config=self._config, farmId=farm_id) + return sorted( + [(item["displayName"], item["queueId"]) for item in response["queues"]], + key=lambda item: (item[0].casefold(), item[1]), + ) + + def _on_queues_success(self, queues: ResourceList) -> None: + """Handle successful queue list fetch.""" + self.queues_loading.emit(False) + self.queues_updated.emit(queues) + + def _on_queues_error(self, error: BaseException) -> None: + """Handle queue list fetch error.""" + # ResourceNotFoundException is expected when switching profiles - the old farm_id + # may not exist in the new profile. Don't log this as an exception. + error_name = type(error).__name__ + if "ResourceNotFoundException" in error_name or "AccessDeniedException" in error_name: + logger.debug(f"Could not fetch queues: {error}") + else: + logger.exception("Failed to fetch queues", exc_info=error) + self.queues_loading.emit(False) + self.queues_updated.emit([]) + + # ───────────────────────────────────────────────────────────── + # Storage Profile Operations + # ───────────────────────────────────────────────────────────── + + def refresh_storage_profiles( + self, + farm_id: Optional[str] = None, + queue_id: Optional[str] = None, + ) -> None: + """ + Fetch storage profiles for a queue. + + Args: + farm_id: Farm ID. If None, uses current farm. + queue_id: Queue ID. If None, uses current queue. + """ + if farm_id is None: + farm_id = self._current_farm_id + if queue_id is None: + queue_id = self._current_queue_id + + if not farm_id or not queue_id: + self.storage_profiles_updated.emit([]) + return + + self.storage_profiles_loading.emit(True) + self._task_runner.run( + operation_key="list_storage_profiles", + fn=self._fetch_storage_profiles, + on_success=self._on_storage_profiles_success, + on_error=self._on_storage_profiles_error, + farm_id=farm_id, + queue_id=queue_id, + ) + + def _fetch_storage_profiles(self, farm_id: str, queue_id: str) -> ResourceList: + """Fetch storage profiles from API. Runs in background thread.""" + # Determine current OS for filtering + if sys.platform.startswith("linux"): + current_os = "linux" + elif sys.platform.startswith("darwin"): + current_os = "macos" + elif sys.platform.startswith("win"): + current_os = "windows" + else: + current_os = "unknown" + + response = api.list_storage_profiles_for_queue( + config=self._config, farmId=farm_id, queueId=queue_id + ) + + profiles: ResourceList = [] + for item in response.get("storageProfiles", []): + if item.get("osFamily", "").lower() == current_os: + profiles.append((item["displayName"], item["storageProfileId"])) + + # Add "none selected" option at the beginning + profiles.insert(0, ("", "")) + + return sorted(profiles, key=lambda item: (item[0].casefold(), item[1])) + + def _on_storage_profiles_success(self, profiles: ResourceList) -> None: + """Handle successful storage profile fetch.""" + self.storage_profiles_loading.emit(False) + self.storage_profiles_updated.emit(profiles) + + def _on_storage_profiles_error(self, error: BaseException) -> None: + """Handle storage profile fetch error.""" + # ResourceNotFoundException is expected when switching profiles - the old farm_id/queue_id + # may not exist in the new profile. Don't log this as an exception. + error_name = type(error).__name__ + if "ResourceNotFoundException" in error_name or "AccessDeniedException" in error_name: + logger.debug(f"Could not fetch storage profiles: {error}") + else: + logger.exception("Failed to fetch storage profiles", exc_info=error) + self.storage_profiles_loading.emit(False) + self.storage_profiles_updated.emit([]) + + # ───────────────────────────────────────────────────────────── + # Queue Parameters Operations + # ───────────────────────────────────────────────────────────── + + def refresh_queue_parameters( + self, + farm_id: Optional[str] = None, + queue_id: Optional[str] = None, + ) -> None: + """ + Fetch queue parameter definitions. + + Args: + farm_id: Farm ID. If None, uses current farm. + queue_id: Queue ID. If None, uses current queue. + """ + if farm_id is None: + farm_id = self._current_farm_id + if queue_id is None: + queue_id = self._current_queue_id + + if not farm_id or not queue_id: + self.queue_parameters_updated.emit([]) + return + + self.queue_parameters_loading.emit(True) + self._task_runner.run( + operation_key="get_queue_parameters", + fn=self._fetch_queue_parameters, + on_success=self._on_queue_parameters_success, + on_error=self._on_queue_parameters_error, + farm_id=farm_id, + queue_id=queue_id, + ) + + def _fetch_queue_parameters(self, farm_id: str, queue_id: str) -> List[JobParameter]: + """Fetch queue parameters from API. Runs in background thread.""" + return api.get_queue_parameter_definitions( + config=self._config, farmId=farm_id, queueId=queue_id + ) + + def _on_queue_parameters_success(self, parameters: List[JobParameter]) -> None: + """Handle successful queue parameters fetch.""" + self.queue_parameters_loading.emit(False) + self.queue_parameters_updated.emit(parameters) + + def _on_queue_parameters_error(self, error: BaseException) -> None: + """Handle queue parameters fetch error.""" + # ResourceNotFoundException is expected when switching profiles - the old farm_id/queue_id + # may not exist in the new profile. Don't log this as an exception. + error_name = type(error).__name__ + if "ResourceNotFoundException" in error_name or "AccessDeniedException" in error_name: + logger.debug(f"Could not fetch queue parameters: {error}") + else: + logger.exception("Failed to fetch queue parameters", exc_info=error) + self.queue_parameters_loading.emit(False) + self.queue_parameters_updated.emit([]) + + # ───────────────────────────────────────────────────────────── + # Cascading Selection Handlers + # ───────────────────────────────────────────────────────────── + + def on_farm_selected(self, farm_id: str) -> None: + """ + Handle farm selection change. + + Triggers cascading refresh of dependent resources: + farm -> queues -> storage profiles, queue parameters + + Args: + farm_id: The newly selected farm ID + """ + if farm_id == self._current_farm_id: + return + + self._current_farm_id = farm_id + self._current_queue_id = "" + + # Clear dependent data + self.storage_profiles_updated.emit([]) + self.queue_parameters_updated.emit([]) + + # Refresh queues for new farm + self.refresh_queues(farm_id) + + def on_queue_selected(self, queue_id: str) -> None: + """ + Handle queue selection change. + + Triggers refresh of queue-dependent resources. + + Args: + queue_id: The newly selected queue ID + """ + if queue_id == self._current_queue_id: + return + + self._current_queue_id = queue_id + + # Refresh dependent data + self.refresh_storage_profiles() + self.refresh_queue_parameters() + + # ───────────────────────────────────────────────────────────── + # Lifecycle + # ───────────────────────────────────────────────────────────── + + def shutdown(self) -> None: + """ + Shutdown the controller and cancel pending operations. + + Call this when closing dialogs or shutting down the application. + """ + self._task_runner.cancel_all() + + def cancel_all_operations(self) -> None: + """Cancel all pending async operations.""" + self._task_runner.cancel_all() diff --git a/src/deadline/client/ui/controllers/_thread_pool.py b/src/deadline/client/ui/controllers/_thread_pool.py new file mode 100644 index 000000000..cc7bfdb1c --- /dev/null +++ b/src/deadline/client/ui/controllers/_thread_pool.py @@ -0,0 +1,114 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +""" +Dedicated thread pool for Deadline Cloud UI operations. + +Using a dedicated pool prevents Deadline API calls from competing +with other Qt background work in the application. +""" + +from typing import Optional + +from qtpy.QtCore import QThreadPool + + +class DeadlineThreadPool: + """ + Singleton thread pool dedicated to Deadline Cloud operations. + + This isolates Deadline API calls from other application threads, + ensuring consistent performance and easier debugging. + + The pool is created lazily on first access and configured with + a reasonable default thread count for API operations. + + Example:: + + from deadline.client.ui.controllers import DeadlineThreadPool + + pool = DeadlineThreadPool.instance() + pool.start(my_runnable) + """ + + _instance: Optional[QThreadPool] = None + + # Default configuration - reasonable for concurrent API calls + DEFAULT_MAX_THREADS = 4 + + @classmethod + def instance(cls) -> QThreadPool: + """ + Get the singleton thread pool instance. + + Creates the pool on first access with default configuration. + + Returns: + The shared QThreadPool instance for Deadline operations. + """ + if cls._instance is None: + cls._instance = QThreadPool() + cls._instance.setMaxThreadCount(cls.DEFAULT_MAX_THREADS) + return cls._instance + + @classmethod + def set_max_threads(cls, count: int) -> None: + """ + Configure the maximum number of concurrent threads. + + Args: + count: Maximum thread count (must be >= 1) + + Raises: + ValueError: If count is less than 1 + """ + if count < 1: + raise ValueError("Thread count must be at least 1") + cls.instance().setMaxThreadCount(count) + + @classmethod + def active_thread_count(cls) -> int: + """ + Get the number of currently active threads. + + Returns: + Number of threads currently executing tasks. + """ + if cls._instance is None: + return 0 + return cls._instance.activeThreadCount() + + @classmethod + def shutdown(cls, wait_for_done: bool = True, timeout_ms: int = 5000) -> bool: + """ + Shutdown the thread pool. + + Args: + wait_for_done: If True, wait for running tasks to complete + timeout_ms: Maximum time to wait in milliseconds + + Returns: + True if all tasks completed (or no pool exists), + False if timeout occurred while waiting + """ + if cls._instance is not None: + if wait_for_done: + result = cls._instance.waitForDone(timeout_ms) + cls._instance = None + return result + cls._instance.clear() + cls._instance = None + return True + + @classmethod + def reset(cls) -> None: + """ + Reset the thread pool instance. + + This clears any pending tasks and destroys the pool. + A new pool will be created on next access. + Primarily useful for testing. + """ + if cls._instance is not None: + cls._instance.clear() + cls._instance.waitForDone(1000) + cls._instance = None diff --git a/src/deadline/client/ui/deadline_authentication_status.py b/src/deadline/client/ui/deadline_authentication_status.py index aa2dc637b..3fe819284 100644 --- a/src/deadline/client/ui/deadline_authentication_status.py +++ b/src/deadline/client/ui/deadline_authentication_status.py @@ -22,17 +22,17 @@ """ import os -import threading from configparser import ConfigParser from logging import getLogger from typing import Optional -from qtpy.QtCore import QObject, QFileSystemWatcher, Signal +from qtpy.QtCore import QObject, QFileSystemWatcher, Qt, Signal from qtpy.QtWidgets import ( # pylint: disable=import-error; type: ignore QWidget, ) from .. import api from ..config import config_file +from .controllers import AsyncTaskRunner logger = getLogger(__name__) @@ -78,6 +78,10 @@ def __init__(self, parent: Optional[QWidget] = None) -> None: self.__auth_status: Optional[api.AwsAuthenticationStatus] = None self.__api_availability: Optional[bool] = None + # Use AsyncTaskRunner for background API calls + self._runner = AsyncTaskRunner(self) + self._runner.task_error.connect(self._handle_task_error, Qt.QueuedConnection) + # Load the default config self.config = ConfigParser() self.config.read_dict(config_file.read_config()) @@ -103,6 +107,12 @@ def __init__(self, parent: Optional[QWidget] = None) -> None: self.refresh_status() + def _handle_task_error(self, operation_name: str, error: BaseException) -> None: + """Handle errors from async tasks.""" + logger.exception(f"Error in {operation_name}: {error}") + + self.refresh_status() + def set_config(self, config: Optional[ConfigParser]) -> None: """ Changes the AWS Deadline Cloud configuration object used to display authentication @@ -152,16 +162,23 @@ def files_changed(self, changed_path) -> None: # file if changed_path in self.aws_creds_paths: logger.info(f"Path {changed_path} changed, refreshing authentication status") - # Send it to another thread to avoid blocking the Qt event loop - self.session_thread = threading.Thread( - target=self._get_session, kwargs={"changed_path": changed_path} + # Use AsyncTaskRunner to avoid blocking the Qt event loop + self._runner.run( + operation_key="get_session", + fn=self._get_session_background, + on_success=lambda _: self._on_session_refreshed(changed_path), + on_error=lambda e: logger.exception(f"Error refreshing session: {e}"), ) - self.session_thread.start() else: logger.info(f"Path {changed_path} changed, does not affect authentication status") - def _get_session(self, changed_path): + def _get_session_background(self): + """Background task to refresh boto3 session.""" api.get_boto3_session(force_refresh=True) + return True + + def _on_session_refreshed(self, changed_path): + """Called after session is refreshed.""" self.refresh_status() if changed_path in self.aws_creds_paths: @@ -169,39 +186,79 @@ def _get_session(self, changed_path): elif changed_path in self.deadline_config_paths: self.deadline_config_changed.emit() - def _refresh_creds_source(self) -> None: - self.__creds_source = None + def _refresh_creds_source(self) -> api.AwsCredentialsSource: + """Background task to get credentials source.""" + return api.get_credentials_source(config=self.config) + + def _on_creds_source_success(self, result: api.AwsCredentialsSource) -> None: + """Handle successful credentials source fetch.""" + self.__creds_source = result self.creds_source_changed.emit() - self.__creds_source = api.get_credentials_source(config=self.config) + + def _on_creds_source_error(self, error: BaseException) -> None: + """Handle credentials source fetch error.""" + logger.exception(error) + self.__creds_source = None self.creds_source_changed.emit() - def _refresh_auth_status(self) -> None: - self.__auth_status = None + def _refresh_auth_status(self) -> api.AwsAuthenticationStatus: + """Background task to check authentication status.""" + return api.check_authentication_status(config=self.config) + + def _on_auth_status_success(self, result: api.AwsAuthenticationStatus) -> None: + """Handle successful authentication status check.""" + self.__auth_status = result self.auth_status_changed.emit() - try: - self.__auth_status = api.check_authentication_status(config=self.config) - except BaseException as e: - logger.exception(e) - self.__auth_status = api.AwsAuthenticationStatus.CONFIGURATION_ERROR + + def _on_auth_status_error(self, error: BaseException) -> None: + """Handle authentication status check error.""" + logger.exception(error) + self.__auth_status = api.AwsAuthenticationStatus.CONFIGURATION_ERROR self.auth_status_changed.emit() - def _refresh_api_availability(self) -> None: - self.__api_availability = None + def _refresh_api_availability(self) -> bool: + """Background task to check API availability.""" + return api.check_deadline_api_available(config=self.config) + + def _on_api_availability_success(self, result: bool) -> None: + """Handle successful API availability check.""" + self.__api_availability = result self.api_availability_changed.emit() - try: - self.__api_availability = api.check_deadline_api_available(config=self.config) - except BaseException as e: - logger.exception(e) - self.__api_availability = False + + def _on_api_availability_error(self, error: BaseException) -> None: + """Handle API availability check error.""" + logger.exception(error) + self.__api_availability = False self.api_availability_changed.emit() def refresh_status(self) -> None: """ Initiates an asynchronous status refresh. """ - self.__creds_source_thread = threading.Thread(target=self._refresh_creds_source) - self.__creds_source_thread.start() - self.__auth_status_thread = threading.Thread(target=self._refresh_auth_status) - self.__auth_status_thread.start() - self.__api_availability_thread = threading.Thread(target=self._refresh_api_availability) - self.__api_availability_thread.start() + # Clear current values and emit signals to indicate refresh started + self.__creds_source = None + self.creds_source_changed.emit() + self.__auth_status = None + self.auth_status_changed.emit() + self.__api_availability = None + self.api_availability_changed.emit() + + # Start async tasks for each status check + self._runner.run( + operation_key="creds_source", + fn=self._refresh_creds_source, + on_success=self._on_creds_source_success, + on_error=self._on_creds_source_error, + ) + self._runner.run( + operation_key="auth_status", + fn=self._refresh_auth_status, + on_success=self._on_auth_status_success, + on_error=self._on_auth_status_error, + ) + self._runner.run( + operation_key="api_availability", + fn=self._refresh_api_availability, + on_success=self._on_api_availability_success, + on_error=self._on_api_availability_error, + ) diff --git a/src/deadline/client/ui/dialogs/_job_submission_worker.py b/src/deadline/client/ui/dialogs/_job_submission_worker.py new file mode 100644 index 000000000..3e36834ee --- /dev/null +++ b/src/deadline/client/ui/dialogs/_job_submission_worker.py @@ -0,0 +1,176 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +""" +QThread-based worker for job submission operations. + +This module provides a QThread subclass that handles job submission in the background, +emitting signals for progress updates and completion status. It replaces the previous +Python threading approach with Qt's native threading model. +""" + +from __future__ import annotations + +import threading as _threading +from typing import Any as _Any, Optional as _Optional + +from qtpy.QtCore import QThread as _QThread, Signal as _Signal, QObject + +from ... import api as _api +from ....job_attachments.progress_tracker import ProgressReportMetadata as _ProgressReportMetadata + +__all__ = ["JobSubmissionWorker"] + + +class JobSubmissionWorker(_QThread): + """ + Worker thread for job submission with progress reporting. + + This QThread subclass handles the job submission process, including: + - Hashing progress reporting + - Upload progress reporting + - Interactive confirmation dialogs + - Cancellation support + + Signals: + print_message(str): Emitted when the submission process has a message to display. + hashing_progress(ProgressReportMetadata): Emitted during file hashing. + upload_progress(ProgressReportMetadata): Emitted during file upload. + confirmation_requested(str, bool): Emitted when user confirmation is needed. + The bool indicates the default response. + succeeded(str): Emitted on successful submission with the job ID. + failed(BaseException): Emitted when submission fails with an exception. + + Usage: + worker = JobSubmissionWorker() + worker.print_message.connect(handle_print) + worker.succeeded.connect(handle_success) + worker.failed.connect(handle_error) + worker.set_submission_kwargs(job_bundle_dir="/path/to/bundle", ...) + worker.start() + """ + + # Progress signals + print_message = _Signal(str) + hashing_progress = _Signal(_ProgressReportMetadata) + upload_progress = _Signal(_ProgressReportMetadata) + + # Interaction signals + confirmation_requested = _Signal(str, bool) # message, default_response + + # Completion signals + succeeded = _Signal(str) # job_id + failed = _Signal(Exception) + + def __init__(self, parent: _Optional[QObject] = None) -> None: + super().__init__(parent) + self._canceled = False + self._confirmation_result: _Optional[bool] = None + self._confirmation_event = _threading.Event() + self._kwargs: dict[str, _Any] = {} + + def set_submission_kwargs(self, **kwargs: _Any) -> None: + """ + Set the keyword arguments for the job submission. + + These are passed directly to api.create_job_from_job_bundle. + Call this before starting the thread. + """ + self._kwargs = kwargs + + def cancel(self) -> None: + """ + Request cancellation of the submission. + + This sets a flag that is checked by the submission callbacks. + The actual cancellation may not be immediate depending on the + current operation. + """ + self._canceled = True + # Release any waiting confirmation dialog + self._confirmation_event.set() + + @property + def is_canceled(self) -> bool: + """Return whether cancellation has been requested.""" + return self._canceled + + def set_confirmation_result(self, accepted: bool) -> None: + """ + Set the result of a confirmation dialog. + + Called from the main thread when the user responds to a confirmation + dialog triggered by the confirmation_requested signal. + + Args: + accepted: True if the user accepted, False if they canceled. + """ + self._confirmation_result = accepted + self._confirmation_event.set() + + def run(self) -> None: + """ + Execute the job submission in the background thread. + + This method is called automatically when start() is invoked. + It sets up the callbacks and calls api.create_job_from_job_bundle. + """ + try: + # Set up callbacks that emit signals + self._kwargs["print_function_callback"] = self._print_callback + self._kwargs["interactive_confirmation_callback"] = self._confirmation_callback + self._kwargs["hashing_progress_callback"] = self._hashing_callback + self._kwargs["upload_progress_callback"] = self._upload_callback + self._kwargs["create_job_result_callback"] = self._check_canceled_callback + + job_id = _api.create_job_from_job_bundle(**self._kwargs) + + if not self._canceled: + self.succeeded.emit(job_id) + except Exception as e: + if not self._canceled: + self.failed.emit(e) + + def _print_callback(self, message: str) -> None: + """Callback for print messages from the submission process.""" + if not self._canceled: + self.print_message.emit(message) + + def _confirmation_callback(self, message: str, default_response: bool) -> bool: + """ + Callback for interactive confirmation dialogs. + + This blocks the worker thread until the main thread responds + via set_confirmation_result(). + """ + if self._canceled: + return False + + # Reset state for new confirmation + self._confirmation_result = None + self._confirmation_event.clear() + + # Request confirmation from main thread + self.confirmation_requested.emit(message, default_response) + + # Block until main thread responds or cancellation + self._confirmation_event.wait() + + if self._canceled: + return False + + return self._confirmation_result if self._confirmation_result is not None else False + + def _hashing_callback(self, progress_metadata: _ProgressReportMetadata) -> bool: + """Callback for hashing progress updates.""" + if not self._canceled: + self.hashing_progress.emit(progress_metadata) + return not self._canceled + + def _upload_callback(self, progress_metadata: _ProgressReportMetadata) -> bool: + """Callback for upload progress updates.""" + if not self._canceled: + self.upload_progress.emit(progress_metadata) + return not self._canceled + + def _check_canceled_callback(self) -> bool: + """Callback to check if submission should continue.""" + return not self._canceled diff --git a/src/deadline/client/ui/dialogs/deadline_config_dialog.py b/src/deadline/client/ui/dialogs/deadline_config_dialog.py index 468031100..5e7d7182f 100644 --- a/src/deadline/client/ui/dialogs/deadline_config_dialog.py +++ b/src/deadline/client/ui/dialogs/deadline_config_dialog.py @@ -10,8 +10,6 @@ __all__ = ["DeadlineConfigDialog"] -import sys -import threading from configparser import ConfigParser from logging import getLogger, root from typing import Callable, Dict, List, Optional @@ -47,9 +45,14 @@ from ... import api from ..deadline_authentication_status import DeadlineAuthenticationStatus from ...config import config_file, get_setting_default, str2bool -from .._utils import CancelationFlag, block_signals, tr +from .._utils import block_signals, tr from ..widgets import DirectoryPickerWidget from ..widgets.deadline_authentication_status_widget import DeadlineAuthenticationStatusWidget +from ..widgets import ( + DeadlineFarmListComboBoxController, + DeadlineQueueListComboBoxController, + DeadlineStorageProfileListComboBoxController, +) from .deadline_login_dialog import DeadlineLoginDialog logger = getLogger(__name__) @@ -227,6 +230,11 @@ def __init__(self, parent: Optional[QWidget] = None): self.config: Optional[ConfigParser] = None self.changes_were_applied = False + # Flags to track when we're waiting for cascading list refreshes + # These prevent the list_updated handlers from interfering with manual user selections + self._awaiting_farms_for_cascade = False + self._awaiting_queues_for_cascade = False + self._build_ui() self._fill_aws_profiles_box() self.refresh() @@ -320,7 +328,7 @@ def _build_profile_settings_ui(self, group, layout): layout.addRow(job_history_dir_label, self.job_history_dir_edit) self.job_history_dir_edit.path_changed.connect(self.job_history_dir_changed) - self.default_farm_box = DeadlineFarmListComboBox(parent=group) + self.default_farm_box = DeadlineFarmListComboBoxController(parent=group) default_farm_box_label = self.labels["defaults.farm_id"] = QLabel(tr("Default farm")) self.default_farm_box.box.currentIndexChanged.connect(self.default_farm_changed) self.default_farm_box.background_exception.connect(self.handle_background_exception) @@ -329,13 +337,23 @@ def _build_profile_settings_ui(self, group, layout): def _build_farm_settings_ui(self, group, layout): layout.setFieldGrowthPolicy(QFormLayout.ExpandingFieldsGrow) - self.default_queue_box = DeadlineQueueListComboBox(parent=group) + self.default_queue_box = DeadlineQueueListComboBoxController(parent=group) default_queue_box_label = self.labels["defaults.queue_id"] = QLabel(tr("Default queue")) self.default_queue_box.box.currentIndexChanged.connect(self.default_queue_changed) self.default_queue_box.background_exception.connect(self.handle_background_exception) layout.addRow(default_queue_box_label, self.default_queue_box) - self.default_storage_profile_box = DeadlineStorageProfileNameListComboBox(parent=group) + # Connect to controller signals to handle cascading updates when lists are populated + # (e.g., after profile or farm change) + from ..controllers import DeadlineUIController + + self._controller = DeadlineUIController.getInstance() + self._controller.farms_updated.connect(self._on_farms_list_updated, Qt.QueuedConnection) + self._controller.queues_updated.connect(self._on_queues_list_updated, Qt.QueuedConnection) + + self.default_storage_profile_box = DeadlineStorageProfileListComboBoxController( + parent=group + ) default_storage_profile_box_label = self.labels["settings.storage_profile_id"] = QLabel( tr("Default storage profile") ) @@ -717,7 +735,12 @@ def _fill_aws_profiles_box(self): self.aws_profiles_box.addItems(list(self.aws_profile_names)) def refresh_lists(self): - if api.check_deadline_api_available(): + # Use the cached api_availability from DeadlineAuthenticationStatus + # instead of making a blocking API call on the main thread. + # The status is refreshed asynchronously and on_auth_status_update + # will call this method again when api_availability_changed is emitted. + auth_status = DeadlineAuthenticationStatus.getInstance() + if auth_status.api_availability: self.default_farm_box.refresh_list() self.default_queue_box.refresh_list() self.default_storage_profile_box.refresh_list() @@ -818,10 +841,16 @@ def apply(self) -> bool: def aws_profile_changed(self, value): self.changes["defaults.aws_profile_name"] = value + # Clear the farm_id and queue_id since they don't exist in the new profile + self.changes["defaults.farm_id"] = "" + self.changes["defaults.queue_id"] = "" self.default_farm_box.clear_list() self.default_queue_box.clear_list() self.default_storage_profile_box.clear_list() self.refresh() + # Trigger cascading refresh - farms will load, then queues, then storage profiles + self._awaiting_farms_for_cascade = True + self.default_farm_box.refresh_list() def job_history_dir_changed(self): job_history_dir = self.job_history_dir_edit.text() @@ -833,22 +862,83 @@ def job_history_dir_changed(self): self.refresh() def default_farm_changed(self, index): - self.changes["defaults.farm_id"] = self.default_farm_box.box.itemData(index) + if index < 0: + return + farm_id = self.default_farm_box.box.itemData(index) + if farm_id is None: + return + self.changes["defaults.farm_id"] = farm_id + # Clear the queue_id since the old queue doesn't exist in the new farm + self.changes["defaults.queue_id"] = "" + # Clear storage profile lists - they depend on queue which we're about to refresh + self.default_storage_profile_box.clear_list() self.refresh() + # Trigger cascading refresh - queues will load, then storage profiles + self._awaiting_queues_for_cascade = True self.default_queue_box.refresh_list() - self.default_storage_profile_box.refresh_list() def default_queue_changed(self, index): - self.changes["defaults.queue_id"] = self.default_queue_box.box.itemData(index) + if index < 0: + return + queue_id = self.default_queue_box.box.currentData() + if queue_id is None: + return + self.changes["defaults.queue_id"] = queue_id self.refresh() self.default_storage_profile_box.refresh_list() def default_storage_profile_name_changed(self, index): - self.changes["settings.storage_profile_id"] = self.default_storage_profile_box.box.itemData( - index - ) + if index < 0: + return + storage_profile_id = self.default_storage_profile_box.box.itemData(index) + if storage_profile_id is None: + return + self.changes["settings.storage_profile_id"] = storage_profile_id self.refresh() + def _on_farms_list_updated(self, farms_list): + """ + Handle when the farm list is updated from the controller. + + When farms are loaded as part of a cascade (e.g., after a profile change), + we need to continue the cascade by refreshing queues. + """ + if not self._awaiting_farms_for_cascade: + return + + self._awaiting_farms_for_cascade = False + + # Get the currently selected farm from the combo box + current_farm_id = self.default_farm_box.box.currentData() + if current_farm_id: + # Update the changes dict with the selected farm + self.changes["defaults.farm_id"] = current_farm_id + self.refresh() + # Continue cascade - refresh queues with the valid farm_id + self._awaiting_queues_for_cascade = True + self.default_queue_box.refresh_list() + + def _on_queues_list_updated(self, queues_list): + """ + Handle when the queue list is updated from the controller. + + When queues are loaded as part of a cascade (e.g., after a farm change), + we need to continue the cascade by refreshing storage profiles. + """ + if not self._awaiting_queues_for_cascade: + return + + self._awaiting_queues_for_cascade = False + + # Get the currently selected queue from the combo box + current_queue_id = self.default_queue_box.box.currentData() + if current_queue_id: + # Update the changes dict with the selected queue + self.changes["defaults.queue_id"] = current_queue_id + self.refresh() + # Continue cascade - refresh storage profiles with the valid queue_id + self.default_storage_profile_box.refresh_list() + def _on_add_known_path(self): """Handle adding a new known path""" path = QFileDialog.getExistingDirectory( @@ -920,217 +1010,3 @@ def _on_edit_known_path(self): self.changes["settings.known_asset_paths"] = os.pathsep.join(current_paths) self.refresh() - - -class _DeadlineResourceListComboBox(QWidget): - """ - A ComboBox for selecting an AWS Deadline Cloud Id, with a refresh button. - - The caller should connect the `background_exception` signal, e.g. - to show a message box, and should call `set_config` whenever there is - a change to the AWS Deadline Cloud config object. - - Args: - resource_name (str): The resource name for the list, like "Farm", - "Queue", "Fleet". - """ - - # Emitted when the background refresh thread catches an exception, - # provides (operation_name, BaseException) - background_exception = Signal(str, BaseException) - - # Emitted when an async refresh_farms_list thread completes, - # provides (refresh_id, [(farm_id, farm_name), ...]) - _list_update = Signal(int, list) - - def __init__(self, resource_name, setting_name, parent: Optional[QWidget] = None): - super().__init__(parent) - - self.__refresh_thread = None - self.__refresh_id = 0 - self.canceled = CancelationFlag() - self.destroyed.connect(self.canceled.set_canceled) - - self.resource_name = resource_name - self.setting_name = setting_name - - self._build_ui() - - def _build_ui(self): - self.box = QComboBox(parent=self) - layout = QHBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(self.box, stretch=1) - - self.refresh_button = QPushButton("") - layout.addWidget(self.refresh_button) - self.refresh_button.setIcon(QApplication.style().standardIcon(QStyle.SP_BrowserReload)) - self.refresh_button.setFixedSize(QSize(22, 22)) # Make the button square - self.refresh_button.clicked.connect(self.refresh_list) - self._list_update.connect(self.handle_list_update) - self.background_exception.connect(self.handle_background_exception) - - def handle_background_exception(self, e): - with block_signals(self.box): - self.box.clear() - self.refresh_selected_id() - - def count(self) -> int: - """Returns the number of items in the combobox""" - return self.box.count() - - def set_config(self, config: ConfigParser) -> None: - """Updates the AWS Deadline Cloud config object the control uses.""" - self.config = config - - def clear_list(self): - """ - Fully clears the list. The caller needs to call either - `refresh_list` or `refresh_selected_id` at a later point - to finish it. - """ - with block_signals(self.box): - self.box.clear() - - def refresh_list(self): - """ - Starts a background thread to refresh the resource list. - """ - config = self.config - selected_id = config_file.get_setting(self.setting_name, config=config) - # Reset to a list of just the currently configured id during refresh - with block_signals(self.box): - self.box.clear() - self.box.addItem("", userData=selected_id) - - self.__refresh_id += 1 - self.__refresh_thread = threading.Thread( - target=self._refresh_thread_function, - name=f"AWS Deadline Cloud refresh {self.resource_name} thread", - args=(self.__refresh_id, config), - ) - self.__refresh_thread.start() - - def handle_list_update(self, refresh_id, items_list): - # Apply the refresh if it's still for the latest call - if refresh_id == self.__refresh_id: - with block_signals(self.box): - self.box.clear() - for name, id in items_list: - self.box.addItem(name, userData=id) - - self.refresh_selected_id() - - def refresh_selected_id(self): - """Refreshes the selected id from the config object""" - selected_id = config_file.get_setting(self.setting_name, config=self.config) - # Restore the selected Id, inserting a new item if - # it doesn't exist in the list. - with block_signals(self.box): - index = self.box.findData(selected_id) - if index >= 0: - self.box.setCurrentIndex(index) - else: - # Some cases allow to select "nothing" and insert an item to indicate such - index = self.box.findText("") - if index >= 0: - self.box.setCurrentIndex(index) - else: - self.box.insertItem(0, "", userData="") - self.box.setCurrentIndex(0) - - def _refresh_thread_function(self, refresh_id: int, config: Optional[ConfigParser] = None): - """ - This function gets started in a background thread to refresh the list. - """ - try: - resources = self.list_resources(config=config) - if not self.canceled: - self._list_update.emit(refresh_id, resources) - except BaseException as e: - if not self.canceled and refresh_id == self.__refresh_id: - self.background_exception.emit(f"Refresh {self.resource_name}s list", e) - - -class DeadlineFarmListComboBox(_DeadlineResourceListComboBox): - def __init__(self, parent: Optional[QWidget] = None): - super().__init__(resource_name="Farm", setting_name="defaults.farm_id", parent=parent) - - def list_resources(self, config: Optional[ConfigParser]): - response = api.list_farms(config=config) - return sorted( - [(item["displayName"], item["farmId"]) for item in response["farms"]], - key=lambda item: (item[0].casefold(), item[1]), - ) - - -class DeadlineQueueListComboBox(_DeadlineResourceListComboBox): - def __init__(self, parent: Optional[QWidget] = None): - super().__init__(resource_name="Queue", setting_name="defaults.queue_id", parent=parent) - - def list_resources(self, config: Optional[ConfigParser]): - default_farm_id = config_file.get_setting("defaults.farm_id", config=config) - if default_farm_id: - response = api.list_queues(config=config, farmId=default_farm_id) - return sorted( - [(item["displayName"], item["queueId"]) for item in response["queues"]], - key=lambda item: (item[0].casefold(), item[1]), - ) - else: - return [] - - -class DeadlineStorageProfileNameListComboBox(_DeadlineResourceListComboBox): - WINDOWS_OS = "windows" - MAC_OS = "macos" - LINUX_OS = "linux" - - def __init__(self, parent: Optional[QWidget] = None): - super().__init__( - resource_name="Storage profile", - setting_name="settings.storage_profile_id", - parent=parent, - ) - - def list_resources(self, config: Optional[ConfigParser]): - default_farm_id = config_file.get_setting("defaults.farm_id", config=config) - default_queue_id = config_file.get_setting("defaults.queue_id", config=config) - if default_farm_id and default_queue_id: - response = api.list_storage_profiles_for_queue( - config=config, farmId=default_farm_id, queueId=default_queue_id - ) - storage_profiles = response.get("storageProfiles", []) - # add a "" option since its possible to select nothing for this type - # of resource - storage_profiles.append( - { - "storageProfileId": "", - "displayName": "", - "osFamily": self._get_current_os(), - } - ) - return sorted( - [ - (item["displayName"], item["storageProfileId"]) - for item in storage_profiles - if self._get_current_os() == item["osFamily"].lower() - ], - key=lambda item: (item[0].casefold(), item[1]), - ) - else: - return [] - - def _get_current_os(self) -> str: - """ - Get a string specifying what the OS is, following the format the Deadline storage profile API expects. - """ - if sys.platform.startswith("linux"): - return self.LINUX_OS - - if sys.platform.startswith("darwin"): - return self.MAC_OS - - if sys.platform.startswith("win"): - return self.WINDOWS_OS - - return "Unknown" diff --git a/src/deadline/client/ui/dialogs/deadline_login_dialog.py b/src/deadline/client/ui/dialogs/deadline_login_dialog.py index c46ed4d3f..8c01f0738 100644 --- a/src/deadline/client/ui/dialogs/deadline_login_dialog.py +++ b/src/deadline/client/ui/dialogs/deadline_login_dialog.py @@ -13,12 +13,12 @@ __all__ = ["DeadlineLoginDialog"] import html -import threading from configparser import ConfigParser from typing import Optional -from qtpy.QtCore import Signal +from qtpy.QtCore import Qt, Signal from .._utils import tr +from ..controllers import AsyncTaskRunner from qtpy.QtWidgets import ( # pylint: disable=import-error; type: ignore QApplication, QMessageBox, @@ -86,7 +86,11 @@ def __init__( self.close_on_success = close_on_success self.config = config self.canceled = False - self.__login_thread: Optional[threading.Thread] = None + + # Use AsyncTaskRunner for background login + self._runner = AsyncTaskRunner(self) + self._runner.task_error.connect(self._handle_task_error, Qt.QueuedConnection) + self.login_thread_exception.connect(self.handle_login_thread_exception) self.login_thread_message.connect(self.handle_login_thread_message) self.login_thread_succeeded.connect(self.handle_login_thread_succeeded) @@ -98,45 +102,49 @@ def __init__( self._start_login() - def _login_background_thread(self): + def _handle_task_error(self, operation_name: str, error: BaseException) -> None: + """Handle errors from the async runner.""" + self.login_thread_exception.emit(error) + + def _login_background_task(self) -> str: """ - This function gets started in a background thread to run the login handshake. It - polls the `self.canceled` flag for cancellation, and fills in details about the - login when they become available. + This function runs in a background thread to perform the login handshake. + It polls the `self.canceled` flag for cancellation. """ - try: - - def on_pending_authorization(**kwargs): - if ( - kwargs["credentials_source"] - == AwsCredentialsSource.DEADLINE_CLOUD_MONITOR_LOGIN - ): - self.login_thread_message.emit( - tr("Opening Deadline Cloud monitor. Please log in before returning here.") - ) - - def on_cancellation_check(): - return self.canceled - - success_message = api.login( - on_pending_authorization, - on_cancellation_check, - config=self.config, - ) - self.login_thread_succeeded.emit(success_message) - except BaseException as e: - # Send the exception to the dialog - self.login_thread_exception.emit(e) + + def on_pending_authorization(**kwargs): + if kwargs["credentials_source"] == AwsCredentialsSource.DEADLINE_CLOUD_MONITOR_LOGIN: + self.login_thread_message.emit( + tr("Opening Deadline Cloud monitor. Please log in before returning here.") + ) + + def on_cancellation_check(): + return self.canceled + + return api.login( + on_pending_authorization, + on_cancellation_check, + config=self.config, + ) + + def _on_login_success(self, success_message: str) -> None: + """Handle successful login.""" + self.login_thread_succeeded.emit(success_message) + + def _on_login_error(self, error: BaseException) -> None: + """Handle login error.""" + self.login_thread_exception.emit(error) def _start_login(self) -> None: """ - Starts the background login thread. + Starts the background login task. """ - - self.__login_thread = threading.Thread( - target=self._login_background_thread, name="AWS Deadline Cloud login thread" + self._runner.run( + operation_key="login", + fn=self._login_background_task, + on_success=self._on_login_success, + on_error=self._on_login_error, ) - self.__login_thread.start() def handle_login_thread_exception(self, e: BaseException) -> None: """ @@ -177,11 +185,12 @@ def handle_login_thread_succeeded(self, success_message: str) -> None: def on_button_clicked(self, button): if self.standardButton(button) == QMessageBox.Cancel: - # Tell the login thread to cancel, then wait for it. + # Tell the login task to cancel self.canceled = True - if self.__login_thread: - while self.__login_thread.is_alive(): - QApplication.instance().processEvents() # type: ignore[union-attr] + self._runner.cancel_all() + # Process events to allow the task to complete + while self._runner.is_running("login"): + QApplication.instance().processEvents() # type: ignore[union-attr] def exec_(self) -> bool: """ diff --git a/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py b/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py index 14d9ac1f0..341f16c8c 100644 --- a/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py +++ b/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py @@ -7,10 +7,7 @@ from __future__ import annotations import logging -import threading from typing import Any, Optional -from functools import partial -import time from qtpy.QtCore import Qt, Signal, QSize from qtpy.QtGui import QCloseEvent, QFontMetrics @@ -31,90 +28,24 @@ QSizePolicy, ) -from .._utils import CancelationFlag, tr -from ... import api +from .._utils import tr from ...config import set_setting from ....job_attachments.progress_tracker import ProgressReportMetadata +from ._job_submission_worker import JobSubmissionWorker __all__ = ["SubmitJobProgressDialog"] logger = logging.getLogger(__name__) -def _print_function_callback( - cancelation_flag: CancelationFlag, dialog: SubmitJobProgressDialog, message: str -): - """Callback for when api.create_job_from_job_bundle prints a message.""" - if not cancelation_flag: - dialog.submission_thread_print.emit(message) - - -def _interactive_confirmation_callback( - cancelation_flag: CancelationFlag, - dialog: SubmitJobProgressDialog, - message: str, - default_response: bool, -) -> bool: - """Callback for when api.create_job_from_job_bundle presents a warning to users.""" - if not cancelation_flag.canceled: - # The handler for the submission_thread_request_warning_dialog signal will show - # the warning dialog in the main GUI thread, then will set the _warning_dialog_completed - # property to True when it is done. - dialog._warning_dialog_completed = False - dialog.submission_thread_request_warning_dialog.emit(message, default_response) - while not cancelation_flag.canceled and not dialog._warning_dialog_completed: - time.sleep(0.1) - return not cancelation_flag.canceled and not dialog._warning_dialog_canceled - - -def _hashing_progress_callback( - cancelation_flag: CancelationFlag, - dialog: SubmitJobProgressDialog, - progress_report_metadata: ProgressReportMetadata, -): - """Callback for when api.create_job_from_job_bundle provides a hashing progress update.""" - if not cancelation_flag.canceled: - dialog.submission_thread_hashing_progress.emit(progress_report_metadata) - return not cancelation_flag.canceled - - -def _upload_progress_callback( - cancelation_flag: CancelationFlag, - dialog: SubmitJobProgressDialog, - progress_report_metadata: ProgressReportMetadata, -): - """Callback for when api.create_job_from_job_bundle provides an upload progress update.""" - if not cancelation_flag.canceled: - dialog.submission_thread_upload_progress.emit(progress_report_metadata) - return not cancelation_flag.canceled - - -def _create_job_result_callback(cancelation_flag: CancelationFlag): - """Callback for when api.create_job_from_job_bundle checks whether to cancel.""" - return not cancelation_flag.canceled - - -def _submission_thread_runner( - cancelation_flag: CancelationFlag, dialog: SubmitJobProgressDialog, kwargs -): - """Function to run api.create_job_from_job_bundle in a background thread.""" - try: - job_id = api.create_job_from_job_bundle(**kwargs) - if not cancelation_flag.canceled: - dialog.job_id = job_id - dialog.submission_thread_succeeded.emit(job_id) - except Exception as e: - if not cancelation_flag.canceled: - dialog.submission_thread_exception.emit(e) - - class SubmitJobProgressDialog(QDialog): """ A modal dialog box for the submission progress while submitting a job bundle to AWS Deadline Cloud. - """ - cancelation_flag: CancelationFlag + This dialog uses a QThread-based worker (JobSubmissionWorker) to run the + submission in the background while displaying progress updates. + """ # This signal is sent when the background thread raises an exception. submission_thread_exception = Signal(BaseException) @@ -127,21 +58,18 @@ class SubmitJobProgressDialog(QDialog): # This signal is sent when the background thread succeeds. submission_thread_succeeded = Signal(str) - progress_window_closed = Signal(None) + progress_window_closed = Signal() job_id: Optional[str] = None def __init__(self, parent: QWidget) -> None: super().__init__(parent=parent) - # Use the CancelationFlag object to decouple the cancelation value - # from the window lifetime. - self.cancelation_flag = CancelationFlag() - self.destroyed.connect(self.cancelation_flag.set_canceled) self._warning_dialog_canceled = False - self._submission_complete = False - self.__submission_thread: Optional[threading.Thread] = None + self._worker: Optional[JobSubmissionWorker] = None + + # Connect internal signals to handlers self.submission_thread_print.connect(self.handle_print) self.submission_thread_hashing_progress.connect(self.handle_hashing_thread_progress_report) self.submission_thread_upload_progress.connect(self.handle_upload_thread_progress_report) @@ -170,30 +98,57 @@ def start_job_submission( kwargs["from_gui"] = True kwargs["submitter_name"] = kwargs.get("submitter_name", "CustomGUI") - # The CancelationFlag object has a lifetime decoupled from the dialog. Each callback - # always checks for cancelation before calling a method or accessing a property of the dialog, - # and the Qt object's destroyed event is connected to set its canceled flag. - kwargs["print_function_callback"] = partial( - _print_function_callback, self.cancelation_flag, self - ) - kwargs["interactive_confirmation_callback"] = partial( - _interactive_confirmation_callback, self.cancelation_flag, self - ) - kwargs["hashing_progress_callback"] = partial( - _hashing_progress_callback, self.cancelation_flag, self + # Create and configure the worker + self._worker = JobSubmissionWorker(self) + + # Connect worker signals to dialog signals/handlers + # Using Qt.QueuedConnection ensures thread-safe signal delivery + self._worker.print_message.connect(self.submission_thread_print.emit, Qt.QueuedConnection) + self._worker.hashing_progress.connect( + self.submission_thread_hashing_progress.emit, Qt.QueuedConnection ) - kwargs["upload_progress_callback"] = partial( - _upload_progress_callback, self.cancelation_flag, self + self._worker.upload_progress.connect( + self.submission_thread_upload_progress.emit, Qt.QueuedConnection ) - kwargs["create_job_result_callback"] = partial( - _create_job_result_callback, self.cancelation_flag + self._worker.confirmation_requested.connect( + self._handle_confirmation_requested, Qt.QueuedConnection ) + self._worker.succeeded.connect(self._handle_worker_succeeded, Qt.QueuedConnection) + self._worker.failed.connect(self._handle_worker_failed, Qt.QueuedConnection) - self.__submission_thread = threading.Thread( - target=partial(_submission_thread_runner, self.cancelation_flag, self, kwargs), - name="AWS Deadline Cloud Job Submission", - ) - self.__submission_thread.start() + # Set submission parameters and start + self._worker.set_submission_kwargs(**kwargs) + self._worker.start() + + def _handle_confirmation_requested(self, message: str, default_response: bool) -> None: + """ + Handle confirmation request from worker thread. + + Shows the warning dialog and sends the result back to the worker. + """ + if self._worker is None or self._worker.is_canceled: + return + + # Show the dialog (this blocks until user responds) + dialog = _JobSumissionWarningDialog(message, default_response, self) + selection = dialog.exec() + + accepted = selection != QDialog.Rejected + if not accepted: + self._warning_dialog_canceled = True + + # Send result back to worker + if self._worker is not None: + self._worker.set_confirmation_result(accepted) + + def _handle_worker_succeeded(self, job_id: str) -> None: + """Handle successful job submission from worker.""" + self.job_id = job_id + self.submission_thread_succeeded.emit(job_id) + + def _handle_worker_failed(self, error: BaseException) -> None: + """Handle failed job submission from worker.""" + self.submission_thread_exception.emit(error) def _build_ui(self): """Builds job submission progress UI""" @@ -241,16 +196,13 @@ def handle_request_warning_dialog(self, message: str, default_response: bool): default_response (bool): True if the default is to continue. This adds a "Do not ask again" button as well. False if the default is to Cancel. - """ - # Build the UI for user confirmation - dialog = _JobSumissionWarningDialog(message, default_response, self) - - selection = dialog.exec() - if selection == QDialog.Rejected: - self._warning_dialog_canceled = True - - self._warning_dialog_completed = True + Note: + This method is kept for backward compatibility with the signal-based approach. + The actual handling is done in _handle_confirmation_requested. + """ + # This is now handled by _handle_confirmation_requested + pass def handle_print(self, message: str) -> None: """ @@ -295,13 +247,17 @@ def handle_create_job_thread_succeeded(self, job_id: str) -> None: self.progress_window_closed.emit ) else: - if self.cancelation_flag or self._warning_dialog_canceled: + if self._is_canceled() or self._warning_dialog_canceled: self.status_label.setText(tr("Submission canceled")) else: self.status_label.setText(tr("Submission error")) self.button_box.setStandardButtons(QDialogButtonBox.Close) self.button_box.button(QDialogButtonBox.Close).setDefault(True) + def _is_canceled(self) -> bool: + """Check if the submission has been canceled.""" + return self._worker is not None and self._worker.is_canceled + def handle_thread_exception(self, e: BaseException) -> None: """ Handles the signal sent from the background threads when an exception is @@ -321,11 +277,12 @@ def closeEvent(self, event: QCloseEvent) -> None: if self._submission_complete: self.accept() else: - self.cancelation_flag.set_canceled() - logger.info("Canceling submission...") - self.status_label.setText(tr("Canceling submission...")) - if self.__submission_thread is not None: - while self.__submission_thread.is_alive(): + if self._worker is not None: + self._worker.cancel() + logger.info("Canceling submission...") + self.status_label.setText(tr("Canceling submission...")) + # Wait for worker to finish with event processing + while self._worker.isRunning(): QApplication.instance().processEvents() # type: ignore[union-attr] super().closeEvent(event) diff --git a/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py b/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py index fa2142400..baf7550ec 100644 --- a/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py +++ b/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py @@ -113,7 +113,7 @@ def __init__( attachments: AssetReferences, on_create_job_bundle_callback: OnCreateJobBundleCallback, parent: Optional[QWidget] = None, - f: Qt.WindowFlags = Qt.WindowFlags(), + f=Qt.WindowFlags(), show_host_requirements_tab: bool = False, host_requirements: Optional[HostRequirements] = None, submitter_info: Optional[SubmitterInfo] = None, diff --git a/src/deadline/client/ui/job_bundle_submitter.py b/src/deadline/client/ui/job_bundle_submitter.py index b2cdc60f2..84a0d2ee0 100644 --- a/src/deadline/client/ui/job_bundle_submitter.py +++ b/src/deadline/client/ui/job_bundle_submitter.py @@ -150,13 +150,12 @@ def show_job_bundle_submitter( if parent is None: # Get the main application window so we can parent ours to it app = QApplication.instance() - main_windows = [ - widget - for widget in app.topLevelWidgets() - if isinstance(widget, QMainWindow) # type: ignore[union-attr] - ] - if main_windows: - parent = main_windows[0] + if app is not None: + main_windows = [ + widget for widget in app.topLevelWidgets() if isinstance(widget, QMainWindow) + ] + if main_windows: + parent = main_windows[0] if not input_job_bundle_dir: input_job_bundle_dir = QFileDialog.getExistingDirectory( diff --git a/src/deadline/client/ui/widgets/__init__.py b/src/deadline/client/ui/widgets/__init__.py index d32d6f432..ccb99152f 100644 --- a/src/deadline/client/ui/widgets/__init__.py +++ b/src/deadline/client/ui/widgets/__init__.py @@ -20,6 +20,10 @@ "DeadlineCloudSettingsWidget", "SharedJobSettingsWidget", "SharedJobPropertiesWidget", + # Controller-based combo boxes + "DeadlineFarmListComboBoxController", + "DeadlineQueueListComboBoxController", + "DeadlineStorageProfileListComboBoxController", ] from .deadline_authentication_status_widget import DeadlineAuthenticationStatusWidget @@ -42,3 +46,8 @@ SharedJobSettingsWidget, SharedJobPropertiesWidget, ) +from ._deadline_list_combo_boxes import ( + DeadlineFarmListComboBoxController, + DeadlineQueueListComboBoxController, + DeadlineStorageProfileListComboBoxController, +) diff --git a/src/deadline/client/ui/widgets/_deadline_list_combo_boxes.py b/src/deadline/client/ui/widgets/_deadline_list_combo_boxes.py new file mode 100644 index 000000000..8ea274a97 --- /dev/null +++ b/src/deadline/client/ui/widgets/_deadline_list_combo_boxes.py @@ -0,0 +1,252 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +""" +Combo box widgets for selecting Deadline Cloud resources. + +These widgets use the DeadlineUIController for async API operations, +ensuring proper ordering and automatic cancellation of superseded requests. +""" + +from configparser import ConfigParser +from typing import Any, List, Optional, TYPE_CHECKING + +from qtpy.QtCore import Qt, QSize, Signal +from qtpy.QtWidgets import ( + QApplication, + QComboBox, + QHBoxLayout, + QPushButton, + QStyle, + QWidget, +) + +if TYPE_CHECKING: + from qtpy.QtCore import SignalInstance + +from ...config import config_file +from .._utils import block_signals +from ..controllers import DeadlineUIController + + +class _DeadlineResourceListComboBoxController(QWidget): + """ + Base class for combo boxes that select Deadline Cloud resources. + + This class uses the DeadlineUIController for async API operations, + ensuring proper ordering and automatic cancellation of superseded requests. + + Subclasses should: + - Call _connect_controller_signals() in __init__ after super().__init__() + - Implement _get_controller_signal() to return the appropriate signal + - Implement _get_loading_signal() to return the loading state signal + - Implement _trigger_refresh() to call the appropriate controller method + - Implement _get_setting_name() to return the config setting name + + Args: + resource_name: Display name for the resource type (e.g., "Farm", "Queue") + parent: Parent widget + """ + + # Emitted when the background refresh catches an exception + background_exception = Signal(str, BaseException) + + def __init__(self, resource_name: str, parent: Optional[QWidget] = None): + super().__init__(parent) + + self.resource_name = resource_name + self.config: Optional[ConfigParser] = None + self._controller = DeadlineUIController.getInstance() + + self._build_ui() + + def _build_ui(self) -> None: + """Build the widget UI.""" + self.box = QComboBox(parent=self) + layout = QHBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addWidget(self.box, stretch=1) + + self.refresh_button = QPushButton("") + layout.addWidget(self.refresh_button) + self.refresh_button.setIcon(QApplication.style().standardIcon(QStyle.SP_BrowserReload)) + self.refresh_button.setFixedSize(QSize(22, 22)) + self.refresh_button.clicked.connect(self.refresh_list) + + def _connect_controller_signals(self) -> None: + """ + Connect to the controller's signals. + + Subclasses must call this after setting up their specific signal connections. + """ + # Connect to the data update signal + self._get_controller_signal().connect(self._handle_list_update, Qt.QueuedConnection) + + # Connect to the loading state signal + self._get_loading_signal().connect(self._handle_loading_state, Qt.QueuedConnection) + + # Connect to the error signal + self._controller.operation_failed.connect( + self._handle_operation_failed, Qt.QueuedConnection + ) + + def _get_controller_signal(self) -> Any: + """Return the controller signal that provides the resource list.""" + raise NotImplementedError("Subclasses must implement _get_controller_signal") + + def _get_loading_signal(self) -> Any: + """Return the controller signal that indicates loading state.""" + raise NotImplementedError("Subclasses must implement _get_loading_signal") + + def _trigger_refresh(self) -> None: + """Trigger a refresh on the controller.""" + raise NotImplementedError("Subclasses must implement _trigger_refresh") + + def _get_setting_name(self) -> str: + """Return the config setting name for this resource.""" + raise NotImplementedError("Subclasses must implement _get_setting_name") + + def _handle_list_update(self, items_list: List) -> None: + """Handle the list update from the controller.""" + with block_signals(self.box): + self.box.clear() + for item in items_list: + # Handle both tuple and list formats (Qt signals may convert) + if isinstance(item, (list, tuple)) and len(item) >= 2: + name, resource_id = item[0], item[1] + self.box.addItem(name, userData=resource_id) + + self.refresh_selected_id() + + def _handle_loading_state(self, is_loading: bool) -> None: + """Handle loading state changes.""" + if is_loading: + # Show refreshing indicator + selected_id = config_file.get_setting(self._get_setting_name(), config=self.config) + with block_signals(self.box): + self.box.clear() + self.box.addItem("", userData=selected_id) + + self.refresh_button.setEnabled(not is_loading) + + def _handle_operation_failed(self, operation_name: str, error: BaseException) -> None: + """Handle operation failures from the controller.""" + # Only handle errors for our resource type + expected_operation = self._get_expected_operation_name() + if operation_name == expected_operation: + with block_signals(self.box): + self.box.clear() + self.refresh_selected_id() + self.background_exception.emit(f"Refresh {self.resource_name}s list", error) + + def _get_expected_operation_name(self) -> str: + """Return the operation name to filter errors by.""" + raise NotImplementedError("Subclasses must implement _get_expected_operation_name") + + def count(self) -> int: + """Returns the number of items in the combobox.""" + return self.box.count() + + def set_config(self, config: ConfigParser) -> None: + """Updates the AWS Deadline Cloud config object the control uses.""" + self.config = config + self._controller.set_config(config) + + def clear_list(self) -> None: + """ + Fully clears the list. The caller needs to call either + `refresh_list` or `refresh_selected_id` at a later point. + """ + with block_signals(self.box): + self.box.clear() + + def refresh_list(self) -> None: + """Starts a background refresh of the resource list.""" + self._trigger_refresh() + + def refresh_selected_id(self) -> None: + """Refreshes the selected id from the config object.""" + selected_id = config_file.get_setting(self._get_setting_name(), config=self.config) + with block_signals(self.box): + index = self.box.findData(selected_id) + if index >= 0: + self.box.setCurrentIndex(index) + else: + # Some cases allow selecting "nothing" + index = self.box.findText("") + if index >= 0: + self.box.setCurrentIndex(index) + else: + self.box.insertItem(0, "", userData="") + self.box.setCurrentIndex(0) + + +class DeadlineFarmListComboBoxController(_DeadlineResourceListComboBoxController): + """Combo box for selecting a Deadline Cloud farm.""" + + def __init__(self, parent: Optional[QWidget] = None): + super().__init__(resource_name="Farm", parent=parent) + self._connect_controller_signals() + + def _get_controller_signal(self) -> "SignalInstance": + return self._controller.farms_updated + + def _get_loading_signal(self) -> "SignalInstance": + return self._controller.farms_loading + + def _trigger_refresh(self) -> None: + self._controller.refresh_farms() + + def _get_setting_name(self) -> str: + return "defaults.farm_id" + + def _get_expected_operation_name(self) -> str: + return "list_farms" + + +class DeadlineQueueListComboBoxController(_DeadlineResourceListComboBoxController): + """Combo box for selecting a Deadline Cloud queue.""" + + def __init__(self, parent: Optional[QWidget] = None): + super().__init__(resource_name="Queue", parent=parent) + self._connect_controller_signals() + + def _get_controller_signal(self) -> "SignalInstance": + return self._controller.queues_updated + + def _get_loading_signal(self) -> "SignalInstance": + return self._controller.queues_loading + + def _trigger_refresh(self) -> None: + farm_id = config_file.get_setting("defaults.farm_id", config=self.config) + self._controller.refresh_queues(farm_id=farm_id) + + def _get_setting_name(self) -> str: + return "defaults.queue_id" + + def _get_expected_operation_name(self) -> str: + return "list_queues" + + +class DeadlineStorageProfileListComboBoxController(_DeadlineResourceListComboBoxController): + """Combo box for selecting a Deadline Cloud storage profile.""" + + def __init__(self, parent: Optional[QWidget] = None): + super().__init__(resource_name="Storage profile", parent=parent) + self._connect_controller_signals() + + def _get_controller_signal(self) -> "SignalInstance": + return self._controller.storage_profiles_updated + + def _get_loading_signal(self) -> "SignalInstance": + return self._controller.storage_profiles_loading + + def _trigger_refresh(self) -> None: + farm_id = config_file.get_setting("defaults.farm_id", config=self.config) + queue_id = config_file.get_setting("defaults.queue_id", config=self.config) + self._controller.refresh_storage_profiles(farm_id=farm_id, queue_id=queue_id) + + def _get_setting_name(self) -> str: + return "settings.storage_profile_id" + + def _get_expected_operation_name(self) -> str: + return "list_storage_profiles" diff --git a/src/deadline/client/ui/widgets/deadline_authentication_status_widget.py b/src/deadline/client/ui/widgets/deadline_authentication_status_widget.py index 613a3deb8..2b2a946e1 100644 --- a/src/deadline/client/ui/widgets/deadline_authentication_status_widget.py +++ b/src/deadline/client/ui/widgets/deadline_authentication_status_widget.py @@ -403,4 +403,4 @@ def _apply_ui_state(self, config: AuthenticationStateConfig) -> None: if any(action.isVisible() for action in self._auth_menu.actions()): self._profile_button.setMenu(self._auth_menu) else: - self._profile_button.setMenu(None) + self._profile_button.setMenu(None) # type: ignore[arg-type] diff --git a/src/deadline/client/ui/widgets/job_bundle_settings_tab.py b/src/deadline/client/ui/widgets/job_bundle_settings_tab.py index 93cba618e..c49fccfd7 100644 --- a/src/deadline/client/ui/widgets/job_bundle_settings_tab.py +++ b/src/deadline/client/ui/widgets/job_bundle_settings_tab.py @@ -46,8 +46,6 @@ class JobBundleSettingsWidget(QWidget): def __init__(self, initial_settings: JobBundleSettings, parent: Optional[QWidget] = None): super().__init__(parent=parent) - self.parent = parent - self.param_layout = QVBoxLayout() self._build_ui(initial_settings) @@ -64,7 +62,9 @@ def refresh_ui(self, settings: JobBundleSettings): # Clear the layout for i in reversed(range(self.param_layout.count())): item = self.param_layout.takeAt(i) - item.widget().deleteLater() + widget = item.widget() + if widget: + widget.deleteLater() self.parameters_widget = OpenJDParametersWidget( parameter_definitions=settings.parameters, parent=self @@ -114,8 +114,8 @@ def on_load_bundle(self): logger.warning(msg) return - if self.parent and hasattr(self.parent, "refresh"): - self.parent.refresh( + if self.parent() and hasattr(self.parent(), "refresh"): + self.parent().refresh( job_settings=job_settings, auto_detected_attachments=asset_references, attachments=None, diff --git a/src/deadline/client/ui/widgets/openjd_parameters_widget.py b/src/deadline/client/ui/widgets/openjd_parameters_widget.py index 33112f2a7..cd674dad2 100644 --- a/src/deadline/client/ui/widgets/openjd_parameters_widget.py +++ b/src/deadline/client/ui/widgets/openjd_parameters_widget.py @@ -90,8 +90,9 @@ def rebuild_ui( if isinstance(layout, QVBoxLayout): for index in reversed(range(layout.count())): child = layout.takeAt(index) - if child.widget(): - child.widget().deleteLater() + widget = child.widget() + if widget: + widget.deleteLater() elif child.layout(): child.layout().deleteLater() else: diff --git a/src/deadline/client/ui/widgets/shared_job_settings_tab.py b/src/deadline/client/ui/widgets/shared_job_settings_tab.py index f43365823..fb873fb47 100644 --- a/src/deadline/client/ui/widgets/shared_job_settings_tab.py +++ b/src/deadline/client/ui/widgets/shared_job_settings_tab.py @@ -7,10 +7,9 @@ from __future__ import annotations import sys -import threading -from typing import Any, Dict, Optional +from typing import Any, Dict, List, Optional -from qtpy.QtCore import Signal # type: ignore +from qtpy.QtCore import Qt, Signal # type: ignore from qtpy.QtWidgets import ( # type: ignore QComboBox, QFormLayout, @@ -26,9 +25,9 @@ from ... import api from ...config import get_setting -from .._utils import CancelationFlag, tr +from .._utils import tr +from ..controllers import AsyncTaskRunner, DeadlineUIController from .openjd_parameters_widget import OpenJDParametersWidget -from ...api import get_queue_parameter_definitions class SharedJobSettingsWidget(QWidget): # pylint: disable=too-few-public-methods @@ -54,14 +53,6 @@ class SharedJobSettingsWidget(QWidget): # pylint: disable=too-few-public-method # Emitted when the queue parameter validity state changes valid_parameters = Signal(bool) - # Emitted when the background refresh thread catches an exception, - # provides (operation_name, BaseException) - _background_exception = Signal(str, BaseException) - - # Emitted when an async queue parameters loading thread completes, - # provides (refresh_id, queue_parameters) - _queue_parameters_update = Signal(int, list) - def __init__( self, *, @@ -92,14 +83,25 @@ def __init__( lambda message: self.parameter_changed.emit(message) ) - self.__refresh_queue_parameters_thread: Optional[threading.Thread] = None - self.__refresh_queue_parameters_id = 0 + # Track current farm/queue IDs for change detection + self.farm_id = get_setting("defaults.farm_id") + self.queue_id = get_setting("defaults.queue_id") self.__valid_queue = False - self.canceled = CancelationFlag() - self.destroyed.connect(self.canceled.set_canceled) - self._queue_parameters_update.connect(self._handle_queue_parameters_update) - self._background_exception.connect(self._handle_background_queue_parameters_exception) - self._start_load_queue_parameters_thread() + + # Connect to the controller for queue parameters + self._controller = DeadlineUIController.getInstance() + self._controller.queue_parameters_updated.connect( + self._handle_queue_parameters_update, Qt.QueuedConnection + ) + self._controller.queue_parameters_loading.connect( + self._handle_queue_parameters_loading, Qt.QueuedConnection + ) + self._controller.operation_failed.connect( + self._handle_operation_failed, Qt.QueuedConnection + ) + + # Start initial load + self._start_load_queue_parameters() # Set any "deadline:*" parameters, like deadline:priority. # The queue parameters will be set asynchronously by the background thread. @@ -107,14 +109,6 @@ def __init__( if name.startswith("deadline:"): self.set_parameter_value({"name": name, "value": value}) - def __del__(self): - self.canceled.set_canceled() - if ( - self.__refresh_queue_parameters_thread - and self.__refresh_queue_parameters_thread.is_alive() - ): - self.__refresh_queue_parameters_thread.join() - def refresh_ui(self, job_settings: Any, load_new_bundle: bool = False): # Refresh the job settings in the UI self.shared_job_properties_box.refresh_ui(job_settings) @@ -146,77 +140,49 @@ def refresh_queue_parameters(self, load_new_bundle: bool = False): self.queue_parameters_box.rebuild_ui( async_loading_state="Reloading Queue Environments..." ) - # Join the thread if the queue id or job bundle has changed and the thread is running - if ( - (queue_id != self.queue_id or load_new_bundle) - and self.__refresh_queue_parameters_thread - and self.__refresh_queue_parameters_thread.is_alive() - ): - self.__refresh_queue_parameters_thread.join() - - # Start the thread if it doesn't exist or is not alive - if ( - not self.__refresh_queue_parameters_thread - or not self.__refresh_queue_parameters_thread.is_alive() - ): - self._start_load_queue_parameters_thread() - - def _handle_background_queue_parameters_exception(self, title: str, error: BaseException): - self.__valid_queue = False - self.valid_parameters.emit(False) - if self.__refresh_queue_parameters_thread: - self.canceled.set_canceled() - self.__refresh_queue_parameters_thread.join() - self.queue_parameters_box.rebuild_ui( - async_loading_state="Error loading queue environments: {}\n\nError traceback: {}".format( - title, error + self._start_load_queue_parameters() + + def _handle_queue_parameters_loading(self, is_loading: bool) -> None: + """Handle loading state changes from the controller.""" + if is_loading: + self.queue_parameters_box.rebuild_ui( + async_loading_state="Loading Queue Environments..." + ) + + def _handle_operation_failed(self, operation_name: str, error: BaseException) -> None: + """Handle operation failures from the controller.""" + if operation_name == "get_queue_parameters": + self.__valid_queue = False + self.valid_parameters.emit(False) + self.queue_parameters_box.rebuild_ui( + async_loading_state="Error loading queue environments: {}\n\nError traceback: {}".format( + "Invalid queue parameters", error + ) ) - ) - def _start_load_queue_parameters_thread(self): + def _start_load_queue_parameters(self): """ - Starts a background thread to load the queue parameters. + Triggers the controller to load queue parameters. """ self.farm_id = farm_id = get_setting("defaults.farm_id") self.queue_id = queue_id = get_setting("defaults.queue_id") if not self.farm_id or not self.queue_id: - # If the user has not selected a farm or queue ID, don't bother starting - # the thread. + # If the user has not selected a farm or queue ID, don't bother loading return - self.__refresh_queue_parameters_id += 1 - self.canceled = CancelationFlag() - self.__refresh_queue_parameters_thread = threading.Thread( - target=self._load_queue_parameters_thread_function, - name="AWS Deadline Cloud load queue parameters thread", - args=(self.__refresh_queue_parameters_id, farm_id, queue_id), - ) - self.__refresh_queue_parameters_thread.start() + self._controller.refresh_queue_parameters(farm_id=farm_id, queue_id=queue_id) def is_queue_valid(self) -> bool: return self.__valid_queue - def _handle_queue_parameters_update(self, refresh_id, queue_parameters): - # Apply the refresh if it's still for the latest call - if refresh_id == self.__refresh_queue_parameters_id: - self.__valid_queue = True - self.valid_parameters.emit(True) - # Apply the initial queue parameter values - for parameter in queue_parameters: - if parameter["name"] in self.initial_shared_parameter_values: - parameter["value"] = self.initial_shared_parameter_values[parameter["name"]] - self.queue_parameters_box.rebuild_ui(parameter_definitions=queue_parameters) - - def _load_queue_parameters_thread_function(self, refresh_id: int, farm_id: str, queue_id: str): - """ - This function gets started in a background thread to refresh the list. - """ - try: - queue_parameters = get_queue_parameter_definitions(farmId=farm_id, queueId=queue_id) - if not self.canceled: - self._queue_parameters_update.emit(refresh_id, queue_parameters) - except BaseException as e: - if not self.canceled: - self._background_exception.emit("Invalid queue parameters", e) + def _handle_queue_parameters_update(self, queue_parameters: List) -> None: + """Handle queue parameters update from the controller.""" + self.__valid_queue = True + self.valid_parameters.emit(True) + # Apply the initial queue parameter values + for parameter in queue_parameters: + if parameter["name"] in self.initial_shared_parameter_values: + parameter["value"] = self.initial_shared_parameter_values[parameter["name"]] + self.queue_parameters_box.rebuild_ui(parameter_definitions=queue_parameters) def update_settings(self, settings): self.shared_job_properties_box.update_settings(settings) @@ -521,6 +487,8 @@ class _DeadlineNamedResourceDisplay(QWidget): it as the Id, but does an async call to AWS Deadline Cloud to convert it to the name. + Uses AsyncTaskRunner for background API calls with automatic cancellation. + Args: resource_name (str): The resource name for the list, like "Farm", "Queue", "Fleet". @@ -531,24 +499,19 @@ class _DeadlineNamedResourceDisplay(QWidget): # provides (operation_name, BaseException) background_exception = Signal(str, BaseException) - # Emitted when an async refresh_item thread completes, - # provides (refresh_id, id, name, description) - _item_update = Signal(int, str, str, str) - def __init__(self, *, resource_name, setting_name, parent: Optional[QWidget] = None): super().__init__(parent=parent) - self.__refresh_thread = None - self.__refresh_id = 0 - self.canceled = CancelationFlag() - self.destroyed.connect(self.canceled.set_canceled) - self.resource_name = resource_name self.setting_name = setting_name self.item_id = get_setting(self.setting_name) self.item_name = "" self.item_description = "" + # Use AsyncTaskRunner for background API calls + self._runner = AsyncTaskRunner(self) + self._runner.task_error.connect(self._handle_error, Qt.QueuedConnection) + self._build_ui() self.label.setText(self.item_display_name()) @@ -558,9 +521,12 @@ def _build_ui(self): layout = QHBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) layout.addWidget(self.label) - self._item_update.connect(self.handle_item_update) self.background_exception.connect(self.handle_background_exception) + def _handle_error(self, operation_name: str, error: BaseException) -> None: + """Handle errors from the async runner.""" + self.background_exception.emit(f"Refresh {self.resource_name} item", error) + def handle_background_exception(self, e): self.label.setText(self.item_id) self.label.setToolTip("") @@ -571,7 +537,7 @@ def item_display_name(self): def refresh(self, deadline_authorized): """ - Starts a background thread to refresh the item name. + Starts a background task to refresh the item name. Args: deadline_authorized (bool): Should be the result of a call to @@ -588,39 +554,26 @@ def refresh(self, deadline_authorized): if deadline_authorized: display_name = " - " + display_name - self.__refresh_id += 1 - self.__refresh_thread = threading.Thread( - target=self._refresh_thread_function, - name=f"AWS Deadline Cloud refresh {self.resource_name} item thread", - args=(self.__refresh_id,), + self._runner.run( + operation_key=f"get_{self.resource_name}", + fn=self.get_item, + on_success=self._handle_item_update, + on_error=lambda e: self._handle_error(f"get_{self.resource_name}", e), ) - self.__refresh_thread.start() self.label.setText(display_name) self.label.setToolTip(self.item_description) else: self.label.setText(self.item_display_name()) - def handle_item_update(self, refresh_id, id, name, description): - # Apply the refresh if it's still for the latest call - if refresh_id == self.__refresh_id: - self.item_id = id - self.item_name = name - self.item_description = description - self.label.setText(self.item_display_name()) - self.label.setToolTip(self.item_description) - - def _refresh_thread_function(self, refresh_id: int): - """ - This function gets started in a background thread to refresh the list. - """ - try: - item = self.get_item() - if not self.canceled: - self._item_update.emit(refresh_id, *item) - except BaseException as e: - if not self.canceled: - self.background_exception.emit(f"Refresh {self.resource_name} item", e) + def _handle_item_update(self, item: tuple) -> None: + """Handle successful item fetch.""" + item_id, name, description = item + self.item_id = item_id + self.item_name = name + self.item_description = description + self.label.setText(self.item_display_name()) + self.label.setToolTip(self.item_description) class DeadlineFarmDisplay(_DeadlineNamedResourceDisplay): diff --git a/test/unit/deadline_client/ui/controllers/__init__.py b/test/unit/deadline_client/ui/controllers/__init__.py new file mode 100644 index 000000000..8d929cc86 --- /dev/null +++ b/test/unit/deadline_client/ui/controllers/__init__.py @@ -0,0 +1 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. diff --git a/test/unit/deadline_client/ui/controllers/test_async_runner.py b/test/unit/deadline_client/ui/controllers/test_async_runner.py new file mode 100644 index 000000000..dd5eda4ce --- /dev/null +++ b/test/unit/deadline_client/ui/controllers/test_async_runner.py @@ -0,0 +1,262 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +""" +Tests for AsyncTaskRunner class. +""" + +import pytest +from unittest.mock import Mock +import time + +try: + from deadline.client.ui.controllers._async_runner import AsyncTaskRunner + from deadline.client.ui.controllers._thread_pool import DeadlineThreadPool + from qtpy.QtCore import Qt, QCoreApplication + + # Handle Qt5 vs Qt6 API differences for connection types + try: + _QueuedConnection = Qt.ConnectionType.QueuedConnection # type: ignore[attr-defined] + except AttributeError: + _QueuedConnection = Qt.QueuedConnection # type: ignore[attr-defined] +except ImportError: + pytest.importorskip("deadline.client.ui.controllers._async_runner") + + +class TestAsyncTaskRunner: + """Tests for AsyncTaskRunner class.""" + + def setup_method(self): + """Reset thread pool before each test.""" + DeadlineThreadPool.reset() + + def teardown_method(self): + """Clean up after each test.""" + DeadlineThreadPool.shutdown(wait_for_done=True, timeout_ms=2000) + DeadlineThreadPool.reset() + + def test_init_creates_runner(self, qtbot): + """Test that AsyncTaskRunner can be instantiated.""" + runner = AsyncTaskRunner() + + assert runner.active_task_count == 0 + + def test_run_returns_operation_id(self, qtbot): + """Test that run() returns an operation ID.""" + runner = AsyncTaskRunner() + + fn = Mock(return_value="result") + op_id = runner.run("test_op", fn) + + assert isinstance(op_id, int) + assert op_id > 0 + + def test_run_increments_operation_id(self, qtbot): + """Test that operation IDs increment.""" + runner = AsyncTaskRunner() + + fn = Mock(return_value="result") + op_id1 = runner.run("op1", fn) + op_id2 = runner.run("op2", fn) + + assert op_id2 > op_id1 + + def test_run_executes_function(self, qtbot): + """Test that run() executes the function.""" + runner = AsyncTaskRunner() + + result_received = [] + fn = Mock(return_value="test_result") + + runner.run("test_op", fn, on_success=lambda x: result_received.append(x)) + + # Wait for the task to complete + qtbot.waitUntil(lambda: len(result_received) > 0, timeout=2000) + + fn.assert_called_once() + assert result_received[0] == "test_result" + + def test_run_passes_args_to_function(self, qtbot): + """Test that run() passes arguments to the function.""" + runner = AsyncTaskRunner() + + result_received = [] + fn = Mock(return_value="result") + + runner.run( + "test_op", + fn, + lambda x: result_received.append(x), # on_success + None, # on_error + "arg1", # *args start here + "arg2", + kwarg1="value1", + ) + + qtbot.waitUntil(lambda: len(result_received) > 0, timeout=2000) + + fn.assert_called_once_with("arg1", "arg2", kwarg1="value1") + + def test_run_calls_on_success_callback(self, qtbot): + """Test that on_success callback is called with result.""" + runner = AsyncTaskRunner() + + result_received = [] + fn = Mock(return_value={"data": "test"}) + + runner.run("test_op", fn, on_success=lambda x: result_received.append(x)) + + qtbot.waitUntil(lambda: len(result_received) > 0, timeout=2000) + + assert result_received[0] == {"data": "test"} + + def test_run_calls_on_error_callback(self, qtbot): + """Test that on_error callback is called on exception.""" + runner = AsyncTaskRunner() + + error_received = [] + test_error = ValueError("test error") + fn = Mock(side_effect=test_error) + + runner.run("test_op", fn, on_error=lambda e: error_received.append(e)) + + qtbot.waitUntil(lambda: len(error_received) > 0, timeout=2000) + + assert error_received[0] is test_error + + def test_run_emits_task_error_signal(self, qtbot): + """Test that task_error signal is emitted on exception.""" + runner = AsyncTaskRunner() + + errors_received = [] + test_error = ValueError("test error") + fn = Mock(side_effect=test_error) + + runner.task_error.connect( + lambda key, e: errors_received.append((key, e)), _QueuedConnection + ) + runner.run("test_op", fn) + + qtbot.waitUntil(lambda: len(errors_received) > 0, timeout=2000) + + assert errors_received[0][0] == "test_op" + assert errors_received[0][1] is test_error + + def test_is_running_returns_true_for_active_task(self, qtbot): + """Test that is_running returns True for active tasks.""" + runner = AsyncTaskRunner() + + # Use a function that takes some time + def slow_fn(): + time.sleep(0.5) + return "result" + + runner.run("slow_op", slow_fn) + + # Check immediately - should be running + assert runner.is_running("slow_op") is True + + def test_is_running_returns_false_for_unknown_key(self, qtbot): + """Test that is_running returns False for unknown keys.""" + runner = AsyncTaskRunner() + + assert runner.is_running("unknown_op") is False + + def test_cancel_stops_task(self, qtbot): + """Test that cancel() prevents result emission.""" + runner = AsyncTaskRunner() + + result_received = [] + + def slow_fn(): + time.sleep(0.5) + return "result" + + runner.run("slow_op", slow_fn, on_success=lambda x: result_received.append(x)) + + # Cancel immediately + canceled = runner.cancel("slow_op") + + assert canceled is True + assert runner.is_running("slow_op") is False + + # Wait a bit and verify no result was received + time.sleep(0.7) + QCoreApplication.processEvents() + assert len(result_received) == 0 + + def test_cancel_returns_false_for_unknown_key(self, qtbot): + """Test that cancel() returns False for unknown keys.""" + runner = AsyncTaskRunner() + + result = runner.cancel("unknown_op") + + assert result is False + + def test_cancel_all_cancels_all_tasks(self, qtbot): + """Test that cancel_all() cancels all running tasks.""" + runner = AsyncTaskRunner() + + def slow_fn(): + time.sleep(1) + return "result" + + runner.run("op1", slow_fn) + runner.run("op2", slow_fn) + + count = runner.cancel_all() + + assert count == 2 + assert runner.active_task_count == 0 + + def test_same_key_cancels_previous_task(self, qtbot): + """Test that running with same key cancels previous task.""" + runner = AsyncTaskRunner() + + results = [] + + def slow_fn(value): + time.sleep(0.3) + return value + + # Start first task + runner.run( + "same_key", + slow_fn, + lambda x: results.append(x), # on_success + None, # on_error + "first", # arg passed to slow_fn + ) + + # Start second task with same key - should cancel first + runner.run( + "same_key", + slow_fn, + lambda x: results.append(x), # on_success + None, # on_error + "second", # arg passed to slow_fn + ) + + # Wait for completion + qtbot.waitUntil(lambda: len(results) > 0, timeout=2000) + + # Only second result should be received + assert results == ["second"] + + def test_active_task_count(self, qtbot): + """Test that active_task_count reflects running tasks.""" + runner = AsyncTaskRunner() + + def slow_fn(): + time.sleep(0.5) + return "result" + + assert runner.active_task_count == 0 + + runner.run("op1", slow_fn) + runner.run("op2", slow_fn) + + assert runner.active_task_count == 2 + + runner.cancel_all() + + assert runner.active_task_count == 0 diff --git a/test/unit/deadline_client/ui/controllers/test_async_task.py b/test/unit/deadline_client/ui/controllers/test_async_task.py new file mode 100644 index 000000000..ee95cf350 --- /dev/null +++ b/test/unit/deadline_client/ui/controllers/test_async_task.py @@ -0,0 +1,211 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +""" +Tests for AsyncTask and WorkerSignals classes. +""" + +import pytest +from unittest.mock import Mock + +try: + from deadline.client.ui.controllers._async_task import AsyncTask, WorkerSignals +except ImportError: + pytest.importorskip("deadline.client.ui.controllers._async_task") + + +class TestWorkerSignals: + """Tests for WorkerSignals class.""" + + def test_signals_exist(self, qtbot): + """Test that all expected signals are defined.""" + signals = WorkerSignals() + + assert hasattr(signals, "finished") + assert hasattr(signals, "error") + assert hasattr(signals, "result") + + def test_result_signal_emits(self, qtbot): + """Test that result signal can be emitted and received.""" + signals = WorkerSignals() + + received = [] + signals.result.connect(lambda x: received.append(x)) + + signals.result.emit({"test": "data"}) + + assert len(received) == 1 + assert received[0] == {"test": "data"} + + def test_error_signal_emits(self, qtbot): + """Test that error signal can be emitted and received.""" + signals = WorkerSignals() + + received = [] + signals.error.connect(lambda x: received.append(x)) + + test_error = ValueError("test error") + signals.error.emit(test_error) + + assert len(received) == 1 + assert received[0] is test_error + + def test_finished_signal_emits(self, qtbot): + """Test that finished signal can be emitted and received.""" + signals = WorkerSignals() + + finished_called = [] + signals.finished.connect(lambda: finished_called.append(True)) + + signals.finished.emit() + + assert len(finished_called) == 1 + + +class TestAsyncTask: + """Tests for AsyncTask class.""" + + def test_init_stores_function_and_args(self): + """Test that constructor stores function and arguments.""" + fn = Mock() + task = AsyncTask(fn, "arg1", "arg2", kwarg1="value1") + + assert task.fn is fn + assert task.args == ("arg1", "arg2") + assert task.kwargs == {"kwarg1": "value1"} + + def test_init_with_operation_id(self): + """Test that operation_id is stored.""" + fn = Mock() + task = AsyncTask(fn, operation_id=42) + + assert task.operation_id == 42 + + def test_init_default_operation_id_is_none(self): + """Test that operation_id defaults to None.""" + fn = Mock() + task = AsyncTask(fn) + + assert task.operation_id is None + + def test_cancel_sets_flag(self): + """Test that cancel() sets the canceled flag.""" + fn = Mock() + task = AsyncTask(fn) + + assert task.is_canceled is False + task.cancel() + assert task.is_canceled is True + + def test_run_executes_function(self): + """Test that run() executes the function with correct arguments.""" + fn = Mock(return_value="result") + task = AsyncTask(fn, "arg1", kwarg1="value1") + + task.run() + + fn.assert_called_once_with("arg1", kwarg1="value1") + + def test_run_emits_result_on_success(self, qtbot): + """Test that run() emits result signal on success.""" + fn = Mock(return_value={"data": "test"}) + task = AsyncTask(fn) + + received = [] + task.signals.result.connect(lambda x: received.append(x)) + + task.run() + + assert len(received) == 1 + assert received[0] == {"data": "test"} + + def test_run_emits_error_on_exception(self, qtbot): + """Test that run() emits error signal on exception.""" + test_error = ValueError("test error") + fn = Mock(side_effect=test_error) + task = AsyncTask(fn) + + received = [] + task.signals.error.connect(lambda x: received.append(x)) + + task.run() + + assert len(received) == 1 + assert received[0] is test_error + + def test_run_emits_finished_on_success(self, qtbot): + """Test that run() emits finished signal on success.""" + fn = Mock(return_value="result") + task = AsyncTask(fn) + + finished_called = [] + task.signals.finished.connect(lambda: finished_called.append(True)) + + task.run() + + assert len(finished_called) == 1 + + def test_run_emits_finished_on_error(self, qtbot): + """Test that run() emits finished signal even on error.""" + fn = Mock(side_effect=ValueError("error")) + task = AsyncTask(fn) + + finished_called = [] + task.signals.finished.connect(lambda: finished_called.append(True)) + + task.run() + + assert len(finished_called) == 1 + + def test_run_does_not_execute_if_canceled_before_start(self, qtbot): + """Test that run() does nothing if task was canceled before starting.""" + fn = Mock(return_value="result") + task = AsyncTask(fn) + + received = [] + task.signals.result.connect(lambda x: received.append(x)) + + task.cancel() + task.run() + + fn.assert_not_called() + assert len(received) == 0 + + def test_run_does_not_emit_result_if_canceled_during_execution(self, qtbot): + """Test that run() does not emit result if canceled during execution.""" + + def slow_fn(): + # Simulate cancellation during execution + task.cancel() + return "result" + + task = AsyncTask(slow_fn) + + received = [] + task.signals.result.connect(lambda x: received.append(x)) + + task.run() + + assert len(received) == 0 + + def test_run_does_not_emit_error_if_canceled_during_execution(self, qtbot): + """Test that run() does not emit error if canceled during execution.""" + + def failing_fn(): + task.cancel() + raise ValueError("error") + + task = AsyncTask(failing_fn) + + received = [] + task.signals.error.connect(lambda x: received.append(x)) + + task.run() + + assert len(received) == 0 + + def test_auto_delete_is_enabled(self): + """Test that autoDelete is set to True.""" + fn = Mock() + task = AsyncTask(fn) + + assert task.autoDelete() is True diff --git a/test/unit/deadline_client/ui/controllers/test_deadline_controller.py b/test/unit/deadline_client/ui/controllers/test_deadline_controller.py new file mode 100644 index 000000000..bab801ca7 --- /dev/null +++ b/test/unit/deadline_client/ui/controllers/test_deadline_controller.py @@ -0,0 +1,340 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +""" +Tests for DeadlineUIController class. +""" + +import pytest +from unittest.mock import patch +from configparser import ConfigParser + +try: + from deadline.client.ui.controllers._deadline_controller import DeadlineUIController + from deadline.client.ui.controllers._thread_pool import DeadlineThreadPool + from qtpy.QtCore import Qt + + # Handle Qt5 vs Qt6 API differences for connection types + try: + _QueuedConnection = Qt.ConnectionType.QueuedConnection # type: ignore[attr-defined] + except AttributeError: + _QueuedConnection = Qt.QueuedConnection # type: ignore[attr-defined] +except ImportError: + pytest.importorskip("deadline.client.ui.controllers._deadline_controller") + + +class TestDeadlineUIController: + """Tests for DeadlineUIController class.""" + + def setup_method(self): + """Reset singleton and thread pool before each test.""" + DeadlineUIController.resetInstance() + DeadlineThreadPool.reset() + + def teardown_method(self): + """Clean up after each test.""" + DeadlineUIController.resetInstance() + DeadlineThreadPool.shutdown(wait_for_done=True, timeout_ms=2000) + DeadlineThreadPool.reset() + + def test_get_instance_returns_controller(self, qtbot): + """Test that getInstance returns a DeadlineUIController.""" + controller = DeadlineUIController.getInstance() + + assert isinstance(controller, DeadlineUIController) + + def test_get_instance_is_singleton(self, qtbot): + """Test that getInstance returns the same instance.""" + controller1 = DeadlineUIController.getInstance() + controller2 = DeadlineUIController.getInstance() + + assert controller1 is controller2 + + def test_reset_instance_clears_singleton(self, qtbot): + """Test that resetInstance clears the singleton.""" + controller1 = DeadlineUIController.getInstance() + DeadlineUIController.resetInstance() + controller2 = DeadlineUIController.getInstance() + + assert controller1 is not controller2 + + def test_set_config_stores_config(self, qtbot): + """Test that set_config stores the configuration.""" + controller = DeadlineUIController.getInstance() + + config = ConfigParser() + config["defaults"] = {"aws_profile_name": "test-profile"} + + controller.set_config(config) + + assert controller.config is not None + assert controller.config["defaults"]["aws_profile_name"] == "test-profile" + + def test_set_config_none_clears_config(self, qtbot): + """Test that set_config(None) clears the configuration.""" + controller = DeadlineUIController.getInstance() + + config = ConfigParser() + controller.set_config(config) + controller.set_config(None) + + assert controller.config is None + + def test_current_farm_id_initially_empty(self, qtbot): + """Test that current_farm_id is initially empty.""" + controller = DeadlineUIController.getInstance() + + assert controller.current_farm_id == "" + + def test_current_queue_id_initially_empty(self, qtbot): + """Test that current_queue_id is initially empty.""" + controller = DeadlineUIController.getInstance() + + assert controller.current_queue_id == "" + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_refresh_farms_emits_loading_signal(self, mock_api, qtbot): + """Test that refresh_farms emits farms_loading signal.""" + controller = DeadlineUIController.getInstance() + + mock_api.list_farms.return_value = {"farms": []} + + loading_states = [] + controller.farms_loading.connect(lambda x: loading_states.append(x), _QueuedConnection) + + controller.refresh_farms() + + # Wait for signals + qtbot.waitUntil(lambda: len(loading_states) >= 2, timeout=2000) + + assert loading_states[0] is True # Loading started + assert loading_states[-1] is False # Loading finished + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_refresh_farms_emits_farms_updated(self, mock_api, qtbot): + """Test that refresh_farms emits farms_updated with farm list.""" + controller = DeadlineUIController.getInstance() + + mock_api.list_farms.return_value = { + "farms": [ + {"displayName": "Farm A", "farmId": "farm-a"}, + {"displayName": "Farm B", "farmId": "farm-b"}, + ] + } + + farms_received = [] + controller.farms_updated.connect(lambda x: farms_received.append(x), _QueuedConnection) + + controller.refresh_farms() + + qtbot.waitUntil(lambda: len(farms_received) > 0, timeout=2000) + + assert len(farms_received) == 1 + farms = farms_received[0] + assert len(farms) == 2 + # Should be sorted by name + # Note: Qt signals may convert tuples to lists + assert list(farms[0]) == ["Farm A", "farm-a"] + assert list(farms[1]) == ["Farm B", "farm-b"] + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_refresh_farms_handles_error(self, mock_api, qtbot): + """Test that refresh_farms handles API errors gracefully.""" + controller = DeadlineUIController.getInstance() + + mock_api.list_farms.side_effect = Exception("API Error") + + farms_received = [] + errors_received = [] + controller.farms_updated.connect(lambda x: farms_received.append(x), _QueuedConnection) + controller.operation_failed.connect( + lambda key, e: errors_received.append((key, e)), _QueuedConnection + ) + + controller.refresh_farms() + + qtbot.waitUntil(lambda: len(farms_received) > 0, timeout=2000) + + # Should emit empty list on error + assert farms_received[0] == [] + # Should emit error signal + assert len(errors_received) == 1 + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_refresh_queues_with_no_farm_emits_empty(self, mock_api, qtbot): + """Test that refresh_queues with no farm emits empty list.""" + controller = DeadlineUIController.getInstance() + + queues_received = [] + controller.queues_updated.connect(lambda x: queues_received.append(x), _QueuedConnection) + + controller.refresh_queues() + + # Should emit immediately without API call + qtbot.waitUntil(lambda: len(queues_received) > 0, timeout=1000) + + assert queues_received[0] == [] + mock_api.list_queues.assert_not_called() + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_refresh_queues_fetches_for_farm(self, mock_api, qtbot): + """Test that refresh_queues fetches queues for specified farm.""" + controller = DeadlineUIController.getInstance() + + mock_api.list_queues.return_value = { + "queues": [ + {"displayName": "Queue 1", "queueId": "queue-1"}, + ] + } + + queues_received = [] + controller.queues_updated.connect(lambda x: queues_received.append(x), _QueuedConnection) + + controller.refresh_queues(farm_id="farm-123") + + qtbot.waitUntil(lambda: len(queues_received) > 0, timeout=2000) + + mock_api.list_queues.assert_called_once() + # Note: Qt signals may convert tuples to lists + assert list(queues_received[0][0]) == ["Queue 1", "queue-1"] + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_on_farm_selected_updates_current_farm(self, mock_api, qtbot): + """Test that on_farm_selected updates current_farm_id.""" + controller = DeadlineUIController.getInstance() + + mock_api.list_queues.return_value = {"queues": []} + + controller.on_farm_selected("farm-123") + + assert controller.current_farm_id == "farm-123" + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_on_farm_selected_clears_queue(self, mock_api, qtbot): + """Test that on_farm_selected clears current_queue_id.""" + controller = DeadlineUIController.getInstance() + + mock_api.list_queues.return_value = {"queues": []} + + controller._current_queue_id = "old-queue" + controller.on_farm_selected("farm-123") + + assert controller.current_queue_id == "" + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_on_farm_selected_triggers_queue_refresh(self, mock_api, qtbot): + """Test that on_farm_selected triggers queue refresh.""" + controller = DeadlineUIController.getInstance() + + mock_api.list_queues.return_value = {"queues": []} + + queues_received = [] + controller.queues_updated.connect(lambda x: queues_received.append(x), _QueuedConnection) + + controller.on_farm_selected("farm-123") + + qtbot.waitUntil(lambda: len(queues_received) > 0, timeout=2000) + + mock_api.list_queues.assert_called_once() + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_on_farm_selected_clears_dependent_data(self, mock_api, qtbot): + """Test that on_farm_selected clears storage profiles and queue params.""" + controller = DeadlineUIController.getInstance() + + mock_api.list_queues.return_value = {"queues": []} + + storage_profiles_received = [] + queue_params_received = [] + controller.storage_profiles_updated.connect( + lambda x: storage_profiles_received.append(x), _QueuedConnection + ) + controller.queue_parameters_updated.connect( + lambda x: queue_params_received.append(x), _QueuedConnection + ) + + controller.on_farm_selected("farm-123") + + qtbot.waitUntil(lambda: len(storage_profiles_received) > 0, timeout=1000) + qtbot.waitUntil(lambda: len(queue_params_received) > 0, timeout=1000) + + assert storage_profiles_received[0] == [] + assert queue_params_received[0] == [] + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_on_farm_selected_ignores_same_farm(self, mock_api, qtbot): + """Test that on_farm_selected does nothing if farm unchanged.""" + controller = DeadlineUIController.getInstance() + + controller._current_farm_id = "farm-123" + + controller.on_farm_selected("farm-123") + + mock_api.list_queues.assert_not_called() + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_on_queue_selected_updates_current_queue(self, mock_api, qtbot): + """Test that on_queue_selected updates current_queue_id.""" + controller = DeadlineUIController.getInstance() + + controller._current_farm_id = "farm-123" + mock_api.list_storage_profiles_for_queue.return_value = {"storageProfiles": []} + mock_api.get_queue_parameter_definitions.return_value = [] + + controller.on_queue_selected("queue-456") + + assert controller.current_queue_id == "queue-456" + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_on_queue_selected_triggers_storage_profile_refresh(self, mock_api, qtbot): + """Test that on_queue_selected triggers storage profile refresh.""" + controller = DeadlineUIController.getInstance() + + controller._current_farm_id = "farm-123" + mock_api.list_storage_profiles_for_queue.return_value = {"storageProfiles": []} + mock_api.get_queue_parameter_definitions.return_value = [] + + storage_received = [] + controller.storage_profiles_updated.connect( + lambda x: storage_received.append(x), _QueuedConnection + ) + + controller.on_queue_selected("queue-456") + + qtbot.waitUntil(lambda: len(storage_received) > 0, timeout=2000) + + mock_api.list_storage_profiles_for_queue.assert_called_once() + + @patch("deadline.client.ui.controllers._deadline_controller.api") + def test_on_queue_selected_triggers_queue_params_refresh(self, mock_api, qtbot): + """Test that on_queue_selected triggers queue parameters refresh.""" + controller = DeadlineUIController.getInstance() + + controller._current_farm_id = "farm-123" + mock_api.list_storage_profiles_for_queue.return_value = {"storageProfiles": []} + mock_api.get_queue_parameter_definitions.return_value = [{"name": "param1"}] + + params_received = [] + controller.queue_parameters_updated.connect( + lambda x: params_received.append(x), _QueuedConnection + ) + + controller.on_queue_selected("queue-456") + + qtbot.waitUntil(lambda: len(params_received) > 0, timeout=2000) + + mock_api.get_queue_parameter_definitions.assert_called_once() + assert params_received[0] == [{"name": "param1"}] + + def test_shutdown_cancels_operations(self, qtbot): + """Test that shutdown cancels pending operations.""" + controller = DeadlineUIController.getInstance() + + # Verify shutdown doesn't raise + controller.shutdown() + + def test_cancel_all_operations(self, qtbot): + """Test that cancel_all_operations cancels pending operations.""" + controller = DeadlineUIController.getInstance() + + # Verify cancel_all_operations doesn't raise + controller.cancel_all_operations() diff --git a/test/unit/deadline_client/ui/controllers/test_thread_pool.py b/test/unit/deadline_client/ui/controllers/test_thread_pool.py new file mode 100644 index 000000000..eb26f5de8 --- /dev/null +++ b/test/unit/deadline_client/ui/controllers/test_thread_pool.py @@ -0,0 +1,104 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +""" +Tests for DeadlineThreadPool class. +""" + +import pytest + +try: + from deadline.client.ui.controllers._thread_pool import DeadlineThreadPool + from qtpy.QtCore import QThreadPool +except ImportError: + pytest.importorskip("deadline.client.ui.controllers._thread_pool") + + +class TestDeadlineThreadPool: + """Tests for DeadlineThreadPool class.""" + + def setup_method(self): + """Reset the singleton before each test.""" + DeadlineThreadPool.reset() + + def teardown_method(self): + """Clean up after each test.""" + DeadlineThreadPool.reset() + + def test_instance_returns_qthreadpool(self): + """Test that instance() returns a QThreadPool.""" + pool = DeadlineThreadPool.instance() + + assert isinstance(pool, QThreadPool) + + def test_instance_is_singleton(self): + """Test that instance() returns the same object on multiple calls.""" + pool1 = DeadlineThreadPool.instance() + pool2 = DeadlineThreadPool.instance() + + assert pool1 is pool2 + + def test_default_max_threads(self): + """Test that default max thread count is set correctly.""" + pool = DeadlineThreadPool.instance() + + assert pool.maxThreadCount() == DeadlineThreadPool.DEFAULT_MAX_THREADS + + def test_set_max_threads(self): + """Test that set_max_threads updates the thread count.""" + DeadlineThreadPool.set_max_threads(8) + + pool = DeadlineThreadPool.instance() + assert pool.maxThreadCount() == 8 + + def test_set_max_threads_invalid_value(self): + """Test that set_max_threads raises ValueError for invalid count.""" + with pytest.raises(ValueError, match="Thread count must be at least 1"): + DeadlineThreadPool.set_max_threads(0) + + with pytest.raises(ValueError, match="Thread count must be at least 1"): + DeadlineThreadPool.set_max_threads(-1) + + def test_active_thread_count_initially_zero(self): + """Test that active_thread_count is 0 when no tasks are running.""" + # Don't create instance yet + DeadlineThreadPool.reset() + + assert DeadlineThreadPool.active_thread_count() == 0 + + def test_shutdown_returns_true_when_no_pool(self): + """Test that shutdown returns True when no pool exists.""" + DeadlineThreadPool.reset() + + result = DeadlineThreadPool.shutdown() + + assert result is True + + def test_shutdown_clears_instance(self): + """Test that shutdown clears the singleton instance.""" + # Create the instance + pool1 = DeadlineThreadPool.instance() + + # Shutdown + DeadlineThreadPool.shutdown() + + # Get instance again - should be a new one + pool2 = DeadlineThreadPool.instance() + + assert pool1 is not pool2 + + def test_reset_clears_instance(self): + """Test that reset clears the singleton instance.""" + pool1 = DeadlineThreadPool.instance() + + DeadlineThreadPool.reset() + + pool2 = DeadlineThreadPool.instance() + + assert pool1 is not pool2 + + def test_instance_not_global_pool(self): + """Test that our pool is separate from Qt's global pool.""" + our_pool = DeadlineThreadPool.instance() + global_pool = QThreadPool.globalInstance() + + assert our_pool is not global_pool diff --git a/test/unit/deadline_client/ui/dialogs/test_help_dialog.py b/test/unit/deadline_client/ui/dialogs/test_help_dialog.py index 4e0c12012..5476251cd 100644 --- a/test/unit/deadline_client/ui/dialogs/test_help_dialog.py +++ b/test/unit/deadline_client/ui/dialogs/test_help_dialog.py @@ -222,10 +222,13 @@ def test_hard_coded_documentation_links_display(self, qtbot): # Get the layout and find all QLabel widgets layout = help_dialog.layout() labels = [] - for i in range(layout.count()): - widget = layout.itemAt(i).widget() - if isinstance(widget, QLabel): - labels.append(widget) + if layout is not None: + for i in range(layout.count()): + item = layout.itemAt(i) + if item is not None: + widget = item.widget() + if isinstance(widget, QLabel): + labels.append(widget) # Filter labels that contain documentation links (have href tags) doc_labels = [label for label in labels if " 0, timeout=2000) + + mock_create_job.assert_called_once() + call_kwargs = mock_create_job.call_args[1] + assert call_kwargs["job_bundle_dir"] == "/path/to/bundle" + assert call_kwargs["submitter_name"] == "TestSubmitter" + assert succeeded_results[0] == "job-123" + + @patch("deadline.client.ui.dialogs._job_submission_worker._api.create_job_from_job_bundle") + def test_run_emits_succeeded_on_success(self, mock_create_job, qtbot): + """Test that run() emits succeeded signal on success.""" + worker = JobSubmissionWorker() + mock_create_job.return_value = "job-456" + + succeeded_results = [] + worker.succeeded.connect(lambda x: succeeded_results.append(x), _QueuedConnection) + + worker.set_submission_kwargs(job_bundle_dir="/path") + worker.start() + + qtbot.waitUntil(lambda: len(succeeded_results) > 0, timeout=2000) + + assert succeeded_results[0] == "job-456" + + @patch("deadline.client.ui.dialogs._job_submission_worker._api.create_job_from_job_bundle") + def test_run_emits_failed_on_exception(self, mock_create_job, qtbot): + """Test that run() emits failed signal on exception.""" + worker = JobSubmissionWorker() + test_error = ValueError("Test error") + mock_create_job.side_effect = test_error + + failed_results = [] + worker.failed.connect(lambda x: failed_results.append(x), _QueuedConnection) + + worker.set_submission_kwargs(job_bundle_dir="/path") + worker.start() + + qtbot.waitUntil(lambda: len(failed_results) > 0, timeout=2000) + + assert failed_results[0] is test_error + + @patch("deadline.client.ui.dialogs._job_submission_worker._api.create_job_from_job_bundle") + def test_run_does_not_emit_succeeded_when_canceled(self, mock_create_job, qtbot): + """Test that run() does not emit succeeded when canceled.""" + worker = JobSubmissionWorker() + mock_create_job.return_value = "job-789" + + succeeded_results = [] + worker.succeeded.connect(lambda x: succeeded_results.append(x), _QueuedConnection) + + # Cancel before starting + worker.cancel() + + worker.set_submission_kwargs(job_bundle_dir="/path") + worker.start() + worker.wait(2000) + + # Process events to ensure any signals would be delivered + from qtpy.QtCore import QCoreApplication + + QCoreApplication.processEvents() + + assert len(succeeded_results) == 0 + + @patch("deadline.client.ui.dialogs._job_submission_worker._api.create_job_from_job_bundle") + def test_run_does_not_emit_failed_when_canceled(self, mock_create_job, qtbot): + """Test that run() does not emit failed when canceled.""" + worker = JobSubmissionWorker() + test_error = ValueError("Test error") + mock_create_job.side_effect = test_error + + failed_results = [] + worker.failed.connect(lambda x: failed_results.append(x), _QueuedConnection) + + # Cancel before starting + worker.cancel() + + worker.set_submission_kwargs(job_bundle_dir="/path") + worker.start() + worker.wait(2000) + + # Process events to ensure any signals would be delivered + from qtpy.QtCore import QCoreApplication + + QCoreApplication.processEvents() + + assert len(failed_results) == 0 + + @patch("deadline.client.ui.dialogs._job_submission_worker._api.create_job_from_job_bundle") + def test_print_callback_emits_signal(self, mock_create_job, qtbot): + """Test that print callback emits print_message signal.""" + worker = JobSubmissionWorker() + + print_messages = [] + worker.print_message.connect(lambda x: print_messages.append(x), _QueuedConnection) + + def capture_callback(**kwargs): + callback = kwargs.get("print_function_callback") + if callback: + callback("Test message") + return "job-123" + + mock_create_job.side_effect = capture_callback + + worker.set_submission_kwargs(job_bundle_dir="/path") + worker.start() + + qtbot.waitUntil(lambda: len(print_messages) > 0, timeout=2000) + + assert "Test message" in print_messages + + @patch("deadline.client.ui.dialogs._job_submission_worker._api.create_job_from_job_bundle") + def test_hashing_callback_emits_signal(self, mock_create_job, qtbot): + """Test that hashing callback emits hashing_progress signal.""" + worker = JobSubmissionWorker() + + progress_reports = [] + worker.hashing_progress.connect(lambda x: progress_reports.append(x), _QueuedConnection) + + mock_metadata = MagicMock() + mock_metadata.progress = 50.0 + mock_metadata.progressMessage = "Hashing..." + + def capture_callback(**kwargs): + callback = kwargs.get("hashing_progress_callback") + if callback: + callback(mock_metadata) + return "job-123" + + mock_create_job.side_effect = capture_callback + + worker.set_submission_kwargs(job_bundle_dir="/path") + worker.start() + + qtbot.waitUntil(lambda: len(progress_reports) > 0, timeout=2000) + + assert progress_reports[0] is mock_metadata + + @patch("deadline.client.ui.dialogs._job_submission_worker._api.create_job_from_job_bundle") + def test_upload_callback_emits_signal(self, mock_create_job, qtbot): + """Test that upload callback emits upload_progress signal.""" + worker = JobSubmissionWorker() + + progress_reports = [] + worker.upload_progress.connect(lambda x: progress_reports.append(x), _QueuedConnection) + + mock_metadata = MagicMock() + mock_metadata.progress = 75.0 + mock_metadata.progressMessage = "Uploading..." + + def capture_callback(**kwargs): + callback = kwargs.get("upload_progress_callback") + if callback: + callback(mock_metadata) + return "job-123" + + mock_create_job.side_effect = capture_callback + + worker.set_submission_kwargs(job_bundle_dir="/path") + worker.start() + + qtbot.waitUntil(lambda: len(progress_reports) > 0, timeout=2000) + + assert progress_reports[0] is mock_metadata + + @patch("deadline.client.ui.dialogs._job_submission_worker._api.create_job_from_job_bundle") + def test_check_canceled_callback_returns_not_canceled(self, mock_create_job, qtbot): + """Test that check_canceled callback returns correct value.""" + worker = JobSubmissionWorker() + + callback_results = [] + + def capture_callback(**kwargs): + callback = kwargs.get("create_job_result_callback") + if callback: + callback_results.append(callback()) + return "job-123" + + mock_create_job.side_effect = capture_callback + + worker.set_submission_kwargs(job_bundle_dir="/path") + worker.start() + + qtbot.waitUntil(lambda: len(callback_results) > 0, timeout=2000) + + # Should return True (not canceled) + assert callback_results[0] is True + + @patch("deadline.client.ui.dialogs._job_submission_worker._api.create_job_from_job_bundle") + def test_hashing_callback_returns_false_when_canceled(self, mock_create_job, qtbot): + """Test that hashing callback returns False when canceled.""" + worker = JobSubmissionWorker() + + callback_results = [] + mock_metadata = MagicMock() + + def capture_callback(**kwargs): + callback = kwargs.get("hashing_progress_callback") + if callback: + # Cancel during callback + worker.cancel() + result = callback(mock_metadata) + callback_results.append(result) + return "job-123" + + mock_create_job.side_effect = capture_callback + + worker.set_submission_kwargs(job_bundle_dir="/path") + worker.start() + worker.wait(2000) + + # Should return False (canceled) + assert len(callback_results) > 0 + assert callback_results[0] is False + + @patch("deadline.client.ui.dialogs._job_submission_worker._api.create_job_from_job_bundle") + def test_confirmation_callback_emits_signal_and_waits(self, mock_create_job, qtbot): + """Test that confirmation callback emits signal and waits for response.""" + worker = JobSubmissionWorker() + + confirmation_requests = [] + worker.confirmation_requested.connect( + lambda msg, default: confirmation_requests.append((msg, default)), _QueuedConnection + ) + + def capture_callback(**kwargs): + callback = kwargs.get("interactive_confirmation_callback") + if callback: + # This will block until set_confirmation_result is called + # We'll set it from the main thread via signal handler + import threading + + def respond_later(): + import time + + time.sleep(0.1) + worker.set_confirmation_result(True) + + threading.Thread(target=respond_later).start() + result = callback("Confirm?", True) + return "job-123" if result else None + return "job-123" + + mock_create_job.side_effect = capture_callback + + succeeded_results = [] + worker.succeeded.connect(lambda x: succeeded_results.append(x), _QueuedConnection) + + worker.set_submission_kwargs(job_bundle_dir="/path") + worker.start() + + qtbot.waitUntil(lambda: len(succeeded_results) > 0, timeout=3000) + + assert len(confirmation_requests) > 0 + assert confirmation_requests[0] == ("Confirm?", True) + assert succeeded_results[0] == "job-123" + + @patch("deadline.client.ui.dialogs._job_submission_worker._api.create_job_from_job_bundle") + def test_confirmation_callback_returns_false_when_canceled(self, mock_create_job, qtbot): + """Test that confirmation callback returns False when canceled during wait.""" + worker = JobSubmissionWorker() + + callback_results = [] + + def capture_callback(**kwargs): + callback = kwargs.get("interactive_confirmation_callback") + if callback: + # Cancel while waiting for confirmation + import threading + + def cancel_later(): + import time + + time.sleep(0.1) + worker.cancel() + + threading.Thread(target=cancel_later).start() + result = callback("Confirm?", True) + callback_results.append(result) + return "job-123" + + mock_create_job.side_effect = capture_callback + + worker.set_submission_kwargs(job_bundle_dir="/path") + worker.start() + worker.wait(2000) + + # Should return False (canceled) + assert len(callback_results) > 0 + assert callback_results[0] is False diff --git a/test/unit/deadline_client/ui/dialogs/test_submit_job_to_deadline_dialog.py b/test/unit/deadline_client/ui/dialogs/test_submit_job_to_deadline_dialog.py index 5675474bd..478e7e05d 100644 --- a/test/unit/deadline_client/ui/dialogs/test_submit_job_to_deadline_dialog.py +++ b/test/unit/deadline_client/ui/dialogs/test_submit_job_to_deadline_dialog.py @@ -1,67 +1,115 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -from unittest.mock import MagicMock, patch +from configparser import ConfigParser +from unittest.mock import MagicMock, PropertyMock, patch import pytest try: - from deadline.client.ui.dialogs.submit_job_to_deadline_dialog import SubmitJobToDeadlineDialog + from qtpy.QtWidgets import QWidget from deadline.client.ui.dataclasses import JobBundleSettings from deadline.client.job_bundle.submission import AssetReferences except ImportError: pytest.importorskip("deadline.client.ui.dialogs.submit_job_to_deadline_dialog") +class MockJobSettingsWidget(QWidget): + """A mock job settings widget that is a real QWidget.""" + + def __init__(self, initial_settings=None, parent=None): + super().__init__(parent) + self.initial_settings = initial_settings + self.parameter_changed = MagicMock() + self.parameter_changed.connect = MagicMock() + + def update_settings(self, settings): + pass + + @pytest.fixture -def mock_job_settings_widget(): - """Create a mock job settings widget type.""" - widget = MagicMock() - widget.return_value = MagicMock() - widget.return_value.parameter_changed = MagicMock() - widget.return_value.parameter_changed.connect = MagicMock() - return widget - - -@patch("deadline.client.ui.dialogs.submit_job_to_deadline_dialog.DeadlineAuthenticationStatus") -def test_load_bundle_button_shown_when_browse_enabled( - mock_auth_status, qtbot, mock_job_settings_widget -): +def mock_auth_status(): + """Create a mock DeadlineAuthenticationStatus that prevents API calls.""" + mock_instance = MagicMock() + # Use PropertyMock to ensure these return None, not MagicMock + type(mock_instance).api_availability = PropertyMock(return_value=None) + type(mock_instance).creds_source = PropertyMock(return_value=None) + type(mock_instance).auth_status = PropertyMock(return_value=None) + # Provide a real ConfigParser for the config attribute + mock_instance.config = ConfigParser() + # Mock the signals + mock_instance.api_availability_changed = MagicMock() + mock_instance.api_availability_changed.connect = MagicMock() + mock_instance.creds_source_changed = MagicMock() + mock_instance.creds_source_changed.connect = MagicMock() + mock_instance.auth_status_changed = MagicMock() + mock_instance.auth_status_changed.connect = MagicMock() + return mock_instance + + +def test_load_bundle_button_shown_when_browse_enabled(qtbot, mock_auth_status): """Test that the Load a different job bundle button is shown when browse_enabled is True.""" - mock_auth_status.getInstance.return_value = MagicMock() + # Reset the singleton + import deadline.client.ui.deadline_authentication_status as auth_module + + auth_module._deadline_authentication_status = mock_auth_status + + # Also patch the widget's getInstance call + with patch( + "deadline.client.ui.widgets.deadline_authentication_status_widget.DeadlineAuthenticationStatus.getInstance", + return_value=mock_auth_status, + ), patch( + "deadline.client.ui.dialogs.submit_job_to_deadline_dialog.DeadlineAuthenticationStatus.getInstance", + return_value=mock_auth_status, + ): + from deadline.client.ui.dialogs.submit_job_to_deadline_dialog import ( + SubmitJobToDeadlineDialog, + ) - settings = JobBundleSettings(browse_enabled=True) + settings = JobBundleSettings(browse_enabled=True) - dialog = SubmitJobToDeadlineDialog( - job_setup_widget_type=mock_job_settings_widget, - initial_job_settings=settings, - initial_shared_parameter_values={}, - auto_detected_attachments=AssetReferences(), - attachments=AssetReferences(), - on_create_job_bundle_callback=MagicMock(), - ) - qtbot.addWidget(dialog) + dialog = SubmitJobToDeadlineDialog( + job_setup_widget_type=MockJobSettingsWidget, + initial_job_settings=settings, + initial_shared_parameter_values={}, + auto_detected_attachments=AssetReferences(), + attachments=AssetReferences(), + on_create_job_bundle_callback=MagicMock(), + ) + qtbot.addWidget(dialog) - assert hasattr(dialog, "load_bundle_button") - assert dialog.load_bundle_button.text() == "Load a different job bundle" + assert hasattr(dialog, "load_bundle_button") + assert dialog.load_bundle_button.text() == "Load a different job bundle" -@patch("deadline.client.ui.dialogs.submit_job_to_deadline_dialog.DeadlineAuthenticationStatus") -def test_load_bundle_button_hidden_when_browse_disabled( - mock_auth_status, qtbot, mock_job_settings_widget -): +def test_load_bundle_button_hidden_when_browse_disabled(qtbot, mock_auth_status): """Test that the Load a different job bundle button is not shown when browse_enabled is False.""" - mock_auth_status.getInstance.return_value = MagicMock() + # Reset the singleton + import deadline.client.ui.deadline_authentication_status as auth_module + + auth_module._deadline_authentication_status = mock_auth_status + + # Also patch the widget's getInstance call + with patch( + "deadline.client.ui.widgets.deadline_authentication_status_widget.DeadlineAuthenticationStatus.getInstance", + return_value=mock_auth_status, + ), patch( + "deadline.client.ui.dialogs.submit_job_to_deadline_dialog.DeadlineAuthenticationStatus.getInstance", + return_value=mock_auth_status, + ): + from deadline.client.ui.dialogs.submit_job_to_deadline_dialog import ( + SubmitJobToDeadlineDialog, + ) - settings = JobBundleSettings(browse_enabled=False) + settings = JobBundleSettings(browse_enabled=False) - dialog = SubmitJobToDeadlineDialog( - job_setup_widget_type=mock_job_settings_widget, - initial_job_settings=settings, - initial_shared_parameter_values={}, - auto_detected_attachments=AssetReferences(), - attachments=AssetReferences(), - on_create_job_bundle_callback=MagicMock(), - ) - qtbot.addWidget(dialog) + dialog = SubmitJobToDeadlineDialog( + job_setup_widget_type=MockJobSettingsWidget, + initial_job_settings=settings, + initial_shared_parameter_values={}, + auto_detected_attachments=AssetReferences(), + attachments=AssetReferences(), + on_create_job_bundle_callback=MagicMock(), + ) + qtbot.addWidget(dialog) - assert not hasattr(dialog, "load_bundle_button") + assert not hasattr(dialog, "load_bundle_button") diff --git a/test/unit/deadline_client/ui/widgets/test_deadline_list_combo_boxes.py b/test/unit/deadline_client/ui/widgets/test_deadline_list_combo_boxes.py new file mode 100644 index 000000000..1cc1a4de0 --- /dev/null +++ b/test/unit/deadline_client/ui/widgets/test_deadline_list_combo_boxes.py @@ -0,0 +1,295 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +""" +Tests for the controller-based Deadline resource list combo boxes. +""" + +import pytest +from unittest.mock import patch +from configparser import ConfigParser + +try: + from deadline.client.ui.widgets._deadline_list_combo_boxes import ( + DeadlineFarmListComboBoxController, + DeadlineQueueListComboBoxController, + DeadlineStorageProfileListComboBoxController, + ) + from deadline.client.ui.controllers._deadline_controller import DeadlineUIController + from deadline.client.ui.controllers._thread_pool import DeadlineThreadPool +except ImportError: + pytest.importorskip("deadline.client.ui.widgets._deadline_list_combo_boxes") + + +class TestDeadlineFarmListComboBoxController: + """Tests for DeadlineFarmListComboBoxController.""" + + def setup_method(self): + """Reset singleton and thread pool before each test.""" + DeadlineUIController.resetInstance() + DeadlineThreadPool.reset() + + def teardown_method(self): + """Clean up after each test.""" + DeadlineUIController.resetInstance() + DeadlineThreadPool.shutdown(wait_for_done=True, timeout_ms=2000) + DeadlineThreadPool.reset() + + def test_init_creates_widget(self, qtbot): + """Test that the widget can be instantiated.""" + widget = DeadlineFarmListComboBoxController() + qtbot.addWidget(widget) + + assert widget.box is not None + assert widget.refresh_button is not None + + def test_set_config_updates_controller(self, qtbot): + """Test that set_config updates the controller.""" + widget = DeadlineFarmListComboBoxController() + qtbot.addWidget(widget) + + config = ConfigParser() + config["defaults"] = {"aws_profile_name": "test-profile"} + + widget.set_config(config) + + assert widget.config is config + + def test_clear_list_empties_combobox(self, qtbot): + """Test that clear_list empties the combobox.""" + widget = DeadlineFarmListComboBoxController() + qtbot.addWidget(widget) + + # Add some items first + widget.box.addItem("Farm 1", userData="farm-1") + widget.box.addItem("Farm 2", userData="farm-2") + assert widget.count() == 2 + + widget.clear_list() + + assert widget.count() == 0 + + def test_handle_list_update_populates_combobox(self, qtbot): + """Test that _handle_list_update populates the combobox.""" + widget = DeadlineFarmListComboBoxController() + qtbot.addWidget(widget) + + config = ConfigParser() + config["defaults"] = {"farm_id": "farm-a"} # Match one of the items + widget.set_config(config) + + # Simulate list update from controller + items = [["Farm A", "farm-a"], ["Farm B", "farm-b"]] + + with patch("deadline.client.ui.widgets._deadline_list_combo_boxes.config_file") as mock_cf: + mock_cf.get_setting.return_value = "farm-a" + widget._handle_list_update(items) + + assert widget.count() == 2 + assert widget.box.itemText(0) == "Farm A" + assert widget.box.itemData(0) == "farm-a" + assert widget.box.itemText(1) == "Farm B" + assert widget.box.itemData(1) == "farm-b" + + def test_handle_loading_state_shows_refreshing(self, qtbot): + """Test that _handle_loading_state shows refreshing indicator.""" + widget = DeadlineFarmListComboBoxController() + qtbot.addWidget(widget) + + config = ConfigParser() + config["defaults"] = {"farm_id": "farm-123"} + widget.set_config(config) + + with patch("deadline.client.ui.widgets._deadline_list_combo_boxes.config_file") as mock_cf: + mock_cf.get_setting.return_value = "farm-123" + widget._handle_loading_state(True) + + assert widget.count() == 1 + assert widget.box.itemText(0) == "" + assert widget.box.itemData(0) == "farm-123" + assert widget.refresh_button.isEnabled() is False + + def test_handle_loading_state_enables_button_when_done(self, qtbot): + """Test that _handle_loading_state enables button when loading completes.""" + widget = DeadlineFarmListComboBoxController() + qtbot.addWidget(widget) + + widget._handle_loading_state(True) + assert not widget.refresh_button.isEnabled() + + widget._handle_loading_state(False) + assert widget.refresh_button.isEnabled() + + @patch.object(DeadlineUIController, "refresh_farms") + def test_refresh_list_calls_controller(self, mock_refresh, qtbot): + """Test that refresh_list calls the controller.""" + widget = DeadlineFarmListComboBoxController() + qtbot.addWidget(widget) + + widget.refresh_list() + + mock_refresh.assert_called_once() + + def test_refresh_selected_id_selects_configured_farm(self, qtbot): + """Test that refresh_selected_id selects the configured farm.""" + widget = DeadlineFarmListComboBoxController() + qtbot.addWidget(widget) + + config = ConfigParser() + config["defaults"] = {"farm_id": "farm-b"} + widget.set_config(config) + + # Add items + widget.box.addItem("Farm A", userData="farm-a") + widget.box.addItem("Farm B", userData="farm-b") + + with patch("deadline.client.ui.widgets._deadline_list_combo_boxes.config_file") as mock_cf: + mock_cf.get_setting.return_value = "farm-b" + widget.refresh_selected_id() + + assert widget.box.currentData() == "farm-b" + + def test_refresh_selected_id_adds_none_selected_if_not_found(self, qtbot): + """Test that refresh_selected_id adds none selected if ID not found.""" + widget = DeadlineFarmListComboBoxController() + qtbot.addWidget(widget) + + config = ConfigParser() + config["defaults"] = {"farm_id": "unknown-farm"} + widget.set_config(config) + + # Add items that don't include the configured ID + widget.box.addItem("Farm A", userData="farm-a") + + widget.refresh_selected_id() + + # Should add "" and select it + assert widget.box.currentText() == "" + assert widget.box.currentData() == "" + + +class TestDeadlineQueueListComboBoxController: + """Tests for DeadlineQueueListComboBoxController.""" + + def setup_method(self): + """Reset singleton and thread pool before each test.""" + DeadlineUIController.resetInstance() + DeadlineThreadPool.reset() + + def teardown_method(self): + """Clean up after each test.""" + DeadlineUIController.resetInstance() + DeadlineThreadPool.shutdown(wait_for_done=True, timeout_ms=2000) + DeadlineThreadPool.reset() + + def test_init_creates_widget(self, qtbot): + """Test that the widget can be instantiated.""" + widget = DeadlineQueueListComboBoxController() + qtbot.addWidget(widget) + + assert widget.box is not None + assert widget.resource_name == "Queue" + + @patch("deadline.client.ui.widgets._deadline_list_combo_boxes.config_file") + @patch.object(DeadlineUIController, "refresh_queues") + def test_refresh_list_calls_controller_with_farm_id(self, mock_refresh, mock_cf, qtbot): + """Test that refresh_list calls controller with farm_id.""" + widget = DeadlineQueueListComboBoxController() + qtbot.addWidget(widget) + + config = ConfigParser() + config["defaults"] = {"farm_id": "farm-123", "queue_id": ""} + widget.set_config(config) + + mock_cf.get_setting.return_value = "farm-123" + widget.refresh_list() + + mock_refresh.assert_called_once_with(farm_id="farm-123") + + def test_handle_list_update_populates_queues(self, qtbot): + """Test that _handle_list_update populates queues.""" + widget = DeadlineQueueListComboBoxController() + qtbot.addWidget(widget) + + config = ConfigParser() + config["defaults"] = {"queue_id": "queue-1"} + widget.set_config(config) + + items = [["Queue 1", "queue-1"], ["Queue 2", "queue-2"]] + + with patch("deadline.client.ui.widgets._deadline_list_combo_boxes.config_file") as mock_cf: + mock_cf.get_setting.return_value = "queue-1" + widget._handle_list_update(items) + + assert widget.count() == 2 + assert widget.box.itemText(0) == "Queue 1" + + +class TestDeadlineStorageProfileListComboBoxController: + """Tests for DeadlineStorageProfileListComboBoxController.""" + + def setup_method(self): + """Reset singleton and thread pool before each test.""" + DeadlineUIController.resetInstance() + DeadlineThreadPool.reset() + + def teardown_method(self): + """Clean up after each test.""" + DeadlineUIController.resetInstance() + DeadlineThreadPool.shutdown(wait_for_done=True, timeout_ms=2000) + DeadlineThreadPool.reset() + + def test_init_creates_widget(self, qtbot): + """Test that the widget can be instantiated.""" + widget = DeadlineStorageProfileListComboBoxController() + qtbot.addWidget(widget) + + assert widget.box is not None + assert widget.resource_name == "Storage profile" + + @patch("deadline.client.ui.widgets._deadline_list_combo_boxes.config_file") + @patch.object(DeadlineUIController, "refresh_storage_profiles") + def test_refresh_list_calls_controller_with_ids(self, mock_refresh, mock_cf, qtbot): + """Test that refresh_list calls controller with farm and queue IDs.""" + widget = DeadlineStorageProfileListComboBoxController() + qtbot.addWidget(widget) + + config = ConfigParser() + config["defaults"] = {"farm_id": "farm-123", "queue_id": "queue-456"} + config["settings"] = {"storage_profile_id": ""} + widget.set_config(config) + + # Return different values for different setting names + def get_setting_side_effect(setting_name, config=None): + if setting_name == "defaults.farm_id": + return "farm-123" + elif setting_name == "defaults.queue_id": + return "queue-456" + return "" + + mock_cf.get_setting.side_effect = get_setting_side_effect + widget.refresh_list() + + mock_refresh.assert_called_once_with(farm_id="farm-123", queue_id="queue-456") + + def test_handle_list_update_populates_profiles(self, qtbot): + """Test that _handle_list_update populates storage profiles.""" + widget = DeadlineStorageProfileListComboBoxController() + qtbot.addWidget(widget) + + config = ConfigParser() + config["settings"] = {"storage_profile_id": "profile-1"} + widget.set_config(config) + + items = [ + ["", ""], + ["Profile 1", "profile-1"], + ["Profile 2", "profile-2"], + ] + + with patch("deadline.client.ui.widgets._deadline_list_combo_boxes.config_file") as mock_cf: + mock_cf.get_setting.return_value = "profile-1" + widget._handle_list_update(items) + + assert widget.count() == 3 + assert widget.box.itemText(0) == "" + assert widget.box.itemText(1) == "Profile 1" diff --git a/test/unit/deadline_client/ui/widgets/test_host_requirements_tab.py b/test/unit/deadline_client/ui/widgets/test_host_requirements_tab.py index 6001fcf09..c532deb7f 100644 --- a/test/unit/deadline_client/ui/widgets/test_host_requirements_tab.py +++ b/test/unit/deadline_client/ui/widgets/test_host_requirements_tab.py @@ -59,8 +59,9 @@ def test_input_in_hardware_requirements_widget_should_be_integer_within_range(qt def test_name_in_custom_amount_widget_should_be_truncated(qtbot): - widget = CustomAmountWidget(MagicMock(), 1) - qtbot.addWidget(widget) + parent = CustomRequirementsWidget() + qtbot.addWidget(parent) + widget = CustomAmountWidget(MagicMock(), 1, parent) invalid_str = "a" * (AMOUNT_NAME_MAX_LENGTH + 1) widget.name_line_edit.setText(invalid_str) @@ -68,17 +69,19 @@ def test_name_in_custom_amount_widget_should_be_truncated(qtbot): def test_name_in_custom_amount_widget_should_not_allow_invalid_chars(qtbot): - widget = CustomAmountWidget(MagicMock(), 1) - qtbot.addWidget(widget) + parent = CustomRequirementsWidget() + qtbot.addWidget(parent) + widget = CustomAmountWidget(MagicMock(), 1, parent) - invalid_str = "" + invalid_str = "" widget.name_line_edit.setText(invalid_str) assert widget.name_line_edit.hasAcceptableInput() is False def test_name_in_custom_amount_widget_should_allow_identifiers(qtbot): - widget = CustomAmountWidget(MagicMock(), 1) - qtbot.addWidget(widget) + parent = CustomRequirementsWidget() + qtbot.addWidget(parent) + widget = CustomAmountWidget(MagicMock(), 1, parent) valid_identifier = "a" + (".a" * math.floor((AMOUNT_NAME_MAX_LENGTH - 1) / 2)) widget.name_line_edit.setText(valid_identifier) @@ -86,8 +89,9 @@ def test_name_in_custom_amount_widget_should_allow_identifiers(qtbot): def test_name_in_custom_amount_widget_does_not_allow_invalid_identifiers(qtbot): - widget = CustomAmountWidget(MagicMock(), 1) - qtbot.addWidget(widget) + parent = CustomRequirementsWidget() + qtbot.addWidget(parent) + widget = CustomAmountWidget(MagicMock(), 1, parent) valid_identifier = "a" invalid_identifier = "a" * (IDENTFIER_MAX_LENGTH + 1) @@ -97,8 +101,9 @@ def test_name_in_custom_amount_widget_does_not_allow_invalid_identifiers(qtbot): def test_name_in_custom_amount_widget_should_not_allow_missing_identifiers(qtbot): - widget = CustomAmountWidget(MagicMock(), 1) - qtbot.addWidget(widget) + parent = CustomRequirementsWidget() + qtbot.addWidget(parent) + widget = CustomAmountWidget(MagicMock(), 1, parent) missing_identifier = "a..a" widget.name_line_edit.setText(missing_identifier) @@ -106,8 +111,9 @@ def test_name_in_custom_amount_widget_should_not_allow_missing_identifiers(qtbot def test_name_in_custom_amount_widget_should_not_allow_reserved_first_identifier(qtbot): - widget = CustomAmountWidget(MagicMock(), 1) - qtbot.addWidget(widget) + parent = CustomRequirementsWidget() + qtbot.addWidget(parent) + widget = CustomAmountWidget(MagicMock(), 1, parent) for reserved_identifier in RESERVED_FIRST_IDENTIFIERS: widget.name_line_edit.setText(reserved_identifier) @@ -122,8 +128,9 @@ def test_name_in_custom_amount_widget_should_not_allow_reserved_first_identifier def test_value_in_custom_amount_widget_should_be_integer_within_range(qtbot): - widget = CustomAmountWidget(MagicMock(), 1) - qtbot.addWidget(widget) + parent = CustomRequirementsWidget() + qtbot.addWidget(parent) + widget = CustomAmountWidget(MagicMock(), 1, parent) assert widget.min_spin_box.min == 0 assert widget.min_spin_box.max == MAX_INT_VALUE @@ -144,7 +151,7 @@ def test_name_in_custom_attribute_widget_should_follow_regex_pattern(qtbot): widget = CustomAttributeWidget(MagicMock(), 1, CustomRequirementsWidget()) qtbot.addWidget(widget) - invalid_str = "" + invalid_str = "" widget.name_line_edit.setText(invalid_str) assert widget.name_line_edit.hasAcceptableInput() is False @@ -164,7 +171,7 @@ def test_value_in_custom_attribute_widget_should_follow_regex_pattern(qtbot): widget = CustomAttributeValueWidget(MagicMock(), parent_widget) qtbot.addWidget(widget) - invalid_str = "" + invalid_str = "" widget.line_edit.setText(invalid_str) assert widget.line_edit.hasAcceptableInput() is False @@ -235,8 +242,9 @@ def test_name_in_custom_attribute_widget_should_not_allow_reserved_first_identif def test_custom_amount_widget_includes_zero_minimum(qtbot): - widget = CustomAmountWidget(MagicMock(), 1) - qtbot.addWidget(widget) + parent = CustomRequirementsWidget() + qtbot.addWidget(parent) + widget = CustomAmountWidget(MagicMock(), 1, parent) widget.name_line_edit.setText("test.amount") widget.min_spin_box.setValue(0) @@ -250,8 +258,9 @@ def test_custom_amount_widget_includes_zero_minimum(qtbot): def test_custom_amount_widget_includes_zero_maximum(qtbot): - widget = CustomAmountWidget(MagicMock(), 1) - qtbot.addWidget(widget) + parent = CustomRequirementsWidget() + qtbot.addWidget(parent) + widget = CustomAmountWidget(MagicMock(), 1, parent) widget.name_line_edit.setText("test.amount") widget.min_spin_box.setValue(0) @@ -265,8 +274,9 @@ def test_custom_amount_widget_includes_zero_maximum(qtbot): def test_custom_amount_widget_includes_zero_only_minimum(qtbot): - widget = CustomAmountWidget(MagicMock(), 1) - qtbot.addWidget(widget) + parent = CustomRequirementsWidget() + qtbot.addWidget(parent) + widget = CustomAmountWidget(MagicMock(), 1, parent) widget.name_line_edit.setText("test.amount") widget.min_spin_box.setValue(0) @@ -279,8 +289,9 @@ def test_custom_amount_widget_includes_zero_only_minimum(qtbot): def test_custom_amount_widget_includes_zero_only_maximum(qtbot): - widget = CustomAmountWidget(MagicMock(), 1) - qtbot.addWidget(widget) + parent = CustomRequirementsWidget() + qtbot.addWidget(parent) + widget = CustomAmountWidget(MagicMock(), 1, parent) widget.name_line_edit.setText("test.amount") widget.max_spin_box.setValue(0)