diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 6f67191c4a11..11b0e90ed3d1 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -32,7 +32,7 @@ repos:
--statistics,
--max-complexity, "10",
--max-line-length, "88",
- --extend-ignore, E203 E501 C901 B007 B009 B010 B011 B028,
+ --extend-ignore, E203 E501 C901 B007 B009 B010 B011 B028 F403 F405 F401 E402,
src, doc, examples, tests
]
diff --git a/README.md b/README.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/devel/undocumented_fields/search_undocumented_fields.py b/devel/undocumented_fields/search_undocumented_fields.py
index 17a2c4f1ca01..ea856cb86fab 100644
--- a/devel/undocumented_fields/search_undocumented_fields.py
+++ b/devel/undocumented_fields/search_undocumented_fields.py
@@ -6,7 +6,6 @@
import os
import sys
-from typing import Dict, Set
import ast_comments as ast
@@ -121,7 +120,7 @@ def visit_FunctionDef(self, node):
self.classes[self.current_class].add(child.target.attr)
-def analyze_file(file_path: str) -> Dict[str, Set[str]]:
+def analyze_file(file_path: str) -> dict[str, set[str]]:
"""Analyze a Python file and return classes with their public fields."""
with open(file_path, "r", encoding="utf-8") as file:
try:
@@ -134,7 +133,7 @@ def analyze_file(file_path: str) -> Dict[str, Set[str]]:
return {}
-def analyze_package(package_path: str) -> Dict[str, Dict[str, Set[str]]]:
+def analyze_package(package_path: str) -> dict[str, dict[str, set[str]]]:
"""Analyze all Python files in a package directory."""
result = {}
@@ -160,7 +159,7 @@ def analyze_package(package_path: str) -> Dict[str, Dict[str, Set[str]]]:
return result
-def write_results(results: Dict[str, Dict[str, Set[str]]], f) -> str:
+def write_results(results: dict[str, dict[str, set[str]]], f) -> None:
"""Format the analysis results."""
for module_name, classes in sorted(results.items()):
diff --git a/doc/changelog.d/4761.added.md b/doc/changelog.d/4761.added.md
new file mode 100644
index 000000000000..8383cf28efa5
--- /dev/null
+++ b/doc/changelog.d/4761.added.md
@@ -0,0 +1 @@
+Typing improvements
diff --git a/doc/modify_html.py b/doc/modify_html.py
index 4f653a650c79..f217ecffa1de 100644
--- a/doc/modify_html.py
+++ b/doc/modify_html.py
@@ -32,7 +32,7 @@ def modify_html(soup: BeautifulSoup) -> None:
/ "settings"
)
for html_file in html_dir.glob("*.html"):
- with open(html_file, "r", encoding="utf-8") as f:
+ with open(html_file, encoding="utf-8") as f:
soup = BeautifulSoup(f, "html.parser", from_encoding="utf-8")
modify_html(soup)
diff --git a/examples/00-fluent/DOE_ML.py b/examples/00-fluent/DOE_ML.py
index 02876c9778fe..a7eca92af93e 100644
--- a/examples/00-fluent/DOE_ML.py
+++ b/examples/00-fluent/DOE_ML.py
@@ -63,7 +63,6 @@
# flake8: noqa: E402
import os
-from pathlib import Path
import matplotlib.pyplot as plt
import numpy as np
@@ -264,8 +263,8 @@
def display_scores(scores):
"""Display scores."""
print("\nCross-Validation Scores:", scores)
- print("Mean:%0.2f" % (scores.mean()))
- print("Std. Dev.:%0.2f" % (scores.std()))
+ print(f"Mean:{scores.mean():0.2f}")
+ print(f"Std. Dev.:{scores.std():0.2f}")
def fit_and_predict(model):
@@ -282,8 +281,8 @@ def fit_and_predict(model):
test_predictions = model.predict(X_test)
print(train_predictions.shape[0])
print("\n\nCoefficient Of Determination")
- print("Train Data R2 Score: %0.3f" % (r2_score(train_predictions, y_train)))
- print("Test Data R2 Score: %0.3f" % (r2_score(test_predictions, y_test)))
+ print(f"Train Data R2 Score: {r2_score(train_predictions, y_train):0.3f}")
+ print(f"Test Data R2 Score: {r2_score(test_predictions, y_test):0.3f}")
print(
"\n\nPredictions - Ground Truth (Kelvin): ", (test_predictions - y_test), "\n"
)
@@ -451,8 +450,8 @@ def fit_and_predict(model):
test_predictions = np.ravel(test_predictions.T)
print(test_predictions.shape)
-print("\n\nTrain R2: %0.3f" % (r2_score(train_predictions, y_train)))
-print("Test R2: %0.3f" % (r2_score(test_predictions, y_test)))
+print(f"\n\nTrain R2: {r2_score(train_predictions, y_train):0.3f}")
+print(f"Test R2: {r2_score(test_predictions, y_test):0.3f}")
print("Predictions - Ground Truth (Kelvin): ", (test_predictions - y_test))
fig = plt.figure(figsize=(12, 5))
diff --git a/pyproject.toml b/pyproject.toml
index 09b5330d7235..80bdf8296919 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -34,6 +34,7 @@ dependencies = [
"pandas>=1.1.0,<3.0.0",
"pyansys-tools-report>=0.8.1",
"pyyaml>=6.0",
+ "typing-extensions>=4.12"
]
dynamic = ["version"]
@@ -234,3 +235,8 @@ skips = [
"B604",
"B607",
]
+
+[tool.basedpyright]
+reportUnknownMemberType = false
+reportExplicitAny = false
+reportPrivateUsage = false
\ No newline at end of file
diff --git a/src/ansys/fluent/core/__init__.py b/src/ansys/fluent/core/__init__.py
index 5e0a57b74c1d..c5b3dc81d862 100644
--- a/src/ansys/fluent/core/__init__.py
+++ b/src/ansys/fluent/core/__init__.py
@@ -22,64 +22,31 @@
"""A package providing Fluent's Solver and Meshing capabilities in Python."""
-import os
-import pydoc
-import warnings
-
# isort: off
# config must be initialized before logging setup.
-from ansys.fluent.core.module_config import config
+from ansys.fluent.core.module_config import *
# Logging has to be imported before importing other PyFluent modules
-from ansys.fluent.core.logger import set_console_logging_level # noqa: F401
+from ansys.fluent.core.logger import *
# isort: on
-from ansys.fluent.core.field_data_interfaces import ( # noqa: F401
- PathlinesFieldDataRequest,
- ScalarFieldDataRequest,
- SurfaceDataType,
- SurfaceFieldDataRequest,
- VectorFieldDataRequest,
-)
-from ansys.fluent.core.get_build_details import ( # noqa: F401
- get_build_version,
- get_build_version_string,
-)
-from ansys.fluent.core.launcher.launch_options import ( # noqa: F401
- Dimension,
- FluentLinuxGraphicsDriver,
- FluentMode,
- FluentWindowsGraphicsDriver,
- Precision,
- UIMode,
-)
-from ansys.fluent.core.launcher.launcher import ( # noqa: F401
- connect_to_fluent,
- launch_fluent,
-)
-from ansys.fluent.core.parametric import LocalParametricStudy # noqa: F401
-from ansys.fluent.core.pyfluent_warnings import ( # noqa: F401
- PyFluentDeprecationWarning,
- PyFluentUserWarning,
- warning,
-)
-from ansys.fluent.core.search import search # noqa: F401
-from ansys.fluent.core.services.batch_ops import BatchOps # noqa: F401
-from ansys.fluent.core.session import BaseSession as Fluent # noqa: F401
-from ansys.fluent.core.session_utilities import ( # noqa: F401
- Meshing,
- PrePost,
- PureMeshing,
- Solver,
- SolverAero,
- SolverIcing,
-)
-from ansys.fluent.core.streaming_services.events_streaming import * # noqa: F401, F403
-from ansys.fluent.core.utils import fldoc
-from ansys.fluent.core.utils.fluent_version import FluentVersion # noqa: F401
-from ansys.fluent.core.utils.setup_for_fluent import setup_for_fluent # noqa: F401
+from ansys.fluent.core.field_data_interfaces import *
+from ansys.fluent.core.get_build_details import *
+from ansys.fluent.core.launcher.launch_options import *
+from ansys.fluent.core.launcher.launcher import *
+from ansys.fluent.core.parametric import *
+from ansys.fluent.core.pyfluent_warnings import *
+from ansys.fluent.core.search import *
+from ansys.fluent.core.services.batch_ops import *
+from ansys.fluent.core.session import *
+from ansys.fluent.core.session import BaseSession as Fluent
+from ansys.fluent.core.session_utilities import *
+from ansys.fluent.core.streaming_services.events_streaming import *
+from ansys.fluent.core.utils import *
+from ansys.fluent.core.utils.fluent_version import *
+from ansys.fluent.core.utils.setup_for_fluent import *
__version__ = "0.38.dev2"
@@ -89,10 +56,14 @@
Build timestamp and commit hash are added to this variable during packaging.
"""
-_THIS_DIRNAME = os.path.dirname(__file__)
-_README_FILE = os.path.normpath(os.path.join(_THIS_DIRNAME, "docs", "README.rst"))
+import os as _os # noqa: E402
+import warnings as _warnings # noqa: E402
+
+
+_THIS_DIRNAME = _os.path.dirname(__file__)
+_README_FILE = _os.path.normpath(_os.path.join(_THIS_DIRNAME, "docs", "README.rst"))
-if os.path.exists(_README_FILE):
+if _os.path.exists(_README_FILE):
with open(_README_FILE, encoding="utf8") as f:
__doc__ = f.read()
@@ -112,7 +83,10 @@ def version_info() -> str:
return _VERSION_INFO if _VERSION_INFO is not None else __version__
-pydoc.text.docother = fldoc.docother.__get__(pydoc.text, pydoc.TextDoc)
+import pydoc as _pydoc # noqa: E402
+from ansys.fluent.core.utils import fldoc as _fldoc # noqa: E402
+
+_pydoc.text.docother = _fldoc.docother.__get__(_pydoc.text, _pydoc.TextDoc)
_config_by_deprecated_name = {
@@ -144,14 +118,17 @@ def version_info() -> str:
"LAUNCH_FLUENT_SKIP_PASSWORD_CHECK": "launch_fluent_skip_password_check",
}
+from typing import TYPE_CHECKING as _TYPE_CHECKING # noqa: E402
+
+if not _TYPE_CHECKING:
-def __getattr__(name: str) -> str:
- """Get the value of a deprecated configuration variable."""
- if name in _config_by_deprecated_name:
- config_name = _config_by_deprecated_name[name]
- warnings.warn(
- f"'{name}' is deprecated, use 'config.{config_name}' instead.",
- category=PyFluentDeprecationWarning,
- )
- return getattr(config, config_name)
- raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
+ def __getattr__(name: str) -> str:
+ """Get the value of a deprecated configuration variable."""
+ if name in _config_by_deprecated_name:
+ config_name = _config_by_deprecated_name[name]
+ _warnings.warn(
+ f"'{name}' is deprecated, use 'config.{config_name}' instead.",
+ category=PyFluentDeprecationWarning,
+ )
+ return getattr(config, config_name)
+ raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
diff --git a/src/ansys/fluent/core/_types.py b/src/ansys/fluent/core/_types.py
index bf074d4253e0..d7a03ed9e87b 100644
--- a/src/ansys/fluent/core/_types.py
+++ b/src/ansys/fluent/core/_types.py
@@ -31,5 +31,5 @@
import os
from typing import TypeAlias
-PathType: TypeAlias = "os.PathLike[str] | os.PathLike[bytes] | str | bytes"
+PathType: TypeAlias = "os.PathLike[str] | str"
"""Type alias for file system paths."""
diff --git a/src/ansys/fluent/core/codegen/datamodelgen.py b/src/ansys/fluent/core/codegen/datamodelgen.py
index 13ce4b32a433..706d8be3cfe6 100644
--- a/src/ansys/fluent/core/codegen/datamodelgen.py
+++ b/src/ansys/fluent/core/codegen/datamodelgen.py
@@ -28,7 +28,7 @@
from pathlib import Path
import shutil
import string
-from typing import Any, Dict
+from typing import Any
import ansys.fluent.core as pyfluent
from ansys.fluent.core import FluentMode, launch_fluent
@@ -230,7 +230,7 @@ class DataModelGenerator:
def __init__(self, version, static_infos: dict, verbose: bool = False):
self.version = version
self._server_static_infos = static_infos
- self._static_info: Dict[str, DataModelStaticInfo] = {}
+ self._static_info: dict[str, DataModelStaticInfo] = {}
self._verbose = verbose
if StaticInfoType.DATAMODEL_WORKFLOW in static_infos:
self._static_info["workflow"] = DataModelStaticInfo(
diff --git a/src/ansys/fluent/core/codegen/tuigen.py b/src/ansys/fluent/core/codegen/tuigen.py
index f0e8599094c6..ff07e72cab03 100644
--- a/src/ansys/fluent/core/codegen/tuigen.py
+++ b/src/ansys/fluent/core/codegen/tuigen.py
@@ -43,7 +43,7 @@
import shutil
import string
import subprocess
-from typing import Any, Dict
+from typing import Any
import uuid
from defusedxml.ElementTree import parse
@@ -194,7 +194,7 @@ def find_class(self, module, name):
if module == "tuigen":
renamed_module = "ansys.fluent.core.codegen.tuigen"
- return super(_RenameModuleUnpickler, self).find_class(renamed_module, name)
+ return super().find_class(renamed_module, name)
class TUIGenerator:
@@ -217,7 +217,7 @@ def __init__(
self._static_infos = static_infos
self._verbose = verbose
- def _populate_menu(self, menu: _TUIMenu, info: Dict[str, Any]):
+ def _populate_menu(self, menu: _TUIMenu, info: dict[str, Any]):
for child_menu_name, child_menu_info in sorted(info["menus"].items()):
if _is_valid_tui_menu_name(child_menu_name):
child_menu = _TUIMenu(
diff --git a/src/ansys/fluent/core/codegen/walk_api.py b/src/ansys/fluent/core/codegen/walk_api.py
index 56f405d991f7..136e5fcffd4e 100644
--- a/src/ansys/fluent/core/codegen/walk_api.py
+++ b/src/ansys/fluent/core/codegen/walk_api.py
@@ -35,13 +35,12 @@
"""
from inspect import signature
-from typing import List
import ansys.fluent.core.solver.flobject as flobject
def walk_api(
- api_cls, on_each_path, current_path: str | List[str] = "", api_item_type: str = ""
+ api_cls, on_each_path, current_path: str | list[str] = "", api_item_type: str = ""
):
"""
Recursively traverse the API hierarchy, calling `on_each_path` for each item.
diff --git a/src/ansys/fluent/core/data_model_cache.py b/src/ansys/fluent/core/data_model_cache.py
index e59275f5fd54..700c1ad620c0 100644
--- a/src/ansys/fluent/core/data_model_cache.py
+++ b/src/ansys/fluent/core/data_model_cache.py
@@ -27,7 +27,7 @@
import copy
from enum import Enum
from threading import RLock
-from typing import Any, Dict, List, Optional
+from typing import Any
from ansys.api.fluent.v0.variant_pb2 import Variant
from ansys.fluent.core.utils.fluent_version import FluentVersion
@@ -37,12 +37,12 @@
| int
| float
| str
- | List[bool]
- | List[int]
- | List[float]
- | List[str]
- | List["StateType"]
- | Dict[str, "StateType"]
+ | list[bool]
+ | list[int]
+ | list[float]
+ | list[str]
+ | list["StateType"]
+ | dict[str, "StateType"]
)
@@ -225,7 +225,7 @@ def set_config(self, rules: str, name: str, value: Any):
def _update_cache_from_variant_state(
self,
rules: str,
- source: Dict[str, StateType],
+ source: dict[str, StateType],
key: str,
state: Variant,
updater_fn,
@@ -309,7 +309,7 @@ def update_source_with_state(state_field):
def _determine_key(
self,
- source: Dict[str, StateType],
+ source: dict[str, StateType],
internal_names_as_keys: bool,
key: str,
state: Variant,
@@ -338,7 +338,7 @@ def _determine_key(
return new_key
def update_cache(
- self, rules: str, state: Variant, deleted_paths: List[str], version=None
+ self, rules: str, state: Variant, deleted_paths: list[str], version=None
):
"""Update datamodel cache from streamed state.
@@ -377,8 +377,8 @@ def update_cache(
def _process_deleted_paths(
self,
- cache: Dict[str, Any],
- deleted_paths: List[str],
+ cache: dict[str, Any],
+ deleted_paths: list[str],
internal_names_as_keys: bool,
):
"""Process and delete paths from the cache based on the deleted paths list."""
@@ -387,7 +387,7 @@ def _process_deleted_paths(
self._delete_from_cache(cache, comps, internal_names_as_keys)
def _delete_from_cache(
- self, sub_cache: Dict[str, Any], comps: List[str], internal_names_as_keys: bool
+ self, sub_cache: dict[str, Any], comps: list[str], internal_names_as_keys: bool
):
"""Recursively delete components from the cache."""
for i, comp in enumerate(comps):
@@ -407,12 +407,12 @@ def _delete_from_cache(
def _find_key_to_delete(
self,
- sub_cache: Dict[str, Any],
+ sub_cache: dict[str, Any],
comp: str,
iname: str,
is_last_component: bool,
internal_names_as_keys: bool,
- ) -> Optional[str]:
+ ) -> str | None:
"""Find the key to delete from the sub-cache."""
for k, v in sub_cache.items():
if (internal_names_as_keys and k == comp) or (
diff --git a/src/ansys/fluent/core/exceptions.py b/src/ansys/fluent/core/exceptions.py
index 7437e16ae08a..795fdc56ef9f 100644
--- a/src/ansys/fluent/core/exceptions.py
+++ b/src/ansys/fluent/core/exceptions.py
@@ -21,7 +21,8 @@
# SOFTWARE.
"""Custom common higher level exceptions."""
-from typing import Any, Iterable
+from collections.abc import Iterable
+from typing import Any
from ansys.fluent.core.solver.error_message import allowed_name_error_message
diff --git a/src/ansys/fluent/core/field_data_interfaces.py b/src/ansys/fluent/core/field_data_interfaces.py
index 1928ec4d3d7e..2b2b9f4bb90c 100644
--- a/src/ansys/fluent/core/field_data_interfaces.py
+++ b/src/ansys/fluent/core/field_data_interfaces.py
@@ -22,8 +22,9 @@
"""Common interfaces for field data."""
from abc import ABC, abstractmethod
+from collections.abc import Callable
from enum import Enum
-from typing import Callable, Dict, List, NamedTuple
+from typing import NamedTuple
import warnings
import numpy as np
@@ -35,6 +36,14 @@
FluentFieldDataNamingStrategy as naming_strategy,
)
+__all__ = (
+ "PathlinesFieldDataRequest",
+ "ScalarFieldDataRequest",
+ "SurfaceDataType",
+ "SurfaceFieldDataRequest",
+ "VectorFieldDataRequest",
+)
+
_to_field_name_str = naming_strategy().to_string
@@ -50,8 +59,8 @@ class SurfaceDataType(Enum):
class SurfaceFieldDataRequest(NamedTuple):
"""Container storing parameters for surface data request."""
- data_types: List[SurfaceDataType] | List[str]
- surfaces: List[int | str | object]
+ data_types: list[SurfaceDataType] | list[str]
+ surfaces: list[int | str | object]
overset_mesh: bool | None = False
flatten_connectivity: bool = False
@@ -60,7 +69,7 @@ class ScalarFieldDataRequest(NamedTuple):
"""Container storing parameters for scalar field data request."""
field_name: str
- surfaces: List[int | str | object]
+ surfaces: list[int | str | object]
node_value: bool | None = True
boundary_value: bool | None = True
@@ -69,14 +78,14 @@ class VectorFieldDataRequest(NamedTuple):
"""Container storing parameters for vector field data request."""
field_name: str
- surfaces: List[int | str | object]
+ surfaces: list[int | str | object]
class PathlinesFieldDataRequest(NamedTuple):
"""Container storing parameters for path-lines field data request."""
field_name: str
- surfaces: List[int | str | object]
+ surfaces: list[int | str | object]
additional_field_name: str = ""
provide_particle_time_field: bool | None = False
node_value: bool | None = True
@@ -111,8 +120,8 @@ class BaseFieldInfo(ABC):
@abstractmethod
def get_scalar_field_range(
- self, field: str, node_value: bool = False, surface_ids: List[int] = None
- ) -> List[float]:
+ self, field: str, node_value: bool = False, surface_ids: list[int] = None
+ ) -> list[float]:
"""
Retrieve the range (minimum and maximum values) of a scalar field.
@@ -129,7 +138,7 @@ def get_scalar_field_range(
pass
@abstractmethod
- def get_scalar_fields_info(self) -> Dict[str, Dict]:
+ def get_scalar_fields_info(self) -> dict[str, dict]:
"""
Retrieve information about available scalar fields.
@@ -142,7 +151,7 @@ def get_scalar_fields_info(self) -> Dict[str, Dict]:
pass
@abstractmethod
- def get_vector_fields_info(self) -> Dict[str, Dict]:
+ def get_vector_fields_info(self) -> dict[str, dict]:
""" "
Retrieve information about available vector fields.
@@ -155,7 +164,7 @@ def get_vector_fields_info(self) -> Dict[str, Dict]:
pass
@abstractmethod
- def get_surfaces_info(self) -> Dict[str, Dict]:
+ def get_surfaces_info(self) -> dict[str, dict]:
"""
Retrieve information about available surfaces.
@@ -184,7 +193,7 @@ class BaseFieldDataSource(ABC):
"""
@abstractmethod
- def get_surface_ids(self, surfaces: List[str | int]) -> List[int]:
+ def get_surface_ids(self, surfaces: list[str | int]) -> list[int]:
"""Retrieve a list of surface IDs based on input surface names or numerical identifiers."""
pass
@@ -197,7 +206,7 @@ def get_field_data(
| VectorFieldDataRequest
| PathlinesFieldDataRequest
),
- ) -> Dict[int | str, Dict | np.array]:
+ ) -> dict[int | str, dict | np.ndarray]:
"""
Retrieve the field data for a given request.
@@ -253,7 +262,7 @@ class FieldBatch(ABC):
"""
@abstractmethod
- def get_surface_ids(self, surfaces: List[str | int]) -> List[int]:
+ def get_surface_ids(self, surfaces: list[str | int]) -> list[int]:
"""Retrieve a list of surface IDs based on input surface names or numerical identifiers."""
pass
@@ -298,7 +307,7 @@ def allowed_values(self):
"""Lists available surface names."""
return list(self._allowed_surface_names())
- def validate(self, surfaces: List[str]) -> bool:
+ def validate(self, surfaces: list[str]) -> bool:
"""
Validate that the given surfaces are in the list of allowed surface names.
@@ -331,7 +340,7 @@ def allowed_values(self):
"""Lists available surface ids."""
return self._allowed_surface_ids()
- def validate(self, surface_ids: List[int]) -> bool:
+ def validate(self, surface_ids: list[int]) -> bool:
"""
Validate that the given surface IDs are in the list of allowed surface IDs.
@@ -450,7 +459,7 @@ def valid_name(self, field_name):
class _AllowedSurfaceNames(_AllowedNames):
- def __call__(self, respect_data_valid: bool = True) -> List[str]:
+ def __call__(self, respect_data_valid: bool = True) -> list[str]:
return self._info if self._info else self._field_info._get_surfaces_info()
def valid_name(self, surface_name: str) -> str:
@@ -475,7 +484,7 @@ def valid_name(self, surface_name: str) -> str:
class _AllowedSurfaceIDs(_AllowedNames):
- def __call__(self, respect_data_valid: bool = True) -> List[int]:
+ def __call__(self, respect_data_valid: bool = True) -> list[int]:
try:
return [
info["surface_id"][0]
@@ -495,7 +504,7 @@ class _AllowedScalarFieldNames(_AllowedFieldNames):
_field_name_error = DisallowedValuesError
_field_unavailable_error = FieldUnavailable
- def __call__(self, respect_data_valid: bool = True) -> List[str]:
+ def __call__(self, respect_data_valid: bool = True) -> list[str]:
field_dict = (
self._info if self._info else self._field_info._get_scalar_fields_info()
)
@@ -514,7 +523,7 @@ class _AllowedVectorFieldNames(_AllowedFieldNames):
_field_name_error = DisallowedValuesError
_field_unavailable_error = FieldUnavailable
- def __call__(self, respect_data_valid: bool = True) -> List[str]:
+ def __call__(self, respect_data_valid: bool = True) -> list[str]:
return (
self._info
if self._info
@@ -602,10 +611,10 @@ class _ReturnFieldData:
@staticmethod
def _scalar_data(
field_name: str,
- surfaces: List[int | str | object],
- surface_ids: List[int],
- scalar_field_data: np.array,
- ) -> Dict[int | str, np.array]:
+ surfaces: list[int | str | object],
+ surface_ids: list[int],
+ scalar_field_data: np.ndarray,
+ ) -> dict[int | str, np.ndarray]:
surfaces = get_surfaces_from_objects(surfaces)
return {
surface: scalar_field_data[surface_ids[count]][field_name]
@@ -614,13 +623,13 @@ def _scalar_data(
@staticmethod
def _surface_data(
- data_types: List[SurfaceDataType],
- surfaces: List[int | str | object],
- surface_ids: List[int],
- surface_data: np.array | List[np.array],
+ data_types: list[SurfaceDataType],
+ surfaces: list[int | str | object],
+ surface_ids: list[int],
+ surface_data: np.ndarray | list[np.ndarray],
deprecated_flag: bool | None = False,
flatten_connectivity: bool = False,
- ) -> Dict[int | str, Dict[SurfaceDataType, np.array | List[np.array]]]:
+ ) -> dict[int | str, dict[SurfaceDataType, np.ndarray | list[np.ndarray]]]:
surfaces = get_surfaces_from_objects(surfaces)
ret_surf_data = {}
for count, surface in enumerate(surfaces):
@@ -656,10 +665,10 @@ def _surface_data(
@staticmethod
def _vector_data(
field_name: str,
- surfaces: List[int | str | object],
- surface_ids: List[int],
- vector_field_data: np.array,
- ) -> Dict[int | str, np.array]:
+ surfaces: list[int | str | object],
+ surface_ids: list[int],
+ vector_field_data: np.ndarray,
+ ) -> dict[int | str, np.ndarray]:
surfaces = get_surfaces_from_objects(surfaces)
return {
surface: vector_field_data[surface_ids[count]][field_name].reshape(-1, 3)
@@ -669,12 +678,12 @@ def _vector_data(
@staticmethod
def _pathlines_data(
field_name: str,
- surfaces: List[int | str | object],
- surface_ids: List[int],
- pathlines_data: Dict,
+ surfaces: list[int | str | object],
+ surface_ids: list[int],
+ pathlines_data: dict,
deprecated_flag: bool | None = False,
flatten_connectivity: bool = False,
- ) -> Dict:
+ ) -> dict:
surfaces = get_surfaces_from_objects(surfaces)
path_lines_dict = {}
for count, surface in enumerate(surfaces):
@@ -711,7 +720,7 @@ def _pathlines_data(
return path_lines_dict
-def get_surfaces_from_objects(surfaces: List[int | str | object]):
+def get_surfaces_from_objects(surfaces: list[int | str | object]):
"""
Extract surface names or identifiers from a list of surfaces.
diff --git a/src/ansys/fluent/core/file_session.py b/src/ansys/fluent/core/file_session.py
index 8c7690a699d4..bd02ac50b59c 100644
--- a/src/ansys/fluent/core/file_session.py
+++ b/src/ansys/fluent/core/file_session.py
@@ -22,7 +22,6 @@
"""Provides a module for file session."""
-from typing import Dict, List
import warnings
from deprecated.sphinx import deprecated
@@ -95,7 +94,7 @@ class BatchFieldData:
def __init__(
self,
- data: Dict,
+ data: dict,
field_info,
allowed_surface_names,
allowed_scalar_field_names,
@@ -107,7 +106,7 @@ def __init__(
self._allowed_scalar_field_names = allowed_scalar_field_names
self._returned_data = _ReturnFieldData()
- def get_surface_ids(self, surfaces: List[str | int]) -> List[int]:
+ def get_surface_ids(self, surfaces: list[str | int]) -> list[int]:
"""Get a list of surface ids based on surfaces provided as inputs."""
return _get_surface_ids(
field_info=self._field_info,
@@ -117,7 +116,7 @@ def get_surface_ids(self, surfaces: List[str | int]) -> List[int]:
def _get_scalar_field_data(
self,
**kwargs,
- ) -> Dict[int | str, np.array]:
+ ) -> dict[int | str, np.ndarray]:
scalar_field_data = self.data[
(
("type", "scalar-field"),
@@ -135,7 +134,7 @@ def _get_scalar_field_data(
def _get_surface_data(
self,
**kwargs,
- ) -> Dict[int | str, Dict[SurfaceDataType, np.array | List[np.array]]]:
+ ) -> dict[int | str, dict[SurfaceDataType, np.ndarray | list[np.ndarray]]]:
surface_data = self.data[(("type", "surface-data"),)]
return self._returned_data._surface_data(
kwargs.get("data_types"),
@@ -148,7 +147,7 @@ def _get_surface_data(
def _get_vector_field_data(
self,
**kwargs,
- ) -> Dict[int | str, np.array]:
+ ) -> dict[int | str, np.ndarray]:
vector_field_data = self.data[(("type", "vector-field"),)]
return self._returned_data._vector_data(
_to_vector_field_name(kwargs.get("field_name")),
@@ -160,7 +159,7 @@ def _get_vector_field_data(
def _get_pathlines_field_data(
self,
**kwargs,
- ) -> Dict:
+ ) -> dict:
if kwargs.get("zones") is None:
zones = []
del zones
@@ -188,7 +187,7 @@ def get_field_data(
| VectorFieldDataRequest
| PathlinesFieldDataRequest
),
- ) -> Dict[int | str, Dict | np.array]:
+ ) -> dict[int | str, dict | np.ndarray]:
"""Get the surface, scalar, vector or path-lines field data on a surface.
Returns
@@ -249,7 +248,7 @@ def __init__(self, file_session, field_info):
self._field_info = field_info
self._cache_requests = []
- def get_surface_ids(self, surfaces: List[str | int]) -> List[int]:
+ def get_surface_ids(self, surfaces: list[str | int]) -> list[int]:
"""Get a list of surface ids based on surfaces provided as inputs."""
return _get_surface_ids(
field_info=self._field_info,
@@ -275,8 +274,8 @@ def get_surface_ids(self, surfaces: List[str | int]) -> List[int]:
@deprecate_function(version="v0.25.0", new_func="add_requests")
def add_surfaces_request(
self,
- data_types: List[SurfaceDataType] | List[str],
- surfaces: List[int | str],
+ data_types: list[SurfaceDataType] | list[str],
+ surfaces: list[int | str],
) -> None:
"""Add request to get surface data (vertices, face connectivity, centroids, and
normals).
@@ -296,8 +295,8 @@ def add_surfaces_request(
def _add_surfaces_request(
self,
- data_types: List[SurfaceDataType] | List[str],
- surfaces: List[int | str],
+ data_types: list[SurfaceDataType] | list[str],
+ surfaces: list[int | str],
) -> None:
updated_types = []
for d_type in data_types:
@@ -327,7 +326,7 @@ def _add_surfaces_request(
def add_scalar_fields_request(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
node_value: bool | None = True,
boundary_value: bool | None = True,
) -> None:
@@ -365,7 +364,7 @@ def add_scalar_fields_request(
def _add_scalar_fields_request(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
node_value: bool | None = True,
boundary_value: bool | None = True,
) -> None:
@@ -397,7 +396,7 @@ def _add_scalar_fields_request(
def add_vector_fields_request(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
) -> None:
"""Add request to get vector field data on surfaces.
@@ -422,7 +421,7 @@ def add_vector_fields_request(
def _add_vector_fields_request(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
) -> None:
surface_ids = self.get_surface_ids(surfaces)
if len(self._file_session._data_file.get_phases()) > 1:
@@ -452,7 +451,7 @@ def _add_vector_fields_request(
def add_pathlines_fields_request(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
):
"""Add request to get pathlines field on surfaces.
@@ -472,7 +471,7 @@ def add_pathlines_fields_request(
def _add_pathlines_fields_request(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
):
raise NotImplementedError("Pathlines are not supported.")
@@ -633,7 +632,7 @@ def new_transaction(self):
"""Create a new field transaction."""
return self.new_batch()
- def get_surface_ids(self, surfaces: List[str | int]) -> List[int]:
+ def get_surface_ids(self, surfaces: list[str | int]) -> list[int]:
"""Get a list of surface ids based on surfaces provided as inputs."""
return _get_surface_ids(
field_info=self._field_info,
@@ -653,8 +652,8 @@ def get_surface_ids(self, surfaces: List[str | int]) -> List[int]:
@deprecate_function(version="v0.25.0", new_func="get_field_data")
def get_surface_data(
self,
- data_types: List[SurfaceDataType] | List[str],
- surfaces: List[int | str],
+ data_types: list[SurfaceDataType] | list[str],
+ surfaces: list[int | str],
overset_mesh: bool | None = False,
flatten_connectivity: bool = False,
):
@@ -687,8 +686,8 @@ def get_surface_data(
def _get_surface_data(
self,
- data_types: List[SurfaceDataType] | List[str],
- surfaces: List[int | str],
+ data_types: list[SurfaceDataType] | list[str],
+ surfaces: list[int | str],
overset_mesh: bool | None = False,
flatten_connectivity: bool = False,
):
@@ -745,7 +744,7 @@ def _get_surface_data(
def get_scalar_field_data(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
node_value: bool | None = True,
boundary_value: bool | None = True,
):
@@ -786,7 +785,7 @@ def get_scalar_field_data(
def _get_scalar_field_data(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
node_value: bool | None = True,
boundary_value: bool | None = True,
):
@@ -824,7 +823,7 @@ def _get_scalar_field_data(
def get_vector_field_data(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
):
"""Get vector field data on a surface.
@@ -857,7 +856,7 @@ def get_vector_field_data(
def _get_vector_field_data(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
):
field_name = _to_vector_field_name(field_name)
surface_ids = self.get_surface_ids(surfaces=surfaces)
@@ -898,7 +897,7 @@ def _get_vector_field_data(
def get_pathlines_field_data(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
):
"""Get the pathlines field data on a surface.
@@ -920,7 +919,7 @@ def get_pathlines_field_data(
def _get_pathlines_field_data(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
**kwargs,
):
raise NotImplementedError("Pathlines are not supported.")
@@ -933,7 +932,7 @@ def get_field_data(
| VectorFieldDataRequest
| PathlinesFieldDataRequest
),
- ) -> Dict[int | str, Dict | np.array]:
+ ) -> dict[int | str, dict | np.ndarray]:
"""Get the surface, scalar, vector or path-lines field data on a surface.
Returns
@@ -966,8 +965,8 @@ def __init__(self, file_session):
self._file_session = file_session
def get_scalar_field_range(
- self, field: str, node_value: bool = False, surface_ids: List[int] = None
- ) -> List[float]:
+ self, field: str, node_value: bool = False, surface_ids: list[int] = None
+ ) -> list[float]:
"""Get the range (minimum and maximum values) of the field.
Parameters
@@ -990,8 +989,8 @@ def get_scalar_field_range(
return self._get_scalar_field_range(field, node_value, surface_ids)
def _get_scalar_field_range(
- self, field: str, node_value: bool = False, surface_ids: List[int] = None
- ) -> List[float]:
+ self, field: str, node_value: bool = False, surface_ids: list[int] = None
+ ) -> list[float]:
minimum = None
maximum = None
if not surface_ids:
@@ -1179,8 +1178,8 @@ def field_data(self):
def _get_surface_ids(
field_info: FileFieldInfo,
- surfaces: List[int | str],
-) -> List[int]:
+ surfaces: list[int | str],
+) -> list[int]:
"""Get surface IDs based on surface names or IDs.
Parameters
diff --git a/src/ansys/fluent/core/filereader/case_file.py b/src/ansys/fluent/core/filereader/case_file.py
index 2842a2f32354..65f293a04c6b 100644
--- a/src/ansys/fluent/core/filereader/case_file.py
+++ b/src/ansys/fluent/core/filereader/case_file.py
@@ -43,7 +43,6 @@
import os
from os.path import dirname
from pathlib import Path
-from typing import Dict, List
import defusedxml.ElementTree as ET
import numpy as np
@@ -73,7 +72,7 @@ class InputParameterOld:
a string, qualified by units
"""
- def __init__(self, raw_data: List) -> None:
+ def __init__(self, raw_data: list) -> None:
"""Initialize InputParameter.
Parameters
@@ -118,7 +117,7 @@ class InputParameter:
a string, qualified by units
"""
- def __init__(self, raw_data: Dict[str, str]) -> None:
+ def __init__(self, raw_data: dict[str, str]) -> None:
"""Initialize InputParameter.
Parameters
@@ -338,7 +337,7 @@ def _get_nodes(self, surface_id):
nodes = nodes[previous : previous + np.sum(nnodes)]
return [nodes, nnodes]
- def get_connectivity(self, surface_id) -> np.array:
+ def get_connectivity(self, surface_id) -> np.ndarray:
"""Returns numpy array of face connectivity data for a particular surface."""
nodes, nnodes = self._get_nodes(surface_id)
key = nodes.copy()
@@ -354,7 +353,7 @@ def get_connectivity(self, surface_id) -> np.array:
nodes = np.insert(nodes, obj, nnodes)
return nodes
- def get_vertices(self, surface_id) -> np.array:
+ def get_vertices(self, surface_id) -> np.ndarray:
"""Returns numpy array of vertices data for a particular surface."""
nodes, nnodes = self._get_nodes(surface_id)
nodes = np.unique(nodes)
@@ -420,7 +419,7 @@ def __init__(
self._config_vars = {v[0]: v[1] for v in self._rp_vars["case-config"]}
- def input_parameters(self) -> List[InputParameter] | List[InputParameterOld]:
+ def input_parameters(self) -> list[InputParameter] | list[InputParameterOld]:
"""Get the input parameters.
Returns
@@ -443,7 +442,7 @@ def input_parameters(self) -> List[InputParameter] | List[InputParameterOld]:
except ValueError:
return [InputParameterOld(param) for param in rp_var_params]
- def output_parameters(self) -> List[OutputParameter]:
+ def output_parameters(self) -> list[OutputParameter]:
"""Get the output parameters.
Returns
@@ -573,7 +572,7 @@ def __init__(self, settings_file_name: str | None = None) -> None:
"""
if settings_file_name:
try:
- with open(settings_file_name, "r") as file:
+ with open(settings_file_name) as file:
rp_vars_str = file.read()
if not rp_vars_str.startswith("(rp ("):
raise RuntimeError("Not a valid settings file.")
@@ -733,7 +732,7 @@ def _get_processed_string(input_string: bytes) -> str:
def _get_case_file_name_from_flprj(flprj_file):
- with open(flprj_file, "r") as file:
+ with open(flprj_file) as file:
content = file.read()
content = remove_unsupported_xml_chars(content)
root = ET.fromstring(content)
diff --git a/src/ansys/fluent/core/filereader/data_file.py b/src/ansys/fluent/core/filereader/data_file.py
index 92ecf3e5aef0..362ef163a7e8 100644
--- a/src/ansys/fluent/core/filereader/data_file.py
+++ b/src/ansys/fluent/core/filereader/data_file.py
@@ -178,7 +178,7 @@ def get_cell_variables(self, phase_name) -> list:
def get_face_scalar_field_data(
self, phase_name: str, field_name: str, surface_id: int
- ) -> np.array:
+ ) -> np.ndarray:
"""Gets scalar field data for face.
Parameters
@@ -209,7 +209,9 @@ def get_face_scalar_field_data(
return field_array[min_id - array_min_id : max_id + 1 - array_min_id]
return np.zeros(max_id + 1 - min_id)
- def get_face_vector_field_data(self, phase_name: str, surface_id: int) -> np.array:
+ def get_face_vector_field_data(
+ self, phase_name: str, surface_id: int
+ ) -> np.ndarray:
"""Gets vector field data for face.
Parameters
diff --git a/src/ansys/fluent/core/fluent_connection.py b/src/ansys/fluent/core/fluent_connection.py
index c7f12b354d09..5eabc10fe153 100644
--- a/src/ansys/fluent/core/fluent_connection.py
+++ b/src/ansys/fluent/core/fluent_connection.py
@@ -35,7 +35,7 @@
import socket
import subprocess
import threading
-from typing import Any, Callable, List, Tuple, TypeVar
+from typing import Any, Callable, TypeVar
import warnings
import weakref
@@ -51,13 +51,15 @@
)
from ansys.fluent.core.launcher.launcher_utils import ComposeConfig
from ansys.fluent.core.pyfluent_warnings import InsecureGrpcWarning
-from ansys.fluent.core.services import service_creator
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.app_utilities import (
+ AppUtilities,
AppUtilitiesOld,
AppUtilitiesService,
AppUtilitiesV252,
)
-from ansys.fluent.core.services.scheme_eval import SchemeEvalService
+from ansys.fluent.core.services.health_check import HealthCheckService
+from ansys.fluent.core.services.scheme_eval import SchemeEval, SchemeEvalService
from ansys.fluent.core.utils.execution import timeout_exec, timeout_loop
from ansys.fluent.core.utils.file_transfer_service import ContainerFileTransferStrategy
from ansys.fluent.core.utils.networking import get_uds_path, is_localhost
@@ -125,7 +127,7 @@ class MonitorThread(threading.Thread):
def __init__(self):
"""Initialize MonitorThread."""
super().__init__(daemon=True)
- self.cbs: List[Callable] = []
+ self.cbs: list[Callable] = []
def run(self) -> None:
"""Run monitor thread."""
@@ -331,9 +333,7 @@ def _get_channel(
class _ConnectionInterface:
def __init__(self, create_grpc_service, error_state):
self._scheme_eval_service = create_grpc_service(SchemeEvalService, error_state)
- self.scheme_eval = service_creator("scheme_eval").create(
- self._scheme_eval_service
- )
+ self.scheme_eval = SchemeEval(self._scheme_eval_service)
self._app_utilities_service = create_grpc_service(
AppUtilitiesService, error_state
)
@@ -347,9 +347,7 @@ def __init__(self, create_grpc_service, error_state):
)
case _:
- self._app_utilities = service_creator("app_utilities").create(
- self._app_utilities_service
- )
+ self._app_utilities = AppUtilities(self._app_utilities_service)
@property
def product_build_info(self) -> str:
@@ -411,6 +409,9 @@ def _pid_exists(pid):
return True
+S = TypeVar("S", bound=ServiceProtocol)
+
+
class FluentConnection:
"""Encapsulates a Fluent connection.
@@ -420,7 +421,7 @@ class FluentConnection:
Close the Fluent connection and exit Fluent.
"""
- _on_exit_cbs: List[Callable] = []
+ _on_exit_cbs: list[Callable] = []
_id_iter = itertools.count()
_monitor_thread: MonitorThread | None = None
@@ -526,11 +527,11 @@ def __init__(
insecure_mode=insecure_mode,
inside_container=inside_container,
)
- self._metadata: List[Tuple[str, str]] = (
+ self._metadata: list[tuple[str, str]] = (
[("password", password)] if password else []
)
- self._health_check = service_creator("health_check").create(
+ self._health_check = HealthCheckService(
self._channel, self._metadata, self._error_state
)
# At this point, the server must be running. If the following check_health()
@@ -731,7 +732,7 @@ def register_finalizer_cb(self, cb, at_start=False):
else:
self.finalizer_cbs.append(cb)
- def create_grpc_service(self, service, *args):
+ def create_grpc_service(self, service: type[S], *args) -> S:
"""Create a gRPC service.
Parameters
diff --git a/src/ansys/fluent/core/get_build_details.py b/src/ansys/fluent/core/get_build_details.py
index 7d24afd24741..2f6cff319e2e 100644
--- a/src/ansys/fluent/core/get_build_details.py
+++ b/src/ansys/fluent/core/get_build_details.py
@@ -27,6 +27,11 @@
import ansys.fluent.core as pyfluent
+__all__ = (
+ "get_build_version",
+ "get_build_version_string",
+)
+
def get_build_version():
"""Get build version."""
diff --git a/src/ansys/fluent/core/launcher/container_launcher.py b/src/ansys/fluent/core/launcher/container_launcher.py
index ba0944a3c1a4..5a20fc390da7 100644
--- a/src/ansys/fluent/core/launcher/container_launcher.py
+++ b/src/ansys/fluent/core/launcher/container_launcher.py
@@ -35,11 +35,12 @@
>>> container_solver_session = container_solver_launcher()
"""
-import inspect
import logging
import os
import time
-from typing import Any
+from typing import Any, TypedDict
+
+from typing_extensions import Unpack
from ansys.fluent.core.fluent_connection import FluentConnection
from ansys.fluent.core.launcher.error_warning_messages import (
@@ -68,6 +69,39 @@
from ansys.fluent.core.session import _parse_server_info_file
from ansys.fluent.core.utils.fluent_version import FluentVersion
+
+class ContainerArgsWithoutDryRun(
+ TypedDict, total=False
+): # pylint: disable=missing-class-docstring
+ ui_mode: UIMode | str | None
+ graphics_driver: (
+ FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str | None
+ )
+ product_version: FluentVersion | str | float | int | None
+ dimension: Dimension | int | None
+ precision: Precision | str | None
+ processor_count: int | None
+ start_timeout: int
+ additional_arguments: str
+ container_dict: dict[str, Any] | None
+ cleanup_on_exit: bool
+ start_transcript: bool
+ py: bool | None
+ gpu: bool | None
+ start_watchdog: bool | None
+ file_transfer_service: Any | None
+ use_docker_compose: bool | None
+ use_podman_compose: bool | None
+ certificates_folder: str | None
+ insecure_mode: bool
+
+
+class ContainerArgs(
+ ContainerArgsWithoutDryRun, total=False
+): # pylint: disable=missing-class-docstring
+ dry_run: bool
+
+
_THIS_DIR = os.path.dirname(__file__)
_OPTIONS_FILE = os.path.join(_THIS_DIR, "fluent_launcher_options.json")
logger = logging.getLogger("pyfluent.launcher")
@@ -93,29 +127,8 @@ class DockerLauncher:
def __init__(
self,
- mode: FluentMode | str | None = None,
- ui_mode: UIMode | str | None = None,
- graphics_driver: (
- FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str | None
- ) = None,
- product_version: FluentVersion | str | float | int | None = None,
- dimension: Dimension | int | None = None,
- precision: Precision | str | None = None,
- processor_count: int | None = None,
- start_timeout: int = 60,
- additional_arguments: str = "",
- container_dict: dict | None = None,
- dry_run: bool = False,
- cleanup_on_exit: bool = True,
- start_transcript: bool = True,
- py: bool | None = None,
- gpu: bool | None = None,
- start_watchdog: bool | None = None,
- file_transfer_service: Any | None = None,
- use_docker_compose: bool | None = None,
- use_podman_compose: bool | None = None,
- certificates_folder: str | None = None,
- insecure_mode: bool = False,
+ mode: FluentMode | str,
+ **kwargs: Unpack[ContainerArgs],
):
"""
Launch a Fluent session in container mode.
@@ -200,25 +213,23 @@ def __init__(
# GitHub Actions runs to indicate that insecure mode should be used.
insecure_mode_env = os.getenv("PYFLUENT_CONTAINER_INSECURE_MODE") == "1"
certificates_folder, insecure_mode = get_remote_grpc_options(
- certificates_folder, insecure_mode or insecure_mode_env
+ kwargs.get("certificates_folder"),
+ kwargs.get("insecure_mode") or insecure_mode_env,
)
if certificates_folder is None and not insecure_mode:
raise ValueError(CERTIFICATES_FOLDER_NOT_PROVIDED_AT_LAUNCH)
- locals_ = locals().copy()
- argvals = {
- arg: locals_.get(arg)
- for arg in inspect.getargvalues(inspect.currentframe()).args
- }
- self.argvals, self.new_session = _get_argvals_and_session(argvals)
- if self.argvals["start_timeout"] is None:
+ self.argvals, self.new_session = _get_argvals_and_session(
+ {**kwargs, mode: mode}
+ )
+ if self.argvals.get("start_timeout") is None:
self.argvals["start_timeout"] = 60
- self.file_transfer_service = file_transfer_service
+ self.file_transfer_service = kwargs.get("file_transfer_service")
if self.argvals["mode"] == FluentMode.SOLVER_ICING:
self.argvals["fluent_icing"] = True
- if self.argvals["container_dict"] is None:
+ if self.argvals.get("container_dict") is None:
self.argvals["container_dict"] = {}
- if self.argvals["product_version"]:
+ if "product_version" in self.argvals:
self.argvals["container_dict"][
"image_tag"
] = f"v{FluentVersion(self.argvals['product_version']).value}"
@@ -226,6 +237,9 @@ def __init__(
self._args = _build_fluent_launch_args_string(**self.argvals).split()
if FluentMode.is_meshing(self.argvals["mode"]):
self._args.append(" -meshing")
+
+ use_docker_compose = kwargs.get("use_docker_compose")
+ use_podman_compose = kwargs.get("use_podman_compose")
self._compose_config = ComposeConfig(use_docker_compose, use_podman_compose)
fluent_image_tag = os.getenv("FLUENT_IMAGE_TAG")
# There is an issue in passing gRPC arguments to Fluent image version 24.1.0 during github runs.
@@ -242,7 +256,6 @@ def __init__(
self._args.append(" -grpc-certs-folder=/tmp/certs")
def __call__(self):
-
if self.argvals["dry_run"]:
config_dict, *_ = configure_container_dict(
self._args,
diff --git a/src/ansys/fluent/core/launcher/fluent_container.py b/src/ansys/fluent/core/launcher/fluent_container.py
index 4be35d9d3bea..7a7507b62869 100644
--- a/src/ansys/fluent/core/launcher/fluent_container.py
+++ b/src/ansys/fluent/core/launcher/fluent_container.py
@@ -76,7 +76,7 @@
from pathlib import Path, PurePosixPath
from pprint import pformat
import tempfile
-from typing import Any, List
+from typing import Any
import warnings
import ansys.fluent.core as pyfluent
@@ -150,7 +150,7 @@ def dict_to_str(dict: dict) -> str:
version="v0.23.0",
)
def configure_container_dict(
- args: List[str],
+ args: list[str],
mount_source: str | Path | None = None,
mount_target: str | Path | None = None,
timeout: int | None = None,
@@ -472,7 +472,7 @@ def configure_container_dict(
def start_fluent_container(
- args: List[str],
+ args: list[str],
container_dict: dict | None = None,
start_timeout: int = 60,
compose_config: ComposeConfig | None = None,
diff --git a/src/ansys/fluent/core/launcher/launch_options.py b/src/ansys/fluent/core/launcher/launch_options.py
index f963168e9f89..7808e84b7175 100644
--- a/src/ansys/fluent/core/launcher/launch_options.py
+++ b/src/ansys/fluent/core/launcher/launch_options.py
@@ -42,6 +42,15 @@
from ansys.fluent.core.utils.fluent_version import FluentVersion
import ansys.platform.instancemanagement as pypim
+__all__ = (
+ "FluentMode",
+ "UIMode",
+ "Dimension",
+ "Precision",
+ "FluentWindowsGraphicsDriver",
+ "FluentLinuxGraphicsDriver",
+)
+
class FluentEnum(Enum):
"""Provides the base class for Fluent-related enums.
@@ -382,13 +391,12 @@ def _validate_gpu(gpu: bool | list, dimension: int):
raise exceptions.GPUSolverSupportError()
-def _get_argvals_and_session(argvals):
- _validate_gpu(argvals["gpu"], argvals["dimension"])
+def _get_argvals_and_session(argvals) -> tuple[dict, type]:
+ _validate_gpu(argvals.get("gpu"), argvals.get("dimension"))
argvals["graphics_driver"] = _get_graphics_driver(
- argvals["graphics_driver"], argvals["ui_mode"]
+ argvals.get("graphics_driver"), argvals.get("ui_mode")
)
- argvals["mode"] = FluentMode(argvals["mode"])
- del argvals["self"]
+ argvals["mode"] = FluentMode(argvals.get("mode"))
new_session = argvals["mode"].get_fluent_value()
return argvals, new_session
diff --git a/src/ansys/fluent/core/launcher/launcher.py b/src/ansys/fluent/core/launcher/launcher.py
index b82544eb24f1..d94c32e81e9f 100644
--- a/src/ansys/fluent/core/launcher/launcher.py
+++ b/src/ansys/fluent/core/launcher/launcher.py
@@ -29,9 +29,11 @@
import inspect
import logging
import os
-from typing import Any, Dict
+from typing import Any, Literal, TypedDict, overload
from warnings import warn
+from typing_extensions import Required, Unpack
+
import ansys.fluent.core as pyfluent
from ansys.fluent.core._types import PathType
from ansys.fluent.core.exceptions import DisallowedValuesError
@@ -68,10 +70,16 @@
from ansys.fluent.core.session_meshing import Meshing
from ansys.fluent.core.session_pure_meshing import PureMeshing
from ansys.fluent.core.session_solver import Solver
+from ansys.fluent.core.session_solver_aero import SolverAero
from ansys.fluent.core.session_solver_icing import SolverIcing
from ansys.fluent.core.utils.deprecate import deprecate_arguments
from ansys.fluent.core.utils.fluent_version import FluentVersion
+__all__ = (
+ "create_launcher",
+ "launch_fluent",
+)
+
_THIS_DIR = os.path.dirname(__file__)
_OPTIONS_FILE = os.path.join(_THIS_DIR, "fluent_launcher_options.json")
logger = logging.getLogger("pyfluent.launcher")
@@ -96,7 +104,7 @@ def create_launcher(
Session launcher.
Raises
------
- DisallowedValuesError
+ ValueError
If an unknown Fluent launch mode is passed.
"""
launchers = {
@@ -146,6 +154,111 @@ def _version_to_dimension(old_arg_val):
return None
+class LaunchFluentArgs(
+ TypedDict, total=False
+): # pylint: disable=missing-class-docstring
+ product_version: FluentVersion | str | float | int | None
+ dimension: Dimension | int
+ precision: Precision | str
+ processor_count: int | None
+ journal_file_names: None | str | list[str]
+ start_timeout: int
+ additional_arguments: str
+ env: dict[str, Any] | None
+ start_container: bool | None
+ container_dict: dict[str, Any] | None
+ cleanup_on_exit: bool
+ start_transcript: bool
+ ui_mode: UIMode | str | None
+ graphics_driver: (
+ FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str | None
+ )
+ case_file_name: str | None
+ case_data_file_name: str | None
+ lightweight_mode: bool | None
+ py: bool | None
+ gpu: bool | list[int] | None
+ cwd: str | None
+ fluent_path: str | None
+ topy: str | list | None
+ start_watchdog: bool | None
+ file_transfer_service: Any | None
+ use_docker_compose: bool
+ use_podman_compose: bool
+
+
+class SlurmSchedulerOptions(
+ TypedDict, total=False
+): # pylint: disable=missing-class-docstring
+ scheduler: Required[Literal["slurm"]]
+ scheduler_headnode: str
+ scheduler_queue: str
+ scheduler_account: str
+
+
+@overload
+def launch_fluent(
+ *,
+ dry_run: Literal[False] = False,
+ mode: Literal[FluentMode.MESHING, "meshing"],
+ **kwargs: Unpack[LaunchFluentArgs],
+) -> Meshing: ...
+
+
+@overload
+def launch_fluent(
+ *,
+ dry_run: Literal[False] = False,
+ mode: Literal[FluentMode.PURE_MESHING, "pure_meshing"],
+ **kwargs: Unpack[LaunchFluentArgs],
+) -> PureMeshing: ...
+
+
+@overload
+def launch_fluent(
+ *,
+ dry_run: Literal[False] = False,
+ mode: Literal[FluentMode.SOLVER, "solver"] = FluentMode.SOLVER,
+ **kwargs: Unpack[LaunchFluentArgs],
+) -> Solver: ...
+
+
+@overload
+def launch_fluent(
+ *,
+ dry_run: Literal[False] = False,
+ mode: Literal[FluentMode.SOLVER_ICING, "solver_icing"],
+ **kwargs: Unpack[LaunchFluentArgs],
+) -> SolverIcing: ...
+
+
+@overload
+def launch_fluent(
+ *,
+ dry_run: Literal[False] = False,
+ mode: Literal[FluentMode.SOLVER_AERO, "solver_aero"] = ...,
+ **kwargs: Unpack[LaunchFluentArgs],
+) -> SolverAero: ...
+
+
+@overload
+def launch_fluent(
+ *,
+ dry_run: Literal[False] = False,
+ scheduler_options: SlurmSchedulerOptions,
+ mode: FluentMode | str = FluentMode.SOLVER,
+ **kwargs: Unpack[LaunchFluentArgs],
+) -> SlurmFuture: ...
+
+
+@overload
+def launch_fluent(
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[LaunchFluentArgs],
+) -> dict[str, Any]: ...
+
+
def _custom_converter_gui(kwargs):
old_val = kwargs.pop("show_gui", None)
kwargs["ui_mode"] = _show_gui_to_ui_mode(old_val, **kwargs)
@@ -171,14 +284,15 @@ def _custom_converter_dimension(kwargs):
converter=_custom_converter_dimension,
)
def launch_fluent(
+ *,
product_version: FluentVersion | str | float | int | None = None,
- dimension: Dimension | int | None = None,
- precision: Precision | str | None = None,
+ dimension: Dimension | int = Dimension.THREE,
+ precision: Precision | str = Precision.DOUBLE,
processor_count: int | None = None,
journal_file_names: None | str | list[str] = None,
- start_timeout: int = None,
+ start_timeout: int | None = None,
additional_arguments: str = "",
- env: Dict[str, Any] | None = None,
+ env: dict[str, Any] | None = None,
start_container: bool | None = None,
container_dict: dict | None = None,
dry_run: bool = False,
@@ -191,20 +305,28 @@ def launch_fluent(
case_file_name: "PathType | None" = None,
case_data_file_name: "PathType | None" = None,
lightweight_mode: bool | None = None,
- mode: FluentMode | str | None = None,
+ mode: FluentMode | str = FluentMode.SOLVER,
py: bool | None = None,
gpu: bool | list[int] | None = None,
cwd: "PathType | None" = None,
fluent_path: "PathType | None" = None,
topy: str | list | None = None,
start_watchdog: bool | None = None,
- scheduler_options: dict | None = None,
+ scheduler_options: SlurmSchedulerOptions | None = None,
file_transfer_service: Any | None = None,
- use_docker_compose: bool | None = None,
- use_podman_compose: bool | None = None,
+ use_docker_compose: bool = False,
+ use_podman_compose: bool = False,
certificates_folder: str | None = None,
insecure_mode: bool = False,
-) -> Meshing | PureMeshing | Solver | SolverIcing | SlurmFuture | dict:
+) -> (
+ Meshing
+ | PureMeshing
+ | Solver
+ | SolverIcing
+ | SolverAero
+ | SlurmFuture
+ | dict[Any, Any]
+):
"""Launch Fluent locally in server mode or connect to a running Fluent server
instance.
@@ -225,8 +347,7 @@ def launch_fluent(
in which case ``Dimension.THREE`` is used. Options are either the values of the
``Dimension`` enum (``Dimension.TWO`` or ``Dimension.THREE``) or any of ``2`` and ``3``.
precision : Precision or str, optional
- Floating point precision. The default is ``None``, in which case ``Precision.DOUBLE``
- is used. Options are either the values of the ``Precision`` enum (``Precision.SINGLE``
+ Floating point precision. Options are either the values of the ``Precision`` enum (``Precision.SINGLE``
or ``Precision.DOUBLE``) or any of ``"double"`` and ``"single"``.
processor_count : int, optional
Number of processors. The default is ``None``, in which case ``1``
@@ -291,7 +412,7 @@ def launch_fluent(
This parameter is used only when ``case_file_name`` is provided. The default is ``False``.
mode : FluentMode or str or None, optional
Launch mode of Fluent to point to a specific session type. Can be a
- ``FluentMode`` enum member or a string. The default value is ``None``.
+ ``FluentMode`` enum member or a string. The default value is ``SOLVER``.
Valid string options include ``"meshing"``, ``"pure-meshing"``, and
``"solver"``.
py : bool, optional
@@ -411,6 +532,7 @@ def _mode_to_launcher_type(fluent_launch_mode: LaunchMode):
def connect_to_fluent(
+ *,
ip: str | None = None,
port: int | None = None,
address: str | None = None,
@@ -423,7 +545,7 @@ def connect_to_fluent(
insecure_mode: bool = False,
start_watchdog: bool | None = None,
file_transfer_service: Any | None = None,
-) -> Meshing | PureMeshing | Solver | SolverIcing:
+) -> Meshing | PureMeshing | Solver | SolverIcing | SolverAero:
"""Connect to an existing Fluent server instance.
Parameters
diff --git a/src/ansys/fluent/core/launcher/launcher_utils.py b/src/ansys/fluent/core/launcher/launcher_utils.py
index 76df08511780..0581272dd4c1 100644
--- a/src/ansys/fluent/core/launcher/launcher_utils.py
+++ b/src/ansys/fluent/core/launcher/launcher_utils.py
@@ -29,7 +29,7 @@
import socket
import subprocess
import time
-from typing import Any, Dict
+from typing import Any
import warnings
from ansys.fluent.core.exceptions import InvalidArgument
@@ -92,12 +92,12 @@ def is_windows():
return platform.system() == "Windows"
-def _get_subprocess_kwargs_for_fluent(env: Dict[str, Any], argvals) -> Dict[str, Any]:
+def _get_subprocess_kwargs_for_fluent(env: dict[str, Any], argvals) -> dict[str, Any]:
import ansys.fluent.core as pyfluent
scheduler_options = argvals.get("scheduler_options")
is_slurm = scheduler_options and scheduler_options["scheduler"] == "slurm"
- kwargs: Dict[str, Any] = {}
+ kwargs: dict[str, Any] = {}
if is_slurm:
kwargs.update(stdout=subprocess.PIPE)
else:
diff --git a/src/ansys/fluent/core/launcher/pim_launcher.py b/src/ansys/fluent/core/launcher/pim_launcher.py
index 15d1ffe8f07b..1cba8328b380 100644
--- a/src/ansys/fluent/core/launcher/pim_launcher.py
+++ b/src/ansys/fluent/core/launcher/pim_launcher.py
@@ -35,12 +35,13 @@
>>> pim_solver_session = pim_solver_launcher()
"""
-import inspect
import logging
import os
import tempfile
import time
-from typing import Any, Dict
+from typing import Any, TypedDict
+
+from typing_extensions import Unpack
from ansys.fluent.core.fluent_connection import FluentConnection, _get_max_c_int_limit
from ansys.fluent.core.launcher.launch_options import (
@@ -61,6 +62,29 @@
from ansys.fluent.core.utils.fluent_version import FluentVersion
import ansys.platform.instancemanagement as pypim
+
+class PIMArgs(TypedDict, total=False): # pylint: disable=missing-class-docstring
+ ui_mode: UIMode | str | None
+ graphics_driver: (
+ FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str | None
+ )
+ product_version: FluentVersion | str | float | int | None
+ dimension: Dimension | int | None
+ precision: Precision | str | None
+ processor_count: int | None
+ start_timeout: int
+ additional_arguments: str
+ cleanup_on_exit: bool
+ start_transcript: bool
+ gpu: bool | None
+ start_watchdog: bool | None
+ file_transfer_service: Any | None
+
+
+class PIMArgsWithMode(PIMArgs, total=False): # pylint: disable=missing-class-docstring
+ mode: FluentMode | str | None
+
+
_THIS_DIR = os.path.dirname(__file__)
_OPTIONS_FILE = os.path.join(_THIS_DIR, "fluent_launcher_options.json")
logger = logging.getLogger("pyfluent.launcher")
@@ -71,22 +95,7 @@ class PIMLauncher:
def __init__(
self,
- mode: FluentMode | str | None = None,
- ui_mode: UIMode | str | None = None,
- graphics_driver: (
- FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str | None
- ) = None,
- product_version: FluentVersion | str | float | int | None = None,
- dimension: Dimension | int = Dimension.THREE,
- precision: Precision | str | None = None,
- processor_count: int | None = None,
- start_timeout: int = 60,
- additional_arguments: str = "",
- cleanup_on_exit: bool = True,
- start_transcript: bool = True,
- gpu: bool | None = None,
- start_watchdog: bool | None = None,
- file_transfer_service: Any | None = None,
+ **kwargs: Unpack[PIMArgsWithMode],
):
"""
Launch a Fluent session in `PIM `_ mode.
@@ -149,6 +158,9 @@ def __init__(
In job scheduler environments (e.g., SLURM, LSF, PBS), resources and compute nodes are allocated,
and core counts are queried from these environments before being passed to Fluent.
"""
+ additional_arguments = kwargs.get("additional_arguments", "")
+ start_watchdog = kwargs.get("start_watchdog")
+ file_transfer_service = kwargs.get("file_transfer_service")
if additional_arguments:
logger.warning(
@@ -161,14 +173,9 @@ def __init__(
"'start_watchdog' argument for 'launch_fluent()' method is not supported "
"when starting a remote Fluent PyPIM client."
)
- locals_ = locals().copy()
- argvals = {
- arg: locals_.get(arg)
- for arg in inspect.getargvalues(inspect.currentframe()).args
- }
- self.argvals, self.new_session = _get_argvals_and_session(argvals)
+ self.argvals, self.new_session = _get_argvals_and_session(kwargs)
self.file_transfer_service = file_transfer_service
- if self.argvals["start_timeout"] is None:
+ if self.argvals.get("start_timeout") is None:
self.argvals["start_timeout"] = 60
def __call__(self):
@@ -249,7 +256,7 @@ def launch_remote_fluent(
cleanup_on_exit: bool = True,
mode: FluentMode = FluentMode.SOLVER,
dimensionality: Dimension | int = Dimension.THREE,
- launcher_args: Dict[str, Any] | None = None,
+ launcher_args: dict[str, Any] | None = None,
file_transfer_service: Any | None = None,
) -> Meshing | PureMeshing | Solver | SolverIcing:
"""Launch Fluent remotely using `PyPIM `.
@@ -270,8 +277,8 @@ def launch_remote_fluent(
Whether to clean up and exit Fluent when Python exits. Default is ``True``.
mode : FluentMode, optional
Launch Fluent in meshing mode. Default is ``FluentMode.SOLVER``.
- dimensionality : str, optional
- Geometric dimensionality of the Fluent simulation. Default is ``None`` (3D).
+ dimensionality : Dimension | int
+ Geometric dimensionality of the Fluent simulation. Default is Dimension.THREE.
file_transfer_service : optional
Service for uploading/downloading files to/from the server.
launcher_args : Any
@@ -355,7 +362,7 @@ def create_fluent_connection(
channel,
cleanup_on_exit: bool,
instance,
- launcher_args: Dict[str, Any] | None,
+ launcher_args: dict[str, Any] | None,
):
"""Create a Fluent connection."""
diff --git a/src/ansys/fluent/core/launcher/process_launch_string.py b/src/ansys/fluent/core/launcher/process_launch_string.py
index 5c5f080393bf..2973aea17c62 100644
--- a/src/ansys/fluent/core/launcher/process_launch_string.py
+++ b/src/ansys/fluent/core/launcher/process_launch_string.py
@@ -85,12 +85,12 @@ def _build_fluent_launch_args_string(**kwargs) -> str:
json_key = json.dumps(argval)
argval = fluent_map[json_key]
launch_args_string += v["fluent_format"].replace("{}", str(argval))
- additional_arguments = kwargs["additional_arguments"]
+ additional_arguments = kwargs.get("additional_arguments", "")
if additional_arguments:
launch_args_string += " " + additional_arguments
if "-t" not in additional_arguments and "-cnf=" not in additional_arguments:
parallel_options = build_parallel_options(
- load_machines(ncores=kwargs["processor_count"])
+ load_machines(ncores=kwargs.get("processor_count"))
)
if parallel_options:
launch_args_string += " " + parallel_options
diff --git a/src/ansys/fluent/core/launcher/slurm_launcher.py b/src/ansys/fluent/core/launcher/slurm_launcher.py
index 4f25aa0e2415..3145bde285ee 100644
--- a/src/ansys/fluent/core/launcher/slurm_launcher.py
+++ b/src/ansys/fluent/core/launcher/slurm_launcher.py
@@ -61,6 +61,7 @@
>>> slurm_solver_session = slurm_solver_launcher()
"""
+from collections.abc import Callable
from concurrent.futures import Future, ThreadPoolExecutor
import inspect
import logging
@@ -68,7 +69,9 @@
import shutil
import subprocess
import time
-from typing import Any, Callable, Dict
+from typing import Any, Generic
+
+from typing_extensions import TypeVar
from ansys.fluent.core import config
from ansys.fluent.core._types import PathType
@@ -208,7 +211,14 @@ def cancel(job_id: int) -> None:
subprocess.run(["scancel", f"{job_id}"])
-class SlurmFuture:
+SessionT = TypeVar(
+ "SessionT",
+ bound="Meshing | PureMeshing | Solver | SolverIcing",
+ default="Meshing | PureMeshing | Solver | SolverIcing",
+)
+
+
+class SlurmFuture(Generic[SessionT]):
"""Encapsulates asynchronous launch of Fluent within a Slurm environment.
The interface is similar to Python's
@@ -320,9 +330,7 @@ def done(self) -> bool:
)
return self._get_state() in ["", "CANCELLED", "COMPLETED"]
- def result(
- self, timeout: int = None
- ) -> Meshing | PureMeshing | Solver | SolverIcing:
+ def result(self, timeout: int | None = None) -> SessionT:
"""Return the session instance corresponding to the Fluent launch. If Fluent
hasn't yet launched, then this method will wait up to timeout seconds. If Fluent
hasn't launched in timeout seconds, then a TimeoutError will be raised. If
@@ -345,7 +353,7 @@ def result(
"""
return self._future.result(timeout)
- def exception(self, timeout: int = None) -> Exception:
+ def exception(self, timeout: int | None = None) -> Exception:
"""Return the exception raised by the Fluent launch. If Fluent hasn't yet
launched, then this method will wait up to timeout seconds. If Fluent hasn't
launched in timeout seconds, then a TimeoutError will be raised. If timeout is
@@ -394,7 +402,7 @@ def __init__(
journal_file_names: None | str | list[str] = None,
start_timeout: int = -1,
additional_arguments: str = "",
- env: Dict[str, Any] | None = None,
+ env: dict[str, Any] | None = None,
cleanup_on_exit: bool = True,
start_transcript: bool = True,
case_file_name: "PathType | None" = None,
diff --git a/src/ansys/fluent/core/launcher/standalone_launcher.py b/src/ansys/fluent/core/launcher/standalone_launcher.py
index 7812a2534586..b3957ec5a901 100644
--- a/src/ansys/fluent/core/launcher/standalone_launcher.py
+++ b/src/ansys/fluent/core/launcher/standalone_launcher.py
@@ -35,14 +35,14 @@
>>> standalone_solver_session = standalone_solver_launcher()
"""
-import inspect
import logging
import os
from pathlib import Path
import subprocess
-from typing import Any, Dict
+from typing import TYPE_CHECKING, Any, TypedDict
+
+from typing_extensions import Unpack
-from ansys.fluent.core._types import PathType
from ansys.fluent.core.launcher.error_handler import (
LaunchFluentError,
)
@@ -69,8 +69,48 @@
_get_server_info_file_names,
)
import ansys.fluent.core.launcher.watchdog as watchdog
+from ansys.fluent.core.session import BaseSession
from ansys.fluent.core.utils.fluent_version import FluentVersion
+if TYPE_CHECKING:
+ from ansys.fluent.core.launcher.launcher import LaunchFluentArgs
+
+
+class StandaloneArgsWithoutDryRun(
+ TypedDict, total=False
+): # pylint: disable=missing-class-docstring
+ product_version: FluentVersion | str | float | int | None
+ dimension: Dimension | int
+ precision: Precision | str
+ processor_count: int | None
+ journal_file_names: None | str | list[str]
+ start_timeout: int
+ additional_arguments: str
+ env: dict[str, Any] | None
+ cleanup_on_exit: bool
+ start_transcript: bool
+ ui_mode: UIMode | str | None
+ graphics_driver: (
+ FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str | None
+ )
+ case_file_name: str | None
+ case_data_file_name: str | None
+ lightweight_mode: bool | None
+ py: bool | None
+ gpu: bool | list[int] | None
+ cwd: str | None
+ fluent_path: str | None
+ topy: str | list[Any] | None
+ start_watchdog: bool | None
+ file_transfer_service: Any | None
+
+
+class StandaloneArgs(
+ StandaloneArgsWithoutDryRun, total=False
+): # pylint: disable=missing-class-docstring
+ dry_run: bool | None
+
+
logger = logging.getLogger("pyfluent.launcher")
@@ -79,32 +119,10 @@ class StandaloneLauncher:
def __init__(
self,
- mode: FluentMode | str | None = None,
- ui_mode: UIMode | str | None = None,
- graphics_driver: (
- FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str
- ) = None,
- product_version: FluentVersion | str | float | int | None = None,
- dimension: Dimension | int | None = None,
- precision: Precision | str | None = None,
- processor_count: int | None = None,
- journal_file_names: None | str | list[str] = None,
- start_timeout: int = 60,
- additional_arguments: str = "",
- env: Dict[str, Any] | None = None,
- cleanup_on_exit: bool = True,
+ *,
+ mode: FluentMode,
dry_run: bool = False,
- start_transcript: bool = True,
- case_file_name: "PathType | None" = None,
- case_data_file_name: "PathType | None" = None,
- lightweight_mode: bool | None = None,
- py: bool | None = None,
- gpu: bool | None = None,
- cwd: "PathType | None" = None,
- fluent_path: "PathType | None" = None,
- topy: str | list | None = None,
- start_watchdog: bool | None = None,
- file_transfer_service: Any | None = None,
+ **kwargs: Unpack["LaunchFluentArgs"],
):
"""
Launch a Fluent session in standalone mode.
@@ -187,26 +205,21 @@ def __init__(
"""
import ansys.fluent.core as pyfluent
- locals_ = locals().copy()
- argvals = {
- arg: locals_.get(arg)
- for arg in inspect.getargvalues(inspect.currentframe()).args
- }
- self.argvals, self.new_session = _get_argvals_and_session(argvals)
- self.file_transfer_service = file_transfer_service
+ self.argvals, self.new_session = _get_argvals_and_session(kwargs)
+ self.file_transfer_service = kwargs.get("file_transfer_service")
if pyfluent.config.show_fluent_gui:
- ui_mode = UIMode.GUI
- self.argvals["ui_mode"] = UIMode(ui_mode)
- if self.argvals["start_timeout"] is None:
+ kwargs["ui_mode"] = UIMode.GUI
+ self.argvals["ui_mode"] = UIMode(kwargs.get("ui_mode"))
+ if self.argvals.get("start_timeout") is None:
self.argvals["start_timeout"] = 60
- if self.argvals["lightweight_mode"] is None:
+ if self.argvals.get("lightweight_mode") is None:
self.argvals["lightweight_mode"] = False
fluent_version = _get_standalone_launch_fluent_version(self.argvals)
if (
fluent_version
and fluent_version >= FluentVersion.v251
- and self.argvals["py"] is None
+ and self.argvals.get("py") is None
):
self.argvals["py"] = True
@@ -224,25 +237,25 @@ def __init__(
self._sifile_last_mtime = Path(self._server_info_file_name).stat().st_mtime
self._kwargs = _get_subprocess_kwargs_for_fluent(
- self.argvals["env"], self.argvals
+ self.argvals.get("env", {}), self.argvals
)
- if self.argvals["cwd"]:
- self._kwargs.update(cwd=self.argvals["cwd"])
+ if "cwd" in self.argvals:
+ self._kwargs.update(cwd=self.argvals.get("cwd"))
self._launch_string += _build_journal_argument(
- self.argvals["topy"], self.argvals["journal_file_names"]
+ self.argvals.get("topy", []), self.argvals.get("journal_file_names")
)
if is_windows():
self._launch_cmd = self._launch_string
else:
- if self.argvals["ui_mode"] not in [UIMode.GUI, UIMode.HIDDEN_GUI]:
+ if self.argvals.get("ui_mode") not in [UIMode.GUI, UIMode.HIDDEN_GUI]:
# Using nohup to hide Fluent output from the current terminal
self._launch_cmd = "nohup " + self._launch_string + " &"
else:
self._launch_cmd = self._launch_string
- def __call__(self):
- if self.argvals["dry_run"]:
+ def __call__(self) -> tuple[str, str] | BaseSession:
+ if self.argvals.get("dry_run"):
print(f"Fluent launch string: {self._launch_string}")
return self._launch_string, self._server_info_file_name
try:
@@ -252,7 +265,7 @@ def __call__(self):
try:
_await_fluent_launch(
self._server_info_file_name,
- self.argvals["start_timeout"],
+ self.argvals.get("start_timeout", 60),
self._sifile_last_mtime,
)
except TimeoutError as ex:
@@ -266,7 +279,7 @@ def __call__(self):
process = subprocess.Popen(launch_cmd, **self._kwargs)
_await_fluent_launch(
self._server_info_file_name,
- self.argvals["start_timeout"],
+ self.argvals.get("start_timeout", 60),
self._sifile_last_mtime,
)
else:
@@ -275,15 +288,15 @@ def __call__(self):
session = self.new_session._create_from_server_info_file(
server_info_file_name=self._server_info_file_name,
file_transfer_service=self.file_transfer_service,
- cleanup_on_exit=self.argvals["cleanup_on_exit"],
- start_transcript=self.argvals["start_transcript"],
+ cleanup_on_exit=self.argvals.get("cleanup_on_exit"),
+ start_transcript=self.argvals.get("start_transcript"),
launcher_args=self.argvals,
inside_container=False,
)
session._process = process
start_watchdog = _confirm_watchdog_start(
- self.argvals["start_watchdog"],
- self.argvals["cleanup_on_exit"],
+ self.argvals.get("start_watchdog"),
+ self.argvals.get("cleanup_on_exit"),
session._fluent_connection,
)
if start_watchdog:
@@ -292,21 +305,21 @@ def __call__(self):
if len(values) == 3:
ip, port, password = values
watchdog.launch(os.getpid(), port, password, ip)
- if self.argvals["case_file_name"]:
- if FluentMode.is_meshing(self.argvals["mode"]):
- session.tui.file.read_case(self.argvals["case_file_name"])
- elif self.argvals["lightweight_mode"]:
- session.read_case_lightweight(self.argvals["case_file_name"])
+ if self.argvals.get("case_file_name"):
+ if FluentMode.is_meshing(self.argvals.get("mode")):
+ session.tui.file.read_case(self.argvals.get("case_file_name"))
+ elif self.argvals.get("lightweight_mode"):
+ session.read_case_lightweight(self.argvals.get("case_file_name"))
else:
session.settings.file.read(
file_type="case",
- file_name=self.argvals["case_file_name"],
+ file_name=self.argvals.get("case_file_name"),
)
- if self.argvals["case_data_file_name"]:
- if not FluentMode.is_meshing(self.argvals["mode"]):
+ if self.argvals.get("case_data_file_name"):
+ if not FluentMode.is_meshing(self.argvals.get("mode")):
session.settings.file.read(
file_type="case-data",
- file_name=self.argvals["case_data_file_name"],
+ file_name=self.argvals.get("case_data_file_name"),
)
else:
raise RuntimeError(
diff --git a/src/ansys/fluent/core/launcher/watchdog.py b/src/ansys/fluent/core/launcher/watchdog.py
index fa65073b07d9..bde47520f397 100644
--- a/src/ansys/fluent/core/launcher/watchdog.py
+++ b/src/ansys/fluent/core/launcher/watchdog.py
@@ -206,7 +206,7 @@ def launch(
else:
if watchdog_err.is_file():
with open(watchdog_err) as f:
- err_content = "Watchdog - %s" % f.read().replace("\n", "")
+ err_content = "Watchdog - {}".format(f.read().replace("\n", ""))
watchdog_err.unlink()
logger.error(err_content)
if pyfluent.config.watchdog_exception_on_error:
diff --git a/src/ansys/fluent/core/logger.py b/src/ansys/fluent/core/logger.py
index e89432241de4..e537876b71f3 100644
--- a/src/ansys/fluent/core/logger.py
+++ b/src/ansys/fluent/core/logger.py
@@ -27,6 +27,8 @@
from ansys.fluent.core import config
+__all__ = ("set_console_logging_level",)
+
_logging_file_enabled = False
@@ -103,7 +105,7 @@ def get_default_config() -> dict:
file_name = os.path.abspath(__file__)
file_dir = os.path.dirname(file_name)
yaml_path = os.path.join(file_dir, "logging_config.yaml")
- with open(yaml_path, "rt") as f:
+ with open(yaml_path) as f:
config = yaml.safe_load(f)
return config
diff --git a/src/ansys/fluent/core/meshing/meshing_workflow.py b/src/ansys/fluent/core/meshing/meshing_workflow.py
index 09c108e1fd7a..4540a749b672 100644
--- a/src/ansys/fluent/core/meshing/meshing_workflow.py
+++ b/src/ansys/fluent/core/meshing/meshing_workflow.py
@@ -27,6 +27,7 @@
from enum import Enum
import os
+from typing import TYPE_CHECKING
from ansys.fluent.core._types import PathType
from ansys.fluent.core.services.datamodel_se import PyMenuGeneric
@@ -84,16 +85,18 @@ def __init__(
self._activate_dynamic_interface(dynamic_interface=True)
self._initialized = True
- def __getattribute__(self, item: str):
- if (
- not item.startswith("_")
- and super().__getattribute__("_initialized")
- and not getattr(self._meshing.GlobalSettings, self._identifier)()
- ):
- raise RuntimeError(
- f"'{self._name}' objects are inaccessible from other workflows."
- )
- return super().__getattribute__(item)
+ if not TYPE_CHECKING:
+
+ def __getattribute__(self, item: str):
+ if (
+ not item.startswith("_")
+ and super().__getattribute__("_initialized")
+ and not getattr(self._meshing.GlobalSettings, self._identifier)()
+ ):
+ raise RuntimeError(
+ f"'{self._name}' objects are inaccessible from other workflows."
+ )
+ return super().__getattribute__(item)
class WatertightMeshingWorkflow(MeshingWorkflow):
diff --git a/src/ansys/fluent/core/module_config.py b/src/ansys/fluent/core/module_config.py
index 8650c17164fd..6b5dccc2b453 100644
--- a/src/ansys/fluent/core/module_config.py
+++ b/src/ansys/fluent/core/module_config.py
@@ -20,12 +20,16 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Configuration variables for PyFluent."""
+from collections.abc import Callable
import inspect
import os
from pathlib import Path
-from typing import Any, Callable, Generic, TypeVar
+from typing import Any, Generic, TypeVar
import warnings
+__all__ = ("config",)
+
+
TConfig = TypeVar("TConfig", bound="Config")
diff --git a/src/ansys/fluent/core/parametric.py b/src/ansys/fluent/core/parametric.py
index b2607773fa36..a7dda37b0c51 100644
--- a/src/ansys/fluent/core/parametric.py
+++ b/src/ansys/fluent/core/parametric.py
@@ -52,7 +52,7 @@
from math import ceil
import os
-from typing import Any, Dict
+from typing import Any
from ansys.fluent.core._types import PathType
from ansys.fluent.core.launcher.launcher import launch_fluent
@@ -61,9 +61,12 @@
BASE_DP_NAME = "Base DP"
+__all__ = ("LocalParametricStudy",)
+
+
def convert_design_point_parameter_units(
- value: Dict[str, float | int | str],
-) -> Dict[str, float | int]:
+ value: dict[str, float | int | str],
+) -> dict[str, float | int]:
"""Convert design point parameter units."""
def conv(val):
@@ -211,6 +214,7 @@ def make_input_for_studies(num_servers) -> None:
study_inputs.append(
make_input_for_study(range(range_base, range_base + count))
)
+
return study_inputs
@asynchronous
diff --git a/src/ansys/fluent/core/pyfluent_warnings.py b/src/ansys/fluent/core/pyfluent_warnings.py
index 9d2b20f2009e..77c7cf0c6cd1 100644
--- a/src/ansys/fluent/core/pyfluent_warnings.py
+++ b/src/ansys/fluent/core/pyfluent_warnings.py
@@ -24,6 +24,13 @@
import warnings
+__all__ = (
+ "PyFluentDeprecationWarning",
+ "PyFluentUserWarning",
+ "FluentDevVersionWarning",
+ "warning",
+)
+
class PyFluentDeprecationWarning(FutureWarning):
"""Provides the common warning class for warnings about deprecated PyFluent
diff --git a/src/ansys/fluent/core/rpvars.py b/src/ansys/fluent/core/rpvars.py
index 81032011e6f1..9eca4ed302a3 100644
--- a/src/ansys/fluent/core/rpvars.py
+++ b/src/ansys/fluent/core/rpvars.py
@@ -27,7 +27,7 @@
interfaces: solver settings objects and task-based meshing workflow.
"""
-from typing import Any, List
+from typing import Any
import ansys.fluent.core.filereader.lispy as lispy
from ansys.fluent.core.solver.error_message import allowed_name_error_message
@@ -83,7 +83,7 @@ def __call__(self, var: str | None = None, val: Any | None = None) -> Any:
else (self._get_var(var) if var is not None else self._get_vars())
)
- def allowed_values(self) -> List[str]:
+ def allowed_values(self) -> list[str]:
"""Returns list with the allowed rpvars names.
Returns
diff --git a/src/ansys/fluent/core/scheduler/load_machines.py b/src/ansys/fluent/core/scheduler/load_machines.py
index 188f18616073..5d5acfa11a43 100644
--- a/src/ansys/fluent/core/scheduler/load_machines.py
+++ b/src/ansys/fluent/core/scheduler/load_machines.py
@@ -31,13 +31,12 @@
import os
from pathlib import Path
import subprocess
-from typing import Dict, List
from ansys.fluent.core.scheduler.machine_list import Machine, MachineList
def load_machines(
- machine_info: List[Dict[str, int]] | None = None,
+ machine_info: list[dict[str, int]] | None = None,
host_info: str | None = None,
ncores: int | None = None,
) -> MachineList:
@@ -145,7 +144,7 @@ def _parse_host_info(host_info):
"""
if Path(host_info).is_file():
# Only opens a file if it exists
- with open(host_info, "r") as f:
+ with open(host_info) as f:
host_info = f.read()
sMod = 1 if host_info[0] == "[" else 0
@@ -275,7 +274,7 @@ def _construct_machine_list_uge(host_file_name):
"""Provide private module function to parse the UGE host file."""
csv.register_dialect("pemachines", delimiter=" ", skipinitialspace=True)
machineList = MachineList()
- with open(host_file_name, "r") as peFile:
+ with open(host_file_name) as peFile:
peReader = csv.reader(peFile, dialect="pemachines")
for row in peReader:
if len(row) == 0:
@@ -305,7 +304,7 @@ def _construct_machine_list_pbs(host_file_name):
# It's identical to a Fluent host file format. This code accumulates the total
# core count on each machine.
machineDict = {}
- with open(host_file_name, "r") as pbsFile:
+ with open(host_file_name) as pbsFile:
for hostname in pbsFile:
hostname = hostname.rstrip("\r\n")
if len(hostname) == 0:
diff --git a/src/ansys/fluent/core/scheduler/machine_list.py b/src/ansys/fluent/core/scheduler/machine_list.py
index db566e8ae896..c4bd0d335be9 100644
--- a/src/ansys/fluent/core/scheduler/machine_list.py
+++ b/src/ansys/fluent/core/scheduler/machine_list.py
@@ -37,11 +37,10 @@
when the machine file is loaded.
"""
-from builtins import object
import copy
-class Machine(object):
+class Machine:
"""Provides an interface for a single machine allocated by a queue system."""
def __init__(self, hostName, numberOfCores, queueName=None, coreList=None):
@@ -99,7 +98,7 @@ def core_list(self):
return self._coreList
-class MachineList(object):
+class MachineList:
"""Provides an interface to list of machines allocated by a queue system."""
def __init__(self, machinesIn=None):
diff --git a/src/ansys/fluent/core/search.py b/src/ansys/fluent/core/search.py
index 2a7211617758..5dfdd9931841 100644
--- a/src/ansys/fluent/core/search.py
+++ b/src/ansys/fluent/core/search.py
@@ -40,6 +40,7 @@
get_version_for_file_name,
)
+__all__ = ("search",)
warnings.filterwarnings("ignore", category=UserWarning, module="nltk")
logger = logging.getLogger("pyfluent.general")
@@ -176,7 +177,7 @@ def _get_api_tree_data():
"""Get API tree data."""
api_tree_data_file_path = _get_api_tree_data_file_path()
if api_tree_data_file_path.exists():
- json_file = open(api_tree_data_file_path, "r")
+ json_file = open(api_tree_data_file_path)
api_tree_data = json.load(json_file)
return api_tree_data
@@ -311,6 +312,7 @@ def _search_wildcard(
-------
List of search string matches.
"""
+
api_tree_data = api_tree_data or _get_api_tree_data()
all_names = api_tree_data["all_api_object_names"]
queries = []
diff --git a/src/ansys/fluent/core/services/__init__.py b/src/ansys/fluent/core/services/__init__.py
index be8efb52746c..ad644f204101 100644
--- a/src/ansys/fluent/core/services/__init__.py
+++ b/src/ansys/fluent/core/services/__init__.py
@@ -32,7 +32,11 @@
)
from ansys.fluent.core.services.deprecated_field_data import DeprecatedFieldData
from ansys.fluent.core.services.events import EventsService
-from ansys.fluent.core.services.field_data import LiveFieldData, _FieldInfo
+from ansys.fluent.core.services.field_data import (
+ FieldDataService,
+ LiveFieldData,
+ _FieldInfo,
+)
from ansys.fluent.core.services.health_check import HealthCheckService
from ansys.fluent.core.services.monitor import MonitorsService
from ansys.fluent.core.services.reduction import Reduction
@@ -45,34 +49,23 @@
from ansys.fluent.core.services.transcript import TranscriptService
from ansys.fluent.core.streaming_services.field_data_streaming import FieldDataStreaming
-_service_cls_by_name = {
- "app_utilities": AppUtilities,
- "health_check": HealthCheckService,
- "datamodel": DatamodelService_SE,
- "tui": DatamodelService_TUI,
- "settings": SettingsService,
- "scheme_eval": SchemeEval,
- "events": EventsService,
- "field_data": LiveFieldData,
- "field_data_old": DeprecatedFieldData,
- "field_info": _FieldInfo,
- "monitors": MonitorsService,
- "reduction": Reduction,
- "svar": SolutionVariableService,
- "svar_data": SolutionVariableData,
- "transcript": TranscriptService,
- "batch_ops": BatchOpsService,
- "field_data_streaming": FieldDataStreaming,
-}
-
-
-class service_creator:
- """A gRPC service creator."""
-
- def __init__(self, service_name: str):
- """Initialize service_creator."""
- self._service_cls = _service_cls_by_name[service_name]
-
- def create(self, *args, **kwargs):
- """Create a gRPC service."""
- return self._service_cls(*args, **kwargs)
+__all__ = (
+ "AppUtilities",
+ "BatchOpsService",
+ "DatamodelService_SE",
+ "DatamodelService_TUI",
+ "DeprecatedFieldData",
+ "EventsService",
+ "FieldDataService",
+ "FieldDataStreaming",
+ "HealthCheckService",
+ "LiveFieldData",
+ "MonitorsService",
+ "Reduction",
+ "SchemeEval",
+ "SettingsService",
+ "SolutionVariableData",
+ "SolutionVariableService",
+ "TranscriptService",
+ "_FieldInfo",
+)
diff --git a/src/ansys/fluent/core/session_solver.pyi b/src/ansys/fluent/core/services/_protocols.py
similarity index 55%
rename from src/ansys/fluent/core/session_solver.pyi
rename to src/ansys/fluent/core/services/_protocols.py
index 0fb2ca3385e7..4e2702e4ac27 100644
--- a/src/ansys/fluent/core/session_solver.pyi
+++ b/src/ansys/fluent/core/services/_protocols.py
@@ -20,28 +20,19 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
-from ansys.fluent.core.generated.datamodel_252.preferences import (
- Root as preferences_root,
-)
-from ansys.fluent.core.generated.datamodel_252.workflow import Root as workflow_root
-import ansys.fluent.core.generated.solver.settings_252 as settings_root
-from ansys.fluent.core.generated.solver.tui_252 import main_menu
-from ansys.fluent.core.system_coupling import SystemCoupling
+"""Provides protocol definitions for gRPC services."""
-class Solver:
- @property
- def version(self): ...
- @property
- def tui(self) -> main_menu: ...
- @property
- def workflow(self) -> workflow_root: ...
- @property
- def system_coupling(self) -> SystemCoupling: ...
- @property
- def preferences(self) -> preferences_root: ...
- def read_case_lightweight(self, file_name: str): ...
- def read_case(self, file_name: str): ...
- def write_case(self, file_name: str): ...
- @property
- def settings(self) -> settings_root.root: ...
- def enable_beta_features(self): ...
+from typing import TYPE_CHECKING, Protocol
+
+if TYPE_CHECKING:
+ import grpc
+
+
+class ServiceProtocol(Protocol):
+ """Protocol for gRPC service classes."""
+
+ if TYPE_CHECKING:
+
+ def __init__(
+ self, channel: "grpc.Channel", metadata: list[tuple[str, str]]
+ ) -> None: ...
diff --git a/src/ansys/fluent/core/services/app_utilities.py b/src/ansys/fluent/core/services/app_utilities.py
index d775094ed55e..98f4146d2be6 100644
--- a/src/ansys/fluent/core/services/app_utilities.py
+++ b/src/ansys/fluent/core/services/app_utilities.py
@@ -25,13 +25,13 @@
from dataclasses import dataclass
from enum import Enum
import os
-from typing import List, Tuple
import grpc
from ansys.api.fluent.v0 import app_utilities_pb2 as AppUtilitiesProtoModule
from ansys.api.fluent.v0 import app_utilities_pb2_grpc as AppUtilitiesGrpcModule
from ansys.fluent.core._types import PathType
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.interceptors import (
BatchInterceptor,
ErrorStateInterceptor,
@@ -41,11 +41,11 @@
from ansys.fluent.core.streaming_services.events_streaming import SolverEvent
-class AppUtilitiesService:
+class AppUtilitiesService(ServiceProtocol):
"""AppUtilities Service."""
- def __init__(
- self, channel: grpc.Channel, metadata: List[Tuple[str, str]], fluent_error_state
+ def __init__( # pyright: ignore[reportMissingSuperCall]
+ self, channel: grpc.Channel, metadata: list[tuple[str, str]], fluent_error_state
):
"""__init__ method of AppUtilities class."""
intercept_channel = grpc.intercept_channel(
diff --git a/src/ansys/fluent/core/services/batch_ops.py b/src/ansys/fluent/core/services/batch_ops.py
index d06d43da6047..f9900f620676 100644
--- a/src/ansys/fluent/core/services/batch_ops.py
+++ b/src/ansys/fluent/core/services/batch_ops.py
@@ -33,13 +33,16 @@
import ansys.api.fluent.v0 as api
from ansys.api.fluent.v0 import batch_ops_pb2, batch_ops_pb2_grpc
+from ansys.fluent.core.services._protocols import ServiceProtocol
+
+__all__ = ("BatchOps",)
_TBatchOps = TypeVar("_TBatchOps", bound="BatchOps")
network_logger: logging.Logger = logging.getLogger("pyfluent.networking")
-class BatchOpsService:
+class BatchOpsService(ServiceProtocol):
"""Class wrapping methods in batch RPC service."""
def __init__(self, channel: grpc.Channel, metadata: list[tuple[str, str]]) -> None:
@@ -173,7 +176,7 @@ def update_result(self, status: batch_ops_pb2.ExecuteStatus, data: str) -> None:
def __new__(cls, session) -> _TBatchOps:
if cls.instance() is None:
- instance = super(BatchOps, cls).__new__(cls)
+ instance = super().__new__(cls)
instance._service: BatchOpsService = session._batch_ops_service
instance._ops: list[BatchOps.Op] = []
instance.batching = False
diff --git a/src/ansys/fluent/core/services/datamodel_se.py b/src/ansys/fluent/core/services/datamodel_se.py
index 3790edff5015..e274adbd29a8 100644
--- a/src/ansys/fluent/core/services/datamodel_se.py
+++ b/src/ansys/fluent/core/services/datamodel_se.py
@@ -22,13 +22,14 @@
"""Wrappers over StateEngine based datamodel gRPC service of Fluent."""
+from collections.abc import Callable, Iterator, Sequence
from enum import Enum
import functools
import itertools
import logging
import os
from threading import RLock
-from typing import Any, Callable, Iterator, NoReturn, Sequence, TypeVar
+from typing import Any, NoReturn, TypeVar
from google.protobuf.json_format import MessageToDict, ParseDict
import grpc
@@ -38,6 +39,7 @@
from ansys.api.fluent.v0.variant_pb2 import Variant
import ansys.fluent.core as pyfluent
from ansys.fluent.core.data_model_cache import DataModelCache, NameKey
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.interceptors import (
BatchInterceptor,
ErrorStateInterceptor,
@@ -176,7 +178,7 @@ def validate_name(name):
return [name for name in names if validate_name(name)]
-class DatamodelServiceImpl:
+class DatamodelServiceImpl(ServiceProtocol):
"""Wraps the StateEngine-based datamodel gRPC service of Fluent."""
def __init__(
@@ -494,7 +496,9 @@ def unsubscribe_all(self) -> None:
v.unsubscribe()
-class DatamodelService(StreamingService):
+class DatamodelService(
+ StreamingService, ServiceProtocol
+): # pyright: ignore[reportUnsafeMultipleInheritance]
"""Pure Python wrapper of DatamodelServiceImpl."""
def __init__(
@@ -1168,7 +1172,6 @@ def add_on_command_attribute_changed(
)
def __dir__(self):
-
all_children = list(self.__dict__) + dir(type(self))
filtered_children = _FilterDatamodelNames(self.service)(self, all_children)
@@ -1688,7 +1691,7 @@ def _get_item(self, key: str) -> PyMenu:
)
else:
raise LookupError(
- f"{key} is not found at path " f"{convert_path_to_se_path(self.path)}"
+ f"{key} is not found at path {convert_path_to_se_path(self.path)}"
)
def _del_item(self, key: str) -> None:
@@ -1711,7 +1714,7 @@ def _del_item(self, key: str) -> None:
self.service.delete_object(self.rules, se_path)
else:
raise LookupError(
- f"{key} is not found at path " f"{convert_path_to_se_path(self.path)}"
+ f"{key} is not found at path {convert_path_to_se_path(self.path)}"
)
def __getitem__(self, key: str) -> PyMenu:
@@ -2054,7 +2057,7 @@ def __del__(self) -> None:
self.path[-1][1],
)
except Exception as exc:
- logger.info("__del__ %s: %s" % (type(exc).__name__, exc))
+ logger.info(f"__del__ {type(exc).__name__}: {exc}")
def get_attr(self, attrib: str) -> Any:
"""Get attribute value of the current object.
@@ -2221,7 +2224,7 @@ def _get_child(self, name: str) -> PyNamedObjectContainer | PyCommand | PyQuery:
return PyQuery(self.service, self.rules, name, self.path)
else:
raise LookupError(
- f"{name} is not found at path " f"{convert_path_to_se_path(self.path)}"
+ f"{name} is not found at path {convert_path_to_se_path(self.path)}"
)
def __dir__(self) -> list[str]:
@@ -2271,5 +2274,5 @@ def _get_item(self, key: str) -> PyMenuGeneric:
return PyMenuGeneric(self.service, self.rules, child_path)
else:
raise LookupError(
- f"{key} is not found at path " f"{convert_path_to_se_path(self.path)}"
+ f"{key} is not found at path {convert_path_to_se_path(self.path)}"
)
diff --git a/src/ansys/fluent/core/services/datamodel_tui.py b/src/ansys/fluent/core/services/datamodel_tui.py
index 9ad912e4cacb..f0fd9e250057 100644
--- a/src/ansys/fluent/core/services/datamodel_tui.py
+++ b/src/ansys/fluent/core/services/datamodel_tui.py
@@ -32,6 +32,7 @@
from ansys.api.fluent.v0 import datamodel_tui_pb2 as DataModelProtoModule
from ansys.api.fluent.v0 import datamodel_tui_pb2_grpc as DataModelGrpcModule
from ansys.api.fluent.v0.variant_pb2 import Variant
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.api_upgrade import ApiUpgradeAdvisor
from ansys.fluent.core.services.interceptors import (
BatchInterceptor,
@@ -139,7 +140,7 @@ def _convert_gvalue_to_value(gval: Variant) -> Any:
return val
-class DatamodelService:
+class DatamodelService(ServiceProtocol):
"""Pure Python wrapper of DatamodelServiceImpl."""
def __init__(
diff --git a/src/ansys/fluent/core/services/deprecated_field_data.py b/src/ansys/fluent/core/services/deprecated_field_data.py
index 498420db2035..1713c93bd654 100644
--- a/src/ansys/fluent/core/services/deprecated_field_data.py
+++ b/src/ansys/fluent/core/services/deprecated_field_data.py
@@ -22,7 +22,7 @@
"""Deprecated wrappers over FieldData gRPC service of Fluent."""
-from typing import Callable, Dict, List
+from collections.abc import Callable
import warnings
from ansys.api.fluent.v0 import field_data_pb2 as FieldDataProtoModule
@@ -310,10 +310,10 @@ def new_transaction(self):
def get_scalar_field_data(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
node_value: bool | None = True,
boundary_value: bool | None = True,
- ) -> ScalarFieldData | Dict[int, ScalarFieldData]:
+ ) -> ScalarFieldData | dict[int, ScalarFieldData]:
"""Get scalar field data on a surface.
Parameters
@@ -393,15 +393,15 @@ def get_scalar_field_data(
)
def get_surface_data(
self,
- data_types: List[SurfaceDataType] | List[str],
- surfaces: List[int | str],
+ data_types: list[SurfaceDataType] | list[str],
+ surfaces: list[int | str],
overset_mesh: bool | None = False,
) -> (
Vertices
| FacesConnectivity
| FacesNormal
| FacesCentroid
- | Dict[int, Vertices | FacesConnectivity | FacesNormal | FacesCentroid]
+ | dict[int, Vertices | FacesConnectivity | FacesNormal | FacesCentroid]
):
"""Get surface data (vertices, faces connectivity, centroids, and normals).
@@ -522,8 +522,8 @@ def _get_surfaces_data(parent_class, surf_id, _data_type):
def get_vector_field_data(
self,
field_name: str,
- surfaces: List[int | str],
- ) -> VectorFieldData | Dict[int, VectorFieldData]:
+ surfaces: list[int | str],
+ ) -> VectorFieldData | dict[int, VectorFieldData]:
"""Get vector field data on a surface.
Parameters
@@ -592,7 +592,7 @@ def get_vector_field_data(
def get_pathlines_field_data(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
additional_field_name: str = "",
provide_particle_time_field: bool | None = False,
node_value: bool | None = True,
@@ -605,7 +605,7 @@ def get_pathlines_field_data(
coarsen: int | None = 1,
velocity_domain: str | None = "all-phases",
zones: list | None = None,
- ) -> Dict:
+ ) -> dict:
"""Get the pathlines field data on a surface.
Parameters
diff --git a/src/ansys/fluent/core/services/events.py b/src/ansys/fluent/core/services/events.py
index 6860cd2024be..fc67bc47c27f 100644
--- a/src/ansys/fluent/core/services/events.py
+++ b/src/ansys/fluent/core/services/events.py
@@ -22,18 +22,19 @@
"""Wrapper over the events gRPC service of Fluent."""
-from typing import List, Tuple
-
import grpc
from ansys.api.fluent.v0 import events_pb2_grpc as EventsGrpcModule
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.streaming import StreamingService
-class EventsService(StreamingService):
+class EventsService(
+ StreamingService, ServiceProtocol
+): # pyright: ignore[reportUnsafeMultipleInheritance]
"""Class wrapping the events gRPC service of Fluent."""
- def __init__(self, channel: grpc.Channel, metadata: List[Tuple[str, str]]):
+ def __init__(self, channel: grpc.Channel, metadata: list[tuple[str, str]]):
"""__init__ method of EventsService class."""
super().__init__(
stub=EventsGrpcModule.EventsStub(channel),
diff --git a/src/ansys/fluent/core/services/field_data.py b/src/ansys/fluent/core/services/field_data.py
index 6d10da5e365b..3371cbc8f096 100644
--- a/src/ansys/fluent/core/services/field_data.py
+++ b/src/ansys/fluent/core/services/field_data.py
@@ -21,13 +21,13 @@
# SOFTWARE.
"""Wrappers over FieldData gRPC service of Fluent."""
-from collections.abc import Iterable
+
+from collections.abc import Callable, Iterable
from dataclasses import dataclass, field
from enum import Enum
from functools import reduce
import logging
import time
-from typing import Callable, Dict, List, Tuple
import warnings
import weakref
@@ -60,6 +60,7 @@
get_surfaces_from_objects,
)
from ansys.fluent.core.pyfluent_warnings import PyFluentDeprecationWarning
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.interceptors import (
BatchInterceptor,
ErrorStateInterceptor,
@@ -83,11 +84,13 @@ def override_help_text(func, func_to_be_wrapped):
return func
-class FieldDataService(StreamingService):
+class FieldDataService(
+ StreamingService, ServiceProtocol
+): # pyright: ignore[reportUnsafeMultipleInheritance]
"""FieldData service of Fluent."""
def __init__(
- self, channel: grpc.Channel, metadata: List[Tuple[str, str]], fluent_error_state
+ self, channel: grpc.Channel, metadata: list[tuple[str, str]], fluent_error_state
):
"""__init__ method of FieldDataService class."""
intercept_channel = grpc.intercept_channel(
@@ -176,8 +179,8 @@ def __init__(
self._is_data_valid = is_data_valid
def get_scalar_field_range(
- self, field: str, node_value: bool = False, surface_ids: List[int] = None
- ) -> List[float]:
+ self, field: str, node_value: bool = False, surface_ids: list[int] = None
+ ) -> list[float]:
"""Get the range (minimum and maximum values) of the field.
Parameters
@@ -200,8 +203,8 @@ def get_scalar_field_range(
return self._get_scalar_field_range(field, node_value, surface_ids)
def _get_scalar_field_range(
- self, field: str, node_value: bool = False, surface_ids: List[int] = None
- ) -> List[float]:
+ self, field: str, node_value: bool = False, surface_ids: list[int] = None
+ ) -> list[float]:
if not surface_ids:
surface_ids = []
request = FieldDataProtoModule.GetRangeRequest()
@@ -213,7 +216,7 @@ def _get_scalar_field_range(
response = self._service.get_scalar_field_range(request)
return [response.minimum, response.maximum]
- def get_scalar_fields_info(self) -> Dict[str, Dict]:
+ def get_scalar_fields_info(self) -> dict[str, dict]:
"""Get fields information (field name, domain, and section).
Returns
@@ -227,7 +230,7 @@ def get_scalar_fields_info(self) -> Dict[str, Dict]:
)
return self._get_scalar_fields_info()
- def _get_scalar_fields_info(self) -> Dict[str, Dict]:
+ def _get_scalar_fields_info(self) -> dict[str, dict]:
request = FieldDataProtoModule.GetFieldsInfoRequest()
response = self._service.get_scalar_fields_info(request)
return {
@@ -240,7 +243,7 @@ def _get_scalar_fields_info(self) -> Dict[str, Dict]:
for field_info in response.fieldInfo
}
- def get_vector_fields_info(self) -> Dict[str, Dict]:
+ def get_vector_fields_info(self) -> dict[str, dict]:
"""Get vector fields information (vector components).
Returns
@@ -254,7 +257,7 @@ def get_vector_fields_info(self) -> Dict[str, Dict]:
)
return self._get_vector_fields_info()
- def _get_vector_fields_info(self) -> Dict[str, Dict]:
+ def _get_vector_fields_info(self) -> dict[str, dict]:
request = FieldDataProtoModule.GetVectorFieldsInfoRequest()
response = self._service.get_vector_fields_info(request)
return {
@@ -266,7 +269,7 @@ def _get_vector_fields_info(self) -> Dict[str, Dict]:
for vector_field_info in response.vectorFieldInfo
}
- def get_surfaces_info(self) -> Dict[str, Dict]:
+ def get_surfaces_info(self) -> dict[str, dict]:
"""Get surfaces information (surface name, ID, and type).
Returns
@@ -280,7 +283,7 @@ def get_surfaces_info(self) -> Dict[str, Dict]:
)
return self._get_surfaces_info()
- def _get_surfaces_info(self) -> Dict[str, Dict]:
+ def _get_surfaces_info(self) -> dict[str, dict]:
request = FieldDataProtoModule.GetSurfacesInfoResponse()
response = self._service.get_surfaces_info(request)
info = {
@@ -316,7 +319,7 @@ def validate_vector_fields(self, field_name: str):
self._is_data_valid, info=self._get_vector_fields_info()
).valid_name(field_name)
- def validate_surfaces(self, surfaces: List[str]):
+ def validate_surfaces(self, surfaces: list[str]):
"""Validate surfaces."""
warnings.warn(
"This usage is deprecated and will be removed in a future release. "
@@ -416,11 +419,10 @@ def _data_type_converter(args_dict):
class _FetchFieldData:
-
@staticmethod
def _surface_data(
- data_types: List[SurfaceDataType] | List[str],
- surface_ids: List[int],
+ data_types: list[SurfaceDataType] | list[str],
+ surface_ids: list[int],
overset_mesh: bool | None = False,
):
return [
@@ -438,7 +440,7 @@ def _surface_data(
@staticmethod
def _scalar_data(
field_name: str,
- surface_ids: List[int],
+ surface_ids: list[int],
node_value: bool,
boundary_value: bool,
):
@@ -459,7 +461,7 @@ def _scalar_data(
@staticmethod
def _vector_data(
field_name: str,
- surface_ids: List[int],
+ surface_ids: list[int],
):
return [
FieldDataProtoModule.VectorFieldRequest(
@@ -471,7 +473,7 @@ def _vector_data(
@staticmethod
def _pathlines_data(
field_name: str,
- surface_ids: List[int],
+ surface_ids: list[int],
**kwargs,
):
return [
@@ -489,7 +491,7 @@ class BaseFieldData:
def __init__(
self,
- data: Dict,
+ data: dict,
field_info,
allowed_surface_names,
allowed_scalar_field_names,
@@ -502,7 +504,7 @@ def __init__(
self._returned_data = _ReturnFieldData()
self._deprecated_flag = False
- def get_surface_ids(self, surfaces: List[str | int]) -> List[int]:
+ def get_surface_ids(self, surfaces: list[str | int]) -> list[int]:
"""Get a list of surface ids based on surfaces provided as inputs."""
return _get_surface_ids(
field_info=self._field_info,
@@ -513,7 +515,7 @@ def get_surface_ids(self, surfaces: List[str | int]) -> List[int]:
def _get_scalar_field_data(
self,
**kwargs,
- ) -> Dict[int | str, np.array]:
+ ) -> dict[int | str, np.ndarray]:
scalar_field_data = self.data[
(
("type", "scalar-field"),
@@ -531,7 +533,7 @@ def _get_scalar_field_data(
def _get_surface_data(
self,
**kwargs,
- ) -> Dict[int | str, Dict[SurfaceDataType, np.array | List[np.array]]]:
+ ) -> dict[int | str, dict[SurfaceDataType, np.ndarray | list[np.ndarray]]]:
surface_data = self.data[(("type", "surface-data"),)]
return self._returned_data._surface_data(
kwargs.get("data_types"),
@@ -544,7 +546,7 @@ def _get_surface_data(
def _get_vector_field_data(
self,
**kwargs,
- ) -> Dict[int | str, np.array]:
+ ) -> dict[int | str, np.ndarray]:
vector_field_data = self.data[(("type", "vector-field"),)]
return self._returned_data._vector_data(
_to_field_name_str(kwargs.get("field_name")),
@@ -556,7 +558,7 @@ def _get_vector_field_data(
def _get_pathlines_field_data(
self,
**kwargs,
- ) -> Dict:
+ ) -> dict:
if kwargs.get("zones") is None:
zones = []
del zones
@@ -578,7 +580,7 @@ def get_field_data(
| VectorFieldDataRequest
| PathlinesFieldDataRequest
),
- ) -> Dict[int | str, Dict | np.array]:
+ ) -> dict[int | str, dict | np.ndarray]:
"""Get the surface, scalar, vector or path-lines field data on a surface.
Returns
@@ -608,7 +610,7 @@ class BatchFieldData(BaseFieldData, BaseFieldDataSource):
def __init__(
self,
- data: Dict,
+ data: dict,
field_info,
allowed_surface_names,
allowed_scalar_field_names,
@@ -702,7 +704,7 @@ def __init__(
self._pathline_field_data = []
self._cache_requests = []
- def get_surface_ids(self, surfaces: List[str | int]) -> List[int]:
+ def get_surface_ids(self, surfaces: list[str | int]) -> list[int]:
"""Get a list of surface ids based on surfaces provided as inputs."""
return _get_surface_ids(
field_info=self._field_info,
@@ -809,8 +811,8 @@ def _add_pathlines_fields_request(
@deprecate_function(version="v0.23.0", new_func="add_requests")
def add_surfaces_request(
self,
- data_types: List[SurfaceDataType] | List[str],
- surfaces: List[int | str],
+ data_types: list[SurfaceDataType] | list[str],
+ surfaces: list[int | str],
overset_mesh: bool | None = False,
) -> None:
"""Add request to get surface data (vertices, face connectivity, centroids, and
@@ -835,7 +837,7 @@ def add_surfaces_request(
def add_scalar_fields_request(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
node_value: bool | None = True,
boundary_value: bool | None = True,
) -> None:
@@ -861,7 +863,7 @@ def add_scalar_fields_request(
def add_vector_fields_request(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
) -> None:
"""Add request to get vector field data on surfaces."""
self._add_vector_fields_request(
@@ -882,7 +884,7 @@ def add_vector_fields_request(
def add_pathlines_fields_request(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
additional_field_name: str = "",
provide_particle_time_field: bool | None = False,
node_value: bool | None = True,
@@ -1049,8 +1051,8 @@ class _FieldDataConstants:
def _get_surface_ids(
field_info: _FieldInfo,
allowed_surface_names,
- surfaces: List[int | str | object],
-) -> List[int]:
+ surfaces: list[int | str | object],
+) -> list[int]:
"""Get surface IDs based on surface names or IDs.
Parameters
@@ -1109,7 +1111,7 @@ def __init__(self, callbacks_provider: object = None):
"""__init__ method of ChunkParser class."""
self._callbacks_provider = callbacks_provider
- def extract_fields(self, chunk_iterator) -> Dict[int, Dict[str, np.array]]:
+ def extract_fields(self, chunk_iterator) -> dict[int, dict[str, np.ndarray]]:
"""Extracts field data received from Fluent.
if callbacks_provider is set then callbacks are triggered with extracted data.
@@ -1503,7 +1505,7 @@ def _get_scalar_field_data(self, **kwargs):
def _get_surface_data(
self,
**kwargs,
- ) -> Dict[int | str, Dict[SurfaceDataType, np.array | List[np.array]]]:
+ ) -> dict[int | str, dict[SurfaceDataType, np.ndarray | list[np.ndarray]]]:
surface_ids = self.get_surface_ids(kwargs.get("surfaces"))
fields_request = get_fields_request()
fields_request.surfaceRequest.extend(
@@ -1537,7 +1539,7 @@ def _get_surface_data(
def _get_vector_field_data(
self,
**kwargs,
- ) -> Dict[int | str, np.array]:
+ ) -> dict[int | str, np.ndarray]:
surface_ids = self.get_surface_ids(kwargs.get("surfaces"))
field_name = self._allowed_vector_field_names.valid_name(
kwargs.get("field_name")
@@ -1564,7 +1566,7 @@ def _get_vector_field_data(
def _get_pathlines_field_data(
self,
**kwargs,
- ) -> Dict:
+ ) -> dict:
if kwargs.get("zones") is None:
zones = []
surface_ids = self.get_surface_ids(kwargs.get("surfaces"))
@@ -1625,10 +1627,10 @@ def _get_pathlines_field_data(
def get_scalar_field_data(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
node_value: bool | None = True,
boundary_value: bool | None = True,
- ) -> Dict[int | str, np.array]:
+ ) -> dict[int | str, np.ndarray]:
"""Get scalar field data on a surface."""
return self._get_scalar_field_data(
field_name=field_name,
@@ -1640,10 +1642,10 @@ def get_scalar_field_data(
@deprecate_function(version="v0.34.0", new_func="get_field_data")
def get_surface_data(
self,
- data_types: List[SurfaceDataType],
- surfaces: List[int | str],
+ data_types: list[SurfaceDataType],
+ surfaces: list[int | str],
overset_mesh: bool | None = False,
- ) -> Dict[int | str, Dict[SurfaceDataType, np.array | List[np.array]]]:
+ ) -> dict[int | str, dict[SurfaceDataType, np.ndarray | list[np.ndarray]]]:
"""Get surface data (vertices, faces connectivity, centroids, and normals)."""
self._deprecated_flag = True
return self._get_surface_data(
@@ -1654,8 +1656,8 @@ def get_surface_data(
def get_vector_field_data(
self,
field_name: str,
- surfaces: List[int | str],
- ) -> Dict[int | str, np.array]:
+ surfaces: list[int | str],
+ ) -> dict[int | str, np.ndarray]:
"""Get vector field data on a surface."""
return self._get_vector_field_data(
field_name=field_name,
@@ -1666,7 +1668,7 @@ def get_vector_field_data(
def get_pathlines_field_data(
self,
field_name: str,
- surfaces: List[int | str],
+ surfaces: list[int | str],
additional_field_name: str = "",
provide_particle_time_field: bool | None = False,
node_value: bool | None = True,
@@ -1679,7 +1681,7 @@ def get_pathlines_field_data(
coarsen: int | None = 1,
velocity_domain: str | None = "all-phases",
zones: list | None = None,
- ) -> Dict:
+ ) -> dict:
"""Get the pathlines field data on a surface."""
self._deprecated_flag = True
return self._get_pathlines_field_data(
diff --git a/src/ansys/fluent/core/services/health_check.py b/src/ansys/fluent/core/services/health_check.py
index 3010c44f50cb..8f1e5020dc4c 100644
--- a/src/ansys/fluent/core/services/health_check.py
+++ b/src/ansys/fluent/core/services/health_check.py
@@ -31,6 +31,7 @@
from grpc_health.v1 import health_pb2_grpc as HealthCheckGrpcModule
import ansys.fluent.core as pyfluent
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.interceptors import (
BatchInterceptor,
ErrorStateInterceptor,
@@ -41,7 +42,7 @@
logger: logging.Logger = logging.getLogger("pyfluent.general")
-class HealthCheckService:
+class HealthCheckService(ServiceProtocol):
"""Class wrapping the health check gRPC service of Fluent.
Methods
diff --git a/src/ansys/fluent/core/services/monitor.py b/src/ansys/fluent/core/services/monitor.py
index a53b2e911d8a..eae390624cfd 100644
--- a/src/ansys/fluent/core/services/monitor.py
+++ b/src/ansys/fluent/core/services/monitor.py
@@ -27,6 +27,7 @@
from ansys.api.fluent.v0 import monitor_pb2 as MonitorModule
from ansys.api.fluent.v0 import monitor_pb2_grpc as MonitorGrpcModule
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.interceptors import (
BatchInterceptor,
ErrorStateInterceptor,
@@ -35,7 +36,9 @@
from ansys.fluent.core.services.streaming import StreamingService
-class MonitorsService(StreamingService):
+class MonitorsService(
+ StreamingService, ServiceProtocol
+): # pyright: ignore[reportUnsafeMultipleInheritance]
"""Class wrapping the monitor gRPC service of Fluent."""
def __init__(self, channel: grpc.Channel, metadata, fluent_error_state):
diff --git a/src/ansys/fluent/core/services/reduction.py b/src/ansys/fluent/core/services/reduction.py
index cc807da94b69..6021bebd285f 100644
--- a/src/ansys/fluent/core/services/reduction.py
+++ b/src/ansys/fluent/core/services/reduction.py
@@ -21,8 +21,9 @@
# SOFTWARE.
"""Wrappers over Reduction gRPC service of Fluent."""
+
from collections.abc import Iterable
-from typing import Any, List, Tuple
+from typing import Any
import weakref
import grpc
@@ -30,6 +31,7 @@
from ansys.api.fluent.v0 import reduction_pb2 as ReductionProtoModule
from ansys.api.fluent.v0 import reduction_pb2_grpc as ReductionGrpcModule
from ansys.fluent.core.exceptions import DisallowedValuesError
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.datamodel_se import _convert_variant_to_value
from ansys.fluent.core.services.interceptors import (
BatchInterceptor,
@@ -42,14 +44,14 @@
FluentExprNamingStrategy as naming_strategy,
)
-Path = List[Tuple[str, str]]
+Path = list[tuple[str, str]]
-class ReductionService:
+class ReductionService(ServiceProtocol):
"""Reduction Service."""
def __init__(
- self, channel: grpc.Channel, metadata: List[Tuple[str, str]], fluent_error_state
+ self, channel: grpc.Channel, metadata: list[tuple[str, str]], fluent_error_state
):
"""__init__ method of Reduction class."""
intercept_channel = grpc.intercept_channel(
@@ -290,7 +292,7 @@ def _validate_str_location(self, loc: str):
):
raise ValueError(f"Invalid location input: '{loc}'")
- def _get_location_string(self, locations, ctxt) -> List[str]:
+ def _get_location_string(self, locations, ctxt) -> list[str]:
if locations == []:
return []
for loc in locations:
diff --git a/src/ansys/fluent/core/services/scheme_eval.py b/src/ansys/fluent/core/services/scheme_eval.py
index 4428ae66e291..d69a8f7c3916 100644
--- a/src/ansys/fluent/core/services/scheme_eval.py
+++ b/src/ansys/fluent/core/services/scheme_eval.py
@@ -32,7 +32,8 @@
0.7
"""
-from typing import Any, Sequence
+from collections.abc import Sequence
+from typing import Any
from deprecated.sphinx import deprecated
import grpc
@@ -40,6 +41,7 @@
from ansys.api.fluent.v0 import scheme_eval_pb2 as SchemeEvalProtoModule
from ansys.api.fluent.v0 import scheme_eval_pb2_grpc as SchemeEvalGrpcModule
from ansys.api.fluent.v0.scheme_pointer_pb2 import SchemePointer
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.interceptors import (
BatchInterceptor,
ErrorStateInterceptor,
@@ -49,7 +51,7 @@
from ansys.fluent.core.utils.fluent_version import FluentVersion
-class SchemeEvalService:
+class SchemeEvalService(ServiceProtocol):
"""Class wrapping the SchemeEval gRPC service of Fluent.
Using the methods from the SchemeEval class is recommended.
diff --git a/src/ansys/fluent/core/services/settings.py b/src/ansys/fluent/core/services/settings.py
index 1cc5923fa72d..72f301e7982f 100644
--- a/src/ansys/fluent/core/services/settings.py
+++ b/src/ansys/fluent/core/services/settings.py
@@ -30,6 +30,7 @@
from ansys.api.fluent.v0 import settings_pb2 as SettingsModule
from ansys.api.fluent.v0 import settings_pb2_grpc as SettingsGrpcModule
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.interceptors import (
BatchInterceptor,
ErrorStateInterceptor,
@@ -38,7 +39,7 @@
)
-class _SettingsServiceImpl:
+class _SettingsServiceImpl(ServiceProtocol):
def __init__(
self, channel: grpc.Channel, metadata: list[tuple[str, str]], fluent_error_state
) -> None:
@@ -134,13 +135,13 @@ def _trace(fn):
def _fn(self, *args, **kwds):
global _indent
if trace:
- print(f"{' '*_indent}fn={fn.__name__}, args={args} {{")
+ print(f"{' ' * _indent}fn={fn.__name__}, args={args} {{")
try:
_indent += 1
ret = fn(self, *args, **kwds)
finally:
_indent -= 1
- print(f"{' '*_indent}fn = {fn.__name__}, ret={ret} }}")
+ print(f"{' ' * _indent}fn = {fn.__name__}, ret={ret} }}")
return ret
else:
return fn(self, *args, **kwds)
diff --git a/src/ansys/fluent/core/services/solution_variables.py b/src/ansys/fluent/core/services/solution_variables.py
index b165d6f7db8e..c89007a98872 100644
--- a/src/ansys/fluent/core/services/solution_variables.py
+++ b/src/ansys/fluent/core/services/solution_variables.py
@@ -23,7 +23,6 @@
"""Wrappers over SVAR gRPC service of Fluent."""
import math
-from typing import Dict, List
import warnings
import grpc
@@ -33,6 +32,7 @@
from ansys.api.fluent.v0 import svar_pb2 as SvarProtoModule
from ansys.api.fluent.v0 import svar_pb2_grpc as SvarGrpcModule
from ansys.fluent.core.pyfluent_warnings import PyFluentDeprecationWarning
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.field_data import (
_FieldDataConstants,
override_help_text,
@@ -50,7 +50,7 @@
_to_field_name_str = naming_strategy().to_string
-class SolutionVariableService:
+class SolutionVariableService(ServiceProtocol):
"""SVAR service of Fluent."""
def __init__(self, channel: grpc.Channel, metadata):
@@ -142,12 +142,12 @@ def __getitem__(self, name):
return self._solution_variables_info.get(name, None)
@property
- def solution_variables(self) -> List[str]:
+ def solution_variables(self) -> list[str]:
"""Solution variables."""
return list(self._solution_variables_info.keys())
@property
- def svars(self) -> List[str]:
+ def svars(self) -> list[str]:
"""Solution variables."""
warnings.warn(
"svars is deprecated, use solution_variables instead",
@@ -218,12 +218,12 @@ def zones(self):
return self.zone_names
@property
- def zone_names(self) -> List[str]:
+ def zone_names(self) -> list[str]:
"""Get zone names."""
return list(self._zones_info.keys())
@property
- def domains(self) -> List[str]:
+ def domains(self) -> list[str]:
"""Get domain names."""
return list(self._domains_info.keys())
@@ -239,7 +239,7 @@ def __init__(
self._service = service
def get_variables_info(
- self, zone_names: List[str], domain_name: str | None = "mixture"
+ self, zone_names: list[str], domain_name: str | None = "mixture"
) -> SolutionVariables:
"""Get SVARs info for zones in the domain.
@@ -274,7 +274,7 @@ def get_variables_info(
return solution_variables_info
def get_svars_info(
- self, zone_names: List[str], domain_name: str | None = "mixture"
+ self, zone_names: list[str], domain_name: str | None = "mixture"
) -> SolutionVariables:
"""Get solution variables info."""
warnings.warn(
@@ -303,7 +303,7 @@ def get_zones_info(self) -> ZonesInfo:
class InvalidSolutionVariableNameError(ValueError):
"""Exception class for errors in solution variable name."""
- def __init__(self, variable_name: str, allowed_values: List[str]):
+ def __init__(self, variable_name: str, allowed_values: list[str]):
"""Initialize InvalidSolutionVariableNameError."""
super().__init__(
allowed_name_error_message(
@@ -317,7 +317,7 @@ def __init__(self, variable_name: str, allowed_values: List[str]):
class ZoneError(ValueError):
"""Exception class for errors in Zone name."""
- def __init__(self, zone_name: str, allowed_values: List[str]):
+ def __init__(self, zone_name: str, allowed_values: list[str]):
"""Initialize ZoneError."""
self.zone_name = zone_name
super().__init__(
@@ -338,8 +338,8 @@ def __init__(self, solution_variable_info: SolutionVariableInfo):
self._solution_variable_info = solution_variable_info
def __call__(
- self, zone_names: List[str], domain_name: str | None = "mixture"
- ) -> List[str]:
+ self, zone_names: list[str], domain_name: str | None = "mixture"
+ ) -> list[str]:
return self._solution_variable_info.get_variables_info(
zone_names=zone_names, domain_name=domain_name
).solution_variables
@@ -352,7 +352,7 @@ def __call__(
def is_valid(
self,
variable_name,
- zone_names: List[str],
+ zone_names: list[str],
domain_name: str | None = "mixture",
):
"""Check whether solution variable name is valid or not."""
@@ -366,7 +366,7 @@ def is_valid(
def valid_name(
self,
variable_name,
- zone_names: List[str],
+ zone_names: list[str],
domain_name: str | None = "mixture",
):
"""Get a valid solution variable name.
@@ -391,7 +391,7 @@ class _AllowedZoneNames(_AllowedNames):
def __init__(self, solution_variable_info: SolutionVariableInfo):
self._zones_info = solution_variable_info.get_zones_info()
- def __call__(self) -> List[str]:
+ def __call__(self) -> list[str]:
return self._zones_info.zone_names
def valid_name(self, zone_name):
@@ -414,7 +414,7 @@ class _AllowedDomainNames(_AllowedNames):
def __init__(self, solution_variable_info: SolutionVariableInfo):
self._zones_info = solution_variable_info.get_zones_info()
- def __call__(self) -> List[str]:
+ def __call__(self) -> list[str]:
return self._zones_info.domains
def valid_name(self, domain_name):
@@ -629,7 +629,7 @@ def create_empty_array(
def get_data(
self,
variable_name: str,
- zone_names: List[str],
+ zone_names: list[str],
domain_name: str | None = "mixture",
) -> Data:
"""Get SVAR data on zones.
@@ -679,7 +679,7 @@ def get_data(
def get_svar_data(
self,
variable_name: str,
- zone_names: List[str],
+ zone_names: list[str],
domain_name: str | None = "mixture",
) -> Data:
"""Get solution variable data."""
@@ -701,7 +701,7 @@ def get_svar_data(
def set_data(
self,
variable_name: str,
- zone_names_to_data: Dict[str, np.array],
+ zone_names_to_data: dict[str, np.ndarray],
domain_name: str | None = "mixture",
) -> None:
"""Set SVAR data on zones.
@@ -793,8 +793,7 @@ def generate_set_data_requests():
if solution_variable_data.size > 0
]
- for set_data_request in set_data_requests:
- yield set_data_request
+ yield from set_data_requests
self._service.set_data(generate_set_data_requests())
@@ -806,7 +805,7 @@ def generate_set_data_requests():
def set_svar_data(
self,
variable_name: str,
- zone_names_to_svar_data: Dict[str, np.array],
+ zone_names_to_svar_data: dict[str, np.ndarray],
domain_name: str | None = "mixture",
) -> None:
"""Set solution variable data."""
diff --git a/src/ansys/fluent/core/services/streaming.py b/src/ansys/fluent/core/services/streaming.py
index 1e94326efb4b..b40889e518e4 100644
--- a/src/ansys/fluent/core/services/streaming.py
+++ b/src/ansys/fluent/core/services/streaming.py
@@ -22,7 +22,9 @@
"""Wrapper over the streaming gRPC services of Fluent."""
-from typing import Generator, List, Tuple
+from collections.abc import Generator
+
+from ansys.fluent.core.services._protocols import ServiceProtocol
class _StreamingServiceHelper:
@@ -36,7 +38,7 @@ class _StreamingServiceHelper:
End streaming
"""
- def __init__(self, stub, metadata: List[Tuple[str, str]], stream_begin_method):
+ def __init__(self, stub, metadata: list[tuple[str, str]], stream_begin_method):
"""__init__ method of StreamingService class."""
self._stub = stub
self._metadata = metadata
@@ -61,7 +63,7 @@ def end_streaming(self) -> None:
self._streams.cancel()
-class StreamingService:
+class StreamingService(ServiceProtocol):
"""Class wrapping the streaming gRPC services of Fluent.
Methods
@@ -72,7 +74,7 @@ class StreamingService:
End streaming
"""
- def __init__(self, stub, metadata: List[Tuple[str, str]]):
+ def __init__(self, stub, metadata: list[tuple[str, str]]):
"""__init__ method of StreamingService class."""
self._stub = stub
self._metadata = metadata
diff --git a/src/ansys/fluent/core/services/transcript.py b/src/ansys/fluent/core/services/transcript.py
index 96eb180a18a6..7cbe12c3fab9 100644
--- a/src/ansys/fluent/core/services/transcript.py
+++ b/src/ansys/fluent/core/services/transcript.py
@@ -25,10 +25,13 @@
import grpc
from ansys.api.fluent.v0 import transcript_pb2_grpc as TranscriptGrpcModule
+from ansys.fluent.core.services._protocols import ServiceProtocol
from ansys.fluent.core.services.streaming import StreamingService
-class TranscriptService(StreamingService):
+class TranscriptService(
+ StreamingService, ServiceProtocol
+): # pyright: ignore[reportUnsafeMultipleInheritance]
"""Class wrapping the transcript gRPC service of Fluent."""
def __init__(self, channel: grpc.Channel, metadata: list[tuple[str, str]]) -> None:
diff --git a/src/ansys/fluent/core/session.py b/src/ansys/fluent/core/session.py
index 423fa213b0c3..6eb0ec667ec3 100644
--- a/src/ansys/fluent/core/session.py
+++ b/src/ansys/fluent/core/session.py
@@ -22,11 +22,12 @@
"""Module containing class encapsulating Fluent connection and the Base Session."""
+from collections.abc import Callable
from enum import Enum
import json
import logging
import os
-from typing import Any, Callable, Dict
+from typing import Any
import warnings
import weakref
@@ -39,14 +40,36 @@
PyFluentDeprecationWarning,
PyFluentUserWarning,
)
-from ansys.fluent.core.services import service_creator
+from ansys.fluent.core.services import (
+ BatchOpsService,
+ EventsService,
+ SettingsService,
+ SolutionVariableData,
+ SolutionVariableService,
+ TranscriptService,
+)
from ansys.fluent.core.services.app_utilities import AppUtilitiesOld
-from ansys.fluent.core.services.field_data import FieldDataService, ZoneInfo
+from ansys.fluent.core.services.datamodel_se import (
+ DatamodelService as DatamodelService_SE,
+)
+from ansys.fluent.core.services.datamodel_tui import (
+ DatamodelService as DatamodelService_TUI,
+)
+from ansys.fluent.core.services.deprecated_field_data import DeprecatedFieldData
+from ansys.fluent.core.services.field_data import (
+ FieldDataService,
+ LiveFieldData,
+ ZoneInfo,
+ _FieldInfo,
+)
from ansys.fluent.core.services.scheme_eval import SchemeEval
from ansys.fluent.core.streaming_services.datamodel_event_streaming import (
DatamodelEvents,
)
from ansys.fluent.core.streaming_services.events_streaming import EventsManager
+from ansys.fluent.core.streaming_services.field_data_streaming import (
+ FieldDataStreaming,
+)
from ansys.fluent.core.streaming_services.transcript_streaming import Transcript
from ansys.fluent.core.utils.fluent_version import FluentVersion
@@ -61,6 +84,9 @@
logger = logging.getLogger("pyfluent.general")
+__all__ = ("BaseSession",)
+
+
def _parse_server_info_file(file_name: str):
"""Parse server info file.
Returns (ip, port, password) or (unix_socket, password)"""
@@ -127,7 +153,7 @@ def __init__(
scheme_eval: SchemeEval,
file_transfer_service: Any | None = None,
start_transcript: bool = True,
- launcher_args: Dict[str, Any] | None = None,
+ launcher_args: dict[str, Any] | None = None,
event_type: Enum | None = None,
get_zones_info: weakref.WeakMethod[Callable[[], list[ZoneInfo]]] | None = None,
):
@@ -179,7 +205,7 @@ def _build_from_fluent_connection(
file_transfer_service: Any | None = None,
event_type=None,
get_zones_info: weakref.WeakMethod[Callable[[], list[ZoneInfo]]] | None = None,
- launcher_args: Dict[str, Any] | None = None,
+ launcher_args: dict[str, Any] | None = None,
):
"""Build a BaseSession object from fluent_connection object."""
self._fluent_connection = fluent_connection
@@ -192,7 +218,7 @@ def _build_from_fluent_connection(
self.rp_vars = RPVars(self.scheme.string_eval)
self._preferences = None
- self._transcript_service = service_creator("transcript").create(
+ self._transcript_service = TranscriptService(
fluent_connection._channel, fluent_connection._metadata
)
self.transcript = Transcript(self._transcript_service)
@@ -205,7 +231,7 @@ def _build_from_fluent_connection(
self.journal = Journal(self._app_utilities)
- self._datamodel_service_tui = service_creator("tui").create(
+ self._datamodel_service_tui = DatamodelService_TUI(
fluent_connection._channel,
fluent_connection._metadata,
self._error_state,
@@ -213,7 +239,7 @@ def _build_from_fluent_connection(
self.scheme,
)
- self._datamodel_service_se = service_creator("datamodel").create(
+ self._datamodel_service_se = DatamodelService_SE(
fluent_connection._channel,
fluent_connection._metadata,
self.get_fluent_version(),
@@ -224,12 +250,12 @@ def _build_from_fluent_connection(
self._datamodel_events = DatamodelEvents(self._datamodel_service_se)
self._datamodel_events.start()
- self._batch_ops_service = service_creator("batch_ops").create(
+ self._batch_ops_service = BatchOpsService(
fluent_connection._channel, fluent_connection._metadata
)
if event_type:
- events_service = service_creator("events").create(
+ events_service = EventsService(
fluent_connection._channel, fluent_connection._metadata
)
self.events = EventsManager[event_type](
@@ -245,7 +271,7 @@ def _build_from_fluent_connection(
self.fields = Fields(self, get_zones_info)
- self._settings_service = service_creator("settings").create(
+ self._settings_service = SettingsService(
fluent_connection._channel,
fluent_connection._metadata,
self._app_utilities,
@@ -328,7 +354,7 @@ def _create_from_server_info_file(
server_info_file_name: str,
file_transfer_service: Any | None = None,
start_transcript: bool = True,
- launcher_args: Dict[str, Any] | None = None,
+ launcher_args: dict[str, Any] | None = None,
**connection_kwargs,
):
"""Create a Session instance from server-info file.
@@ -551,21 +577,21 @@ def __init__(
self._is_solution_data_valid = (
_session._app_utilities.is_solution_data_available
)
- self._field_info = service_creator("field_info").create(
+ self._field_info = _FieldInfo(
_session._field_data_service,
self._is_solution_data_valid,
)
- self.field_data = service_creator("field_data").create(
+ self.field_data = LiveFieldData(
_session._field_data_service,
self._field_info,
self._is_solution_data_valid,
_session.scheme,
get_zones_info,
)
- self.field_data_streaming = service_creator("field_data_streaming").create(
+ self.field_data_streaming = FieldDataStreaming(
_session._fluent_connection._id, _session._field_data_service
)
- self.field_data_old = service_creator("field_data_old").create(
+ self.field_data_old = DeprecatedFieldData(
_session._field_data_service,
self._field_info,
self._is_solution_data_valid,
diff --git a/src/ansys/fluent/core/session_base_meshing.py b/src/ansys/fluent/core/session_base_meshing.py
index e84bc9bda5fe..469b092d5061 100644
--- a/src/ansys/fluent/core/session_base_meshing.py
+++ b/src/ansys/fluent/core/session_base_meshing.py
@@ -24,9 +24,16 @@
import logging
import os
+from typing import TYPE_CHECKING, cast
from ansys.fluent.core._types import PathType
from ansys.fluent.core.fluent_connection import FluentConnection
+from ansys.fluent.core.meshing.meshing_workflow import (
+ CreateWorkflow,
+ FaultTolerantMeshingWorkflow,
+ LoadWorkflow,
+ WatertightMeshingWorkflow,
+)
from ansys.fluent.core.meshing.meshing_workflow_new import name_to_identifier_map
from ansys.fluent.core.session_shared import (
_make_datamodel_module,
@@ -37,6 +44,27 @@
get_version_for_file_name,
)
+if TYPE_CHECKING:
+ from ansys.fluent.core.generated.datamodel_252.meshing import Root as meshing_root
+ from ansys.fluent.core.generated.datamodel_252.meshing_utilities import (
+ Root as meshing_utilities_root,
+ )
+ from ansys.fluent.core.generated.datamodel_252.meshing_workflow import (
+ Root as meshing_workflow_root,
+ )
+ from ansys.fluent.core.generated.datamodel_252.part_management import (
+ Root as partmanagement_root,
+ )
+ from ansys.fluent.core.generated.datamodel_252.pm_file_management import (
+ Root as pmfilemanagement_root,
+ )
+ from ansys.fluent.core.generated.datamodel_252.preferences import (
+ Root as preferences_root,
+ )
+ from ansys.fluent.core.generated.datamodel_252.workflow import Root as workflow_root
+ from ansys.fluent.core.generated.meshing.tui_252 import main_menu
+
+
pyfluent_logger = logging.getLogger("pyfluent.general")
datamodel_logger = logging.getLogger("pyfluent.datamodel")
@@ -90,48 +118,55 @@ def _version(self):
return self._product_version
@property
- def tui(self):
+ def tui(self) -> "main_menu":
"""Instance of ``main_menu`` on which Fluent's SolverTUI methods can be
executed."""
if self._tui is None:
self._tui = _make_tui_module(self, "meshing")
- return self._tui
+ return cast("main_menu", self._tui)
@property
- def meshing(self):
+ def meshing(self) -> "meshing_root":
"""Meshing object."""
if self._meshing is None:
self._meshing = _make_datamodel_module(self, "meshing")
- return self._meshing
+ return cast("meshing_root", self._meshing)
@property
- def _meshing_utilities_root(self):
+ def _meshing_utilities_root(self) -> "meshing_utilities_root":
"""Datamodel root of meshing_utilities."""
- return _make_datamodel_module(self, "MeshingUtilities")
+ return cast(
+ "meshing_utilities_root", _make_datamodel_module(self, "MeshingUtilities")
+ )
@property
- def meshing_utilities(self):
+ def meshing_utilities(self) -> "meshing_utilities_root":
"""A wrapper over the Fluent's meshing queries."""
if self._meshing_utilities is None:
self._meshing_utilities = self._meshing_utilities_root
return self._meshing_utilities
@property
- def workflow(self):
+ def workflow(self) -> "workflow_root":
"""Datamodel root of workflow."""
if self._old_workflow is None:
- self._old_workflow = _make_datamodel_module(self, "workflow")
+ self._old_workflow = cast(
+ "workflow_root", _make_datamodel_module(self, "workflow")
+ )
return self._old_workflow
@property
- def meshing_workflow(self):
+ def meshing_workflow(self) -> "meshing_workflow_root":
"""Full API to meshing and meshing_workflow."""
if self._meshing_workflow is None:
- self._meshing_workflow = _make_datamodel_module(self, "meshing_workflow")
+ self._meshing_workflow = cast(
+ "meshing_workflow_root",
+ _make_datamodel_module(self, "meshing_workflow"),
+ )
return self._meshing_workflow
- def watertight_workflow(self, initialize: bool = True):
+ def watertight_workflow(self, initialize: bool = True) -> WatertightMeshingWorkflow:
"""Datamodel root of workflow."""
if os.getenv("USE_SERVER_MW") == "1":
root_module = "meshing_workflow"
@@ -147,7 +182,9 @@ def watertight_workflow(self, initialize: bool = True):
)
return self._current_workflow
- def fault_tolerant_workflow(self, initialize: bool = True):
+ def fault_tolerant_workflow(
+ self, initialize: bool = True
+ ) -> FaultTolerantMeshingWorkflow:
"""Datamodel root of workflow."""
if os.getenv("USE_SERVER_MW") == "1":
root_module = "meshing_workflow"
@@ -197,7 +234,7 @@ def topology_based_meshing_workflow(self, initialize: bool = True):
)
return self._current_workflow
- def load_workflow(self, file_path: PathType):
+ def load_workflow(self, file_path: PathType) -> LoadWorkflow:
"""Datamodel root of workflow."""
if os.getenv("USE_SERVER_MW") == "1":
root_module = "meshing_workflow"
@@ -213,7 +250,7 @@ def load_workflow(self, file_path: PathType):
)
return self._current_workflow
- def create_workflow(self, initialize: bool = True):
+ def create_workflow(self, initialize: bool = True) -> CreateWorkflow:
"""Datamodel root of the workflow."""
if os.getenv("USE_SERVER_MW") == "1":
root_module = "meshing_workflow"
@@ -237,7 +274,9 @@ def _get_current_workflow(self, name: str):
return self._current_workflow
@property
- def current_workflow(self):
+ def current_workflow(
+ self,
+ ): # TODO(gobot1234): consider making this generic over workflow?
"""Datamodel root of the workflow.
Raises
@@ -269,22 +308,29 @@ def current_workflow(self):
return self.create_workflow(initialize=False)
@property
- def PartManagement(self):
+ def PartManagement(self) -> "partmanagement_root":
"""Datamodel root of ``PartManagement``."""
if self._part_management is None:
- self._part_management = _make_datamodel_module(self, "PartManagement")
+ self._part_management = cast(
+ "partmanagement_root", _make_datamodel_module(self, "PartManagement")
+ )
return self._part_management
@property
- def PMFileManagement(self):
+ def PMFileManagement(self) -> "pmfilemanagement_root":
"""Datamodel root of PMFileManagement."""
if self._pm_file_management is None:
- self._pm_file_management = _make_datamodel_module(self, "PMFileManagement")
+ self._pm_file_management = cast(
+ "pmfilemanagement_root",
+ _make_datamodel_module(self, "PMFileManagement"),
+ )
return self._pm_file_management
@property
- def preferences(self):
+ def preferences(self) -> "preferences_root":
"""Datamodel root of preferences."""
if self._preferences is None:
- self._preferences = _make_datamodel_module(self, "preferences")
+ self._preferences = cast(
+ "preferences_root", _make_datamodel_module(self, "preferences")
+ )
return self._preferences
diff --git a/src/ansys/fluent/core/session_meshing.py b/src/ansys/fluent/core/session_meshing.py
index e17793b94634..35d11f86a6d1 100644
--- a/src/ansys/fluent/core/session_meshing.py
+++ b/src/ansys/fluent/core/session_meshing.py
@@ -22,7 +22,7 @@
"""Module containing class encapsulating Fluent connection."""
-from typing import Any, Dict
+from typing import TYPE_CHECKING, Any
from ansys.fluent.core.fluent_connection import FluentConnection
from ansys.fluent.core.services import SchemeEval
@@ -47,7 +47,7 @@ def __init__(
scheme_eval: SchemeEval,
file_transfer_service: Any | None = None,
start_transcript: bool = True,
- launcher_args: Dict[str, Any] | None = None,
+ launcher_args: dict[str, Any] | None = None,
):
"""Meshing session.
@@ -65,7 +65,7 @@ def __init__(
transcript can be subsequently started and stopped
using method calls on the ``Session`` object.
"""
- super(Meshing, self).__init__(
+ super().__init__(
fluent_connection=fluent_connection,
scheme_eval=scheme_eval,
file_transfer_service=file_transfer_service,
@@ -95,56 +95,21 @@ def switch_to_solver(self) -> Any:
)
return solver_session
- def __getattribute__(self, item: str):
- if item.startswith("__") and item.endswith("__"):
- return super().__getattribute__(item)
- try:
- _connection = super(Meshing, self).__getattribute__("_fluent_connection")
- except AttributeError:
- _connection = False
- if _connection is None and item not in BaseSession._inactive_session_allow_list:
- raise AttributeError(
- f"'{type(self).__name__}' object has no attribute '{item}'"
- )
-
- return super(Meshing, self).__getattribute__(item)
-
- @property
- def tui(self):
- """Meshing TUI root."""
- return super(Meshing, self).tui
-
- @property
- def meshing(self):
- """Meshing datamodel root."""
- return super(Meshing, self).meshing
-
- @property
- def meshing_utilities(self):
- """Meshing utilities datamodel root."""
- return super(Meshing, self).meshing_utilities
-
- @property
- def workflow(self):
- """Workflow datamodel root."""
- return super(Meshing, self).workflow
-
- @property
- def meshing_workflow(self):
- """Full API to meshing and meshing_workflow."""
- return super(Meshing, self).meshing_workflow
-
- @property
- def PartManagement(self):
- """Part management datamodel root."""
- return super(Meshing, self).PartManagement
-
- @property
- def PMFileManagement(self):
- """Part management file management datamodel root."""
- return super(Meshing, self).PMFileManagement
-
- @property
- def preferences(self):
- """Preferences datamodel root."""
- return super(Meshing, self).preferences
+ if not TYPE_CHECKING:
+
+ def __getattribute__(self, item: str):
+ if item.startswith("__") and item.endswith("__"):
+ return super().__getattribute__(item)
+ try:
+ _connection = super().__getattribute__("_fluent_connection")
+ except AttributeError:
+ _connection = False
+ if (
+ _connection is None
+ and item not in BaseSession._inactive_session_allow_list
+ ):
+ raise AttributeError(
+ f"'{type(self).__name__}' object has no attribute '{item}'"
+ )
+
+ return super().__getattribute__(item)
diff --git a/src/ansys/fluent/core/session_meshing.pyi b/src/ansys/fluent/core/session_meshing.pyi
deleted file mode 100644
index 348a464da3fe..000000000000
--- a/src/ansys/fluent/core/session_meshing.pyi
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright (C) 2021 - 2026 ANSYS, Inc. and/or its affiliates.
-# SPDX-License-Identifier: MIT
-#
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-from ansys.fluent.core.generated.datamodel_252.meshing import Root as meshing_root
-from ansys.fluent.core.generated.datamodel_252.meshing_utilities import (
- Root as meshing_utilities_root,
-)
-from ansys.fluent.core.generated.datamodel_252.part_management import (
- Root as partmanagement_root,
-)
-from ansys.fluent.core.generated.datamodel_252.pm_file_management import (
- Root as pmfilemanagement_root,
-)
-from ansys.fluent.core.generated.datamodel_252.preferences import (
- Root as preferences_root,
-)
-from ansys.fluent.core.generated.datamodel_252.workflow import Root as workflow_root
-from ansys.fluent.core.generated.meshing.tui_252 import main_menu
-
-class Meshing:
- @property
- def tui(self) -> main_menu: ...
- @property
- def meshing(self) -> meshing_root: ...
- @property
- def meshing_utilities(self) -> meshing_utilities_root: ...
- @property
- def workflow(self) -> workflow_root: ...
- @property
- def PartManagement(self) -> partmanagement_root: ...
- @property
- def PMFileManagement(self) -> pmfilemanagement_root: ...
- @property
- def preferences(self) -> preferences_root: ...
diff --git a/src/ansys/fluent/core/session_pure_meshing.py b/src/ansys/fluent/core/session_pure_meshing.py
index d12b50fcc302..91ca7f270f2c 100644
--- a/src/ansys/fluent/core/session_pure_meshing.py
+++ b/src/ansys/fluent/core/session_pure_meshing.py
@@ -24,13 +24,14 @@
import functools
import os
-from typing import Any, Dict
+from typing import TYPE_CHECKING, Any
import ansys.fluent.core as pyfluent
from ansys.fluent.core._types import PathType
from ansys.fluent.core.data_model_cache import DataModelCache, NameKey
from ansys.fluent.core.exceptions import BetaFeaturesNotEnabled
from ansys.fluent.core.fluent_connection import FluentConnection
+from ansys.fluent.core.meshing.meshing_workflow import WatertightMeshingWorkflow
from ansys.fluent.core.services import SchemeEval
from ansys.fluent.core.session import BaseSession
from ansys.fluent.core.session_base_meshing import BaseMeshing
@@ -38,6 +39,26 @@
from ansys.fluent.core.streaming_services.events_streaming import MeshingEvent
from ansys.fluent.core.utils.data_transfer import transfer_case
+if TYPE_CHECKING:
+ from ansys.fluent.core.generated.datamodel_252.meshing import Root as meshing_root
+ from ansys.fluent.core.generated.datamodel_252.meshing_utilities import (
+ Root as meshing_utilities_root,
+ )
+ from ansys.fluent.core.generated.datamodel_252.meshing_workflow import (
+ Root as meshing_workflow_root,
+ )
+ from ansys.fluent.core.generated.datamodel_252.part_management import (
+ Root as partmanagement_root,
+ )
+ from ansys.fluent.core.generated.datamodel_252.pm_file_management import (
+ Root as pmfilemanagement_root,
+ )
+ from ansys.fluent.core.generated.datamodel_252.preferences import (
+ Root as preferences_root,
+ )
+ from ansys.fluent.core.generated.datamodel_252.workflow import Root as workflow_root
+ from ansys.fluent.core.generated.meshing.tui_252 import main_menu
+
class PureMeshing(BaseSession):
"""Encapsulates a Fluent meshing session with a meshing-only Python interface.
@@ -67,7 +88,7 @@ def __init__(
scheme_eval: SchemeEval,
file_transfer_service: Any | None = None,
start_transcript: bool = True,
- launcher_args: Dict[str, Any] | None = None,
+ launcher_args: dict[str, Any] | None = None,
):
"""PureMeshing session.
@@ -85,7 +106,7 @@ def __init__(
transcript can be subsequently started and stopped
using method calls on the ``Session`` object.
"""
- super(PureMeshing, self).__init__(
+ super().__init__(
fluent_connection=fluent_connection,
scheme_eval=scheme_eval,
file_transfer_service=file_transfer_service,
@@ -130,23 +151,23 @@ def __init__(
self._fluent_connection.register_finalizer_cb(stream.stop)
@property
- def tui(self):
+ def tui(self) -> "main_menu":
"""Instance of ``main_menu`` on which Fluent's SolverTUI methods can be
executed."""
return self._base_meshing.tui
@property
- def meshing(self):
+ def meshing(self) -> "meshing_root":
"""Datamodel root of meshing."""
return self._base_meshing.meshing
@property
- def meshing_utilities(self):
+ def meshing_utilities(self) -> "meshing_utilities_root | None":
"""Datamodel root of meshing_utilities."""
return self._base_meshing.meshing_utilities
@property
- def workflow(self):
+ def workflow(self) -> "workflow_root":
"""Datamodel root of workflow."""
return self._base_meshing.workflow
@@ -155,7 +176,7 @@ def meshing_workflow(self):
"""Full API to meshing and meshing_workflow."""
return self._base_meshing.meshing_workflow
- def watertight(self):
+ def watertight(self) -> WatertightMeshingWorkflow:
"""Get a new watertight workflow."""
return self._base_meshing.watertight_workflow()
@@ -193,17 +214,17 @@ def topology_based(self):
return self._base_meshing.topology_based_meshing_workflow()
@property
- def PartManagement(self):
+ def PartManagement(self) -> "partmanagement_root":
"""Datamodel root of PartManagement."""
return self._base_meshing.PartManagement
@property
- def PMFileManagement(self):
+ def PMFileManagement(self) -> "pmfilemanagement_root":
"""Datamodel root of PMFileManagement."""
return self._base_meshing.PMFileManagement
@property
- def preferences(self):
+ def preferences(self) -> "preferences_root":
"""Datamodel root of preferences."""
return self._base_meshing.preferences
diff --git a/src/ansys/fluent/core/session_pure_meshing.pyi b/src/ansys/fluent/core/session_pure_meshing.pyi
deleted file mode 100644
index 0e944d121961..000000000000
--- a/src/ansys/fluent/core/session_pure_meshing.pyi
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright (C) 2021 - 2026 ANSYS, Inc. and/or its affiliates.
-# SPDX-License-Identifier: MIT
-#
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-from ansys.fluent.core.generated.datamodel_252.meshing import Root as meshing_root
-from ansys.fluent.core.generated.datamodel_252.meshing_utilities import (
- Root as meshing_utilities_root,
-)
-from ansys.fluent.core.generated.datamodel_252.part_management import (
- Root as partmanagement_root,
-)
-from ansys.fluent.core.generated.datamodel_252.pm_file_management import (
- Root as pmfilemanagement_root,
-)
-from ansys.fluent.core.generated.datamodel_252.preferences import (
- Root as preferences_root,
-)
-from ansys.fluent.core.generated.datamodel_252.workflow import Root as workflow_root
-from ansys.fluent.core.generated.datamodel_261.meshing_workflow import (
- Root as meshing_workflow_root,
-)
-from ansys.fluent.core.generated.meshing.tui_252 import main_menu
-
-class PureMeshing:
- @property
- def tui(self) -> main_menu: ...
- @property
- def meshing(self) -> meshing_root: ...
- @property
- def meshing_utilities(self) -> meshing_utilities_root: ...
- @property
- def workflow(self) -> workflow_root: ...
- @property
- def meshing_workflow(self) -> meshing_workflow_root: ...
- def watertight(self): ...
- def fault_tolerant(self): ...
- def two_dimensional_meshing(self): ...
- def topology_based(self): ...
- def load_workflow(self, file_path: str): ...
- def create_workflow(self): ...
- @property
- def PartManagement(self) -> partmanagement_root: ...
- @property
- def PMFileManagement(self) -> pmfilemanagement_root: ...
- @property
- def preferences(self) -> preferences_root: ...
- def transfer_mesh_to_solvers(
- self,
- solvers,
- file_type: str = ...,
- file_name_stem: str = ...,
- num_files_to_try: int = ...,
- clean_up_mesh_file: bool = ...,
- overwrite_previous: bool = ...,
- ): ...
- def enable_beta_features(self): ...
diff --git a/src/ansys/fluent/core/session_solver.py b/src/ansys/fluent/core/session_solver.py
index 89178e0cb185..acd87923d18f 100644
--- a/src/ansys/fluent/core/session_solver.py
+++ b/src/ansys/fluent/core/session_solver.py
@@ -24,7 +24,7 @@
import logging
import threading
-from typing import Any, Dict
+from typing import TYPE_CHECKING, Any, cast
import warnings
import weakref
@@ -32,12 +32,14 @@
import ansys.fluent.core as pyfluent
from ansys.fluent.core.exceptions import BetaFeaturesNotEnabled
from ansys.fluent.core.pyfluent_warnings import PyFluentDeprecationWarning
-from ansys.fluent.core.services import SchemeEval, service_creator
+from ansys.fluent.core.services import SchemeEval
from ansys.fluent.core.services.field_data import ZoneInfo, ZoneType
-from ansys.fluent.core.services.reduction import ReductionService
+from ansys.fluent.core.services.monitor import MonitorsService
+from ansys.fluent.core.services.reduction import Reduction, ReductionService
from ansys.fluent.core.services.solution_variables import (
SolutionVariableData,
SolutionVariableInfo,
+ SolutionVariableService,
)
from ansys.fluent.core.session import BaseSession
from ansys.fluent.core.session_shared import (
@@ -61,6 +63,14 @@
)
from ansys.fluent.core.workflow import ClassicWorkflow
+if TYPE_CHECKING:
+ from ansys.fluent.core.generated.datamodel_252.preferences import (
+ Root as preferences_root,
+ )
+ import ansys.fluent.core.generated.solver.settings_252 as settings_root
+ from ansys.fluent.core.generated.solver.tui_252 import main_menu
+
+
tui_logger = logging.getLogger("pyfluent.tui")
datamodel_logger = logging.getLogger("pyfluent.datamodel")
@@ -79,7 +89,7 @@ def _set_state_safe(obj: SettingsBase, state: StateType):
datamodel_logger.debug(f"set_state failed at {obj.path}")
-class Solver(BaseSession):
+class Solver(BaseSession, settings_root.root if TYPE_CHECKING else object):
"""Encapsulates a Fluent solver session.
A ``tui`` object for solver TUI
@@ -92,7 +102,7 @@ def __init__(
scheme_eval: SchemeEval,
file_transfer_service: Any | None = None,
start_transcript: bool = True,
- launcher_args: Dict[str, Any] | None = None,
+ launcher_args: dict[str, Any] | None = None,
):
"""Solver session.
@@ -110,7 +120,7 @@ def __init__(
transcript can be subsequently started and stopped
using method calls on the ``Session`` object.
"""
- super(Solver, self).__init__(
+ super().__init__(
fluent_connection=fluent_connection,
scheme_eval=scheme_eval,
file_transfer_service=file_transfer_service,
@@ -129,7 +139,7 @@ def _build_from_fluent_connection(
fluent_connection,
scheme_eval: SchemeEval,
file_transfer_service: Any | None = None,
- launcher_args: Dict[str, Any] | None = None,
+ launcher_args: dict[str, Any] | None = None,
):
self._tui_service = self._datamodel_service_tui
self._se_service = self._datamodel_service_se
@@ -139,7 +149,7 @@ def _build_from_fluent_connection(
self._fluent_version = None
self._bg_session_threads = []
self._launcher_args = launcher_args
- self._solution_variable_service = service_creator("svar").create(
+ self._solution_variable_service = SolutionVariableService(
fluent_connection._channel, fluent_connection._metadata
)
self.fields.solution_variable_info = SolutionVariableInfo(
@@ -148,12 +158,10 @@ def _build_from_fluent_connection(
self._reduction_service = self._fluent_connection.create_grpc_service(
ReductionService, self._error_state
)
- self.fields.reduction = service_creator("reduction").create(
- self._reduction_service, self
- )
+ self.fields.reduction = Reduction(self._reduction_service, self)
self.fields.solution_variable_data = self._solution_variable_data()
- monitors_service = service_creator("monitors").create(
+ monitors_service = MonitorsService(
fluent_connection._channel, fluent_connection._metadata, self._error_state
)
#: Manage Fluent's solution monitors.
@@ -174,12 +182,12 @@ def _build_from_fluent_connection(
def _solution_variable_data(self) -> SolutionVariableData:
"""Return the SolutionVariableData handle."""
- return service_creator("svar_data").create(
+ return SolutionVariableData(
self._solution_variable_service, self.fields.solution_variable_info
)
@property
- def settings(self):
+ def settings(self) -> "settings_root.root":
"""Settings root handle."""
if self._settings is None:
#: Root settings object.
@@ -190,7 +198,7 @@ def settings(self):
file_transfer_service=self._file_transfer_service,
scheme_eval=self.scheme.eval,
)
- return self._settings
+ return cast("settings_root.root", self._settings)
@property
def svar_data(self):
@@ -244,16 +252,16 @@ def _version(self):
return self._fluent_version
@property
- def tui(self):
+ def tui(self) -> "main_menu":
"""Instance of ``main_menu`` on which Fluent's SolverTUI methods can be
executed."""
if self._tui is None:
self._tui = _make_tui_module(self, "solver")
- return self._tui
+ return cast("main_menu", self._tui)
@property
- def workflow(self):
+ def workflow(self) -> ClassicWorkflow:
"""Datamodel root for workflow."""
if not self._workflow:
self._workflow = ClassicWorkflow(
@@ -275,18 +283,18 @@ def _interrupt(cls, command):
command._root.solution.run_calculation.interrupt()
@property
- def system_coupling(self):
+ def system_coupling(self) -> SystemCoupling:
"""System coupling object."""
if self._system_coupling is None:
self._system_coupling = SystemCoupling(self)
return self._system_coupling
@property
- def preferences(self):
+ def preferences(self) -> "preferences_root":
"""Datamodel root of preferences."""
if self._preferences is None:
self._preferences = _make_datamodel_module(self, "preferences")
- return self._preferences
+ return cast("preferences_root", self._preferences)
def _start_bg_session_and_sync(self, launcher_args):
"""Start a background session and sync it with the current session."""
@@ -295,7 +303,7 @@ def _start_bg_session_and_sync(self, launcher_args):
except Exception as ex:
raise RuntimeError("Unable to read mesh") from ex
state = self.settings.get_state()
- super(Solver, self)._build_from_fluent_connection(
+ super()._build_from_fluent_connection(
bg_session._fluent_connection,
bg_session._fluent_connection._connection_interface.scheme_eval,
event_type=SolverEvent,
@@ -347,28 +355,33 @@ def set_state(self, state: StateT | None = None, **kwargs):
def __call__(self):
return self.get_state()
- def __getattribute__(self, item: str):
- if item.startswith("__") and item.endswith("__"):
- return super().__getattribute__(item)
- try:
- _connection = super(Solver, self).__getattribute__("_fluent_connection")
- except AttributeError:
- _connection = False
- if _connection is None and item not in BaseSession._inactive_session_allow_list:
- raise AttributeError(
- f"'{type(self).__name__}' object has no attribute '{item}'"
- )
- try:
- return super(Solver, self).__getattribute__(item)
- except AttributeError:
- settings = super(Solver, self).__getattribute__("settings")
- if item in settings.child_names:
- warnings.warn(
- f"'{item}' is deprecated. Use 'settings.{item}' instead.",
- DeprecatedSettingWarning,
+ if not TYPE_CHECKING:
+
+ def __getattribute__(self, item: str):
+ if item.startswith("__") and item.endswith("__"):
+ return super().__getattribute__(item)
+ try:
+ _connection = super().__getattribute__("_fluent_connection")
+ except AttributeError:
+ _connection = False
+ if (
+ _connection is None
+ and item not in BaseSession._inactive_session_allow_list
+ ):
+ raise AttributeError(
+ f"'{type(self).__name__}' object has no attribute '{item}'"
)
- return getattr(settings, item)
- raise
+ try:
+ return super().__getattribute__(item)
+ except AttributeError:
+ settings = super().__getattribute__("settings")
+ if item in settings.child_names:
+ warnings.warn(
+ f"'{item}' is deprecated. Use 'settings.{item}' instead.",
+ DeprecatedSettingWarning,
+ )
+ return getattr(settings, item)
+ raise
def __dir__(self):
dir_list = set(super().__dir__()) - {
diff --git a/src/ansys/fluent/core/session_solver_aero.py b/src/ansys/fluent/core/session_solver_aero.py
index 07a4a0d39923..fcd849010d34 100644
--- a/src/ansys/fluent/core/session_solver_aero.py
+++ b/src/ansys/fluent/core/session_solver_aero.py
@@ -25,7 +25,7 @@
Expose aero capabilities.
"""
-from typing import Any, Dict
+from typing import Any
from ansys.fluent.core.fluent_connection import FluentConnection
from ansys.fluent.core.services import SchemeEval
@@ -46,7 +46,7 @@ def __init__(
scheme_eval: SchemeEval,
file_transfer_service: Any | None = None,
start_transcript: bool = True,
- launcher_args: Dict[str, Any] | None = None,
+ launcher_args: dict[str, Any] | None = None,
):
"""SolverAero session.
@@ -64,7 +64,7 @@ def __init__(
transcript can be subsequently started and stopped
using method calls on the ``Session`` object.
"""
- super(SolverAero, self).__init__(
+ super().__init__(
fluent_connection=fluent_connection,
scheme_eval=scheme_eval,
file_transfer_service=file_transfer_service,
@@ -110,6 +110,3 @@ def _flserver(self):
def aero(self):
"""Instance of aero (Case.App) -> root datamodel object."""
return self._flserver.Case.App
-
- def __dir__(self):
- return super(SolverAero, self).__dir__()
diff --git a/src/ansys/fluent/core/session_solver_icing.py b/src/ansys/fluent/core/session_solver_icing.py
index bb964cbc5798..b4ec7982fdd7 100644
--- a/src/ansys/fluent/core/session_solver_icing.py
+++ b/src/ansys/fluent/core/session_solver_icing.py
@@ -26,7 +26,7 @@
"""
import importlib
-from typing import Any, Dict
+from typing import Any
from ansys.fluent.core.fluent_connection import FluentConnection
from ansys.fluent.core.services import SchemeEval
@@ -46,7 +46,7 @@ def __init__(
scheme_eval: SchemeEval,
file_transfer_service: Any | None = None,
start_transcript: bool = True,
- launcher_args: Dict[str, Any] | None = None,
+ launcher_args: dict[str, Any] | None = None,
):
"""SolverIcing session.
@@ -64,7 +64,7 @@ def __init__(
transcript can be subsequently started and stopped
using method calls on the ``Session`` object.
"""
- super(SolverIcing, self).__init__(
+ super().__init__(
fluent_connection=fluent_connection,
scheme_eval=scheme_eval,
file_transfer_service=file_transfer_service,
@@ -90,6 +90,3 @@ def _flserver(self):
def icing(self):
"""Instance of icing (Case.App) -> root datamodel object."""
return self._flserver.Case.App
-
- def __dir__(self):
- return super(SolverIcing, self).__dir__()
diff --git a/src/ansys/fluent/core/session_solver_lite.py b/src/ansys/fluent/core/session_solver_lite.py
index f6075e27a5b1..dfadde08a1de 100644
--- a/src/ansys/fluent/core/session_solver_lite.py
+++ b/src/ansys/fluent/core/session_solver_lite.py
@@ -25,7 +25,7 @@
**********PRESENTLY SAME AS SOLVER WITH A SWITCH TO SOLVER***********
"""
-from typing import Any, Dict
+from typing import Any
from ansys.fluent.core.session_solver import Solver
@@ -40,7 +40,7 @@ def __init__(
fluent_connection=None,
scheme_eval=None,
start_transcript: bool = True,
- launcher_args: Dict[str, Any] | None = None,
+ launcher_args: dict[str, Any] | None = None,
):
"""SolverLite session.
diff --git a/src/ansys/fluent/core/session_utilities.py b/src/ansys/fluent/core/session_utilities.py
index fff2926411f3..f52cf5545cd7 100644
--- a/src/ansys/fluent/core/session_utilities.py
+++ b/src/ansys/fluent/core/session_utilities.py
@@ -1,3 +1,5 @@
+# pyright: reportNoOverloadImplementation=false
+
# Copyright (C) 2021 - 2026 ANSYS, Inc. and/or its affiliates.
# SPDX-License-Identifier: MIT
#
@@ -22,22 +24,44 @@
"""Session utilities."""
-from typing import Any, Dict
+from typing import TYPE_CHECKING, Any, Literal, overload
+
+from typing_extensions import Unpack, override
-import ansys.fluent.core as pyfluent
-from ansys.fluent.core._types import PathType
-from ansys.fluent.core.launcher.container_launcher import DockerLauncher
+from ansys.fluent.core import (
+ session_meshing,
+ session_pure_meshing,
+ session_solver,
+ session_solver_aero,
+ session_solver_icing,
+)
+from ansys.fluent.core.launcher.container_launcher import (
+ ContainerArgsWithoutDryRun,
+ DockerLauncher,
+)
from ansys.fluent.core.launcher.launch_options import (
- Dimension,
- FluentLinuxGraphicsDriver,
FluentMode,
- FluentWindowsGraphicsDriver,
- Precision,
- UIMode,
)
-from ansys.fluent.core.launcher.pim_launcher import PIMLauncher
-from ansys.fluent.core.launcher.standalone_launcher import StandaloneLauncher
-from ansys.fluent.core.utils.fluent_version import FluentVersion
+from ansys.fluent.core.launcher.launcher import LaunchFluentArgs, connect_to_fluent
+from ansys.fluent.core.launcher.pim_launcher import (
+ PIMArgs,
+ PIMLauncher,
+)
+from ansys.fluent.core.launcher.standalone_launcher import (
+ StandaloneArgsWithoutDryRun,
+ StandaloneLauncher,
+)
+from ansys.fluent.core.session import BaseSession
+
+__all__ = (
+ "Meshing",
+ "PureMeshing",
+ "PrePost",
+ "PureMeshing",
+ "Solver",
+ "SolverAero",
+ "SolverIcing",
+)
class SessionBase:
@@ -48,7 +72,7 @@ class SessionBase:
or `from_pim` functions to create a session.
"""
- _session_mode = {
+ _session_mode: dict[str, FluentMode] = {
"Meshing": FluentMode.MESHING,
"PureMeshing": FluentMode.PURE_MESHING,
"PrePost": FluentMode.PRE_POST,
@@ -57,35 +81,31 @@ class SessionBase:
"SolverIcing": FluentMode.SOLVER_ICING,
}
+ @overload
@classmethod
def from_install(
cls,
- ui_mode: UIMode | str | None = None,
- graphics_driver: (
- FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str
- ) = None,
- product_version: FluentVersion | str | float | int | None = None,
- dimension: Dimension | int | None = None,
- precision: Precision | str | None = None,
- processor_count: int | None = None,
- journal_file_names: None | str | list[str] = None,
- start_timeout: int = 60,
- additional_arguments: str = "",
- env: Dict[str, Any] = {}, # noqa: B006
- cleanup_on_exit: bool = True,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[StandaloneArgsWithoutDryRun],
+ ) -> BaseSession: ...
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[StandaloneArgsWithoutDryRun],
+ ) -> tuple[str, str]: ...
+
+ @classmethod
+ def from_install( # pylint: disable=missing-param-doc
+ cls,
+ *,
dry_run: bool = False,
- start_transcript: bool = True,
- case_file_name: "PathType | None" = None,
- case_data_file_name: "PathType | None" = None,
- lightweight_mode: bool | None = None,
- py: bool | None = None,
- gpu: bool | None = None,
- cwd: "PathType | None" = None,
- fluent_path: "PathType | None" = None,
- topy: str | list | None = None,
- start_watchdog: bool | None = None,
- file_transfer_service: Any | None = None,
- ):
+ **kwargs: Unpack[StandaloneArgsWithoutDryRun],
+ ) -> BaseSession | tuple[str, str]:
"""
Launch a Fluent session in standalone mode.
@@ -163,38 +183,36 @@ def from_install(
In job scheduler environments (e.g., SLURM, LSF, PBS), resources and compute nodes are allocated,
and core counts are queried from these environments before being passed to Fluent.
"""
- mode = cls._session_mode[cls.__name__]
- argvals = locals().copy()
- argvals.pop("cls", None) # Remove the class reference from the arguments
- launcher = StandaloneLauncher(**argvals)
+ launcher = StandaloneLauncher(
+ **kwargs, dry_run=dry_run, mode=cls._session_mode[cls.__name__]
+ )
return launcher()
+ @overload
@classmethod
def from_container(
cls,
- ui_mode: UIMode | str | None = None,
- graphics_driver: (
- FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str | None
- ) = None,
- product_version: FluentVersion | str | float | int | None = None,
- dimension: Dimension | int | None = None,
- precision: Precision | str | None = None,
- processor_count: int | None = None,
- start_timeout: int = 60,
- additional_arguments: str = "",
- container_dict: dict | None = None,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> BaseSession: ...
+
+ @overload
+ @classmethod
+ def from_container(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> dict[str, Any]: ...
+
+ @classmethod
+ def from_container( # pylint: disable=missing-param-doc
+ cls,
+ *,
dry_run: bool = False,
- cleanup_on_exit: bool = True,
- start_transcript: bool = True,
- py: bool | None = None,
- gpu: bool | None = None,
- start_watchdog: bool | None = None,
- file_transfer_service: Any | None = None,
- use_docker_compose: bool | None = None,
- use_podman_compose: bool | None = None,
- certificates_folder: str | None = None,
- insecure_mode: bool = False,
- ):
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> BaseSession | dict[str, Any]:
"""
Launch a Fluent session in container mode.
@@ -272,32 +290,16 @@ def from_container(
In job scheduler environments (e.g., SLURM, LSF, PBS), resources and compute nodes are allocated,
and core counts are queried from these environments before being passed to Fluent.
"""
- mode = cls._session_mode[cls.__name__]
- argvals = locals().copy()
- argvals.pop("cls", None)
- launcher = DockerLauncher(**argvals)
+ launcher = DockerLauncher(
+ **kwargs, dry_run=dry_run, mode=cls._session_mode[cls.__name__]
+ )
return launcher()
@classmethod
- def from_pim(
+ def from_pim( # pylint: disable=missing-param-doc
cls,
- ui_mode: UIMode | str | None = None,
- graphics_driver: (
- FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str | None
- ) = None,
- product_version: FluentVersion | str | float | int | None = None,
- dimension: Dimension | int | None = None,
- precision: Precision | str | None = None,
- processor_count: int | None = None,
- start_timeout: int = 60,
- additional_arguments: str = "",
- cleanup_on_exit: bool = True,
- dry_run: bool | None = None,
- start_transcript: bool = True,
- gpu: bool | None = None,
- start_watchdog: bool | None = None,
- file_transfer_service: Any | None = None,
- ):
+ **kwargs: Unpack[PIMArgs],
+ ) -> BaseSession:
"""
Launch a Fluent session in `PIM `_ mode.
@@ -360,10 +362,9 @@ def from_pim(
In job scheduler environments (e.g., SLURM, LSF, PBS), resources and compute nodes are allocated,
and core counts are queried from these environments before being passed to Fluent.
"""
- mode = cls._session_mode[cls.__name__]
- argvals = locals().copy()
- argvals.pop("cls", None)
- launcher = PIMLauncher(**argvals)
+ kwargs_with_mode = dict(kwargs)
+ kwargs_with_mode["mode"] = cls._session_mode[cls.__name__]
+ launcher = PIMLauncher(**kwargs_with_mode)
return launcher()
@classmethod
@@ -411,10 +412,11 @@ def from_connection(
TypeError
If the session type does not match the expected session type.
"""
- argvals = locals().copy()
- argvals.pop("cls", None)
- session = pyfluent.connect_to_fluent(
- **argvals,
+ session = connect_to_fluent(
+ ip=ip,
+ port=port,
+ server_info_file_name=server_info_file_name,
+ password=password,
)
expected = "Solver" if cls.__name__ == "PrePost" else cls.__name__
@@ -431,34 +433,282 @@ def from_connection(
class Meshing(SessionBase):
"""Encapsulates a Fluent server for meshing session connection."""
- pass
+ if TYPE_CHECKING:
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[LaunchFluentArgs],
+ ) -> session_meshing.Meshing: ...
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[LaunchFluentArgs],
+ ) -> tuple[str, str]: ...
+
+ @overload
+ @classmethod
+ def from_container(
+ cls,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> session_meshing.Meshing: ...
+
+ @overload
+ @classmethod
+ def from_container(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> dict[str, Any]: ...
+
+ @override
+ @classmethod
+ def from_pim(
+ cls,
+ **kwargs: Unpack[PIMArgs],
+ ) -> session_meshing.Meshing: ...
class PureMeshing(SessionBase):
"""Encapsulates a Fluent server for pure meshing session connection."""
- pass
+ if TYPE_CHECKING:
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[LaunchFluentArgs],
+ ) -> session_pure_meshing.PureMeshing: ...
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[LaunchFluentArgs],
+ ) -> tuple[str, str]: ...
+
+ @overload
+ @classmethod
+ def from_container(
+ cls,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> session_pure_meshing.PureMeshing: ...
+
+ @overload
+ @classmethod
+ def from_container(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> dict[str, Any]: ...
+
+ @override
+ @classmethod
+ def from_pim(
+ cls,
+ **kwargs: Unpack[PIMArgs],
+ ) -> session_pure_meshing.PureMeshing: ...
class PrePost(SessionBase):
"""Encapsulates a Fluent server for pre-post session connection."""
- pass
+ if TYPE_CHECKING:
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[LaunchFluentArgs],
+ ) -> session_solver.Solver: ...
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[LaunchFluentArgs],
+ ) -> tuple[str, str]: ...
+
+ @overload
+ @classmethod
+ def from_container(
+ cls,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> session_solver.Solver: ...
+
+ @overload
+ @classmethod
+ def from_container(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> dict[str, Any]: ...
+
+ @overload
+ @classmethod
+ def from_pim(
+ cls,
+ **kwargs: Unpack[PIMArgs],
+ ) -> session_solver.Solver: ...
class Solver(SessionBase):
"""Encapsulates a Fluent server for solver session connection."""
- pass
+ if TYPE_CHECKING:
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[LaunchFluentArgs],
+ ) -> session_solver.Solver: ...
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[LaunchFluentArgs],
+ ) -> tuple[str, str]: ...
+
+ @overload
+ @classmethod
+ def from_container(
+ cls,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> session_solver.Solver: ...
+
+ @classmethod
+ def from_pim(
+ cls,
+ **kwargs: Unpack[PIMArgs],
+ ) -> session_solver.Solver: ...
class SolverAero(SessionBase):
"""Encapsulates a Fluent server for solver aero session connection."""
- pass
+ if TYPE_CHECKING:
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[LaunchFluentArgs],
+ ) -> session_solver_aero.SolverAero: ...
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[LaunchFluentArgs],
+ ) -> tuple[str, str]: ...
+
+ @overload
+ @classmethod
+ def from_container(
+ cls,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> session_solver_aero.SolverAero: ...
+
+ @overload
+ @classmethod
+ def from_container(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> dict[str, Any]: ...
+
+ @overload
+ @classmethod
+ def from_pim(
+ cls,
+ **kwargs: Unpack[PIMArgs],
+ ) -> session_solver_aero.SolverAero: ...
class SolverIcing(SessionBase):
"""Encapsulates a Fluent server for solver icing session connection."""
- pass
+ if TYPE_CHECKING:
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[LaunchFluentArgs],
+ ) -> session_solver_icing.SolverIcing: ...
+
+ @overload
+ @classmethod
+ def from_install(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[LaunchFluentArgs],
+ ) -> tuple[str, str]: ...
+
+ @overload
+ @classmethod
+ def from_container(
+ cls,
+ *,
+ dry_run: Literal[False] = False,
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> session_solver_icing.SolverIcing: ...
+
+ @overload
+ @classmethod
+ def from_container(
+ cls,
+ *,
+ dry_run: Literal[True],
+ **kwargs: Unpack[ContainerArgsWithoutDryRun],
+ ) -> dict[str, Any]: ...
+
+ @overload
+ @classmethod
+ def from_pim(
+ cls,
+ **kwargs: Unpack[PIMArgs],
+ ) -> session_solver_icing.SolverIcing: ...
diff --git a/src/ansys/fluent/core/solver/error_message.py b/src/ansys/fluent/core/solver/error_message.py
index 938ade4bbc93..a4b1852a8675 100644
--- a/src/ansys/fluent/core/solver/error_message.py
+++ b/src/ansys/fluent/core/solver/error_message.py
@@ -22,9 +22,10 @@
"""Provides a module to customize exception messages."""
+from collections.abc import Iterable
import difflib
from functools import partial
-from typing import Any, Iterable
+from typing import Any
def closest_allowed_names(trial_name: str, allowed_names: Iterable[str]) -> list[str]:
diff --git a/src/ansys/fluent/core/solver/flobject.py b/src/ansys/fluent/core/solver/flobject.py
index bc035d7487f3..491c0a36275f 100644
--- a/src/ansys/fluent/core/solver/flobject.py
+++ b/src/ansys/fluent/core/solver/flobject.py
@@ -41,6 +41,7 @@
from __future__ import annotations
import collections
+from collections.abc import Callable
from contextlib import contextmanager, nullcontext
import fnmatch
import hashlib
@@ -55,13 +56,9 @@
import types
from typing import (
Any,
- Callable,
- Dict,
ForwardRef,
Generic,
- List,
NewType,
- Tuple,
TypeVar,
Union,
_eval_type,
@@ -127,12 +124,12 @@ class _InlineConstants:
# Type hints
-RealType = NewType("real", Union[float, str]) # constant or expression
-RealListType = List[RealType]
-RealVectorType = Tuple[RealType, RealType, RealType]
-IntListType = List[int]
-StringListType = List[str]
-BoolListType = List[bool]
+RealType = NewType("real", float | str) # constant or expression
+RealListType = list[RealType]
+RealVectorType = tuple[RealType, RealType, RealType]
+IntListType = list[int]
+StringListType = list[str]
+BoolListType = list[bool]
PrimitiveStateType = Union[
str,
RealType,
@@ -143,8 +140,8 @@ class _InlineConstants:
StringListType,
BoolListType,
]
-DictStateType = Dict[str, "StateType"]
-ListStateType = List["StateType"]
+DictStateType = dict[str, "StateType"]
+ListStateType = list["StateType"]
StateType = Union[PrimitiveStateType, DictStateType, ListStateType]
@@ -411,7 +408,7 @@ def get_attrs(self, attrs, recursive=False) -> Any:
def get_attr(
self,
attr: str,
- attr_type_or_types: type | Tuple[type] | None = None,
+ attr_type_or_types: type | tuple[type] | None = None,
) -> Any:
"""Get the requested attribute for the object.
@@ -530,7 +527,7 @@ def __eq__(self, other):
return False
return self.flproxy == other.flproxy and self.path == other.path
- def get_completer_info(self, prefix="", excluded=None) -> List[List[str]]:
+ def get_completer_info(self, prefix="", excluded=None) -> list[list[str]]:
"""Get completer info of all children.
Returns
@@ -2019,8 +2016,7 @@ def __iter__(self):
"""Iterator for child named objects."""
for cname in self.child_names:
try:
- for item in getattr(self, cname):
- yield item
+ yield from getattr(self, cname)
except Exception:
continue
diff --git a/src/ansys/fluent/core/streaming_services/datamodel_event_streaming.py b/src/ansys/fluent/core/streaming_services/datamodel_event_streaming.py
index 2cefc28c55fd..da5c8a63992a 100644
--- a/src/ansys/fluent/core/streaming_services/datamodel_event_streaming.py
+++ b/src/ansys/fluent/core/streaming_services/datamodel_event_streaming.py
@@ -22,9 +22,9 @@
"""Provides a module for datamodel event streaming."""
+from collections.abc import Callable
import logging
import threading
-from typing import Callable
from google.protobuf.json_format import MessageToDict
diff --git a/src/ansys/fluent/core/streaming_services/events_streaming.py b/src/ansys/fluent/core/streaming_services/events_streaming.py
index a73c30aa0008..1afe111787e2 100644
--- a/src/ansys/fluent/core/streaming_services/events_streaming.py
+++ b/src/ansys/fluent/core/streaming_services/events_streaming.py
@@ -22,12 +22,13 @@
"""Module for events management."""
+from collections.abc import Callable, Sequence
from dataclasses import dataclass, field, fields
from enum import Enum
from functools import partial
import inspect
import logging
-from typing import Callable, Generic, Literal, Sequence, Type, TypeVar
+from typing import Generic, Literal, TypeVar
import warnings
from google.protobuf.json_format import MessageToDict
@@ -382,7 +383,7 @@ class EventsManager(Generic[TEvent]):
def __init__(
self,
- event_type: Type[TEvent],
+ event_type: type[TEvent],
session_events_service,
fluent_error_state,
session,
diff --git a/src/ansys/fluent/core/streaming_services/field_data_streaming.py b/src/ansys/fluent/core/streaming_services/field_data_streaming.py
index 6cf594f17168..fde44f2cba9e 100644
--- a/src/ansys/fluent/core/streaming_services/field_data_streaming.py
+++ b/src/ansys/fluent/core/streaming_services/field_data_streaming.py
@@ -22,7 +22,7 @@
"""Module for Field data streaming."""
-from typing import Callable, Dict, List
+from collections.abc import Callable
from ansys.api.fluent.v0 import field_data_pb2 as FieldDataProtoModule
from ansys.fluent.core.services.field_data import ChunkParser
@@ -58,6 +58,6 @@ def _process_streaming(self, id, stream_begin_method, started_evt, *args, **kwar
)
)
- def callbacks(self) -> List[List[Callable | List | Dict]]:
+ def callbacks(self) -> list[list[Callable | list | dict]]:
"""Get list of callbacks along with arguments and keyword arguments."""
return self._service_callbacks.values()
diff --git a/src/ansys/fluent/core/streaming_services/monitor_streaming.py b/src/ansys/fluent/core/streaming_services/monitor_streaming.py
index 5644a11c8e40..82d6086fb739 100644
--- a/src/ansys/fluent/core/streaming_services/monitor_streaming.py
+++ b/src/ansys/fluent/core/streaming_services/monitor_streaming.py
@@ -23,7 +23,6 @@
"""Module for monitors management."""
import threading
-from typing import Dict, List, Tuple
import numpy as np
@@ -60,7 +59,7 @@ def __init__(self, session_id: str, service):
self._monitors_info = None
self._data_frames = {}
- def get_monitor_set_names(self) -> List[str]:
+ def get_monitor_set_names(self) -> list[str]:
"""Get monitor set names.
Parameters
@@ -120,7 +119,7 @@ def get_monitor_set_data(
monitor_set_name,
start_index: int = 0,
end_index: int | None = None,
- ) -> Tuple[np.array, Dict[str, np.array]]:
+ ) -> tuple[np.ndarray, dict[str, np.ndarray]]:
"""Get monitor set data.
Parameters
diff --git a/src/ansys/fluent/core/streaming_services/streaming.py b/src/ansys/fluent/core/streaming_services/streaming.py
index 13cad5577dca..539b27eea469 100644
--- a/src/ansys/fluent/core/streaming_services/streaming.py
+++ b/src/ansys/fluent/core/streaming_services/streaming.py
@@ -22,10 +22,10 @@
"""Provides a module for streaming services."""
+from collections.abc import Callable
import itertools
import logging
import threading
-from typing import Callable
logger = logging.getLogger("pyfluent.networking")
diff --git a/src/ansys/fluent/core/system_coupling.py b/src/ansys/fluent/core/system_coupling.py
index d8aaad917233..2adfa603046e 100644
--- a/src/ansys/fluent/core/system_coupling.py
+++ b/src/ansys/fluent/core/system_coupling.py
@@ -24,7 +24,6 @@
from dataclasses import dataclass
import os
-from typing import List
import xml.etree.ElementTree as XmlET
from defusedxml.ElementTree import fromstring
@@ -52,8 +51,8 @@ class Region:
name: str
display_name: str
topology: str
- input_variables: List[str]
- output_variables: List[str]
+ input_variables: list[str]
+ output_variables: list[str]
class SystemCoupling:
@@ -86,7 +85,7 @@ def participant_type(self) -> str:
"""Get participant type."""
return "FLUENT"
- def get_variables(self) -> List[Variable]:
+ def get_variables(self) -> list[Variable]:
"""Get variables."""
if self._solver.get_fluent_version() >= FluentVersion.v251:
@@ -131,7 +130,7 @@ def get_variables(self) -> List[Variable]:
# maintains back-compatibility for 24.1 and 24.2
return self.__get_syc_setup()["variables"]
- def get_regions(self) -> List[Region]:
+ def get_regions(self) -> list[Region]:
"""Get regions."""
if self._solver.get_fluent_version() >= FluentVersion.v251:
@@ -257,7 +256,7 @@ def get_scp_string() -> str:
scp_file_name
), f"ERROR: could not create System Coupling SCP file: {scp_file_name}"
- with open(scp_file_name, "r") as f:
+ with open(scp_file_name) as f:
xml_string = f.read()
os.remove(scp_file_name)
diff --git a/src/ansys/fluent/core/ui/standalone_web_ui.py b/src/ansys/fluent/core/ui/standalone_web_ui.py
index c3f6385b96ca..98f6beff9abd 100644
--- a/src/ansys/fluent/core/ui/standalone_web_ui.py
+++ b/src/ansys/fluent/core/ui/standalone_web_ui.py
@@ -24,7 +24,8 @@
from __future__ import annotations
-from typing import Any, Callable, Dict, List
+from collections.abc import Callable
+from typing import Any
try:
import panel as pn
@@ -72,13 +73,13 @@ class Refresh(param.Parameterized):
def _render_widget_from_props(
- settings_obj, label: str, props: Dict[str, Any]
+ settings_obj, label: str, props: dict[str, Any]
) -> pn.viewable.Viewable:
"""Produce a Panel widget from type+props. No backend mutation here."""
return _render_widget_from_props_generic(settings_obj, label, props, pn.widgets)
-def _param_view(settings_obj, props: Dict[str, Any]) -> pn.viewable.Viewable:
+def _param_view(settings_obj, props: dict[str, Any]) -> pn.viewable.Viewable:
label = props["python_name"].replace("_", " ").capitalize()
def get_fn():
@@ -144,14 +145,14 @@ def _commit(event):
)
-def _command_view(func, props: Dict[str, Any]) -> pn.viewable.Viewable:
+def _command_view(func, props: dict[str, Any]) -> pn.viewable.Viewable:
"""Render command arguments (on demand) and execute only on click."""
# Safely fetch argument names (does NOT execute the command)
if not hasattr(func, "argument_names"):
return pn.pane.HTML("Command has no 'argument_names()'.")
arg_names = func.argument_names
- arg_widgets: Dict[str, Any] = {}
- controls: List[pn.viewable.Viewable] = []
+ arg_widgets: dict[str, Any] = {}
+ controls: list[pn.viewable.Viewable] = []
# Build argument widgets immediately when this view is created
for name in arg_names:
@@ -261,7 +262,7 @@ def _settings_view(obj, indent: int = 0) -> pn.viewable.Viewable:
else:
return _param_view(obj, props) if props["is_active"] else pn.pane.HTML("")
- sections: List[pn.viewable.Viewable] = []
+ sections: list[pn.viewable.Viewable] = []
for child_name in child_names:
# Build a lazy loader that only resolves the child on expand
diff --git a/src/ansys/fluent/core/utils/__init__.py b/src/ansys/fluent/core/utils/__init__.py
index 76afa5e837b7..772c33b7a6bd 100644
--- a/src/ansys/fluent/core/utils/__init__.py
+++ b/src/ansys/fluent/core/utils/__init__.py
@@ -27,11 +27,15 @@
from pathlib import Path
import sys
-from ansys.fluent.core.search import search # noqa: F401
-
logger = logging.getLogger("pyfluent.general")
+__all__ = (
+ "load_module",
+ "get_user_data_dir",
+)
+
+
def load_module(module_name, file_path):
"""Load a module from a file path."""
spec = importlib.util.spec_from_file_location(module_name, file_path)
diff --git a/src/ansys/fluent/core/utils/deprecate.py b/src/ansys/fluent/core/utils/deprecate.py
index 60cde218b29c..3bb75c68d541 100644
--- a/src/ansys/fluent/core/utils/deprecate.py
+++ b/src/ansys/fluent/core/utils/deprecate.py
@@ -22,9 +22,10 @@
"""Deprecate Arguments."""
+from collections.abc import Callable
import functools
import inspect
-from typing import Any, Callable
+from typing import Any
import warnings
from deprecated.sphinx import deprecated
diff --git a/src/ansys/fluent/core/utils/execution.py b/src/ansys/fluent/core/utils/execution.py
index 07f1b34e2102..fc3aa0a8c353 100644
--- a/src/ansys/fluent/core/utils/execution.py
+++ b/src/ansys/fluent/core/utils/execution.py
@@ -22,12 +22,13 @@
"""Module providing additional execution methods."""
+from collections.abc import Callable
from concurrent.futures import ThreadPoolExecutor
import functools
from multiprocessing.context import TimeoutError
import multiprocessing.pool
import time
-from typing import Any, Callable
+from typing import Any
from ansys.fluent.core.exceptions import InvalidArgument
diff --git a/src/ansys/fluent/core/utils/fldoc.py b/src/ansys/fluent/core/utils/fldoc.py
index 9b45be2ac2dc..dbf71dfe476f 100644
--- a/src/ansys/fluent/core/utils/fldoc.py
+++ b/src/ansys/fluent/core/utils/fldoc.py
@@ -24,7 +24,6 @@
import pprint
import pydoc
-import sys
def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None):
@@ -41,13 +40,8 @@ def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=No
if chop < 0:
rep = rep[:chop] + "..."
line = (name and self.bold(name) + " = " or "") + rep
- # The source have been changed in 3.9, cpython commit id fbf2786c4c89430e2067016603078cf3500cfe94
- if sys.version_info < (3, 9):
- if doc is not None:
- line += "\n" + self.indent(str(doc))
- else:
- if not doc:
- doc = pydoc.getdoc(object)
- if doc:
- line += "\n" + self.indent(str(doc)) + "\n"
+ if not doc:
+ doc = pydoc.getdoc(object)
+ if doc:
+ line += "\n" + self.indent(str(doc)) + "\n"
return line
diff --git a/src/ansys/fluent/core/utils/fluent_version.py b/src/ansys/fluent/core/utils/fluent_version.py
index 6f298c63917f..a825eead15cb 100644
--- a/src/ansys/fluent/core/utils/fluent_version.py
+++ b/src/ansys/fluent/core/utils/fluent_version.py
@@ -32,6 +32,8 @@
import ansys.fluent.core as pyfluent
+__all__ = ("FluentVersion",)
+
class AnsysVersionNotFound(RuntimeError):
"""Raised when Ansys version is not found."""
diff --git a/src/ansys/fluent/core/utils/setup_for_fluent.py b/src/ansys/fluent/core/utils/setup_for_fluent.py
index b5d3cc2165f0..9d2b13002e61 100644
--- a/src/ansys/fluent/core/utils/setup_for_fluent.py
+++ b/src/ansys/fluent/core/utils/setup_for_fluent.py
@@ -25,6 +25,8 @@
from ansys.fluent.core.launcher.launcher import launch_fluent
from ansys.fluent.core.session_solver import Solver
+__all__ = ("setup_for_fluent",)
+
def setup_for_fluent(*args, **kwargs):
"""Returns global PyConsole objects."""
diff --git a/src/ansys/fluent/core/workflow.py b/src/ansys/fluent/core/workflow.py
index 22f1dff6f45d..f4907c968ba4 100644
--- a/src/ansys/fluent/core/workflow.py
+++ b/src/ansys/fluent/core/workflow.py
@@ -24,10 +24,11 @@
from __future__ import annotations
+from collections.abc import Iterable, Iterator
import logging
import re
import threading
-from typing import Any, Iterable, Iterator, Tuple
+from typing import Any
import warnings
from ansys.fluent.core.pyfluent_warnings import (
@@ -1534,7 +1535,7 @@ def __call__(self):
def _workflow_state(self):
return self._workflow()
- def _workflow_and_task_list_state(self) -> Tuple[dict, dict]:
+ def _workflow_and_task_list_state(self) -> tuple[dict, dict]:
workflow_state = self._workflow_state()
prefix = "TaskObject:"
task_list = [
diff --git a/tests/fluent_fixtures.py b/tests/fluent_fixtures.py
index e9257c987ab7..af4f2a4de985 100644
--- a/tests/fluent_fixtures.py
+++ b/tests/fluent_fixtures.py
@@ -20,7 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
-from typing import Callable
+from collections.abc import Callable
import pytest
diff --git a/tests/test_builtin_settings.py b/tests/test_builtin_settings.py
index 79781420b19f..0f1654073ef5 100644
--- a/tests/test_builtin_settings.py
+++ b/tests/test_builtin_settings.py
@@ -20,12 +20,9 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
-from pathlib import Path
-import tempfile
import pytest
-import ansys.fluent.core as pyfluent
from ansys.fluent.core.examples import download_file
from ansys.fluent.core.solver import * # noqa: F401, F403
from ansys.fluent.core.solver.flobject import InactiveObjectError
diff --git a/tests/test_meshingmode/test_meshing_launch.py b/tests/test_meshingmode/test_meshing_launch.py
index f466672a10d0..80e943b23e4a 100644
--- a/tests/test_meshingmode/test_meshing_launch.py
+++ b/tests/test_meshingmode/test_meshing_launch.py
@@ -182,12 +182,12 @@ def __init__(self) -> None:
def __getattribute__(self, item: str):
if item == "_switched":
- return super(fake_session, self).__getattribute__(item)
+ return super().__getattribute__(item)
if self._switched:
return None
- return super(fake_session, self).__getattribute__(item)
+ return super().__getattribute__(item)
def foo(self):
return 42
diff --git a/tests/test_new_meshing_workflow.py b/tests/test_new_meshing_workflow.py
index d49cf23f7ed6..0637fe3c27d7 100644
--- a/tests/test_new_meshing_workflow.py
+++ b/tests/test_new_meshing_workflow.py
@@ -20,8 +20,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
+from collections.abc import Iterable
import time
-from typing import Iterable
import pytest
diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py
index 6109e6f00633..57a972627d2f 100644
--- a/tests/test_scheduler.py
+++ b/tests/test_scheduler.py
@@ -23,7 +23,6 @@
"""Provide a module to test the algorithms which parse job scheduler environments for
machines to run on."""
-from builtins import range
import os
import socket
import tempfile
diff --git a/tests/test_scheme_eval_231.py b/tests/test_scheme_eval_231.py
index 7f7a4f340ae6..9841c26fd694 100644
--- a/tests/test_scheme_eval_231.py
+++ b/tests/test_scheme_eval_231.py
@@ -20,7 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
-from typing import Any, Dict
+from typing import Any
from google.protobuf.json_format import MessageToDict, ParseDict
import pytest
@@ -90,7 +90,7 @@
],
)
def test_convert_py_value_to_scheme_pointer(
- py_value: Any, json_dict: Dict[str, Any]
+ py_value: Any, json_dict: dict[str, Any]
) -> None:
p = SchemePointer()
_convert_py_value_to_scheme_pointer(py_value, p, "23.1.0")
@@ -181,7 +181,7 @@ def test_convert_py_value_to_scheme_pointer(
],
)
def test_convert_scheme_pointer_to_py_value(
- py_value: Any, json_dict: Dict[str, Any]
+ py_value: Any, json_dict: dict[str, Any]
) -> None:
p = SchemePointer()
ParseDict(json_dict, p)