Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions doc/changelog.d/4925.fixed.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Type hints for meshing workflows
57 changes: 35 additions & 22 deletions src/ansys/fluent/core/codegen/datamodelgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
"""Module to generate Fluent datamodel API classes."""

import argparse
from io import FileIO, StringIO
from io import StringIO, TextIOWrapper
import os
from pathlib import Path
import shutil
Expand Down Expand Up @@ -116,16 +116,16 @@
return name


def _write_command_query_stub(name: str, info: Any, f: FileIO):
def _write_command_query_stub(name: str, info: Any, f: TextIOWrapper):
signature = StringIO()
indent = " "
signature.write(f"(\n{indent}self,\n")
if info.get("args"):
for arg in info.get("args"):
signature.write(
f'{indent}{arg["name"]}: {_PY_TYPE_BY_DM_TYPE[arg["type"]]} | None = None,\n'
f"{indent}{arg['name']}: {_PY_TYPE_BY_DM_TYPE[arg['type']]} | None = None,\n"
)
signature.write(f'{indent}) -> {_PY_TYPE_BY_DM_TYPE[info["returntype"]]}: ...')
signature.write(f"{indent}) -> {_PY_TYPE_BY_DM_TYPE[info['returntype']]}: ...")
f.write(f"\n def {name}{signature.getvalue()}\n")


Expand Down Expand Up @@ -181,17 +181,17 @@
doc.write(f"\n{indent}Parameters\n")
doc.write(f"{indent}{'-' * len('Parameters')}\n")
for arg in info.get("args"):
doc.write(f'{indent}{arg["name"]} : {_PY_TYPE_BY_DM_TYPE[arg["type"]]}\n')
doc.write(f"{indent}{arg['name']} : {_PY_TYPE_BY_DM_TYPE[arg['type']]}\n")
arg_help = _get_api_help_text(arg, "")
if arg_help:
for line in arg_help.splitlines():
doc.write(f"{indent} {line}\n")
doc.write("\n")
elif arg.get("docstring"):
doc.write(f'{indent} {arg["docstring"]}\n\n')
doc.write(f"{indent} {arg['docstring']}\n")
doc.write(f"\n{indent}Returns\n")
doc.write(f"{indent}{'-' * len('Returns')}\n")
doc.write(f'{indent}{_PY_TYPE_BY_DM_TYPE[info["returntype"]]}\n')
doc.write(f"{indent}{_PY_TYPE_BY_DM_TYPE[info['returntype']]}\n")
if meshing_utility_examples.get(name):
doc.write(f"\n{indent}Examples\n")
doc.write(f"{indent}{'-' * len('Examples')}\n")
Expand Down Expand Up @@ -368,7 +368,7 @@
"Information: Problem accessing flserver datamodel for icing settings\n"
)

def _write_arg_class(self, f: FileIO, arg_info, indent: str):
def _write_arg_class(self, f: TextIOWrapper, arg_info, indent: str):

Check warning on line 371 in src/ansys/fluent/core/codegen/datamodelgen.py

View workflow job for this annotation

GitHub Actions / Code style

Type annotation is missing for parameter "arg_info" (reportMissingParameterType)

Check warning on line 371 in src/ansys/fluent/core/codegen/datamodelgen.py

View workflow job for this annotation

GitHub Actions / Code style

Type of parameter "arg_info" is unknown (reportUnknownParameterType)
arg_name = arg_info["name"]
arg_type = arg_info["type"]
arg_doc = _get_api_help_text(arg_info, f"Argument {arg_name}.")
Expand Down Expand Up @@ -400,7 +400,9 @@
f, parameter_info | {"name": name}, f"{indent} "
)

def _write_static_info(self, name: str, info: Any, f: FileIO, level: int = 0):
def _write_static_info(
self, name: str, info: Any, f: TextIOWrapper, level: int = 0
):
api_tree = {}
# preferences contains a deprecated object Meshing Workflow (with a space)
# which migrates to MeshingWorkflow automatically. Simplest thing to do is
Expand All @@ -409,7 +411,7 @@
return api_tree
indent = " " * level * 4
singleton_doc = _get_api_help_text(info, _build_singleton_docstring(name))
f.write(f"{indent}class {name}(PyMenu):\n")
f.write(f"{indent}class {'_' if name != 'Root' else ''}{name}(PyMenu):\n")
f.write(f'{indent} """\n')
for line in singleton_doc.splitlines():
f.write(f"{indent} {escape_wildcards(line)}\n")
Expand All @@ -423,41 +425,41 @@
for k in named_objects:
f.write(
f"{indent} self.{k} = "
f'self.__class__.{k}(service, rules, path + [("{k}", "")])\n'
+ f'self.__class__._{k}(service, rules, path + [("{k}", "")])\n'
)
for k in singletons:
# This is where filtering these names out really matters (see commsent above)
if k.isidentifier():
f.write(
f"{indent} self.{k} = "
f'self.__class__.{k}(service, rules, path + [("{k}", "")])\n'
+ f'self.__class__._{k}(service, rules, path + [("{k}", "")])\n'
)
for k in parameters:
f.write(
f"{indent} self.{k} = "
f'self.__class__.{k}(service, rules, path + [("{k}", "")])\n'
+ f'self.__class__._{k}(service, rules, path + [("{k}", "")])\n'
)
for k in commands:
f.write(
f"{indent} self.{k} = "
f'self.__class__.{k}(service, rules, "{k}", path)\n'
+ f'self.__class__._{k}(service, rules, "{k}", path)\n'
)
for k in queries:
f.write(
f"{indent} self.{k} = "
f'self.__class__.{k}(service, rules, "{k}", path)\n'
+ f'self.__class__._{k}(service, rules, "{k}", path)\n'
)
f.write(f"{indent} super().__init__(service, rules, path)\n\n")
for k in named_objects:
f.write(f"{indent} class {k}(PyNamedObjectContainer):\n")
f.write(f"{indent} class _{k}(PyNamedObjectContainer):\n")
f.write(f'{indent} """\n')
f.write(f"{indent} .\n")
f.write(f'{indent} """\n')
api_tree[f"{k}:<name>"] = self._write_static_info(
f"_{k}", info["namedobjects"][k], f, level + 2
)
# Specify the concrete named object type for __getitem__
f.write(f"{indent} def __getitem__(self, key: str) -> " f"_{k}:\n")
f.write(f"{indent} def __getitem__(self, key: str) -> _{k}:\n")
f.write(f"{indent} return super().__getitem__(key)\n\n")
for k in singletons:
if k.isidentifier():
Expand Down Expand Up @@ -500,7 +502,7 @@
file.write(
"from ansys.fluent.core.services.datamodel_se import PyMenu\n"
)
file.write("from typing import Any\n")
file.write("from typing import Any, TYPE_CHECKING\n")
file.write("\n\n")
file.write("class Root(PyMenu):\n")
for k in commands:
Expand All @@ -517,10 +519,10 @@
)

def _write_static_command_and_query_info(
actions, class_name: str, st_info_key: tuple[str], is_command: bool
actions, class_name: str, st_info_key: tuple[str, ...], is_command: bool
):
for k in actions:
f.write(f"{indent} class {k}({class_name}):\n")
f.write(f"{indent} class _{k}({class_name}):\n")
f.write(f'{indent} """\n')
actions_static_info = info[st_info_key[0]][k]
f.write(
Expand All @@ -529,14 +531,24 @@
)
)
f.write(f'{indent} """\n')
args_info = actions_static_info[st_info_key[1]].get("args", [])

if is_command:
f.write(f"{indent} if TYPE_CHECKING:\n")
py_names = [
f"{_convert_to_py_name(arg['name'])}: {_PY_TYPE_BY_DM_TYPE[arg['type']]}"
for arg in args_info
]
f.write(
f"{indent} def __call__(self, {'*, ' if py_names else ''}{', '.join(py_names)}): ...\n"
)
f.write(f"{indent} class _{k}Arguments(PyArguments):\n")
f.write(
f"{indent} def __init__(self, service, rules, command, path, id):\n"
)
f.write(
f"{indent} super().__init__(service, rules, command, path, id)\n"
)
args_info = actions_static_info[st_info_key[1]].get("args", [])
for arg_info in args_info:
arg_name = arg_info["name"]
py_name = _convert_to_py_name(arg_name)
Expand All @@ -548,7 +560,7 @@
self._write_arg_class(f, arg_info, f"{indent} ")

f.write(
f"{indent} def create_instance(self) -> _{k}Arguments:\n"
f"{indent} def create_instance(self) -> _{k}Arguments | None:\n"
)
f.write(
f"{indent} args = self._get_create_instance_args()\n"
Expand Down Expand Up @@ -579,6 +591,7 @@
f.write("# This is an auto-generated file. DO NOT EDIT!\n")
f.write("#\n")
f.write("# pylint: disable=line-too-long\n\n")
f.write("from typing import Any, TYPE_CHECKING\n\n")
f.write("from ansys.fluent.core.services.datamodel_se import (\n")
f.write(" PyMenu,\n")
f.write(" PyParameter,\n")
Expand Down
5 changes: 4 additions & 1 deletion src/ansys/fluent/core/meshing/meshing_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
from typing import TYPE_CHECKING

from ansys.fluent.core._types import PathType
from ansys.fluent.core.generated.datamodel_261.meshing import Root
from ansys.fluent.core.services.datamodel_se import PyMenuGeneric
from ansys.fluent.core.utils.fluent_version import FluentVersion
from ansys.fluent.core.workflow import Workflow
Expand All @@ -42,7 +43,9 @@
}


class MeshingWorkflow(Workflow):
class MeshingWorkflow(

Check failure on line 46 in src/ansys/fluent/core/meshing/meshing_workflow.py

View workflow job for this annotation

GitHub Actions / Code style

Multiple inheritance is not allowed because the following base classes contain `__init__` or `__new__` methods that may not get called: Root (reportUnsafeMultipleInheritance)
Workflow, Root if TYPE_CHECKING else object
): # pyright: ignore[reportUnsafeMultipleInheritance]

Check warning on line 48 in src/ansys/fluent/core/meshing/meshing_workflow.py

View workflow job for this annotation

GitHub Actions / Code style

Unnecessary "# pyright: ignore" rule: "reportUnsafeMultipleInheritance" (reportUnnecessaryTypeIgnoreComment)
"""Provides meshing specialization of the workflow wrapper that extends the core
functionality in an object-oriented manner."""

Expand Down
26 changes: 20 additions & 6 deletions src/ansys/fluent/core/meshing/meshing_workflow_new.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,10 @@

from enum import Enum
import os
from typing import TYPE_CHECKING, override

Check warning on line 30 in src/ansys/fluent/core/meshing/meshing_workflow_new.py

View workflow job for this annotation

GitHub Actions / Code style

Type of "override" is unknown (reportUnknownVariableType)

Check failure on line 30 in src/ansys/fluent/core/meshing/meshing_workflow_new.py

View workflow job for this annotation

GitHub Actions / Code style

"override" is unknown import symbol (reportAttributeAccessIssue)

from ansys.fluent.core._types import PathType
from ansys.fluent.core.generated.datamodel_261.meshing_workflow import Root
from ansys.fluent.core.services.datamodel_se import PyMenuGeneric
from ansys.fluent.core.utils.fluent_version import FluentVersion
from ansys.fluent.core.workflow_new import Workflow
Expand Down Expand Up @@ -71,7 +73,9 @@
self._initialized = True


class WatertightMeshingWorkflow(MeshingWorkflow):
class WatertightMeshingWorkflow( # pyright: ignore[reportUnsafeMultipleInheritance]
MeshingWorkflow, Root if TYPE_CHECKING else object
):
"""Provides watertight meshing specialization of the workflow wrapper."""

def __init__(
Expand Down Expand Up @@ -103,7 +107,7 @@
)


class FaultTolerantMeshingWorkflow(MeshingWorkflow):
class FaultTolerantMeshingWorkflow(MeshingWorkflow, Root if TYPE_CHECKING else object):

Check failure on line 110 in src/ansys/fluent/core/meshing/meshing_workflow_new.py

View workflow job for this annotation

GitHub Actions / Code style

Multiple inheritance is not allowed because the following base classes contain `__init__` or `__new__` methods that may not get called: Root (reportUnsafeMultipleInheritance)
"""Provides fault-tolerant meshing specialization of the workflow wrapper."""

def __init__(
Expand Down Expand Up @@ -144,7 +148,8 @@
self._pm_file_management = pm_file_management

@property
@override

Check warning on line 151 in src/ansys/fluent/core/meshing/meshing_workflow_new.py

View workflow job for this annotation

GitHub Actions / Code style

Untyped function decorator obscures type of function; ignoring decorator (reportUntypedFunctionDecorator)
def parts(self) -> PyMenuGeneric | None:

Check warning on line 152 in src/ansys/fluent/core/meshing/meshing_workflow_new.py

View workflow job for this annotation

GitHub Actions / Code style

Method "parts" is not marked as override but is overriding a method in class "Root" (reportImplicitOverride)
"""Access part-management in fault-tolerant mode.

Returns
Expand All @@ -155,7 +160,8 @@
return self._parent_workflow.parts

@property
@override

Check warning on line 163 in src/ansys/fluent/core/meshing/meshing_workflow_new.py

View workflow job for this annotation

GitHub Actions / Code style

Untyped function decorator obscures type of function; ignoring decorator (reportUntypedFunctionDecorator)
def parts_files(self):

Check warning on line 164 in src/ansys/fluent/core/meshing/meshing_workflow_new.py

View workflow job for this annotation

GitHub Actions / Code style

Method "parts_files" is not marked as override but is overriding a method in class "Root" (reportImplicitOverride)
"""Access the part-management file-management object in fault-tolerant mode.

Returns
Expand Down Expand Up @@ -190,7 +196,9 @@
return self._pm_file_management


class TwoDimensionalMeshingWorkflow(MeshingWorkflow):
class TwoDimensionalMeshingWorkflow( # pyright: ignore[reportUnsafeMultipleInheritance]
MeshingWorkflow, Root if TYPE_CHECKING else object
):
"""Provides 2D meshing specialization of the workflow wrapper."""

def __init__(
Expand Down Expand Up @@ -222,7 +230,9 @@
)


class TopologyBasedMeshingWorkflow(MeshingWorkflow):
class TopologyBasedMeshingWorkflow( # pyright: ignore[reportUnsafeMultipleInheritance]
MeshingWorkflow, Root if TYPE_CHECKING else object
):
"""Provides topology-based meshing specialization of the workflow wrapper."""

def __init__(
Expand Down Expand Up @@ -263,7 +273,9 @@
TOPOLOGY_BASED_MESHING_MODE = TopologyBasedMeshingWorkflow


class LoadWorkflow(Workflow):
class LoadWorkflow( # pyright: ignore[reportUnsafeMultipleInheritance]
Workflow, Root if TYPE_CHECKING else object
):
"""Provides a specialization of the workflow wrapper for a loaded workflow."""

def __init__(
Expand Down Expand Up @@ -297,7 +309,9 @@
self._load_workflow(file_path=os.fspath(file_path))


class CreateWorkflow(Workflow):
class CreateWorkflow( # pyright: ignore[reportUnsafeMultipleInheritance]
Workflow, Root if TYPE_CHECKING else object
):
"""Provides a specialization of the workflow wrapper for a newly created
workflow."""

Expand Down
Loading
Loading