Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Chore/doc linting #13

Merged
merged 7 commits into from
Jan 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,11 @@ repos:
# Run the formatter.
- id: ruff-format

- repo: https://github.com/asottile/blacken-docs
rev: 1.16.0
hooks:
- id: blacken-docs

- repo: https://github.com/codespell-project/codespell
rev: v2.2.4
hooks:
Expand Down
3 changes: 2 additions & 1 deletion doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,10 @@
import os
from datetime import datetime

from ansys.simai.core import __version__
from ansys_sphinx_theme import ansys_favicon, get_version_match, pyansys_logo_black

from ansys.simai.core import __version__

# -- Project information -----------------------------------------------------

project = "ansys-simai-core"
Expand Down
425 changes: 325 additions & 100 deletions pdm.lock

Large diffs are not rendered by default.

20 changes: 16 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# Using PDM with flit backend
[build-system]
requires = ["flit_core>=3.2,<4"]
build-backend = "flit_core.buildapi"
Expand Down Expand Up @@ -58,7 +59,13 @@ test = [
doc = [
"sphinx>=7.2.0",
"autodoc-pydantic>=2.0.0",
"ansys-sphinx-theme>=0.12.5"
"ansys-sphinx-theme>=0.12.5",
"blacken-docs>=1.16.0"
]
linting = [
"ruff",
"blacken-docs",
"pre-commit"
]

[tool.pdm.scripts]
Expand All @@ -84,7 +91,7 @@ select = [
"C4", # flake8-comprehensions
# "C90", # mccabe
"CPY", # flake8-copyright
# "D", # pydocstyle # TODO: Deal with it
"D", # pydocstyle
# "DJ", # flake8-django
# "DTZ", # flake8-datetimez
"E", # pycodestyle errors
Expand Down Expand Up @@ -135,24 +142,29 @@ select = [
ignore = [
"D100", # pydocstyle - missing docstring in public module
"D101", # pydocstyle - missing docstring in public class
"D102", # pydocstyle - missing docstring in public method
"D103", # pydocstyle - missing docstring in public function
"D104", # pydocstyle - missing docstring in public package
"D105", # pydocstyle - missing docstring in magic method
"D106", # pydocstyle - missing docstring in public nested class
"D107", # pydocstyle - missing docstring in __init__
"D202", # pydocstyle - no blank lines allowed after function docstring
"D205", # pydocstyle - 1 blank line required between summary line and description
"D415", # pydocstyle - first line should end with a period, question mark, or exclamation point
"E501", # pycodestyle line too long, handled by formatting
"ISC001", # Ruff formatter incompatible
"S101", # flake8-bandit - use of assert
]
target-version = "py39"

[tool.ruff.lint.per-file-ignores]
"tests/*" = ["D"]
"src/ansys/simai/core/{api,utils}/*" = ["D102"]

[tool.ruff.pydocstyle]
convention = "google"

[tool.ruff.lint.isort]
known-first-party = ["ansys.simai.core"]

[tool.coverage.run]
source = ["ansys.simai.core"]

Expand Down
8 changes: 5 additions & 3 deletions src/ansys/simai/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# ruff: noqa: F401

from importlib.metadata import version

Expand All @@ -27,12 +29,12 @@
except Exception:
__version__ = "n/a"

from ansys.simai.core.client import SimAIClient, from_config # noqa
from ansys.simai.core.data.post_processings import ( # noqa
import ansys.simai.core.errors
from ansys.simai.core.client import SimAIClient, from_config
from ansys.simai.core.data.post_processings import (
GlobalCoefficients,
Slice,
SurfaceEvol,
SurfaceVTP,
VolumeVTU,
)
import ansys.simai.core.errors # noqa
4 changes: 1 addition & 3 deletions src/ansys/simai/core/api/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,4 @@ class ApiClient(
TrainingDataPartClientMixin,
WorkspaceClientMixin,
):
"""
Low-level client that handles direct communication with the server.
"""
"""Low-level client that handles direct communication with the server."""
9 changes: 4 additions & 5 deletions src/ansys/simai/core/api/design_of_experiments.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,21 +30,20 @@


class DesignOfExperimentsMixin(ApiClientMixin):
"""
Client for the design of experiments ("/design-of-experiments/") part of the API.
"""
"""Client for the design of experiments ("/design-of-experiments/") part of the API."""

def download_design_of_experiments(
self,
file: Optional[File],
format: str,
workspace_id: str,
) -> Union[None, BinaryIO]:
"""
Downloads the design of experiments into the file at the given path.
"""Downloads the design of experiments into the file at the given path.

Args:
file: A binary file-object or the path of the file to put the content into.
format: the format to download, ``xlsx`` or ``csv``
workspace_id: id of the workspace for which to download the DoE

Return:
None if a file is provided, a BytesIO with the design of experiments's content otherwise
Expand Down
31 changes: 11 additions & 20 deletions src/ansys/simai/core/api/geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,20 +22,17 @@

import json
import logging
from pathlib import Path
from typing import Any, BinaryIO, Dict, List, Optional, Union
from urllib.parse import quote

from ansys.simai.core.api.mixin import ApiClientMixin
from ansys.simai.core.data.types import MonitorCallback
from ansys.simai.core.data.types import File, MonitorCallback

logger = logging.getLogger(__name__)


class GeometryClientMixin(ApiClientMixin):
"""
Client for the Geometry ("/geometries/") part of the API.
"""
"""Client for the Geometry ("/geometries/") part of the API."""

def geometries(self, workspace_id: str, filters: Optional[Dict[str, Any]] = None):
"""Get list of all geometries."""
Expand All @@ -46,17 +43,15 @@ def geometries(self, workspace_id: str, filters: Optional[Dict[str, Any]] = None
return self._get("geometries/", params=params)

def get_geometry(self, geometry_id: str):
"""
Get information on a single geometry.
"""Get information on a single geometry.

Args:
geometry_id: The id of the geometry to get
"""
return self._get(f"geometries/{geometry_id}")

def get_geometry_by_name(self, name: str, workspace_id: str):
"""
Get information on a single geometry, by name instead of id
"""Get information on a single geometry, by name instead of id.

Args:
name: The name of the geometry to get
Expand All @@ -65,8 +60,8 @@ def get_geometry_by_name(self, name: str, workspace_id: str):
return self._get(f"geometries/name/{quote(name)}", params={"workspace": workspace_id})

def delete_geometry(self, geometry_id: str):
"""
Delete a single geometry.
"""Delete a single geometry.

All objects associated to that geometry are also deleted.

Args:
Expand All @@ -85,8 +80,7 @@ def update_geometry(
name: Optional[str] = None,
metadata: Optional[dict] = None,
):
"""
Update a geometry information
"""Update a geometry information.

Args:
geometry_id: The id of the geometry to update
Expand All @@ -107,8 +101,7 @@ def create_geometry(
extension: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None,
):
"""
Create a new geometry, without pushing the data
"""Create a new geometry, without pushing the data.

Args:
workspace_id: The id of the workspace the geometry should belong to.
Expand Down Expand Up @@ -138,11 +131,10 @@ def complete_geometry_upload(self, id: str, upload_id: str, parts: List[Dict[str
def download_geometry(
self,
geometry_id: str,
file: Optional[Union[BinaryIO, str, Path]] = None,
file: Optional[File] = None,
monitor_callback: Optional[MonitorCallback] = None,
) -> Union[None, BinaryIO]:
"""
Downloads the input geometry into the file at the given path.
"""Downloads the input geometry into the file at the given path.

Args:
geometry_id: The id of the geometry to download
Expand All @@ -153,8 +145,7 @@ def download_geometry(
return self.download_file(f"geometries/{geometry_id}/download", file, monitor_callback)

def get_geometry_predictions(self, geometry_id: str):
"""
Get predictions associated with a geometry.
"""Get predictions associated with a geometry.

Args:
geometry_id: The id of the geometry whose predictions to get
Expand Down
49 changes: 31 additions & 18 deletions src/ansys/simai/core/api/mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,20 +24,21 @@
import os
from io import BytesIO
from pathlib import Path
from typing import Any, BinaryIO, Callable, Dict, List, Optional, Union
from typing import Any, BinaryIO, Dict, List, Optional, Union
from urllib.parse import urljoin
from urllib.request import getproxies

import requests
from requests.adapters import HTTPAdapter, Retry
from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor

from ansys.simai.core import __version__
from ansys.simai.core.data.types import APIResponse, File, MonitorCallback
from ansys.simai.core.errors import ConnectionError
from ansys.simai.core.utils.auth import Authenticator
from ansys.simai.core.utils.configuration import ClientConfig
from ansys.simai.core.utils.files import file_path_to_obj_file
from ansys.simai.core.utils.requests import handle_response
from requests.adapters import HTTPAdapter, Retry
from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -94,21 +95,24 @@ def build_full_url_for_endpoint(self, url) -> str: # noqa: D102

def _request(
self,
method,
method: str,
url,
*args,
return_json: bool = True,
**kwargs,
) -> APIResponse:
"""
Wrap around :py:meth:`requests.Session.request`.
"""Wrap around :py:meth:`requests.Session.request`.

By default this method expects a json response. If you call an endpoint that does
not return a json, specify return_json=False

Args:
method: The HTTP verb of the request
url: The url of the request
*args: Additional args for the request
return_json: Whether the expected response is a json. If yes returns
directly the json, otherwise the Response is returned
**kwargs: Additional kwargs for request

Returns:
The json dict of the response if :py:args:`return_json` is True. The raw
Expand All @@ -132,15 +136,15 @@ def download_file(
request_json_body: Optional[Dict[str, Any]] = None,
request_method: str = "GET",
) -> Union[None, BinaryIO]:
"""
Download a file from the given URL into the given file or a :class:`BytesIO`.
"""Download a file from the given URL into the given file or a :class:`BytesIO`.

Args:
download_url: url to GET the file
file: Optional binary file or path onto which to put the downloaded file
monitor_callback: Tuple of two functions or methods used to monitor the download.
The first one will be passed the total size of the file to download.
The second one will be passed the bytes_read delta.
monitor_callback: An optional callback to monitor the progress of the download.
See :obj:`~ansys.simai.core.data.types.MonitorCallback` for details.
request_json_body: Optional JSON to include in the request
request_method: The HTTP verb

Raises:
ConnectionError: If an error occurred during the download
Expand All @@ -151,7 +155,7 @@ def download_file(
if file is None:
output_file = BytesIO()
close_file = False
elif isinstance(file, (Path, str)):
elif isinstance(file, (Path, os.PathLike, str)):
output_file = file_path_to_obj_file(file, "wb")
close_file = True
else:
Expand All @@ -163,13 +167,13 @@ def download_file(
request_kwargs.update({"json": request_json_body})
response = self._request(request_method, download_url, **request_kwargs)
if monitor_callback is not None:
monitor_callback[0](int(response.headers.get("Content-Length", 0)))
monitor_callback(int(response.headers.get("Content-Length", 0)))
logger.info("Starting download.")
try:
for chunk in response.iter_content(chunk_size=1024):
bytes_read_delta = output_file.write(chunk)
if monitor_callback is not None:
monitor_callback[1](bytes_read_delta)
monitor_callback(bytes_read_delta)
except requests.exceptions.ConnectionError as e:
logger.debug("Error {e} happened during download stream.")
if close_file is True:
Expand All @@ -189,15 +193,25 @@ def upload_file_with_presigned_post(
self,
file: BinaryIO,
presigned_post: Dict[str, Any],
monitor_callback: Optional[Callable[[int], None]] = None,
monitor_callback: Optional[MonitorCallback] = None,
):
upload_form = presigned_post["fields"]
filename = getattr(file, "name", "")
upload_form["file"] = (filename, file, "application/octet-stream")
multipart = MultipartEncoder(upload_form)

if monitor_callback is not None:
multipart = MultipartEncoderMonitor(multipart, monitor_callback)
# Wrap the monitor callback so that it receives only the bytes read
# instead of the full MultipartEncoderMonitor object
def wrap_monitor_callback(monitor_callback):
# FIXME: This ain't gonna work chief
def wrapped_monitor_callback(monitor):
update = monitor_callback(monitor)
return update.bytes_read

return wrapped_monitor_callback

multipart = MultipartEncoderMonitor(multipart, wrap_monitor_callback(monitor_callback))
self._post(
presigned_post["url"],
data=multipart,
Expand All @@ -213,8 +227,7 @@ def upload_parts(
part_size: int = int(100e6),
monitor_callback: Optional[MonitorCallback] = None,
) -> List[Dict[str, Any]]:
"""
Upload parts using the given endpoints to get presigned PUT urls
"""Upload parts using the given endpoints to get presigned PUT urls.

Returns:
The list of parts, with their id and their etag
Expand Down
Loading
Loading