From 32728bec1ee938fd411cfa0c9b760bca7869c377 Mon Sep 17 00:00:00 2001 From: Guillaume Mazoyer Date: Thu, 11 Dec 2025 12:29:17 +0100 Subject: [PATCH 01/27] IHS-173 Detect if a relationship is set from a profile (#686) --- infrahub_sdk/node/constants.py | 1 + infrahub_sdk/node/related_node.py | 17 ++++- infrahub_sdk/node/relationship.py | 8 ++ tests/unit/sdk/test_node.py | 123 ++++++++++++++++++++++++++++++ 4 files changed, 145 insertions(+), 4 deletions(-) diff --git a/infrahub_sdk/node/constants.py b/infrahub_sdk/node/constants.py index d474bdb0..f96fce6a 100644 --- a/infrahub_sdk/node/constants.py +++ b/infrahub_sdk/node/constants.py @@ -20,3 +20,4 @@ HIERARCHY_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE = "Hierarchical fields are not supported for this node." HFID_STR_SEPARATOR = "__" +PROFILE_KIND_PREFIX = "Profile" diff --git a/infrahub_sdk/node/related_node.py b/infrahub_sdk/node/related_node.py index daa9726b..1bf1307e 100644 --- a/infrahub_sdk/node/related_node.py +++ b/infrahub_sdk/node/related_node.py @@ -1,12 +1,11 @@ from __future__ import annotations +import re from typing import TYPE_CHECKING, Any -from ..exceptions import ( - Error, -) +from ..exceptions import Error from ..protocols_base import CoreNodeBase -from .constants import PROPERTIES_FLAG, PROPERTIES_OBJECT +from .constants import PROFILE_KIND_PREFIX, PROPERTIES_FLAG, PROPERTIES_OBJECT if TYPE_CHECKING: from ..client import InfrahubClient, InfrahubClientSync @@ -40,6 +39,7 @@ def __init__(self, branch: str, schema: RelationshipSchemaAPI, data: Any | dict, self._display_label: str | None = None self._typename: str | None = None self._kind: str | None = None + self._source_typename: str | None = None if isinstance(data, (CoreNodeBase)): self._peer = data @@ -74,6 +74,8 @@ def __init__(self, branch: str, schema: RelationshipSchemaAPI, data: Any | dict, prop_data = properties_data.get(prop, properties_data.get(f"_relation__{prop}", None)) if prop_data and isinstance(prop_data, dict) and "id" in prop_data: setattr(self, prop, prop_data["id"]) + if prop == "source" and "__typename" in prop_data: + self._source_typename = prop_data["__typename"] elif prop_data and isinstance(prop_data, (str, bool)): setattr(self, prop, prop_data) else: @@ -125,6 +127,13 @@ def kind(self) -> str | None: return self._peer.get_kind() return self._kind + @property + def is_from_profile(self) -> bool: + """Return whether this relationship was set from a profile. Done by checking if the source is of a profile kind.""" + if not self._source_typename: + return False + return bool(re.match(rf"^{PROFILE_KIND_PREFIX}[A-Z]", self._source_typename)) + def _generate_input_data(self, allocate_from_pool: bool = False) -> dict[str, Any]: data: dict[str, Any] = {} diff --git a/infrahub_sdk/node/relationship.py b/infrahub_sdk/node/relationship.py index 6dcc66ce..757bb875 100644 --- a/infrahub_sdk/node/relationship.py +++ b/infrahub_sdk/node/relationship.py @@ -56,6 +56,14 @@ def peer_hfids_str(self) -> list[str]: def has_update(self) -> bool: return self._has_update + @property + def is_from_profile(self) -> bool: + """Return whether this relationship was set from a profile. All its peers must be from a profile.""" + if not self.peers: + return False + all_profiles = [p.is_from_profile for p in self.peers] + return bool(all_profiles) and all(all_profiles) + def _generate_input_data(self, allocate_from_pool: bool = False) -> list[dict]: return [peer._generate_input_data(allocate_from_pool=allocate_from_pool) for peer in self.peers] diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py index c0c9e9bf..f9e6b8bc 100644 --- a/tests/unit/sdk/test_node.py +++ b/tests/unit/sdk/test_node.py @@ -11,6 +11,7 @@ InfrahubNodeBase, InfrahubNodeSync, RelatedNodeBase, + RelationshipManager, RelationshipManagerBase, parse_human_friendly_id, ) @@ -2623,3 +2624,125 @@ async def test_process_relationships_recursive_deep_nesting( assert "ip-2" in recursive_ids # From interface-1 assert "ip-3" in recursive_ids # From interface-2 assert len(related_nodes_recursive) == 5 # 2 interfaces + 3 IP addresses + + +class TestRelatedNodeIsFromProfile: + def test_is_from_profile_when_source_is_profile(self, location_schema) -> None: + data = { + "node": {"id": "test-id", "display_label": "test-tag", "__typename": "BuiltinTag"}, + "properties": { + "is_protected": False, + "owner": None, + "source": {"__typename": "ProfileInfraDevice", "display_label": "default-profile", "id": "profile-id"}, + }, + } + related_node = RelatedNodeBase(branch="main", schema=location_schema.relationships[0], data=data) + assert related_node.is_from_profile + + def test_is_from_profile_when_source_is_not_profile(self, location_schema) -> None: + data = { + "node": {"id": "test-id", "display_label": "test-tag", "__typename": "BuiltinTag"}, + "properties": { + "is_protected": False, + "owner": None, + "source": {"__typename": "CoreAccount", "display_label": "admin", "id": "account-id"}, + }, + } + related_node = RelatedNodeBase(branch="main", schema=location_schema.relationships[0], data=data) + assert not related_node.is_from_profile + + def test_is_from_profile_when_source_not_queried(self, location_schema) -> None: + data = { + "node": {"id": "test-id", "display_label": "test-tag", "__typename": "BuiltinTag"}, + "properties": {"is_protected": False, "owner": None, "source": None}, + } + related_node = RelatedNodeBase(branch="main", schema=location_schema.relationships[0], data=data) + assert not related_node.is_from_profile + + def test_is_from_profile_when_no_properties(self, location_schema) -> None: + data = {"node": {"id": "test-id", "display_label": "test-tag", "__typename": "BuiltinTag"}} + related_node = RelatedNodeBase(branch="main", schema=location_schema.relationships[0], data=data) + assert not related_node.is_from_profile + + +class TestRelationshipManagerIsFromProfile: + def test_is_from_profile_when_no_peers(self, location_schema) -> None: + manager = RelationshipManagerBase(name="tags", branch="main", schema=location_schema.relationships[0]) + assert not manager.is_from_profile + + def test_is_from_profile_when_all_peers_from_profile(self, client, location_schema) -> None: + data = { + "count": 2, + "edges": [ + { + "node": {"id": "tag-1", "display_label": "tag1", "__typename": "BuiltinTag"}, + "properties": { + "is_protected": False, + "owner": None, + "source": {"__typename": "ProfileInfraDevice", "display_label": "profile1", "id": "profile-1"}, + }, + }, + { + "node": {"id": "tag-2", "display_label": "tag2", "__typename": "BuiltinTag"}, + "properties": { + "is_protected": False, + "owner": None, + "source": {"__typename": "ProfileInfraDevice", "display_label": "profile1", "id": "profile-1"}, + }, + }, + ], + } + manager = RelationshipManager( + name="tags", client=client, node=None, branch="main", schema=location_schema.relationships[0], data=data + ) + assert manager.is_from_profile + + def test_is_from_profile_when_any_peer_not_from_profile(self, client, location_schema) -> None: + data = { + "count": 2, + "edges": [ + { + "node": {"id": "tag-1", "display_label": "tag1", "__typename": "BuiltinTag"}, + "properties": { + "is_protected": False, + "owner": None, + "source": {"__typename": "ProfileInfraDevice", "display_label": "profile1", "id": "profile-1"}, + }, + }, + { + "node": {"id": "tag-2", "display_label": "tag2", "__typename": "BuiltinTag"}, + "properties": { + "is_protected": False, + "owner": None, + "source": {"__typename": "CoreAccount", "display_label": "admin", "id": "account-1"}, + }, + }, + ], + } + manager = RelationshipManager( + name="tags", client=client, node=None, branch="main", schema=location_schema.relationships[0], data=data + ) + assert not manager.is_from_profile + + def test_is_from_profile_when_any_peer_has_unknown_source(self, client, location_schema) -> None: + data = { + "count": 2, + "edges": [ + { + "node": {"id": "tag-1", "display_label": "tag1", "__typename": "BuiltinTag"}, + "properties": { + "is_protected": False, + "owner": None, + "source": {"__typename": "ProfileInfraDevice", "display_label": "profile1", "id": "profile-1"}, + }, + }, + { + "node": {"id": "tag-2", "display_label": "tag2", "__typename": "BuiltinTag"}, + "properties": {"is_protected": False, "owner": None, "source": None}, + }, + ], + } + manager = RelationshipManager( + name="tags", client=client, node=None, branch="main", schema=location_schema.relationships[0], data=data + ) + assert not manager.is_from_profile From 20d349c5a64be0e23d0d97d13a43657660c6bb48 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Tue, 16 Dec 2025 16:48:11 +0100 Subject: [PATCH 02/27] Remove deprecated 'background_execution' param from client.branch.create() --- changelog/+e2f96e7b.removed.md | 1 + infrahub_sdk/branch.py | 51 ++++++++-------------------------- 2 files changed, 13 insertions(+), 39 deletions(-) create mode 100644 changelog/+e2f96e7b.removed.md diff --git a/changelog/+e2f96e7b.removed.md b/changelog/+e2f96e7b.removed.md new file mode 100644 index 00000000..52e96350 --- /dev/null +++ b/changelog/+e2f96e7b.removed.md @@ -0,0 +1 @@ +The previously deprecated 'background_execution' parameter under client.branch.create() was removed. diff --git a/infrahub_sdk/branch.py b/infrahub_sdk/branch.py index 53623eae..2c32a481 100644 --- a/infrahub_sdk/branch.py +++ b/infrahub_sdk/branch.py @@ -1,6 +1,5 @@ from __future__ import annotations -import warnings from enum import Enum from typing import TYPE_CHECKING, Any, Literal, overload from urllib.parse import urlencode @@ -93,7 +92,6 @@ async def create( sync_with_git: bool = True, description: str = "", wait_until_completion: Literal[True] = True, - background_execution: bool | None = False, ) -> BranchData: ... @overload @@ -103,7 +101,6 @@ async def create( sync_with_git: bool = True, description: str = "", wait_until_completion: Literal[False] = False, - background_execution: bool | None = False, ) -> str: ... async def create( @@ -112,19 +109,9 @@ async def create( sync_with_git: bool = True, description: str = "", wait_until_completion: bool = True, - background_execution: bool | None = False, ) -> BranchData | str: - if background_execution is not None: - warnings.warn( - "`background_execution` is deprecated, please use `wait_until_completion` instead.", - DeprecationWarning, - stacklevel=1, - ) - - background_execution = background_execution or not wait_until_completion input_data = { - # Should be switched to `wait_until_completion` once `background_execution` is removed server side. - "background_execution": background_execution, + "wait_until_completion": wait_until_completion, "data": { "name": branch_name, "description": description, @@ -132,15 +119,14 @@ async def create( }, } - mutation_query = MUTATION_QUERY_TASK if background_execution else MUTATION_QUERY_DATA + mutation_query = MUTATION_QUERY_DATA if wait_until_completion else MUTATION_QUERY_TASK query = Mutation(mutation="BranchCreate", input_data=input_data, query=mutation_query) response = await self.client.execute_graphql(query=query.render(), tracker="mutation-branch-create") - # Make sure server version is recent enough to support background execution, as previously - # using background_execution=True had no effect. - if background_execution and "task" in response["BranchCreate"]: - return response["BranchCreate"]["task"]["id"] - return BranchData(**response["BranchCreate"]["object"]) + if wait_until_completion: + return BranchData(**response["BranchCreate"]["object"]) + + return response["BranchCreate"]["task"]["id"] async def delete(self, branch_name: str) -> bool: input_data = { @@ -261,7 +247,6 @@ def create( sync_with_git: bool = True, description: str = "", wait_until_completion: Literal[True] = True, - background_execution: bool | None = False, ) -> BranchData: ... @overload @@ -271,7 +256,6 @@ def create( sync_with_git: bool = True, description: str = "", wait_until_completion: Literal[False] = False, - background_execution: bool | None = False, ) -> str: ... def create( @@ -280,19 +264,9 @@ def create( sync_with_git: bool = True, description: str = "", wait_until_completion: bool = True, - background_execution: bool | None = False, ) -> BranchData | str: - if background_execution is not None: - warnings.warn( - "`background_execution` is deprecated, please use `wait_until_completion` instead.", - DeprecationWarning, - stacklevel=1, - ) - - background_execution = background_execution or not wait_until_completion input_data = { - # Should be switched to `wait_until_completion` once `background_execution` is removed server side. - "background_execution": background_execution, + "wait_until_completion": wait_until_completion, "data": { "name": branch_name, "description": description, @@ -300,15 +274,14 @@ def create( }, } - mutation_query = MUTATION_QUERY_TASK if background_execution else MUTATION_QUERY_DATA + mutation_query = MUTATION_QUERY_DATA if wait_until_completion else MUTATION_QUERY_TASK query = Mutation(mutation="BranchCreate", input_data=input_data, query=mutation_query) response = self.client.execute_graphql(query=query.render(), tracker="mutation-branch-create") - # Make sure server version is recent enough to support background execution, as previously - # using background_execution=True had no effect. - if background_execution and "task" in response["BranchCreate"]: - return response["BranchCreate"]["task"]["id"] - return BranchData(**response["BranchCreate"]["object"]) + if wait_until_completion: + return BranchData(**response["BranchCreate"]["object"]) + + return response["BranchCreate"]["task"]["id"] def delete(self, branch_name: str) -> bool: input_data = { From 186e199444b7d08cd4501e585948540d7826a790 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Dec 2025 17:13:46 +0000 Subject: [PATCH 03/27] Bump astral-sh/setup-uv from 4 to 7 Bumps [astral-sh/setup-uv](https://github.com/astral-sh/setup-uv) from 4 to 7. - [Release notes](https://github.com/astral-sh/setup-uv/releases) - [Commits](https://github.com/astral-sh/setup-uv/compare/v4...v7) --- updated-dependencies: - dependency-name: astral-sh/setup-uv dependency-version: '7' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/publish-pypi.yml | 2 +- .github/workflows/release.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 96bb84a7..350afd56 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -42,7 +42,7 @@ jobs: python-version: "3.12" - name: Install UV - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@v7 with: version: ${{ needs.prepare-environment.outputs.UV_VERSION }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1f0a088b..368254c4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -31,7 +31,7 @@ jobs: python-version: "3.12" - name: Install UV - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@v7 with: version: "${{ needs.prepare-environment.outputs.UV_VERSION }}" From b67ec03aafc8291fd8911feb82acccd7c1d606b7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Dec 2025 17:13:52 +0000 Subject: [PATCH 04/27] Bump actions/checkout from 5 to 6 Bumps [actions/checkout](https://github.com/actions/checkout) from 5 to 6. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v5...v6) --- updated-dependencies: - dependency-name: actions/checkout dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5ba68c57..484af237 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -76,7 +76,7 @@ jobs: timeout-minutes: 5 steps: - name: "Check out repository code" - uses: "actions/checkout@v5" + uses: "actions/checkout@v6" - name: Set up Python uses: actions/setup-python@v6 with: @@ -138,7 +138,7 @@ jobs: timeout-minutes: 5 steps: - name: "Check out repository code" - uses: "actions/checkout@v5" + uses: "actions/checkout@v6" - name: "Set up Python" uses: "actions/setup-python@v6" with: From 4228100ce31a42b4e3bca45b69ad5cb337308d20 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Dec 2025 09:12:42 +0000 Subject: [PATCH 05/27] Bump DavidAnson/markdownlint-cli2-action from 21 to 22 Bumps [DavidAnson/markdownlint-cli2-action](https://github.com/davidanson/markdownlint-cli2-action) from 21 to 22. - [Release notes](https://github.com/davidanson/markdownlint-cli2-action/releases) - [Commits](https://github.com/davidanson/markdownlint-cli2-action/compare/v21...v22) --- updated-dependencies: - dependency-name: DavidAnson/markdownlint-cli2-action dependency-version: '22' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 484af237..ff38ab5a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -104,7 +104,7 @@ jobs: - name: "Check out repository code" uses: "actions/checkout@v6" - name: "Linting: markdownlint" - uses: DavidAnson/markdownlint-cli2-action@v21 + uses: DavidAnson/markdownlint-cli2-action@v22 with: config: .markdownlint.yaml globs: | From 77c9d1da6ed506a76c399e92bdbd013f7b0d3981 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Dec 2025 09:12:37 +0000 Subject: [PATCH 06/27] Bump actions/cache from 4 to 5 Bumps [actions/cache](https://github.com/actions/cache) from 4 to 5. - [Release notes](https://github.com/actions/cache/releases) - [Changelog](https://github.com/actions/cache/blob/main/RELEASES.md) - [Commits](https://github.com/actions/cache/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/cache dependency-version: '5' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/publish-pypi.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 350afd56..e6bce2d3 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -52,7 +52,7 @@ jobs: submodules: true - name: Cache UV dependencies - uses: "actions/cache@v4" + uses: "actions/cache@v5" id: "cached-uv-dependencies" with: path: ".venv" From 668caaacd9bb45e6d42292e564d46d82479807d8 Mon Sep 17 00:00:00 2001 From: Fatih Acar Date: Thu, 11 Dec 2025 13:46:57 +0100 Subject: [PATCH 07/27] fix: py3.14 warnings due to iscoroutinefunction Signed-off-by: Fatih Acar --- infrahub_sdk/checks.py | 4 ++-- infrahub_sdk/ctl/cli_commands.py | 3 ++- infrahub_sdk/ctl/utils.py | 4 ++-- infrahub_sdk/transforms.py | 4 ++-- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/infrahub_sdk/checks.py b/infrahub_sdk/checks.py index e0880da9..e1dfc404 100644 --- a/infrahub_sdk/checks.py +++ b/infrahub_sdk/checks.py @@ -1,7 +1,7 @@ from __future__ import annotations -import asyncio import importlib +import inspect import os import warnings from abc import abstractmethod @@ -160,7 +160,7 @@ async def run(self, data: dict | None = None) -> bool: data = await self.collect_data() unpacked = data.get("data") or data - if asyncio.iscoroutinefunction(self.validate): + if inspect.iscoroutinefunction(self.validate): await self.validate(data=unpacked) else: self.validate(data=unpacked) diff --git a/infrahub_sdk/ctl/cli_commands.py b/infrahub_sdk/ctl/cli_commands.py index 538bad6b..e76225e4 100644 --- a/infrahub_sdk/ctl/cli_commands.py +++ b/infrahub_sdk/ctl/cli_commands.py @@ -3,6 +3,7 @@ import asyncio import functools import importlib +import inspect import logging import platform import sys @@ -240,7 +241,7 @@ async def _run_transform( console.print("[yellow] you can specify a different branch with --branch") raise typer.Abort() - if asyncio.iscoroutinefunction(transform_func): + if inspect.iscoroutinefunction(transform_func): output = await transform_func(response) else: output = transform_func(response) diff --git a/infrahub_sdk/ctl/utils.py b/infrahub_sdk/ctl/utils.py index 9db07957..f87a81a1 100644 --- a/infrahub_sdk/ctl/utils.py +++ b/infrahub_sdk/ctl/utils.py @@ -1,6 +1,6 @@ from __future__ import annotations -import asyncio +import inspect import logging import traceback from collections.abc import Callable, Coroutine @@ -83,7 +83,7 @@ def catch_exception( console = Console() def decorator(func: Callable[..., T]) -> Callable[..., T | Coroutine[Any, Any, T]]: - if asyncio.iscoroutinefunction(func): + if inspect.iscoroutinefunction(func): @wraps(func) async def async_wrapper(*args: Any, **kwargs: Any) -> T: diff --git a/infrahub_sdk/transforms.py b/infrahub_sdk/transforms.py index 0c07e296..ee17605f 100644 --- a/infrahub_sdk/transforms.py +++ b/infrahub_sdk/transforms.py @@ -1,6 +1,6 @@ from __future__ import annotations -import asyncio +import inspect import os from abc import abstractmethod from typing import TYPE_CHECKING, Any @@ -75,7 +75,7 @@ async def run(self, data: dict | None = None) -> Any: unpacked = data.get("data") or data await self.process_nodes(data=unpacked) - if asyncio.iscoroutinefunction(self.transform): + if inspect.iscoroutinefunction(self.transform): return await self.transform(data=unpacked) return self.transform(data=unpacked) From f9bd5f7220d9348d792c570ec5bc5dfa85c2cfe2 Mon Sep 17 00:00:00 2001 From: Fatih Acar Date: Thu, 11 Dec 2025 13:47:16 +0100 Subject: [PATCH 08/27] fix: upgrade pytest-asyncio for py3.14 compat Signed-off-by: Fatih Acar --- pyproject.toml | 5 +++-- tests/conftest.py | 14 ++++++-------- uv.lock | 37 ++++++++++++++++++++++++------------- 3 files changed, 33 insertions(+), 23 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 924d0ddd..63be59f6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,8 +70,8 @@ all = [ # Core optional dependencies tests = [ "infrahub-testcontainers>=1.5.1", - "pytest", - "pytest-asyncio<0.23", + "pytest>=9.0,<9.1", + "pytest-asyncio>=1.3,<1.4", "pytest-clarity>=1.0.1", "pytest-cov>=4.0.0", "pytest-httpx>=0.30", @@ -108,6 +108,7 @@ exclude_lines = ["if TYPE_CHECKING:", "raise NotImplementedError()"] [tool.pytest.ini_options] asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "session" testpaths = ["tests"] filterwarnings = [ "ignore:Module already imported so cannot be rewritten", diff --git a/tests/conftest.py b/tests/conftest.py index 953e9c6b..5d19956e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,8 +1,8 @@ -import asyncio import os from collections.abc import Generator import pytest +import pytest_asyncio from infrahub_sdk.ctl import config @@ -11,13 +11,11 @@ ENV_VARS_TO_CLEAN = ["INFRAHUB_ADDRESS", "INFRAHUB_TOKEN", "INFRAHUB_BRANCH", "INFRAHUB_USERNAME", "INFRAHUB_PASSWORD"] -@pytest.fixture(scope="session") -def event_loop() -> Generator[asyncio.AbstractEventLoop]: - """Overrides pytest default function scoped event loop""" - policy = asyncio.get_event_loop_policy() - loop = policy.new_event_loop() - yield loop - loop.close() +def pytest_collection_modifyitems(items) -> None: + pytest_asyncio_tests = (item for item in items if pytest_asyncio.is_async_test(item)) + session_scope_marker = pytest.mark.asyncio(loop_scope="session") + for async_test in pytest_asyncio_tests: + async_test.add_marker(session_scope_marker, append=False) @pytest.fixture(scope="session", autouse=True) diff --git a/uv.lock b/uv.lock index 3cf507d6..b07d9360 100644 --- a/uv.lock +++ b/uv.lock @@ -124,6 +124,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a2/ee/3fd29bf416eb4f1c5579cf12bf393ae954099258abd7bde03c4f9716ef6b/autoflake-2.3.1-py3-none-any.whl", hash = "sha256:3ae7495db9084b7b32818b4140e6dc4fc280b712fb414f5b8fe57b0a8e85a840", size = 32483, upload-time = "2024-03-13T03:41:26.969Z" }, ] +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + [[package]] name = "black" version = "25.9.0" @@ -848,8 +857,8 @@ dev = [ { name = "ipython" }, { name = "mypy", specifier = "==1.11.2" }, { name = "pre-commit", specifier = ">=2.20.0" }, - { name = "pytest" }, - { name = "pytest-asyncio", specifier = "<0.23" }, + { name = "pytest", specifier = ">=9.0,<9.1" }, + { name = "pytest-asyncio", specifier = ">=1.3,<1.4" }, { name = "pytest-clarity", specifier = ">=1.0.1" }, { name = "pytest-cov", specifier = ">=4.0.0" }, { name = "pytest-httpx", specifier = ">=0.30" }, @@ -870,8 +879,8 @@ lint = [ ] tests = [ { name = "infrahub-testcontainers", specifier = ">=1.5.1" }, - { name = "pytest" }, - { name = "pytest-asyncio", specifier = "<0.23" }, + { name = "pytest", specifier = ">=9.0,<9.1" }, + { name = "pytest-asyncio", specifier = ">=1.3,<1.4" }, { name = "pytest-clarity", specifier = ">=1.0.1" }, { name = "pytest-cov", specifier = ">=4.0.0" }, { name = "pytest-httpx", specifier = ">=0.30" }, @@ -1972,7 +1981,7 @@ wheels = [ [[package]] name = "pytest" -version = "8.4.2" +version = "9.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -1983,21 +1992,23 @@ dependencies = [ { name = "pygments" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, ] [[package]] name = "pytest-asyncio" -version = "0.21.2" +version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/53/57663d99acaac2fcdafdc697e52a9b1b7d6fcf36616281ff9768a44e7ff3/pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45", size = 30656, upload-time = "2024-04-29T13:23:24.738Z" } +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/ce/1e4b53c213dce25d6e8b163697fbce2d43799d76fa08eea6ad270451c370/pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b", size = 13368, upload-time = "2024-04-29T13:23:23.126Z" }, + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, ] [[package]] @@ -2027,15 +2038,15 @@ wheels = [ [[package]] name = "pytest-httpx" -version = "0.35.0" +version = "0.36.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/89/5b12b7b29e3d0af3a4b9c071ee92fa25a9017453731a38f08ba01c280f4c/pytest_httpx-0.35.0.tar.gz", hash = "sha256:d619ad5d2e67734abfbb224c3d9025d64795d4b8711116b1a13f72a251ae511f", size = 54146, upload-time = "2024-11-28T19:16:54.237Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/5574834da9499066fa1a5ea9c336f94dba2eae02298d36dab192fcf95c86/pytest_httpx-0.36.0.tar.gz", hash = "sha256:9edb66a5fd4388ce3c343189bc67e7e1cb50b07c2e3fc83b97d511975e8a831b", size = 56793, upload-time = "2025-12-02T16:34:57.414Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/ed/026d467c1853dd83102411a78126b4842618e86c895f93528b0528c7a620/pytest_httpx-0.35.0-py3-none-any.whl", hash = "sha256:ee11a00ffcea94a5cbff47af2114d34c5b231c326902458deed73f9c459fd744", size = 19442, upload-time = "2024-11-28T19:16:52.787Z" }, + { url = "https://files.pythonhosted.org/packages/e2/d2/1eb1ea9c84f0d2033eb0b49675afdc71aa4ea801b74615f00f3c33b725e3/pytest_httpx-0.36.0-py3-none-any.whl", hash = "sha256:bd4c120bb80e142df856e825ec9f17981effb84d159f9fa29ed97e2357c3a9c8", size = 20229, upload-time = "2025-12-02T16:34:56.45Z" }, ] [[package]] From b3fc3ac84ad7110b6df3c035e4ca2e673c104b12 Mon Sep 17 00:00:00 2001 From: Fatih Acar Date: Tue, 16 Dec 2025 12:32:42 +0100 Subject: [PATCH 09/27] chore: add newsfragment Signed-off-by: Fatih Acar --- changelog/+1b40f022.housekeeping.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/+1b40f022.housekeeping.md diff --git a/changelog/+1b40f022.housekeeping.md b/changelog/+1b40f022.housekeeping.md new file mode 100644 index 00000000..40a566c7 --- /dev/null +++ b/changelog/+1b40f022.housekeeping.md @@ -0,0 +1 @@ +Fixed Python 3.14 compatibility warnings. Testing now requires pytest>=9. From fb522374ac1b4af44aa19e427a30a81ba3f4aed2 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Wed, 17 Dec 2025 10:23:51 +0100 Subject: [PATCH 10/27] Add ability to order results by metadata created_at or updated_at --- changelog/+86c0992a.added.md | 1 + infrahub_sdk/enums.py | 6 ++++++ infrahub_sdk/graphql/renderers.py | 21 +++++++++++++++++++++ infrahub_sdk/types.py | 20 ++++++++++++++++++-- 4 files changed, 46 insertions(+), 2 deletions(-) create mode 100644 changelog/+86c0992a.added.md create mode 100644 infrahub_sdk/enums.py diff --git a/changelog/+86c0992a.added.md b/changelog/+86c0992a.added.md new file mode 100644 index 00000000..1e53de4b --- /dev/null +++ b/changelog/+86c0992a.added.md @@ -0,0 +1 @@ +Added ability to order nodes by metadata created_at or updated_at fields diff --git a/infrahub_sdk/enums.py b/infrahub_sdk/enums.py new file mode 100644 index 00000000..75219825 --- /dev/null +++ b/infrahub_sdk/enums.py @@ -0,0 +1,6 @@ +from enum import Enum + + +class OrderDirection(str, Enum): + ASC = "ASC" + DESC = "DESC" diff --git a/infrahub_sdk/graphql/renderers.py b/infrahub_sdk/graphql/renderers.py index 3cd3e557..91b77526 100644 --- a/infrahub_sdk/graphql/renderers.py +++ b/infrahub_sdk/graphql/renderers.py @@ -7,6 +7,8 @@ from pydantic import BaseModel +from infrahub_sdk.types import Order + from .constants import VARIABLE_TYPE_MAPPING @@ -53,6 +55,16 @@ def convert_to_graphql_as_string(value: Any, convert_enum: bool = False) -> str: if isinstance(value, list): values_as_string = [convert_to_graphql_as_string(value=item, convert_enum=convert_enum) for item in value] return "[" + ", ".join(values_as_string) + "]" + if isinstance(value, Order): + data = value.model_dump(exclude_none=True) + return ( + "{ " + + ", ".join( + f"{key}: {convert_to_graphql_as_string(value=val, convert_enum=convert_enum)}" + for key, val in data.items() + ) + + " }" + ) if isinstance(value, BaseModel): data = value.model_dump() return ( @@ -63,6 +75,15 @@ def convert_to_graphql_as_string(value: Any, convert_enum: bool = False) -> str: ) + " }" ) + if isinstance(value, dict): + return ( + "{ " + + ", ".join( + f"{key}: {convert_to_graphql_as_string(value=val, convert_enum=convert_enum)}" + for key, val in value.items() + ) + + " }" + ) return str(value) diff --git a/infrahub_sdk/types.py b/infrahub_sdk/types.py index 666bb71c..59cbedef 100644 --- a/infrahub_sdk/types.py +++ b/infrahub_sdk/types.py @@ -4,7 +4,9 @@ from logging import Logger from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable -from pydantic import BaseModel +from pydantic import BaseModel, Field, model_validator + +from infrahub_sdk.enums import OrderDirection # noqa: TC001 if TYPE_CHECKING: import httpx @@ -68,5 +70,19 @@ def exception(self, event: str | None = None, *args: Any, **kw: Any) -> Any: InfrahubLoggers = InfrahubLogger | Logger +class NodeMetaOrder(BaseModel): + created_at: OrderDirection | None = None + updated_at: OrderDirection | None = None + + @model_validator(mode="after") + def validate_selection(self) -> NodeMetaOrder: + if self.created_at and self.updated_at: + raise ValueError("'created_at' and 'updated_at' are mutually exclusive") + return self + + class Order(BaseModel): - disable: bool | None = None + disable: bool | None = Field( + default=None, description="Disable default ordering, can be used to improve performance" + ) + node_metadata: NodeMetaOrder | None = Field(default=None, description="Order by node meta fields") From 077bcc2b107e854a729e3b1ae6e649a24201b1c7 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Thu, 18 Dec 2025 16:42:19 +0100 Subject: [PATCH 11/27] Add ability to query for node metadata --- changelog/+d3b5369f.added.md | 1 + infrahub_sdk/client.py | 36 +++ infrahub_sdk/node/attribute.py | 14 +- infrahub_sdk/node/constants.py | 11 + infrahub_sdk/node/metadata.py | 69 +++++ infrahub_sdk/node/node.py | 68 ++++- infrahub_sdk/node/property.py | 3 + infrahub_sdk/node/related_node.py | 25 +- infrahub_sdk/node/relationship.py | 11 +- tests/unit/sdk/test_node.py | 434 +++++++++++++++++++++++++++++- 10 files changed, 655 insertions(+), 17 deletions(-) create mode 100644 changelog/+d3b5369f.added.md create mode 100644 infrahub_sdk/node/metadata.py diff --git a/changelog/+d3b5369f.added.md b/changelog/+d3b5369f.added.md new file mode 100644 index 00000000..3942c0cc --- /dev/null +++ b/changelog/+d3b5369f.added.md @@ -0,0 +1 @@ +Add ability to query for metadata on nodes to include information such as creation and update timestamps, creator and last user to update an object. diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index 2ef90c0a..33487ebf 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -401,6 +401,7 @@ async def get( fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> SchemaType | None: ... @@ -420,6 +421,7 @@ async def get( fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> SchemaType: ... @@ -439,6 +441,7 @@ async def get( fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> SchemaType: ... @@ -458,6 +461,7 @@ async def get( fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> InfrahubNode | None: ... @@ -477,6 +481,7 @@ async def get( fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> InfrahubNode: ... @@ -496,6 +501,7 @@ async def get( fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> InfrahubNode: ... @@ -514,6 +520,7 @@ async def get( fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, + include_metadata: bool = False, **kwargs: Any, ) -> InfrahubNode | SchemaType | None: branch = branch or self.default_branch @@ -547,6 +554,7 @@ async def get( fragment=fragment, prefetch_relationships=prefetch_relationships, property=property, + include_metadata=include_metadata, **filters, ) @@ -650,6 +658,7 @@ async def all( property: bool = ..., parallel: bool = ..., order: Order | None = ..., + include_metadata: bool = ..., ) -> list[SchemaType]: ... @overload @@ -669,6 +678,7 @@ async def all( property: bool = ..., parallel: bool = ..., order: Order | None = ..., + include_metadata: bool = ..., ) -> list[InfrahubNode]: ... async def all( @@ -687,6 +697,7 @@ async def all( property: bool = False, parallel: bool = False, order: Order | None = None, + include_metadata: bool = False, ) -> list[InfrahubNode] | list[SchemaType]: """Retrieve all nodes of a given kind @@ -704,6 +715,7 @@ async def all( prefetch_relationships (bool, optional): Flag to indicate whether to prefetch related node data. parallel (bool, optional): Whether to use parallel processing for the query. order (Order, optional): Ordering related options. Setting `disable=True` enhances performances. + include_metadata (bool, optional): If True, includes node_metadata and relationship_metadata in the query. Returns: list[InfrahubNode]: List of Nodes @@ -723,6 +735,7 @@ async def all( property=property, parallel=parallel, order=order, + include_metadata=include_metadata, ) @overload @@ -743,6 +756,7 @@ async def filters( property: bool = ..., parallel: bool = ..., order: Order | None = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> list[SchemaType]: ... @@ -764,6 +778,7 @@ async def filters( property: bool = ..., parallel: bool = ..., order: Order | None = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> list[InfrahubNode]: ... @@ -784,6 +799,7 @@ async def filters( property: bool = False, parallel: bool = False, order: Order | None = None, + include_metadata: bool = False, **kwargs: Any, ) -> list[InfrahubNode] | list[SchemaType]: """Retrieve nodes of a given kind based on provided filters. @@ -803,6 +819,7 @@ async def filters( partial_match (bool, optional): Allow partial match of filter criteria for the query. parallel (bool, optional): Whether to use parallel processing for the query. order (Order, optional): Ordering related options. Setting `disable=True` enhances performances. + include_metadata (bool, optional): If True, includes node_metadata and relationship_metadata in the query. **kwargs (Any): Additional filter criteria for the query. Returns: @@ -829,6 +846,7 @@ async def process_page(page_offset: int, page_number: int) -> tuple[dict, Proces partial_match=partial_match, property=property, order=order, + include_metadata=include_metadata, ) query = Query(query=query_data) response = await self.execute_graphql( @@ -1957,6 +1975,7 @@ def all( property: bool = ..., parallel: bool = ..., order: Order | None = ..., + include_metadata: bool = ..., ) -> list[SchemaTypeSync]: ... @overload @@ -1976,6 +1995,7 @@ def all( property: bool = ..., parallel: bool = ..., order: Order | None = ..., + include_metadata: bool = ..., ) -> list[InfrahubNodeSync]: ... def all( @@ -1994,6 +2014,7 @@ def all( property: bool = False, parallel: bool = False, order: Order | None = None, + include_metadata: bool = False, ) -> list[InfrahubNodeSync] | list[SchemaTypeSync]: """Retrieve all nodes of a given kind @@ -2011,6 +2032,7 @@ def all( prefetch_relationships (bool, optional): Flag to indicate whether to prefetch related node data. parallel (bool, optional): Whether to use parallel processing for the query. order (Order, optional): Ordering related options. Setting `disable=True` enhances performances. + include_metadata (bool, optional): If True, includes node_metadata and relationship_metadata in the query. Returns: list[InfrahubNodeSync]: List of Nodes @@ -2030,6 +2052,7 @@ def all( property=property, parallel=parallel, order=order, + include_metadata=include_metadata, ) def _process_nodes_and_relationships( @@ -2091,6 +2114,7 @@ def filters( property: bool = ..., parallel: bool = ..., order: Order | None = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> list[SchemaTypeSync]: ... @@ -2112,6 +2136,7 @@ def filters( property: bool = ..., parallel: bool = ..., order: Order | None = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> list[InfrahubNodeSync]: ... @@ -2132,6 +2157,7 @@ def filters( property: bool = False, parallel: bool = False, order: Order | None = None, + include_metadata: bool = False, **kwargs: Any, ) -> list[InfrahubNodeSync] | list[SchemaTypeSync]: """Retrieve nodes of a given kind based on provided filters. @@ -2151,6 +2177,7 @@ def filters( partial_match (bool, optional): Allow partial match of filter criteria for the query. parallel (bool, optional): Whether to use parallel processing for the query. order (Order, optional): Ordering related options. Setting `disable=True` enhances performances. + include_metadata (bool, optional): If True, includes node_metadata and relationship_metadata in the query. **kwargs (Any): Additional filter criteria for the query. Returns: @@ -2177,6 +2204,7 @@ def process_page(page_offset: int, page_number: int) -> tuple[dict, ProcessRelat partial_match=partial_match, property=property, order=order, + include_metadata=include_metadata, ) query = Query(query=query_data) response = self.execute_graphql( @@ -2266,6 +2294,7 @@ def get( fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> SchemaTypeSync | None: ... @@ -2285,6 +2314,7 @@ def get( fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> SchemaTypeSync: ... @@ -2304,6 +2334,7 @@ def get( fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> SchemaTypeSync: ... @@ -2323,6 +2354,7 @@ def get( fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> InfrahubNodeSync | None: ... @@ -2342,6 +2374,7 @@ def get( fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> InfrahubNodeSync: ... @@ -2361,6 +2394,7 @@ def get( fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., + include_metadata: bool = ..., **kwargs: Any, ) -> InfrahubNodeSync: ... @@ -2379,6 +2413,7 @@ def get( fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, + include_metadata: bool = False, **kwargs: Any, ) -> InfrahubNodeSync | SchemaTypeSync | None: branch = branch or self.default_branch @@ -2412,6 +2447,7 @@ def get( fragment=fragment, prefetch_relationships=prefetch_relationships, property=property, + include_metadata=include_metadata, **filters, ) diff --git a/infrahub_sdk/node/attribute.py b/infrahub_sdk/node/attribute.py index a0d00a96..8043d567 100644 --- a/infrahub_sdk/node/attribute.py +++ b/infrahub_sdk/node/attribute.py @@ -6,7 +6,7 @@ from ..protocols_base import CoreNodeBase from ..uuidt import UUIDT -from .constants import IP_TYPES, PROPERTIES_FLAG, PROPERTIES_OBJECT, SAFE_VALUE +from .constants import ATTRIBUTE_METADATA_OBJECT, IP_TYPES, PROPERTIES_FLAG, PROPERTIES_OBJECT, SAFE_VALUE from .property import NodeProperty if TYPE_CHECKING: @@ -57,11 +57,16 @@ def __init__(self, name: str, schema: AttributeSchemaAPI, data: Any | dict) -> N self.source: NodeProperty | None = None self.owner: NodeProperty | None = None + self.updated_by: NodeProperty | None = None for prop_name in self._properties_object: if data.get(prop_name): setattr(self, prop_name, NodeProperty(data=data.get(prop_name))) # type: ignore[arg-type] + for prop_name in ATTRIBUTE_METADATA_OBJECT: + if data.get(prop_name): + setattr(self, prop_name, NodeProperty(data=data.get(prop_name))) # type: ignore[arg-type] + @property def value(self) -> Any: return self._value @@ -104,7 +109,7 @@ def _generate_input_data(self) -> dict | None: return {"data": data, "variables": variables} - def _generate_query_data(self, property: bool = False) -> dict | None: + def _generate_query_data(self, property: bool = False, include_metadata: bool = False) -> dict | None: data: dict[str, Any] = {"value": None} if property: @@ -115,6 +120,11 @@ def _generate_query_data(self, property: bool = False) -> dict | None: for prop_name in self._properties_object: data[prop_name] = {"id": None, "display_label": None, "__typename": None} + if include_metadata: + data["updated_at"] = None + for prop_name in ATTRIBUTE_METADATA_OBJECT: + data[prop_name] = {"id": None, "display_label": None, "__typename": None} + return data def _generate_mutation_query(self) -> dict[str, Any]: diff --git a/infrahub_sdk/node/constants.py b/infrahub_sdk/node/constants.py index f96fce6a..8d301115 100644 --- a/infrahub_sdk/node/constants.py +++ b/infrahub_sdk/node/constants.py @@ -3,6 +3,17 @@ PROPERTIES_FLAG = ["is_protected", "updated_at"] PROPERTIES_OBJECT = ["source", "owner"] + +# Attribute-level metadata object fields (in addition to PROPERTIES_OBJECT) +ATTRIBUTE_METADATA_OBJECT = ["updated_by"] + +# Node metadata fields (for node_metadata in GraphQL response) +NODE_METADATA_FIELDS_FLAG = ["created_at", "updated_at"] +NODE_METADATA_FIELDS_OBJECT = ["created_by", "updated_by"] + +# Relationship metadata fields (for relationship_metadata in GraphQL response) +RELATIONSHIP_METADATA_FIELDS_FLAG = ["updated_at"] +RELATIONSHIP_METADATA_FIELDS_OBJECT = ["updated_by"] SAFE_VALUE = re.compile(r"(^[\. /:a-zA-Z0-9_-]+$)|(^$)") IP_TYPES = ipaddress.IPv4Interface | ipaddress.IPv6Interface | ipaddress.IPv4Network | ipaddress.IPv6Network diff --git a/infrahub_sdk/node/metadata.py b/infrahub_sdk/node/metadata.py new file mode 100644 index 00000000..1fe236d8 --- /dev/null +++ b/infrahub_sdk/node/metadata.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +from .property import NodeProperty + + +class NodeMetadata: + """Represents metadata about a node (created_at, created_by, updated_at, updated_by).""" + + def __init__(self, data: dict | None = None) -> None: + """ + Args: + data: Data containing the metadata fields from the GraphQL response. + """ + self.created_at: str | None = None + self.created_by: NodeProperty | None = None + self.updated_at: str | None = None + self.updated_by: NodeProperty | None = None + + if data: + self.created_at = data.get("created_at") + self.updated_at = data.get("updated_at") + if data.get("created_by"): + self.created_by = NodeProperty(data["created_by"]) + if data.get("updated_by"): + self.updated_by = NodeProperty(data["updated_by"]) + + def __repr__(self) -> str: + return ( + f"NodeMetadata(created_at={self.created_at!r}, created_by={self.created_by!r}, " + f"updated_at={self.updated_at!r}, updated_by={self.updated_by!r})" + ) + + @classmethod + def _generate_query_data(cls) -> dict: + """Generate the query structure for node_metadata fields.""" + return { + "created_at": None, + "created_by": {"id": None, "__typename": None, "display_label": None}, + "updated_at": None, + "updated_by": {"id": None, "__typename": None, "display_label": None}, + } + + +class RelationshipMetadata: + """Represents metadata about a relationship edge (updated_at, updated_by).""" + + def __init__(self, data: dict | None = None) -> None: + """ + Args: + data: Data containing the metadata fields from the GraphQL response. + """ + self.updated_at: str | None = None + self.updated_by: NodeProperty | None = None + + if data: + self.updated_at = data.get("updated_at") + if data.get("updated_by"): + self.updated_by = NodeProperty(data["updated_by"]) + + def __repr__(self) -> str: + return f"RelationshipMetadata(updated_at={self.updated_at!r}, updated_by={self.updated_by!r})" + + @classmethod + def _generate_query_data(cls) -> dict: + """Generate the query structure for relationship_metadata fields.""" + return { + "updated_at": None, + "updated_by": {"id": None, "__typename": None, "display_label": None}, + } diff --git a/infrahub_sdk/node/node.py b/infrahub_sdk/node/node.py index 8c5e89c9..ecf1b773 100644 --- a/infrahub_sdk/node/node.py +++ b/infrahub_sdk/node/node.py @@ -23,6 +23,7 @@ ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE, PROPERTIES_OBJECT, ) +from .metadata import NodeMetadata from .related_node import RelatedNode, RelatedNodeBase, RelatedNodeSync from .relationship import RelationshipManager, RelationshipManagerBase, RelationshipManagerSync @@ -50,6 +51,7 @@ def __init__(self, schema: MainSchemaTypesAPI, branch: str, data: dict | None = self._branch = branch self._existing: bool = True self._attribute_data: dict[str, Attribute] = {} + self._metadata: NodeMetadata | None = None # Generate a unique ID only to be used inside the SDK # The format if this ID is purposely different from the ID used by the API @@ -152,6 +154,10 @@ def hfid(self) -> list[str] | None: def hfid_str(self) -> str | None: return self.get_human_friendly_id_as_string(include_kind=True) + def get_node_metadata(self) -> NodeMetadata | None: + """Returns the node metadata (created_at, created_by, updated_at, updated_by) if fetched.""" + return self._metadata + def _init_attributes(self, data: dict | None = None) -> None: for attr_schema in self._schema.attributes: attr_data = data.get(attr_schema.name, None) if isinstance(data, dict) else None @@ -419,12 +425,16 @@ def generate_query_data_init( exclude: list[str] | None = None, partial_match: bool = False, order: Order | None = None, + include_metadata: bool = False, ) -> dict[str, Any | dict]: data: dict[str, Any] = { "count": None, "edges": {"node": {"id": None, "hfid": None, "display_label": None, "__typename": None}}, } + if include_metadata: + data["edges"]["node_metadata"] = NodeMetadata._generate_query_data() + data["@filters"] = deepcopy(filters) if filters is not None else {} if order: @@ -496,8 +506,12 @@ def __init__( """ self._client = client - if isinstance(data, dict) and isinstance(data.get("node"), dict): - data = data.get("node") + # Extract node_metadata before extracting node data (node_metadata is sibling to node in edges) + node_metadata_data: dict | None = None + if isinstance(data, dict): + node_metadata_data = data.get("node_metadata") + if isinstance(data.get("node"), dict): + data = data.get("node") self._relationship_cardinality_many_data: dict[str, RelationshipManager] = {} self._relationship_cardinality_one_data: dict[str, RelatedNode] = {} @@ -505,6 +519,10 @@ def __init__( super().__init__(schema=schema, branch=branch or client.default_branch, data=data) + # Initialize metadata after base class init + if node_metadata_data: + self._metadata = NodeMetadata(node_metadata_data) + @classmethod async def from_graphql( cls, @@ -785,6 +803,7 @@ async def generate_query_data( partial_match: bool = False, property: bool = False, order: Order | None = None, + include_metadata: bool = False, ) -> dict[str, Any | dict]: data = self.generate_query_data_init( filters=filters, @@ -794,6 +813,7 @@ async def generate_query_data( exclude=exclude, partial_match=partial_match, order=order, + include_metadata=include_metadata, ) data["edges"]["node"].update( await self.generate_query_data_node( @@ -802,6 +822,7 @@ async def generate_query_data( prefetch_relationships=prefetch_relationships, inherited=True, property=property, + include_metadata=include_metadata, ) ) @@ -825,6 +846,7 @@ async def generate_query_data( inherited=False, insert_alias=True, property=property, + include_metadata=include_metadata, ) if child_data: @@ -840,6 +862,7 @@ async def generate_query_data_node( insert_alias: bool = False, prefetch_relationships: bool = False, property: bool = False, + include_metadata: bool = False, ) -> dict[str, Any | dict]: """Generate the node part of a GraphQL Query with attributes and nodes. @@ -850,6 +873,7 @@ async def generate_query_data_node( Defaults to True. insert_alias (bool, optional): If True, inserts aliases in the query for each attribute or relationship. prefetch_relationships (bool, optional): If True, pre-fetches relationship data as part of the query. + include_metadata (bool, optional): If True, includes node_metadata and relationship_metadata in the query. Returns: dict[str, Union[Any, Dict]]: GraphQL query in dictionary format @@ -866,7 +890,7 @@ async def generate_query_data_node( if not inherited and attr._schema.inherited: continue - attr_data = attr._generate_query_data(property=property) + attr_data = attr._generate_query_data(property=property, include_metadata=include_metadata) if attr_data: data[attr_name] = attr_data if insert_alias: @@ -898,11 +922,14 @@ async def generate_query_data_node( peer_node = InfrahubNode(client=self._client, schema=peer_schema, branch=self._branch) peer_data = await peer_node.generate_query_data_node( property=property, + include_metadata=include_metadata, ) rel_data: dict[str, Any] if rel_schema and rel_schema.cardinality == "one": - rel_data = RelatedNode._generate_query_data(peer_data=peer_data, property=property) + rel_data = RelatedNode._generate_query_data( + peer_data=peer_data, property=property, include_metadata=include_metadata + ) # Nodes involved in a hierarchy are required to inherit from a common ancestor node, and graphql # tries to resolve attributes in this ancestor instead of actual node. To avoid # invalid queries issues when attribute is missing in the common ancestor, we use a fragment @@ -912,7 +939,9 @@ async def generate_query_data_node( rel_data["node"] = {} rel_data["node"][f"...on {rel_schema.peer}"] = data_node elif rel_schema and rel_schema.cardinality == "many": - rel_data = RelationshipManager._generate_query_data(peer_data=peer_data, property=property) + rel_data = RelationshipManager._generate_query_data( + peer_data=peer_data, property=property, include_metadata=include_metadata + ) else: continue @@ -1285,8 +1314,12 @@ def __init__( """ self._client = client - if isinstance(data, dict) and isinstance(data.get("node"), dict): - data = data.get("node") + # Extract node_metadata before extracting node data (node_metadata is sibling to node in edges) + node_metadata_data: dict | None = None + if isinstance(data, dict): + node_metadata_data = data.get("node_metadata") + if isinstance(data.get("node"), dict): + data = data.get("node") self._relationship_cardinality_many_data: dict[str, RelationshipManagerSync] = {} self._relationship_cardinality_one_data: dict[str, RelatedNodeSync] = {} @@ -1294,6 +1327,10 @@ def __init__( super().__init__(schema=schema, branch=branch or client.default_branch, data=data) + # Initialize metadata after base class init + if node_metadata_data: + self._metadata = NodeMetadata(node_metadata_data) + @classmethod def from_graphql( cls, @@ -1571,6 +1608,7 @@ def generate_query_data( partial_match: bool = False, property: bool = False, order: Order | None = None, + include_metadata: bool = False, ) -> dict[str, Any | dict]: data = self.generate_query_data_init( filters=filters, @@ -1580,6 +1618,7 @@ def generate_query_data( exclude=exclude, partial_match=partial_match, order=order, + include_metadata=include_metadata, ) data["edges"]["node"].update( self.generate_query_data_node( @@ -1588,6 +1627,7 @@ def generate_query_data( prefetch_relationships=prefetch_relationships, inherited=True, property=property, + include_metadata=include_metadata, ) ) @@ -1610,6 +1650,7 @@ def generate_query_data( inherited=False, insert_alias=True, property=property, + include_metadata=include_metadata, ) if child_data: @@ -1625,6 +1666,7 @@ def generate_query_data_node( insert_alias: bool = False, prefetch_relationships: bool = False, property: bool = False, + include_metadata: bool = False, ) -> dict[str, Any | dict]: """Generate the node part of a GraphQL Query with attributes and nodes. @@ -1635,6 +1677,7 @@ def generate_query_data_node( Defaults to True. insert_alias (bool, optional): If True, inserts aliases in the query for each attribute or relationship. prefetch_relationships (bool, optional): If True, pre-fetches relationship data as part of the query. + include_metadata (bool, optional): If True, includes node_metadata and relationship_metadata in the query. Returns: dict[str, Union[Any, Dict]]: GraphQL query in dictionary format @@ -1651,7 +1694,7 @@ def generate_query_data_node( if not inherited and attr._schema.inherited: continue - attr_data = attr._generate_query_data(property=property) + attr_data = attr._generate_query_data(property=property, include_metadata=include_metadata) if attr_data: data[attr_name] = attr_data if insert_alias: @@ -1683,11 +1726,14 @@ def generate_query_data_node( peer_node = InfrahubNodeSync(client=self._client, schema=peer_schema, branch=self._branch) peer_data = peer_node.generate_query_data_node( property=property, + include_metadata=include_metadata, ) rel_data: dict[str, Any] if rel_schema and rel_schema.cardinality == "one": - rel_data = RelatedNodeSync._generate_query_data(peer_data=peer_data, property=property) + rel_data = RelatedNodeSync._generate_query_data( + peer_data=peer_data, property=property, include_metadata=include_metadata + ) # Nodes involved in a hierarchy are required to inherit from a common ancestor node, and graphql # tries to resolve attributes in this ancestor instead of actual node. To avoid # invalid queries issues when attribute is missing in the common ancestor, we use a fragment @@ -1697,7 +1743,9 @@ def generate_query_data_node( rel_data["node"] = {} rel_data["node"][f"...on {rel_schema.peer}"] = data_node elif rel_schema and rel_schema.cardinality == "many": - rel_data = RelationshipManagerSync._generate_query_data(peer_data=peer_data, property=property) + rel_data = RelationshipManagerSync._generate_query_data( + peer_data=peer_data, property=property, include_metadata=include_metadata + ) else: continue diff --git a/infrahub_sdk/node/property.py b/infrahub_sdk/node/property.py index 7a8fcd6d..652aa816 100644 --- a/infrahub_sdk/node/property.py +++ b/infrahub_sdk/node/property.py @@ -20,5 +20,8 @@ def __init__(self, data: dict | str) -> None: self.display_label = data.get("display_label", None) self.typename = data.get("__typename", None) + def __repr__(self) -> str: + return f"NodeProperty({{'id': {self.id!r}, 'display_label': {self.display_label!r}, '__typename': {self.typename!r}}})" + def _generate_input_data(self) -> str | None: return self.id diff --git a/infrahub_sdk/node/related_node.py b/infrahub_sdk/node/related_node.py index 1bf1307e..5b46a8f7 100644 --- a/infrahub_sdk/node/related_node.py +++ b/infrahub_sdk/node/related_node.py @@ -6,6 +6,7 @@ from ..exceptions import Error from ..protocols_base import CoreNodeBase from .constants import PROFILE_KIND_PREFIX, PROPERTIES_FLAG, PROPERTIES_OBJECT +from .metadata import NodeMetadata, RelationshipMetadata if TYPE_CHECKING: from ..client import InfrahubClient, InfrahubClientSync @@ -40,11 +41,13 @@ def __init__(self, branch: str, schema: RelationshipSchemaAPI, data: Any | dict, self._typename: str | None = None self._kind: str | None = None self._source_typename: str | None = None + self._relationship_metadata: RelationshipMetadata | None = None if isinstance(data, (CoreNodeBase)): self._peer = data for prop in self._properties: setattr(self, prop, None) + self._relationship_metadata = None elif isinstance(data, list): data = {"hfid": data} @@ -81,6 +84,10 @@ def __init__(self, branch: str, schema: RelationshipSchemaAPI, data: Any | dict, else: setattr(self, prop, None) + # Parse relationship metadata (at edge level) + if data.get("relationship_metadata"): + self._relationship_metadata = RelationshipMetadata(data["relationship_metadata"]) + @property def id(self) -> str | None: if self._peer: @@ -134,6 +141,10 @@ def is_from_profile(self) -> bool: return False return bool(re.match(rf"^{PROFILE_KIND_PREFIX}[A-Z]", self._source_typename)) + def get_relationship_metadata(self) -> RelationshipMetadata | None: + """Returns the relationship metadata (updated_at, updated_by) if fetched.""" + return self._relationship_metadata + def _generate_input_data(self, allocate_from_pool: bool = False) -> dict[str, Any]: data: dict[str, Any] = {} @@ -160,12 +171,17 @@ def _generate_mutation_query(self) -> dict[str, Any]: return {} @classmethod - def _generate_query_data(cls, peer_data: dict[str, Any] | None = None, property: bool = False) -> dict: + def _generate_query_data( + cls, peer_data: dict[str, Any] | None = None, property: bool = False, include_metadata: bool = False + ) -> dict: """Generates the basic structure of a GraphQL query for a single relationship. Args: peer_data (dict[str, Union[Any, Dict]], optional): Additional data to be included in the query for the node. This is used to add extra fields when prefetching related node data. + property (bool, optional): If True, includes property fields (is_protected, source, owner, etc.). + include_metadata (bool, optional): If True, includes node_metadata (for the peer node) and + relationship_metadata (for the relationship edge) fields. Returns: Dict: A dictionary representing the basic structure of a GraphQL query, including the node's ID, display label, @@ -181,6 +197,13 @@ def _generate_query_data(cls, peer_data: dict[str, Any] | None = None, property: properties[prop_name] = {"id": None, "display_label": None, "__typename": None} data["properties"] = properties + + if include_metadata: + # node_metadata is for the peer InfrahubNode (populated via from_graphql) + data["node_metadata"] = NodeMetadata._generate_query_data() + # relationship_metadata is for the relationship edge itself + data["relationship_metadata"] = RelationshipMetadata._generate_query_data() + if peer_data: data["node"].update(peer_data) diff --git a/infrahub_sdk/node/relationship.py b/infrahub_sdk/node/relationship.py index 757bb875..dcd33c9c 100644 --- a/infrahub_sdk/node/relationship.py +++ b/infrahub_sdk/node/relationship.py @@ -10,6 +10,7 @@ ) from ..types import Order from .constants import PROPERTIES_FLAG, PROPERTIES_OBJECT +from .metadata import NodeMetadata, RelationshipMetadata from .related_node import RelatedNode, RelatedNodeSync if TYPE_CHECKING: @@ -72,12 +73,16 @@ def _generate_mutation_query(self) -> dict[str, Any]: return {} @classmethod - def _generate_query_data(cls, peer_data: dict[str, Any] | None = None, property: bool = False) -> dict: + def _generate_query_data( + cls, peer_data: dict[str, Any] | None = None, property: bool = False, include_metadata: bool = False + ) -> dict: """Generates the basic structure of a GraphQL query for relationships with multiple nodes. Args: peer_data (dict[str, Union[Any, Dict]], optional): Additional data to be included in the query for each node. This is used to add extra fields when prefetching related node data in many-to-many relationships. + property (bool, optional): If True, includes property fields (is_protected, source, owner, etc.). + include_metadata (bool, optional): If True, includes node_metadata and relationship_metadata fields. Returns: Dict: A dictionary representing the basic structure of a GraphQL query for multiple related nodes. @@ -97,6 +102,10 @@ def _generate_query_data(cls, peer_data: dict[str, Any] | None = None, property: properties[prop_name] = {"id": None, "display_label": None, "__typename": None} data["edges"]["properties"] = properties + if include_metadata: + data["edges"]["node_metadata"] = NodeMetadata._generate_query_data() + data["edges"]["relationship_metadata"] = RelationshipMetadata._generate_query_data() + if peer_data: data["edges"]["node"].update(peer_data) diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py index f9e6b8bc..87c2dd53 100644 --- a/tests/unit/sdk/test_node.py +++ b/tests/unit/sdk/test_node.py @@ -1,9 +1,10 @@ +from __future__ import annotations + import inspect import ipaddress from typing import TYPE_CHECKING import pytest -from pytest_httpx import HTTPXMock from infrahub_sdk.exceptions import NodeNotFoundError from infrahub_sdk.node import ( @@ -16,12 +17,16 @@ parse_human_friendly_id, ) from infrahub_sdk.node.constants import SAFE_VALUE +from infrahub_sdk.node.metadata import NodeMetadata, RelationshipMetadata +from infrahub_sdk.node.property import NodeProperty from infrahub_sdk.node.related_node import RelatedNode, RelatedNodeSync -from infrahub_sdk.schema import GenericSchema, NodeSchemaAPI -from tests.unit.sdk.conftest import BothClients if TYPE_CHECKING: + from pytest_httpx import HTTPXMock + from infrahub_sdk.client import InfrahubClient, InfrahubClientSync + from infrahub_sdk.schema import GenericSchema, NodeSchemaAPI + from tests.unit.sdk.conftest import BothClients # type: ignore[attr-defined] @@ -2746,3 +2751,426 @@ def test_is_from_profile_when_any_peer_has_unknown_source(self, client, location name="tags", client=client, node=None, branch="main", schema=location_schema.relationships[0], data=data ) assert not manager.is_from_profile + + +def test_node_property_repr_with_dict_data() -> None: + data = {"id": "account-123", "display_label": "Admin User", "__typename": "CoreAccount"} + prop = NodeProperty(data) + result = repr(prop) + assert result == "NodeProperty({'id': 'account-123', 'display_label': 'Admin User', '__typename': 'CoreAccount'})" + + +def test_node_metadata_repr_with_full_data() -> None: + data = { + "created_at": "2024-01-15T10:30:00Z", + "created_by": {"id": "account-1", "display_label": "Admin", "__typename": "CoreAccount"}, + "updated_at": "2024-01-16T14:45:00Z", + "updated_by": {"id": "account-2", "display_label": "Editor", "__typename": "CoreAccount"}, + } + metadata = NodeMetadata(data) + result = repr(metadata) + assert "NodeMetadata(created_at='2024-01-15T10:30:00Z'" in result + assert "created_by=NodeProperty({'id': 'account-1'" in result + assert "updated_at='2024-01-16T14:45:00Z'" in result + assert "updated_by=NodeProperty({'id': 'account-2'" in result + + +def test_relationship_metadata_repr_with_full_data() -> None: + data = { + "updated_at": "2024-01-16T14:45:00Z", + "updated_by": {"id": "account-1", "display_label": "Admin", "__typename": "CoreAccount"}, + } + metadata = RelationshipMetadata(data) + result = repr(metadata) + assert "RelationshipMetadata(updated_at='2024-01-16T14:45:00Z'" in result + assert "updated_by=NodeProperty({'id': 'account-1'" in result + + +@pytest.mark.parametrize("client_type", client_types) +async def test_query_data_with_include_metadata( + clients: BothClients, location_schema: NodeSchemaAPI, client_type: str +) -> None: + """Test that include_metadata=True adds node_metadata and attribute-level updated_by to the query.""" + if client_type == "standard": + node = InfrahubNode(client=clients.standard, schema=location_schema) + data = await node.generate_query_data(include_metadata=True) + else: + node = InfrahubNodeSync(client=clients.sync, schema=location_schema) + data = node.generate_query_data(include_metadata=True) + + edges = data["BuiltinLocation"]["edges"] + + # Verify node_metadata is present at the edge level + assert "node_metadata" in edges + assert edges["node_metadata"] == { + "created_at": None, + "created_by": {"id": None, "__typename": None, "display_label": None}, + "updated_at": None, + "updated_by": {"id": None, "__typename": None, "display_label": None}, + } + + # Verify attribute-level metadata fields + node_data = edges["node"] + assert node_data["name"]["updated_at"] is None + assert node_data["name"]["updated_by"] == {"id": None, "display_label": None, "__typename": None} + assert node_data["description"]["updated_at"] is None + assert node_data["description"]["updated_by"] == {"id": None, "display_label": None, "__typename": None} + + +@pytest.mark.parametrize("client_type", client_types) +async def test_query_data_with_include_metadata_and_property( + clients: BothClients, location_schema: NodeSchemaAPI, client_type: str +) -> None: + """Test that include_metadata=True combined with property=True produces expected query structure.""" + if client_type == "standard": + node = InfrahubNode(client=clients.standard, schema=location_schema) + data = await node.generate_query_data(property=True, include_metadata=True) + else: + node = InfrahubNodeSync(client=clients.sync, schema=location_schema) + data = node.generate_query_data(property=True, include_metadata=True) + + edges = data["BuiltinLocation"]["edges"] + + # Verify node_metadata is present + assert "node_metadata" in edges + + # Verify attribute has both property fields and metadata fields + node_data = edges["node"] + name_attr = node_data["name"] + + # Property fields + assert "is_protected" in name_attr + assert "source" in name_attr + assert "owner" in name_attr + assert "is_default" in name_attr + assert "is_from_profile" in name_attr + + # Metadata fields + assert "updated_at" in name_attr + assert "updated_by" in name_attr + assert name_attr["updated_by"] == {"id": None, "display_label": None, "__typename": None} + + # Verify relationship also has relationship_metadata + primary_tag = node_data["primary_tag"] + assert "relationship_metadata" in primary_tag + assert primary_tag["relationship_metadata"] == { + "updated_at": None, + "updated_by": {"id": None, "__typename": None, "display_label": None}, + } + + +@pytest.mark.parametrize("client_type", client_types) +async def test_query_data_without_include_metadata( + clients: BothClients, location_schema: NodeSchemaAPI, client_type: str +) -> None: + """Test that include_metadata=False (default) does not add metadata fields.""" + if client_type == "standard": + node = InfrahubNode(client=clients.standard, schema=location_schema) + data = await node.generate_query_data(include_metadata=False) + else: + node = InfrahubNodeSync(client=clients.sync, schema=location_schema) + data = node.generate_query_data(include_metadata=False) + + edges = data["BuiltinLocation"]["edges"] + + # Verify node_metadata is NOT present + assert "node_metadata" not in edges + + # Verify attribute-level metadata fields are NOT present + node_data = edges["node"] + assert "updated_by" not in node_data["name"] + + +@pytest.mark.parametrize("client_type", client_types) +async def test_node_metadata_from_graphql_response( + clients: BothClients, location_schema: NodeSchemaAPI, client_type: str +) -> None: + """Test that NodeMetadata is correctly parsed from GraphQL response data.""" + location_data = { + "node": { + "__typename": "BuiltinLocation", + "id": "llllllll-llll-llll-llll-llllllllllll", + "display_label": "dfw1", + "name": {"value": "DFW"}, + "description": {"value": None}, + "type": {"value": "SITE"}, + "primary_tag": { + "node": { + "id": "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr", + "display_label": "red", + "__typename": "BuiltinTag", + }, + }, + "tags": { + "count": 0, + "edges": [], + }, + }, + "node_metadata": { + "created_at": "2024-01-15T10:30:00Z", + "created_by": {"id": "account-1", "display_label": "Admin", "__typename": "CoreAccount"}, + "updated_at": "2024-01-16T14:45:00Z", + "updated_by": {"id": "account-2", "display_label": "Editor", "__typename": "CoreAccount"}, + }, + } + + if client_type == "standard": + node = InfrahubNode(client=clients.standard, schema=location_schema, data=location_data) + else: + node = InfrahubNodeSync(client=clients.sync, schema=location_schema, data=location_data) + + metadata = node.get_node_metadata() + + assert metadata is not None + assert metadata.created_at == "2024-01-15T10:30:00Z" + assert metadata.created_by.id == "account-1" + assert metadata.created_by.display_label == "Admin" + assert metadata.updated_at == "2024-01-16T14:45:00Z" + assert metadata.updated_by.id == "account-2" + assert metadata.updated_by.display_label == "Editor" + + +@pytest.mark.parametrize("client_type", client_types) +async def test_relationship_metadata_from_graphql_response( + clients: BothClients, location_schema: NodeSchemaAPI, client_type: str +) -> None: + """Test that RelationshipMetadata is correctly parsed from GraphQL response data.""" + location_data = { + "node": { + "__typename": "BuiltinLocation", + "id": "llllllll-llll-llll-llll-llllllllllll", + "display_label": "dfw1", + "name": {"value": "DFW"}, + "description": {"value": None}, + "type": {"value": "SITE"}, + "primary_tag": { + "node": { + "id": "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr", + "display_label": "red", + "__typename": "BuiltinTag", + }, + "relationship_metadata": { + "updated_at": "2024-01-17T08:00:00Z", + "updated_by": {"id": "account-3", "display_label": "Updater", "__typename": "CoreAccount"}, + }, + }, + "tags": { + "count": 0, + "edges": [], + }, + }, + } + + if client_type == "standard": + node = InfrahubNode(client=clients.standard, schema=location_schema, data=location_data) + else: + node = InfrahubNodeSync(client=clients.sync, schema=location_schema, data=location_data) + + rel_metadata = node.primary_tag.get_relationship_metadata() + + assert rel_metadata is not None + assert rel_metadata.updated_at == "2024-01-17T08:00:00Z" + assert rel_metadata.updated_by.id == "account-3" + assert rel_metadata.updated_by.display_label == "Updater" + + +@pytest.mark.parametrize("client_type", client_types) +async def test_attribute_metadata_from_graphql_response( + clients: BothClients, location_schema: NodeSchemaAPI, client_type: str +) -> None: + """Test that attribute-level metadata (updated_at, updated_by) is correctly parsed.""" + location_data = { + "node": { + "__typename": "BuiltinLocation", + "id": "llllllll-llll-llll-llll-llllllllllll", + "display_label": "dfw1", + "name": { + "value": "DFW", + "updated_at": "2024-01-18T09:00:00Z", + "updated_by": {"id": "account-4", "display_label": "NameUpdater", "__typename": "CoreAccount"}, + }, + "description": { + "value": None, + "updated_at": "2024-01-19T10:00:00Z", + "updated_by": None, + }, + "type": {"value": "SITE"}, + "primary_tag": { + "node": { + "id": "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr", + "display_label": "red", + "__typename": "BuiltinTag", + }, + }, + "tags": { + "count": 0, + "edges": [], + }, + }, + } + + if client_type == "standard": + node = InfrahubNode(client=clients.standard, schema=location_schema, data=location_data) + else: + node = InfrahubNodeSync(client=clients.sync, schema=location_schema, data=location_data) + + assert node.name.updated_at == "2024-01-18T09:00:00Z" + assert node.name.updated_by is not None + assert node.name.updated_by.id == "account-4" + assert node.name.updated_by.display_label == "NameUpdater" + + assert node.description.updated_at == "2024-01-19T10:00:00Z" + assert node.description.updated_by is None + + +def test_node_metadata_with_no_data() -> None: + """Test NodeMetadata initialization with no data argument.""" + metadata = NodeMetadata() + + assert metadata.created_at is None + assert metadata.created_by is None + assert metadata.updated_at is None + assert metadata.updated_by is None + + +def test_node_metadata_with_none_data() -> None: + """Test NodeMetadata initialization with explicit None data.""" + metadata = NodeMetadata(data=None) + + assert metadata.created_at is None + assert metadata.created_by is None + assert metadata.updated_at is None + assert metadata.updated_by is None + + +def test_node_metadata_with_partial_data_missing_created_by() -> None: + """Test NodeMetadata with data that has created_by as None.""" + data = { + "created_at": "2024-01-15T10:30:00Z", + "created_by": None, + "updated_at": "2024-01-16T14:45:00Z", + "updated_by": {"id": "account-2", "display_label": "Editor", "__typename": "CoreAccount"}, + } + metadata = NodeMetadata(data=data) + + assert metadata.created_at == "2024-01-15T10:30:00Z" + assert metadata.created_by is None + assert metadata.updated_at == "2024-01-16T14:45:00Z" + assert metadata.updated_by is not None + assert metadata.updated_by.id == "account-2" + + +def test_node_metadata_with_partial_data_missing_updated_by() -> None: + """Test NodeMetadata with data that has updated_by as None.""" + data = { + "created_at": "2024-01-15T10:30:00Z", + "created_by": {"id": "account-1", "display_label": "Admin", "__typename": "CoreAccount"}, + "updated_at": "2024-01-16T14:45:00Z", + "updated_by": None, + } + metadata = NodeMetadata(data=data) + + assert metadata.created_at == "2024-01-15T10:30:00Z" + assert metadata.created_by is not None + assert metadata.created_by.id == "account-1" + assert metadata.updated_at == "2024-01-16T14:45:00Z" + assert metadata.updated_by is None + + +def test_node_metadata_with_partial_data_missing_both() -> None: + """Test NodeMetadata with data that has both created_by and updated_by as None.""" + data = { + "created_at": "2024-01-15T10:30:00Z", + "created_by": None, + "updated_at": "2024-01-16T14:45:00Z", + "updated_by": None, + } + metadata = NodeMetadata(data=data) + + assert metadata.created_at == "2024-01-15T10:30:00Z" + assert metadata.created_by is None + assert metadata.updated_at == "2024-01-16T14:45:00Z" + assert metadata.updated_by is None + + +def test_relationship_metadata_with_no_data() -> None: + """Test RelationshipMetadata initialization with no data argument.""" + metadata = RelationshipMetadata() + + assert metadata.updated_at is None + assert metadata.updated_by is None + + +def test_relationship_metadata_with_none_data() -> None: + """Test RelationshipMetadata initialization with explicit None data.""" + metadata = RelationshipMetadata(data=None) + + assert metadata.updated_at is None + assert metadata.updated_by is None + + +def test_relationship_metadata_with_partial_data_missing_updated_by() -> None: + """Test RelationshipMetadata with data that has updated_by as None.""" + data = { + "updated_at": "2024-01-17T08:00:00Z", + "updated_by": None, + } + metadata = RelationshipMetadata(data=data) + + assert metadata.updated_at == "2024-01-17T08:00:00Z" + assert metadata.updated_by is None + + +def test_relationship_manager_generate_query_data_with_include_metadata() -> None: + """Test that RelationshipManagerBase._generate_query_data includes metadata when include_metadata=True.""" + data = RelationshipManagerBase._generate_query_data(include_metadata=True) + + assert "count" in data + assert "edges" in data + assert "node" in data["edges"] + assert data["edges"]["node"]["id"] is None + assert data["edges"]["node"]["hfid"] is None + assert data["edges"]["node"]["display_label"] is None + assert data["edges"]["node"]["__typename"] is None + + assert "node_metadata" in data["edges"] + node_metadata = data["edges"]["node_metadata"] + assert "created_at" in node_metadata + assert "created_by" in node_metadata + assert "updated_at" in node_metadata + assert "updated_by" in node_metadata + assert node_metadata["created_by"] == {"id": None, "__typename": None, "display_label": None} + assert node_metadata["updated_by"] == {"id": None, "__typename": None, "display_label": None} + + assert "relationship_metadata" in data["edges"] + rel_metadata = data["edges"]["relationship_metadata"] + assert "updated_at" in rel_metadata + assert "updated_by" in rel_metadata + assert rel_metadata["updated_by"] == {"id": None, "__typename": None, "display_label": None} + + +def test_relationship_manager_generate_query_data_with_include_metadata_and_property() -> None: + """Test RelationshipManagerBase._generate_query_data with both include_metadata=True and property=True.""" + data = RelationshipManagerBase._generate_query_data(include_metadata=True, property=True) + + assert "node_metadata" in data["edges"] + assert "relationship_metadata" in data["edges"] + assert "properties" in data["edges"] + + properties = data["edges"]["properties"] + assert "is_protected" in properties + assert "updated_at" in properties + assert "source" in properties + assert "owner" in properties + + +def test_relationship_manager_generate_query_data_without_include_metadata() -> None: + """Test that RelationshipManagerBase._generate_query_data excludes metadata when include_metadata=False.""" + data = RelationshipManagerBase._generate_query_data(include_metadata=False) + + assert "node_metadata" not in data["edges"] + assert "relationship_metadata" not in data["edges"] + + assert "count" in data + assert "edges" in data + assert "node" in data["edges"] From ea109eefd544dd87b50358d31abeb59a887bc9da Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Fri, 19 Dec 2025 19:52:44 +0100 Subject: [PATCH 12/27] Fix invalid return types --- pyproject.toml | 1 - tests/unit/ctl/test_branch_report.py | 1 + tests/unit/sdk/conftest.py | 1 + 3 files changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a2fc1411..a0ae21f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -159,7 +159,6 @@ include = ["tests/**"] invalid-argument-type = "ignore" invalid-assignment = "ignore" invalid-method-override = "ignore" -invalid-return-type = "ignore" no-matching-overload = "ignore" non-subscriptable = "ignore" not-iterable = "ignore" diff --git a/tests/unit/ctl/test_branch_report.py b/tests/unit/ctl/test_branch_report.py index 27fc3a40..bea2fa2d 100644 --- a/tests/unit/ctl/test_branch_report.py +++ b/tests/unit/ctl/test_branch_report.py @@ -126,6 +126,7 @@ def mock_branch_report_default_branch(httpx_mock: HTTPXMock) -> HTTPXMock: }, match_headers={"X-Infrahub-Tracker": "query-branch"}, ) + return httpx_mock @pytest.fixture diff --git a/tests/unit/sdk/conftest.py b/tests/unit/sdk/conftest.py index 281b2d80..55f2be7f 100644 --- a/tests/unit/sdk/conftest.py +++ b/tests/unit/sdk/conftest.py @@ -2121,6 +2121,7 @@ async def mock_rest_api_artifact_generate(httpx_mock: HTTPXMock, schema_query_04 method="POST", url="http://mock/graphql/main", json=artifact_definition_graphql_response, is_reusable=True ) httpx_mock.add_response(method="POST", url=re.compile(r"^http://mock/api/artifact/generate/.*"), is_reusable=True) + return httpx_mock @pytest.fixture From 64ca67a205f1f6d4854cab75f3043c6bcd39e1d5 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Mon, 22 Dec 2025 12:13:37 +0100 Subject: [PATCH 13/27] Upgrade ty and cleanup rules --- .github/workflows/ci.yml | 2 +- pyproject.toml | 18 ++++++---------- uv.lock | 46 ++++++++++++++++++++-------------------- 3 files changed, 31 insertions(+), 35 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 062b9bf7..67b452db 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -86,7 +86,7 @@ jobs: with: version: "${{ needs.prepare-environment.outputs.UV_VERSION }}" - name: Install dependencies - run: uv sync --group lint + run: uv sync --all-groups --all-extras - name: "Linting: ruff check" run: "uv run ruff check ." - name: "Linting: ruff format" diff --git a/pyproject.toml b/pyproject.toml index a2fc1411..69340a36 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ lint = [ "mypy==1.11.2", "ruff==0.14.5", "astroid>=3.1,<4.0", - "ty==0.0.4", + "ty==0.0.5", ] types = [ "types-ujson", @@ -130,23 +130,20 @@ include = ["infrahub_sdk/**"] # The ignored rules below should be removed once the code has been updated, they are included # # like this so that we can reactivate them one by one. # ################################################################################################## -division-by-zero = "ignore" invalid-argument-type = "ignore" invalid-assignment = "ignore" invalid-await = "ignore" -invalid-return-type = "ignore" invalid-type-form = "ignore" -missing-argument = "ignore" no-matching-overload = "ignore" -possibly-unresolved-reference = "ignore" -redundant-cast = "ignore" -too-many-positional-arguments = "ignore" -type-assertion-failure = "ignore" -unknown-argument = "ignore" unresolved-attribute = "ignore" -unresolved-import = "ignore" unsupported-operator = "ignore" +[[tool.ty.overrides]] +include = ["infrahub_sdk/ctl/config.py"] + +[tool.ty.overrides.rules] +unresolved-import = "ignore" # import tomli as tomllib when running on later versions of Python + [[tool.ty.overrides]] include = ["tests/**"] @@ -165,7 +162,6 @@ non-subscriptable = "ignore" not-iterable = "ignore" possibly-missing-attribute = "ignore" unresolved-attribute = "ignore" -unresolved-import = "ignore" [[tool.ty.overrides]] diff --git a/uv.lock b/uv.lock index e2254991..20fe380c 100644 --- a/uv.lock +++ b/uv.lock @@ -868,7 +868,7 @@ dev = [ { name = "requests" }, { name = "ruff", specifier = "==0.14.5" }, { name = "towncrier", specifier = ">=24.8.0" }, - { name = "ty", specifier = "==0.0.4" }, + { name = "ty", specifier = "==0.0.5" }, { name = "types-python-slugify", specifier = ">=8.0.0.3" }, { name = "types-pyyaml" }, { name = "types-ujson" }, @@ -878,7 +878,7 @@ lint = [ { name = "astroid", specifier = ">=3.1,<4.0" }, { name = "mypy", specifier = "==1.11.2" }, { name = "ruff", specifier = "==0.14.5" }, - { name = "ty", specifier = "==0.0.4" }, + { name = "ty", specifier = "==0.0.5" }, { name = "yamllint" }, ] tests = [ @@ -2754,27 +2754,27 @@ wheels = [ [[package]] name = "ty" -version = "0.0.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/48/d9/97d5808e851f790e58f8a54efb5c7b9f404640baf9e295f424846040b316/ty-0.0.4.tar.gz", hash = "sha256:2ea47a0089d74730658ec4e988c8ef476a1e9bd92df3e56709c4003c2895ff3b", size = 4780289, upload-time = "2025-12-19T00:13:53.12Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/94/b32a962243cc8a16e8dc74cf1fe75e8bb013d0e13e71bb540e2c86214b61/ty-0.0.4-py3-none-linux_armv6l.whl", hash = "sha256:5225da65a8d1defeb21ee9d74298b1b97c6cbab36e235a310c1430d9079e4b6a", size = 9762399, upload-time = "2025-12-19T00:14:11.261Z" }, - { url = "https://files.pythonhosted.org/packages/d1/d2/7c76e0c22ddfc2fcd4a3458a65f87ce074070eb1c68c07ee475cc2b6ea68/ty-0.0.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f87770d7988f470b795a2043185082fa959dbe1979a11b4bfe20f1214d37bd6e", size = 9590410, upload-time = "2025-12-19T00:13:55.759Z" }, - { url = "https://files.pythonhosted.org/packages/a5/84/de4b1fc85669faca3622071d5a3f3ec7bfb239971f368c28fae461d3398a/ty-0.0.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ecf68b8ea48674a289d733b4786aecc259242a2d9a920b3ec8583db18c67496a", size = 9131113, upload-time = "2025-12-19T00:14:08.593Z" }, - { url = "https://files.pythonhosted.org/packages/a7/ff/b5bf385b6983be56a470856bbcbac1b7e816bcd765a7e9d39ab2399e387d/ty-0.0.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efc396d76a57e527393cae4ee8faf23b93be3df9e93202f39925721a7a2bb7b8", size = 9599152, upload-time = "2025-12-19T00:13:40.484Z" }, - { url = "https://files.pythonhosted.org/packages/36/d6/9880ba106f2f20d13e6a5dca5d5ca44bfb3782936ee67ff635f89a2959c0/ty-0.0.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c893b968d2f9964a4d4db9992c9ba66b01f411b1f48dffcde08622e19cd6ab97", size = 9585368, upload-time = "2025-12-19T00:14:00.994Z" }, - { url = "https://files.pythonhosted.org/packages/3f/53/503cfc18bc4c7c4e02f89dd43debc41a6e343b41eb43df658dfb493a386d/ty-0.0.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:526c925b80d68a53c165044d2370fcfc0def1f119f7b7e483ee61d24da6fb891", size = 9998412, upload-time = "2025-12-19T00:14:18.653Z" }, - { url = "https://files.pythonhosted.org/packages/1d/bd/dd2d3e29834da5add2eda0ab5b433171ce9ce9a248c364d2e237f82073d7/ty-0.0.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:857f605a7fa366b6c6e6f38abc311d0606be513c2bee8977b5c8fd4bde1a82d5", size = 10853890, upload-time = "2025-12-19T00:13:50.891Z" }, - { url = "https://files.pythonhosted.org/packages/07/fe/28ba3be1672e6b8df46e43de66a02dc076ffba7853d391a5466421886225/ty-0.0.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4cc981aa3ebdac2c233421b1e58c80b0df6a8e6e6fa8b9e69fbdfd2f82768af", size = 10587263, upload-time = "2025-12-19T00:14:21.577Z" }, - { url = "https://files.pythonhosted.org/packages/26/9c/bb598772043f686afe5bc26cb386020709c1a0bcc164bc22ad9da2b4f55d/ty-0.0.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b03b2708b0bf67c76424a860f848aebaa4772c05529170c3761bfcaea93ec199", size = 10401204, upload-time = "2025-12-19T00:13:43.453Z" }, - { url = "https://files.pythonhosted.org/packages/ac/18/71765e9d63669bf09461c3fea84a7a63232ccb0e83b84676f07b987fc217/ty-0.0.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:469890e885544beb129c21e2f8f15321f0573d094aec13da68593c5f86389ff9", size = 10129713, upload-time = "2025-12-19T00:14:13.725Z" }, - { url = "https://files.pythonhosted.org/packages/c3/2d/c03eba570aa85e9c361de5ed36d60b9ab139e93ee91057f455ab4af48e54/ty-0.0.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:abfd928d09567e12068aeca875e920def3badf1978896f474aa4b85b552703c4", size = 9586203, upload-time = "2025-12-19T00:14:03.423Z" }, - { url = "https://files.pythonhosted.org/packages/61/f1/8c3c82a8df69bd4417c77be4f895d043db26dd47bfcc90b33dc109cd0096/ty-0.0.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:44b8e94f9d64df12eae4cf8031c5ca9a4c610b57092b26ad3d68d91bcc7af122", size = 9608230, upload-time = "2025-12-19T00:13:58.252Z" }, - { url = "https://files.pythonhosted.org/packages/51/0c/d8ba3a85c089c246ef6bd49d0f0b40bc0f9209bb819e8c02ccbea5cb4d57/ty-0.0.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9d6a439813e21a06769daf858105818c385d88018929d4a56970d4ddd5cd3df2", size = 9725125, upload-time = "2025-12-19T00:14:05.996Z" }, - { url = "https://files.pythonhosted.org/packages/4d/38/e30f64ad1e40905c766576ec70cffc69163591a5842ce14652672f6ab394/ty-0.0.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c3cfcf26cfe6c828e91d7a529cc2dda37bc3b51ba06909c9be07002a6584af52", size = 10237174, upload-time = "2025-12-19T00:14:23.858Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d7/8d650aa0be8936dd3ed74e2b0655230e2904caa6077c30c16a089b523cff/ty-0.0.4-py3-none-win32.whl", hash = "sha256:58bbf70dd27af6b00dedbdebeec92d5993aa238664f96fa5c0064930f7a0d30b", size = 9188434, upload-time = "2025-12-19T00:13:45.875Z" }, - { url = "https://files.pythonhosted.org/packages/82/d7/9fc0c81cf0b0d281ac9c18bfbdb4d6bae2173503ba79e40b210ab41c2c8b/ty-0.0.4-py3-none-win_amd64.whl", hash = "sha256:7c2db0f96218f08c140bd9d3fcbb1b3c8c5c4f0c9b0a5624487f0a2bf4b76163", size = 10019313, upload-time = "2025-12-19T00:14:15.968Z" }, - { url = "https://files.pythonhosted.org/packages/5f/b8/3e3246738eed1cd695c5964a401f3b9c757d20ac21fdae06281af9f40ef6/ty-0.0.4-py3-none-win_arm64.whl", hash = "sha256:69f14fc98e4a847afa9f8c5d5234d008820dbc09c7dcdb3ac1ba16628f5132df", size = 9561857, upload-time = "2025-12-19T00:13:48.382Z" }, +version = "0.0.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/db/6299d478000f4f1c6f9bf2af749359381610ffc4cbe6713b66e436ecf6e7/ty-0.0.5.tar.gz", hash = "sha256:983da6330773ff71e2b249810a19c689f9a0372f6e21bbf7cde37839d05b4346", size = 4806218, upload-time = "2025-12-20T21:19:17.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/98/c1f61ba378b4191e641bb36c07b7fcc70ff844d61be7a4bf2fea7472b4a9/ty-0.0.5-py3-none-linux_armv6l.whl", hash = "sha256:1594cd9bb68015eb2f5a3c68a040860f3c9306dc6667d7a0e5f4df9967b460e2", size = 9785554, upload-time = "2025-12-20T21:19:05.024Z" }, + { url = "https://files.pythonhosted.org/packages/ab/f9/b37b77c03396bd779c1397dae4279b7ad79315e005b3412feed8812a4256/ty-0.0.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7c0140ba980233d28699d9ddfe8f43d0b3535d6a3bbff9935df625a78332a3cf", size = 9603995, upload-time = "2025-12-20T21:19:15.256Z" }, + { url = "https://files.pythonhosted.org/packages/7d/70/4e75c11903b0e986c0203040472627cb61d6a709e1797fb08cdf9d565743/ty-0.0.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:15de414712cde92048ae4b1a77c4dc22920bd23653fe42acaf73028bad88f6b9", size = 9145815, upload-time = "2025-12-20T21:19:36.481Z" }, + { url = "https://files.pythonhosted.org/packages/89/05/93983dfcf871a41dfe58e5511d28e6aa332a1f826cc67333f77ae41a2f8a/ty-0.0.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:438aa51ad6c5fae64191f8d58876266e26f9250cf09f6624b6af47a22fa88618", size = 9619849, upload-time = "2025-12-20T21:19:19.084Z" }, + { url = "https://files.pythonhosted.org/packages/82/b6/896ab3aad59f846823f202e94be6016fb3f72434d999d2ae9bd0f28b3af9/ty-0.0.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b3d373fd96af1564380caf153600481c676f5002ee76ba8a7c3508cdff82ee0", size = 9606611, upload-time = "2025-12-20T21:19:24.583Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ae/098e33fc92330285ed843e2750127e896140c4ebd2d73df7732ea496f588/ty-0.0.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8453692503212ad316cf8b99efbe85a91e5f63769c43be5345e435a1b16cba5a", size = 10029523, upload-time = "2025-12-20T21:19:07.055Z" }, + { url = "https://files.pythonhosted.org/packages/04/5a/f4b4c33758b9295e9aca0de9645deca0f4addd21d38847228723a6e780fc/ty-0.0.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2e4c454139473abbd529767b0df7a795ed828f780aef8d0d4b144558c0dc4446", size = 10870892, upload-time = "2025-12-20T21:19:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/c3/c5/4e3e7e88389365aa1e631c99378711cf0c9d35a67478cb4720584314cf44/ty-0.0.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:426d4f3b82475b1ec75f3cc9ee5a667c8a4ae8441a09fcd8e823a53b706d00c7", size = 10599291, upload-time = "2025-12-20T21:19:26.557Z" }, + { url = "https://files.pythonhosted.org/packages/c1/5d/138f859ea87bd95e17b9818e386ae25a910e46521c41d516bf230ed83ffc/ty-0.0.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5710817b67c6b2e4c0224e4f319b7decdff550886e9020f6d46aa1ce8f89a609", size = 10413515, upload-time = "2025-12-20T21:19:11.094Z" }, + { url = "https://files.pythonhosted.org/packages/27/21/1cbcd0d3b1182172f099e88218137943e0970603492fb10c7c9342369d9a/ty-0.0.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23c55ef08882c7c5ced1ccb90b4eeefa97f690aea254f58ac0987896c590f76", size = 10144992, upload-time = "2025-12-20T21:19:13.225Z" }, + { url = "https://files.pythonhosted.org/packages/ad/30/fdac06a5470c09ad2659a0806497b71f338b395d59e92611f71b623d05a0/ty-0.0.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b9e4c1a28a23b14cf8f4f793f4da396939f16c30bfa7323477c8cc234e352ac4", size = 9606408, upload-time = "2025-12-20T21:19:09.212Z" }, + { url = "https://files.pythonhosted.org/packages/09/93/e99dcd7f53295192d03efd9cbcec089a916f49cad4935c0160ea9adbd53d/ty-0.0.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4e9ebb61529b9745af662e37c37a01ad743cdd2c95f0d1421705672874d806cd", size = 9630040, upload-time = "2025-12-20T21:19:38.165Z" }, + { url = "https://files.pythonhosted.org/packages/d7/f8/6d1e87186e4c35eb64f28000c1df8fd5f73167ce126c5e3dd21fd1204a23/ty-0.0.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5eb191a8e332f50f56dfe45391bdd7d43dd4ef6e60884710fd7ce84c5d8c1eb5", size = 9754016, upload-time = "2025-12-20T21:19:32.79Z" }, + { url = "https://files.pythonhosted.org/packages/28/e6/20f989342cb3115852dda404f1d89a10a3ce93f14f42b23f095a3d1a00c9/ty-0.0.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:92ed7451a1e82ee134a2c24ca43b74dd31e946dff2b08e5c34473e6b051de542", size = 10252877, upload-time = "2025-12-20T21:19:20.787Z" }, + { url = "https://files.pythonhosted.org/packages/57/9d/fc66fa557443233dfad9ae197ff3deb70ae0efcfb71d11b30ef62f5cdcc3/ty-0.0.5-py3-none-win32.whl", hash = "sha256:71f6707e4c1c010c158029a688a498220f28bb22fdb6707e5c20e09f11a5e4f2", size = 9212640, upload-time = "2025-12-20T21:19:30.817Z" }, + { url = "https://files.pythonhosted.org/packages/68/b6/05c35f6dea29122e54af0e9f8dfedd0a100c721affc8cc801ebe2bc2ed13/ty-0.0.5-py3-none-win_amd64.whl", hash = "sha256:2b8b754a0d7191e94acdf0c322747fec34371a4d0669f5b4e89549aef28814ae", size = 10034701, upload-time = "2025-12-20T21:19:28.311Z" }, + { url = "https://files.pythonhosted.org/packages/df/ca/4201ed5cb2af73912663d0c6ded927c28c28b3c921c9348aa8d2cfef4853/ty-0.0.5-py3-none-win_arm64.whl", hash = "sha256:83bea5a5296caac20d52b790ded2b830a7ff91c4ed9f36730fe1f393ceed6654", size = 9566474, upload-time = "2025-12-20T21:19:22.518Z" }, ] [[package]] From d0dd72138a39aaa9ab9013f5b292b451e5a40d7f Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Mon, 22 Dec 2025 16:51:49 +0100 Subject: [PATCH 14/27] Fixes for test annotations and split up rules --- pyproject.toml | 50 +++++++++++++++++-- tests/conftest.py | 2 +- .../tags_transform/tags_transform.py | 4 +- .../transforms/animal_person.py | 2 +- .../ctl_integration/transforms/converted.py | 2 +- tests/unit/pytest_plugin/test_plugin.py | 19 ++++--- 6 files changed, 62 insertions(+), 17 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0abe70f3..cbbabf57 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -330,13 +330,53 @@ max-complexity = 17 "S106", # Possible hardcoded password assigned to argument "ARG001", # Unused function argument "ARG002", # Unused method argument - - ################################################################################################## - # Review and change the below later # - ################################################################################################## - "ANN001", # Missing type annotation for function argument ] +################################################################################################## +# ANN001 ignores - broken down for incremental cleanup # +# Remove each section as type annotations are added to that directory # +################################################################################################## + +# tests/unit/sdk/ - 478 errors total +"tests/unit/sdk/test_node.py" = ["ANN001"] # 206 errors +"tests/unit/sdk/test_client.py" = ["ANN001"] # 85 errors +"tests/unit/sdk/test_schema.py" = ["ANN001"] # 36 errors +"tests/unit/sdk/test_artifact.py" = ["ANN001"] # 27 errors +"tests/unit/sdk/test_hierarchical_nodes.py" = ["ANN001"] # 26 errors +"tests/unit/sdk/test_task.py" = ["ANN001"] # 21 errors +"tests/unit/sdk/test_store.py" = ["ANN001"] # 12 errors +"tests/unit/sdk/spec/test_object.py" = ["ANN001"] # 11 errors +"tests/unit/sdk/conftest.py" = ["ANN001"] # 11 errors +"tests/unit/sdk/test_diff_summary.py" = ["ANN001"] # 9 errors +"tests/unit/sdk/test_object_store.py" = ["ANN001"] # 7 errors +"tests/unit/sdk/graphql/test_query.py" = ["ANN001"] # 7 errors +"tests/unit/sdk/test_timestamp.py" = ["ANN001"] # 6 errors +"tests/unit/sdk/test_repository.py" = ["ANN001"] # 6 errors +"tests/unit/sdk/test_utils.py" = ["ANN001"] # 4 errors +"tests/unit/sdk/test_store_branch.py" = ["ANN001"] # 4 errors +"tests/unit/sdk/test_query_analyzer.py" = ["ANN001"] # 4 errors +"tests/unit/sdk/test_group_context.py" = ["ANN001"] # 4 errors +"tests/unit/sdk/test_branch.py" = ["ANN001"] # 4 errors +"tests/unit/sdk/test_batch.py" = ["ANN001"] # 4 errors +"tests/unit/sdk/graphql/test_renderer.py" = ["ANN001"] # 4 errors +"tests/unit/sdk/checks/test_checks.py" = ["ANN001"] # 2 errors +"tests/unit/sdk/test_schema_sorter.py" = ["ANN001"] # 1 error +"tests/unit/sdk/test_protocols_generator.py" = ["ANN001"] # 1 error + +# tests/integration/ - 60 errors total +"tests/integration/test_infrahub_client.py" = ["ANN001"] # 32 errors +"tests/integration/test_node.py" = ["ANN001"] # 15 errors +"tests/integration/test_infrahubctl.py" = ["ANN001"] # 9 errors +"tests/integration/test_convert_object_type.py" = ["ANN001"] # 3 errors +"tests/integration/test_repository.py" = ["ANN001"] # 1 error + +# tests/unit/ctl/ - 25 errors total +"tests/unit/ctl/test_repository_app.py" = ["ANN001"] # 11 errors +"tests/unit/ctl/test_render_app.py" = ["ANN001"] # 5 errors +"tests/unit/ctl/test_cli.py" = ["ANN001"] # 5 errors +"tests/unit/ctl/test_branch_app.py" = ["ANN001"] # 3 errors +"tests/unit/ctl/test_branch_report.py" = ["ANN001"] # 1 error + "tasks.py" = [ "PLC0415", # `import` should be at the top-level of a file ] diff --git a/tests/conftest.py b/tests/conftest.py index 5d19956e..9098d373 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,7 +11,7 @@ ENV_VARS_TO_CLEAN = ["INFRAHUB_ADDRESS", "INFRAHUB_TOKEN", "INFRAHUB_BRANCH", "INFRAHUB_USERNAME", "INFRAHUB_PASSWORD"] -def pytest_collection_modifyitems(items) -> None: +def pytest_collection_modifyitems(items: list[pytest.Item]) -> None: pytest_asyncio_tests = (item for item in items if pytest_asyncio.is_async_test(item)) session_scope_marker = pytest.mark.asyncio(loop_scope="session") for async_test in pytest_asyncio_tests: diff --git a/tests/fixtures/integration/test_infrahubctl/tags_transform/tags_transform.py b/tests/fixtures/integration/test_infrahubctl/tags_transform/tags_transform.py index 99786479..2a01dc7d 100644 --- a/tests/fixtures/integration/test_infrahubctl/tags_transform/tags_transform.py +++ b/tests/fixtures/integration/test_infrahubctl/tags_transform/tags_transform.py @@ -1,3 +1,5 @@ +from typing import Any + from infrahub_sdk.transforms import InfrahubTransform @@ -5,7 +7,7 @@ class TagsTransform(InfrahubTransform): query = "tags_query" url = "my-tags" - async def transform(self, data) -> dict[str, str]: + async def transform(self, data: dict[str, Any]) -> dict[str, str]: tag = data["BuiltinTag"]["edges"][0]["node"] tag_name = tag["name"]["value"] tag_description = tag["description"]["value"] diff --git a/tests/fixtures/repos/ctl_integration/transforms/animal_person.py b/tests/fixtures/repos/ctl_integration/transforms/animal_person.py index 667dbaaa..842958e0 100644 --- a/tests/fixtures/repos/ctl_integration/transforms/animal_person.py +++ b/tests/fixtures/repos/ctl_integration/transforms/animal_person.py @@ -6,7 +6,7 @@ class AnimalPerson(InfrahubTransform): query = "animal_person" - async def transform(self, data) -> dict[str, Any]: + async def transform(self, data: dict[str, Any]) -> dict[str, Any]: response_person = data["TestingPerson"]["edges"][0]["node"] name: str = response_person["name"]["value"] animal_names = sorted( diff --git a/tests/fixtures/repos/ctl_integration/transforms/converted.py b/tests/fixtures/repos/ctl_integration/transforms/converted.py index fd4eadb9..d35f7402 100644 --- a/tests/fixtures/repos/ctl_integration/transforms/converted.py +++ b/tests/fixtures/repos/ctl_integration/transforms/converted.py @@ -7,7 +7,7 @@ class ConvertedAnimalPerson(InfrahubTransform): query = "animal_person" - async def transform(self, data) -> dict[str, Any]: + async def transform(self, data: dict[str, Any]) -> dict[str, Any]: response_person = data["TestingPerson"]["edges"][0]["node"] name: str = response_person["name"]["value"] person = self.store.get(key=name, kind="TestingPerson") diff --git a/tests/unit/pytest_plugin/test_plugin.py b/tests/unit/pytest_plugin/test_plugin.py index cbb0d9c2..01fa38ac 100644 --- a/tests/unit/pytest_plugin/test_plugin.py +++ b/tests/unit/pytest_plugin/test_plugin.py @@ -1,16 +1,19 @@ -def test_help_message(pytester) -> None: +import pytest + + +def test_help_message(pytester: pytest.Pytester) -> None: """Make sure that the plugin is loaded by capturing an option it adds in the help message.""" result = pytester.runpytest("--help") result.stdout.fnmatch_lines(["*Infrahub configuration file for the repository*"]) -def test_without_config(pytester) -> None: +def test_without_config(pytester: pytest.Pytester) -> None: """Make sure 0 tests run when test file is not found.""" result = pytester.runpytest() result.assert_outcomes() -def test_emptyconfig(pytester) -> None: +def test_emptyconfig(pytester: pytest.Pytester) -> None: """Make sure that the plugin load the test file properly.""" pytester.makefile( ".yml", @@ -25,7 +28,7 @@ def test_emptyconfig(pytester) -> None: result.assert_outcomes() -def test_jinja2_transform_config_missing_directory(pytester) -> None: +def test_jinja2_transform_config_missing_directory(pytester: pytest.Pytester) -> None: """Make sure tests raise errors if directories are not found.""" pytester.makefile( ".yml", @@ -63,7 +66,7 @@ def test_jinja2_transform_config_missing_directory(pytester) -> None: result.assert_outcomes(errors=1) -def test_jinja2_transform_config_missing_input(pytester) -> None: +def test_jinja2_transform_config_missing_input(pytester: pytest.Pytester) -> None: """Make sure tests raise errors if no inputs are provided.""" pytester.makefile( ".yml", @@ -104,7 +107,7 @@ def test_jinja2_transform_config_missing_input(pytester) -> None: result.assert_outcomes(errors=1) -def test_jinja2_transform_no_expected_output(pytester) -> None: +def test_jinja2_transform_no_expected_output(pytester: pytest.Pytester) -> None: """Make sure tests succeed if no expect outputs are provided.""" pytester.makefile( ".yml", @@ -161,7 +164,7 @@ def test_jinja2_transform_no_expected_output(pytester) -> None: result.assert_outcomes(passed=1) -def test_jinja2_transform_unexpected_output(pytester) -> None: +def test_jinja2_transform_unexpected_output(pytester: pytest.Pytester) -> None: """Make sure tests fail if the expected and computed outputs don't match.""" pytester.makefile( ".yml", @@ -233,7 +236,7 @@ def test_jinja2_transform_unexpected_output(pytester) -> None: result.assert_outcomes(failed=1) -def test_python_transform(pytester) -> None: +def test_python_transform(pytester: pytest.Pytester) -> None: pytester.makefile( ".yml", test_python_transform=""" From f3edb52fd67015736b20f3006a63c4aae3c99d96 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Mon, 22 Dec 2025 17:22:35 +0100 Subject: [PATCH 15/27] Add annotations for clients and client_type Also fixes getattr approach to collecting the previously untyped clients variable depending on which client the test needs. --- tests/unit/sdk/test_node.py | 234 ++++++++++++++++++------------------ 1 file changed, 115 insertions(+), 119 deletions(-) diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py index 87c2dd53..fe4fdd54 100644 --- a/tests/unit/sdk/test_node.py +++ b/tests/unit/sdk/test_node.py @@ -24,11 +24,9 @@ if TYPE_CHECKING: from pytest_httpx import HTTPXMock - from infrahub_sdk.client import InfrahubClient, InfrahubClientSync from infrahub_sdk.schema import GenericSchema, NodeSchemaAPI from tests.unit.sdk.conftest import BothClients -# type: ignore[attr-defined] async_node_methods = [ method for method in dir(InfrahubNode) if not method.startswith("_") and method not in {"hfid", "hfid_str"} @@ -134,7 +132,7 @@ def test_parse_human_friendly_id(hfid: str, expected_kind: str, expected_hfid: l @pytest.mark.parametrize("client_type", client_types) -async def test_init_node_no_data(client, location_schema, client_type) -> None: +async def test_init_node_no_data(client, location_schema, client_type: str) -> None: if client_type == "standard": node = InfrahubNode(client=client, schema=location_schema) else: @@ -147,7 +145,7 @@ async def test_init_node_no_data(client, location_schema, client_type) -> None: @pytest.mark.parametrize("client_type", client_types) -async def test_node_hfid(client, schema_with_hfid, client_type) -> None: +async def test_node_hfid(client, schema_with_hfid, client_type: str) -> None: location_data = {"name": {"value": "JFK1"}, "description": {"value": "JFK Airport"}, "type": {"value": "SITE"}} if client_type == "standard": location = InfrahubNode(client=client, schema=schema_with_hfid["location"], data=location_data) @@ -170,7 +168,7 @@ async def test_node_hfid(client, schema_with_hfid, client_type) -> None: @pytest.mark.parametrize("client_type", client_types) -async def test_init_node_data_user(client, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_init_node_data_user(client, location_schema: NodeSchemaAPI, client_type: str) -> None: data = { "name": {"value": "JFK1"}, "description": {"value": "JFK Airport"}, @@ -188,7 +186,7 @@ async def test_init_node_data_user(client, location_schema: NodeSchemaAPI, clien @pytest.mark.parametrize("client_type", client_types) -async def test_init_node_data_user_with_relationships(client, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_init_node_data_user_with_relationships(client, location_schema: NodeSchemaAPI, client_type: str) -> None: data = { "name": {"value": "JFK1"}, "description": {"value": "JFK Airport"}, @@ -228,7 +226,7 @@ async def test_init_node_data_user_with_relationships(client, location_schema: N ], ) async def test_init_node_data_user_with_relationships_using_related_node( - client, location_schema: NodeSchemaAPI, client_type, rel_data + client, location_schema: NodeSchemaAPI, client_type: str, rel_data ) -> None: rel_schema = location_schema.get_relationship(name="primary_tag") if client_type == "standard": @@ -275,7 +273,7 @@ async def test_init_node_data_user_with_relationships_using_related_node( @pytest.mark.parametrize("property_test", property_tests) @pytest.mark.parametrize("client_type", client_types) async def test_init_node_data_graphql( - client, location_schema: NodeSchemaAPI, location_data01, location_data01_property, client_type, property_test + client, location_schema: NodeSchemaAPI, location_data01, location_data01_property, client_type: str, property_test ) -> None: location_data = location_data01 if property_test == WITHOUT_PROPERTY else location_data01_property @@ -298,14 +296,14 @@ async def test_init_node_data_graphql( @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_no_filters_property(clients, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_query_data_no_filters_property( + clients: BothClients, location_schema: NodeSchemaAPI, client_type: str +) -> None: if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=location_schema) + node = InfrahubNode(client=clients.standard, schema=location_schema) data = await node.generate_query_data(property=True) else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=location_schema) + node = InfrahubNodeSync(client=clients.sync, schema=location_schema) data = node.generate_query_data(property=True) assert data == { @@ -398,14 +396,12 @@ async def test_query_data_no_filters_property(clients, location_schema: NodeSche @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_no_filters(clients, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_query_data_no_filters(clients: BothClients, location_schema: NodeSchemaAPI, client_type: str) -> None: if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=location_schema) + node = InfrahubNode(client=clients.standard, schema=location_schema) data = await node.generate_query_data() else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=location_schema) + node = InfrahubNodeSync(client=clients.sync, schema=location_schema) data = node.generate_query_data() assert data == { @@ -442,14 +438,12 @@ async def test_query_data_no_filters(clients, location_schema: NodeSchemaAPI, cl @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_node_property(clients, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_query_data_node_property(clients: BothClients, location_schema: NodeSchemaAPI, client_type: str) -> None: if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=location_schema) + node = InfrahubNode(client=clients.standard, schema=location_schema) data = await node.generate_query_data_node(property=True) else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=location_schema) + node = InfrahubNodeSync(client=clients.sync, schema=location_schema) data = node.generate_query_data_node(property=True) assert data == { @@ -506,14 +500,12 @@ async def test_query_data_node_property(clients, location_schema: NodeSchemaAPI, @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_node(clients, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_query_data_node(clients: BothClients, location_schema: NodeSchemaAPI, client_type: str) -> None: if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=location_schema) + node = InfrahubNode(client=clients.standard, schema=location_schema) data = await node.generate_query_data_node() else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=location_schema) + node = InfrahubNodeSync(client=clients.sync, schema=location_schema) data = node.generate_query_data_node() assert data == { @@ -538,16 +530,16 @@ async def test_query_data_node(clients, location_schema: NodeSchemaAPI, client_t @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_with_prefetch_relationships_property(clients, mock_schema_query_02, client_type) -> None: +async def test_query_data_with_prefetch_relationships_property( + clients: BothClients, mock_schema_query_02, client_type: str +) -> None: if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - location_schema: GenericSchema = await client.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=location_schema) + location_schema: GenericSchema = await clients.standard.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] + node = InfrahubNode(client=clients.standard, schema=location_schema) data = await node.generate_query_data(prefetch_relationships=True, property=True) else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - location_schema: GenericSchema = client.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=location_schema) + location_schema: GenericSchema = clients.sync.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] + node = InfrahubNodeSync(client=clients.sync, schema=location_schema) data = node.generate_query_data(prefetch_relationships=True, property=True) assert data == { @@ -674,16 +666,16 @@ async def test_query_data_with_prefetch_relationships_property(clients, mock_sch @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_with_prefetch_relationships(clients, mock_schema_query_02, client_type) -> None: +async def test_query_data_with_prefetch_relationships( + clients: BothClients, mock_schema_query_02, client_type: str +) -> None: if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - location_schema: GenericSchema = await client.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=location_schema) + location_schema: GenericSchema = await clients.standard.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] + node = InfrahubNode(client=clients.standard, schema=location_schema) data = await node.generate_query_data(prefetch_relationships=True) else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - location_schema: GenericSchema = client.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=location_schema) + location_schema: GenericSchema = clients.sync.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] + node = InfrahubNodeSync(client=clients.sync, schema=location_schema) data = node.generate_query_data(prefetch_relationships=True) assert data == { @@ -726,16 +718,16 @@ async def test_query_data_with_prefetch_relationships(clients, mock_schema_query @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_node_with_prefetch_relationships_property(clients, mock_schema_query_02, client_type) -> None: +async def test_query_data_node_with_prefetch_relationships_property( + clients: BothClients, mock_schema_query_02, client_type: str +) -> None: if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - location_schema: GenericSchema = await client.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=location_schema) + location_schema: GenericSchema = await clients.standard.schema.get(kind="BuiltinLocation") # type: ignore[assignment] + node = InfrahubNode(client=clients.standard, schema=location_schema) data = await node.generate_query_data_node(prefetch_relationships=True, property=True) else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - location_schema: GenericSchema = client.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=location_schema) + location_schema: GenericSchema = clients.sync.schema.get(kind="BuiltinLocation") # type: ignore[assignment] + node = InfrahubNodeSync(client=clients.sync, schema=location_schema) data = node.generate_query_data_node(prefetch_relationships=True, property=True) assert data == { @@ -802,16 +794,16 @@ async def test_query_data_node_with_prefetch_relationships_property(clients, moc @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_node_with_prefetch_relationships(clients, mock_schema_query_02, client_type) -> None: +async def test_query_data_node_with_prefetch_relationships( + clients: BothClients, mock_schema_query_02, client_type: str +) -> None: if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - location_schema: GenericSchema = await client.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=location_schema) + location_schema: GenericSchema = await clients.standard.schema.get(kind="BuiltinLocation") # type: ignore[assignment] + node = InfrahubNode(client=clients.standard, schema=location_schema) data = await node.generate_query_data_node(prefetch_relationships=True) else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - location_schema: GenericSchema = client.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=location_schema) + location_schema: GenericSchema = clients.sync.schema.get(kind="BuiltinLocation") # type: ignore[assignment] + node = InfrahubNodeSync(client=clients.sync, schema=location_schema) data = node.generate_query_data_node(prefetch_relationships=True) assert data == { @@ -842,16 +834,14 @@ async def test_query_data_node_with_prefetch_relationships(clients, mock_schema_ @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_generic_property(clients, mock_schema_query_02, client_type) -> None: +async def test_query_data_generic_property(clients: BothClients, mock_schema_query_02, client_type: str) -> None: if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - corenode_schema: GenericSchema = await client.schema.get(kind="CoreNode") # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=corenode_schema) + corenode_schema: GenericSchema = await clients.standard.schema.get(kind="CoreNode") # type: ignore[assignment] + node = InfrahubNode(client=clients.standard, schema=corenode_schema) data = await node.generate_query_data(fragment=False, property=True) else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - corenode_schema: GenericSchema = client.schema.get(kind="CoreNode") # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=corenode_schema) + corenode_schema: GenericSchema = clients.sync.schema.get(kind="CoreNode") # type: ignore[assignment] + node = InfrahubNodeSync(client=clients.sync, schema=corenode_schema) data = node.generate_query_data(fragment=False, property=True) assert data == { @@ -871,16 +861,16 @@ async def test_query_data_generic_property(clients, mock_schema_query_02, client @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_generic_fragment_property(clients, mock_schema_query_02, client_type) -> None: +async def test_query_data_generic_fragment_property( + clients: BothClients, mock_schema_query_02, client_type: str +) -> None: if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - corenode_schema: GenericSchema = await client.schema.get(kind="CoreNode") # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=corenode_schema) + corenode_schema: GenericSchema = await clients.standard.schema.get(kind="CoreNode") # type: ignore[assignment] + node = InfrahubNode(client=clients.standard, schema=corenode_schema) data = await node.generate_query_data(fragment=True, property=True) else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - corenode_schema: GenericSchema = client.schema.get(kind="CoreNode") # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=corenode_schema) + corenode_schema: GenericSchema = clients.sync.schema.get(kind="CoreNode") # type: ignore[assignment] + node = InfrahubNodeSync(client=clients.sync, schema=corenode_schema) data = node.generate_query_data(fragment=True, property=True) assert data == { @@ -1017,16 +1007,14 @@ async def test_query_data_generic_fragment_property(clients, mock_schema_query_0 @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_generic_fragment(clients, mock_schema_query_02, client_type) -> None: +async def test_query_data_generic_fragment(clients: BothClients, mock_schema_query_02, client_type: str) -> None: if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - corenode_schema: GenericSchema = await client.schema.get(kind="CoreNode") # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=corenode_schema) + corenode_schema: GenericSchema = await clients.standard.schema.get(kind="CoreNode") # type: ignore[assignment] + node = InfrahubNode(client=clients.standard, schema=corenode_schema) data = await node.generate_query_data(fragment=True) else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - corenode_schema: GenericSchema = client.schema.get(kind="CoreNode") # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=corenode_schema) + corenode_schema: GenericSchema = clients.sync.schema.get(kind="CoreNode") # type: ignore[assignment] + node = InfrahubNodeSync(client=clients.sync, schema=corenode_schema) data = node.generate_query_data(fragment=True) assert data == { @@ -1083,7 +1071,7 @@ async def test_query_data_include_property( client, client_sync, location_schema: NodeSchemaAPI, - client_type, + client_type: str, ) -> None: if client_type == "standard": await set_builtin_tag_schema_cache(client) @@ -1213,7 +1201,7 @@ async def test_query_data_include( client, client_sync, location_schema: NodeSchemaAPI, - client_type, + client_type: str, ) -> None: if client_type == "standard": await set_builtin_tag_schema_cache(client) @@ -1269,7 +1257,7 @@ async def test_query_data_include( @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_exclude_property(client, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_query_data_exclude_property(client, location_schema: NodeSchemaAPI, client_type: str) -> None: if client_type == "standard": node = InfrahubNode(client=client, schema=location_schema) data = await node.generate_query_data(exclude=["description", "primary_tag"], property=True) @@ -1328,7 +1316,7 @@ async def test_query_data_exclude_property(client, location_schema: NodeSchemaAP @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_exclude(client, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_query_data_exclude(client, location_schema: NodeSchemaAPI, client_type: str) -> None: if client_type == "standard": node = InfrahubNode(client=client, schema=location_schema) data = await node.generate_query_data(exclude=["description", "primary_tag"]) @@ -1359,7 +1347,7 @@ async def test_query_data_exclude(client, location_schema: NodeSchemaAPI, client @pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data(client, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_create_input_data(client, location_schema: NodeSchemaAPI, client_type: str) -> None: data = {"name": {"value": "JFK1"}, "description": {"value": "JFK Airport"}, "type": {"value": "SITE"}} if client_type == "standard": @@ -1378,7 +1366,7 @@ async def test_create_input_data(client, location_schema: NodeSchemaAPI, client_ @pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data_with_dropdown(client, location_schema_with_dropdown, client_type) -> None: +async def test_create_input_data_with_dropdown(client, location_schema_with_dropdown, client_type: str) -> None: """Validate input data including dropdown field""" data = { "name": {"value": "JFK1"}, @@ -1406,7 +1394,7 @@ async def test_create_input_data_with_dropdown(client, location_schema_with_drop @pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data__with_relationships_02(client, location_schema, client_type) -> None: +async def test_create_input_data__with_relationships_02(client, location_schema, client_type: str) -> None: """Validate input data with variables that needs replacements""" data = { "name": {"value": "JFK1"}, @@ -1440,7 +1428,7 @@ async def test_create_input_data__with_relationships_02(client, location_schema, @pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data__with_relationships_01(client, location_schema, client_type) -> None: +async def test_create_input_data__with_relationships_01(client, location_schema, client_type: str) -> None: data = { "name": {"value": "JFK1"}, "description": {"value": "JFK Airport"}, @@ -1466,7 +1454,7 @@ async def test_create_input_data__with_relationships_01(client, location_schema, @pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data_with_relationships_02(clients, rfile_schema, client_type) -> None: +async def test_create_input_data_with_relationships_02(clients: BothClients, rfile_schema, client_type: str) -> None: data = { "name": {"value": "rfile01", "is_protected": True, "source": "ffffffff", "owner": "ffffffff"}, "template_path": {"value": "mytemplate.j2"}, @@ -1505,7 +1493,7 @@ async def test_create_input_data_with_relationships_02(clients, rfile_schema, cl @pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data_with_relationships_03(clients, rfile_schema, client_type) -> None: +async def test_create_input_data_with_relationships_03(clients: BothClients, rfile_schema, client_type: str) -> None: data = { "name": {"value": "rfile01", "is_protected": True, "source": "ffffffff"}, "template_path": {"value": "mytemplate.j2"}, @@ -1538,11 +1526,11 @@ async def test_create_input_data_with_relationships_03(clients, rfile_schema, cl @pytest.mark.parametrize("property_test", property_tests) @pytest.mark.parametrize("client_type", client_types) async def test_create_input_data_with_relationships_03_for_update_include_unmodified( - clients, + clients: BothClients, rfile_schema, rfile_userdata01, rfile_userdata01_property, - client_type, + client_type: str, property_test, ) -> None: rfile_userdata = rfile_userdata01 if property_test == WITHOUT_PROPERTY else rfile_userdata01_property @@ -1596,11 +1584,11 @@ async def test_create_input_data_with_relationships_03_for_update_include_unmodi @pytest.mark.parametrize("property_test", property_tests) @pytest.mark.parametrize("client_type", client_types) async def test_create_input_data_with_relationships_03_for_update_exclude_unmodified( - clients, + clients: BothClients, rfile_schema, rfile_userdata01, rfile_userdata01_property, - client_type, + client_type: str, property_test, ) -> None: """NOTE: Need to fix this test, the issue is tracked in https://github.com/opsmill/infrahub-sdk-python/issues/214.""" @@ -1639,7 +1627,7 @@ async def test_create_input_data_with_relationships_03_for_update_exclude_unmodi @pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data_with_IPHost_attribute(client, ipaddress_schema, client_type) -> None: +async def test_create_input_data_with_IPHost_attribute(client, ipaddress_schema, client_type: str) -> None: data = {"address": {"value": ipaddress.ip_interface("1.1.1.1/24"), "is_protected": True}} if client_type == "standard": @@ -1653,7 +1641,7 @@ async def test_create_input_data_with_IPHost_attribute(client, ipaddress_schema, @pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data_with_IPNetwork_attribute(client, ipnetwork_schema, client_type) -> None: +async def test_create_input_data_with_IPNetwork_attribute(client, ipnetwork_schema, client_type: str) -> None: data = {"network": {"value": ipaddress.ip_network("1.1.1.0/24"), "is_protected": True}} if client_type == "standard": @@ -1677,7 +1665,7 @@ async def test_update_input_data__with_relationships_01( tag_blue_data, tag_green_data, tag_red_data, - client_type, + client_type: str, property_test, ) -> None: location_data = location_data01 if property_test == WITHOUT_PROPERTY else location_data01_property @@ -1733,7 +1721,7 @@ async def test_update_input_data__with_relationships_01( @pytest.mark.parametrize("property_test", property_tests) @pytest.mark.parametrize("client_type", client_types) async def test_update_input_data_with_relationships_02( - client, location_schema, location_data02, location_data02_property, client_type, property_test + client, location_schema, location_data02, location_data02_property, client_type: str, property_test ) -> None: location_data = location_data02 if property_test == WITHOUT_PROPERTY else location_data02_property @@ -1804,7 +1792,7 @@ async def test_update_input_data_with_relationships_02( @pytest.mark.parametrize("property_test", property_tests) @pytest.mark.parametrize("client_type", client_types) async def test_update_input_data_with_relationships_02_exclude_unmodified( - client, location_schema, location_data02, location_data02_property, client_type, property_test + client, location_schema, location_data02, location_data02_property, client_type: str, property_test ) -> None: """NOTE Need to fix this test, issue is tracked in https://github.com/opsmill/infrahub-sdk-python/issues/214.""" location_data = location_data02 if property_test == WITHOUT_PROPERTY else location_data02_property @@ -1848,7 +1836,7 @@ async def test_update_input_data_empty_relationship( location_data01_property, tag_schema, tag_blue_data, - client_type, + client_type: str, property_test, ) -> None: """TODO: investigate why name and type are being returned since they haven't been modified.""" @@ -1905,7 +1893,7 @@ async def test_node_get_relationship_from_store( tag_schema, tag_red_data, tag_blue_data, - client_type, + client_type: str, ) -> None: if client_type == "standard": node = InfrahubNode(client=client, schema=location_schema, data=location_data01) @@ -1927,7 +1915,7 @@ async def test_node_get_relationship_from_store( @pytest.mark.parametrize("client_type", client_types) -async def test_node_get_relationship_not_in_store(client, location_schema, location_data01, client_type) -> None: +async def test_node_get_relationship_not_in_store(client, location_schema, location_data01, client_type: str) -> None: if client_type == "standard": node = InfrahubNode(client=client, schema=location_schema, data=location_data01) else: @@ -1944,13 +1932,13 @@ async def test_node_get_relationship_not_in_store(client, location_schema, locat async def test_node_fetch_relationship( httpx_mock: HTTPXMock, mock_schema_query_01, - clients, + clients: BothClients, location_schema, location_data01, tag_schema, tag_red_data, tag_blue_data, - client_type, + client_type: str, ) -> None: response1 = { "data": { @@ -2013,7 +2001,7 @@ async def test_node_fetch_relationship( @pytest.mark.parametrize("client_type", client_types) -async def test_node_IPHost_deserialization(client, ipaddress_schema, client_type) -> None: +async def test_node_IPHost_deserialization(client, ipaddress_schema, client_type: str) -> None: data = { "id": "aaaaaaaaaaaaaa", "address": { @@ -2030,7 +2018,7 @@ async def test_node_IPHost_deserialization(client, ipaddress_schema, client_type @pytest.mark.parametrize("client_type", client_types) -async def test_node_IPNetwork_deserialization(client, ipnetwork_schema, client_type) -> None: +async def test_node_IPNetwork_deserialization(client, ipnetwork_schema, client_type: str) -> None: data = { "id": "aaaaaaaaaaaaaa", "network": { @@ -2048,7 +2036,12 @@ async def test_node_IPNetwork_deserialization(client, ipnetwork_schema, client_t @pytest.mark.parametrize("client_type", client_types) async def test_get_flat_value( - httpx_mock: HTTPXMock, mock_schema_query_01, clients, location_schema, location_data01, client_type + httpx_mock: HTTPXMock, + mock_schema_query_01, + clients: BothClients, + location_schema, + location_data01, + client_type: str, ) -> None: httpx_mock.add_response( method="POST", @@ -2076,7 +2069,7 @@ async def test_get_flat_value( @pytest.mark.parametrize("client_type", client_types) -async def test_node_extract(clients, location_schema, location_data01, client_type) -> None: +async def test_node_extract(clients: BothClients, location_schema, location_data01, client_type: str) -> None: params = {"identifier": "id", "name": "name__value", "description": "description__value"} if client_type == "standard": node = InfrahubNode(client=clients.standard, schema=location_schema, data=location_data01) @@ -2100,7 +2093,7 @@ async def test_read_only_attr( client, address_schema, address_data, - client_type, + client_type: str, ) -> None: if client_type == "standard": address = InfrahubNode(client=client, schema=address_schema, data=address_data) @@ -2119,7 +2112,7 @@ async def test_read_only_attr( @pytest.mark.parametrize("client_type", client_types) -async def test_relationships_excluded_input_data(client, location_schema, client_type) -> None: +async def test_relationships_excluded_input_data(client, location_schema, client_type: str) -> None: data = { "name": {"value": "JFK1"}, "description": {"value": "JFK Airport"}, @@ -2137,7 +2130,7 @@ async def test_relationships_excluded_input_data(client, location_schema, client @pytest.mark.parametrize("client_type", client_types) async def test_create_input_data_with_resource_pool_relationship( - client, ipaddress_pool_schema, ipam_ipprefix_schema, simple_device_schema, ipam_ipprefix_data, client_type + client, ipaddress_pool_schema, ipam_ipprefix_schema, simple_device_schema, ipam_ipprefix_data, client_type: str ) -> None: if client_type == "standard": ip_prefix = InfrahubNode(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) @@ -2189,7 +2182,7 @@ async def test_create_input_data_with_resource_pool_relationship( @pytest.mark.parametrize("client_type", client_types) async def test_create_mutation_query_with_resource_pool_relationship( - client, ipaddress_pool_schema, ipam_ipprefix_schema, simple_device_schema, ipam_ipprefix_data, client_type + client, ipaddress_pool_schema, ipam_ipprefix_schema, simple_device_schema, ipam_ipprefix_data, client_type: str ) -> None: if client_type == "standard": ip_prefix = InfrahubNode(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) @@ -2244,11 +2237,11 @@ async def test_create_mutation_query_with_resource_pool_relationship( async def test_get_pool_allocated_resources( httpx_mock: HTTPXMock, mock_schema_query_ipam: HTTPXMock, - clients, + clients: BothClients, ipaddress_pool_schema, ipam_ipprefix_schema, ipam_ipprefix_data, - client_type, + client_type: str, ) -> None: httpx_mock.add_response( method="POST", @@ -2296,10 +2289,9 @@ async def test_get_pool_allocated_resources( ) if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - ip_prefix = InfrahubNode(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) + ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) ip_pool = InfrahubNode( - client=client, + client=clients.standard, schema=ipaddress_pool_schema, data={ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", @@ -2318,10 +2310,9 @@ async def test_get_pool_allocated_resources( "17d9bd8e-31ee-acf0-2786-179fb76f2f67", ] else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - ip_prefix = InfrahubNodeSync(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) + ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) ip_pool = InfrahubNodeSync( - client=client, + client=clients.sync, schema=ipaddress_pool_schema, data={ "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", @@ -2343,7 +2334,12 @@ async def test_get_pool_allocated_resources( @pytest.mark.parametrize("client_type", client_types) async def test_get_pool_resources_utilization( - httpx_mock: HTTPXMock, clients, ipaddress_pool_schema, ipam_ipprefix_schema, ipam_ipprefix_data, client_type + httpx_mock: HTTPXMock, + clients: BothClients, + ipaddress_pool_schema, + ipam_ipprefix_schema, + ipam_ipprefix_data, + client_type: str, ) -> None: httpx_mock.add_response( method="POST", @@ -2406,7 +2402,7 @@ async def test_get_pool_resources_utilization( @pytest.mark.parametrize("client_type", client_types) -async def test_from_graphql(clients, mock_schema_query_01, location_data01, client_type) -> None: +async def test_from_graphql(clients: BothClients, mock_schema_query_01, location_data01, client_type: str) -> None: if client_type == "standard": schema = await clients.standard.schema.get(kind="BuiltinLocation", branch="main") node = await InfrahubNode.from_graphql( From 1ba4af3ecd5ad8bc7e25a813c39d2e0eba958179 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Tue, 30 Dec 2025 08:51:16 +0100 Subject: [PATCH 16/27] Fix annotations on CTL tests --- pyproject.toml | 7 ------- tests/unit/ctl/test_branch_app.py | 8 +++++--- tests/unit/ctl/test_branch_report.py | 2 +- tests/unit/ctl/test_cli.py | 9 ++++++--- tests/unit/ctl/test_render_app.py | 9 +++++++-- tests/unit/ctl/test_repository_app.py | 13 +++++++------ 6 files changed, 26 insertions(+), 22 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index cbbabf57..b210e390 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -370,13 +370,6 @@ max-complexity = 17 "tests/integration/test_convert_object_type.py" = ["ANN001"] # 3 errors "tests/integration/test_repository.py" = ["ANN001"] # 1 error -# tests/unit/ctl/ - 25 errors total -"tests/unit/ctl/test_repository_app.py" = ["ANN001"] # 11 errors -"tests/unit/ctl/test_render_app.py" = ["ANN001"] # 5 errors -"tests/unit/ctl/test_cli.py" = ["ANN001"] # 5 errors -"tests/unit/ctl/test_branch_app.py" = ["ANN001"] # 3 errors -"tests/unit/ctl/test_branch_report.py" = ["ANN001"] # 1 error - "tasks.py" = [ "PLC0415", # `import` should be at the top-level of a file ] diff --git a/tests/unit/ctl/test_branch_app.py b/tests/unit/ctl/test_branch_app.py index 685a7c6f..12b16f7e 100644 --- a/tests/unit/ctl/test_branch_app.py +++ b/tests/unit/ctl/test_branch_app.py @@ -1,3 +1,5 @@ +from typing import Any + from pytest_httpx import HTTPXMock from typer.testing import CliRunner @@ -6,13 +8,13 @@ runner = CliRunner() -def test_branch_list(mock_branches_list_query) -> None: +def test_branch_list(mock_branches_list_query: HTTPXMock) -> None: result = runner.invoke(app=app, args=["list"]) assert result.exit_code == 0 assert "cr1234" in result.stdout -def test_branch_create_no_auth(httpx_mock: HTTPXMock, authentication_error_payload) -> None: +def test_branch_create_no_auth(httpx_mock: HTTPXMock, authentication_error_payload: dict[str, Any]) -> None: httpx_mock.add_response( status_code=401, method="POST", @@ -24,7 +26,7 @@ def test_branch_create_no_auth(httpx_mock: HTTPXMock, authentication_error_paylo assert "Authentication is required" in result.stdout -def test_branch_create_wrong_name(mock_branch_create_error) -> None: +def test_branch_create_wrong_name(mock_branch_create_error: HTTPXMock) -> None: result = runner.invoke(app=app, args=["create", "branch2"]) assert result.exit_code == 1 diff --git a/tests/unit/ctl/test_branch_report.py b/tests/unit/ctl/test_branch_report.py index bea2fa2d..c9af76c5 100644 --- a/tests/unit/ctl/test_branch_report.py +++ b/tests/unit/ctl/test_branch_report.py @@ -203,7 +203,7 @@ def test_branch_report_command_without_proposed_change( assert "No proposed changes for this branch" in result.stdout -def test_branch_report_command_main_branch(mock_branch_report_default_branch) -> None: +def test_branch_report_command_main_branch(mock_branch_report_default_branch: HTTPXMock) -> None: """Test branch report CLI command on main branch.""" runner = CliRunner() result = runner.invoke(app, ["report", "main"]) diff --git a/tests/unit/ctl/test_cli.py b/tests/unit/ctl/test_cli.py index c2b856d3..41064645 100644 --- a/tests/unit/ctl/test_cli.py +++ b/tests/unit/ctl/test_cli.py @@ -1,4 +1,5 @@ import pytest +from pytest_httpx import HTTPXMock from typer.testing import CliRunner from infrahub_sdk.ctl.cli import app @@ -32,7 +33,7 @@ def test_version_command() -> None: assert "Python SDK: v" in result.stdout -def test_info_command_success(mock_query_infrahub_version, mock_query_infrahub_user) -> None: +def test_info_command_success(mock_query_infrahub_version: HTTPXMock, mock_query_infrahub_user: HTTPXMock) -> None: result = runner.invoke(app, ["info"], env={"INFRAHUB_API_TOKEN": "foo"}) assert result.exit_code == 0 for expected in ["Connection Status", "Python Version", "SDK Version", "Infrahub Version"]: @@ -45,14 +46,16 @@ def test_info_command_failure() -> None: assert "Connection Error" in result.stdout -def test_info_detail_command_success(mock_query_infrahub_version, mock_query_infrahub_user) -> None: +def test_info_detail_command_success( + mock_query_infrahub_version: HTTPXMock, mock_query_infrahub_user: HTTPXMock +) -> None: result = runner.invoke(app, ["info", "--detail"], env={"INFRAHUB_API_TOKEN": "foo"}) assert result.exit_code == 0 for expected in ["Connection Status", "Version Information", "Client Info", "Infrahub Info", "Groups:"]: assert expected in result.stdout, f"'{expected}' not found in detailed info command output" -def test_anonymous_info_detail_command_success(mock_query_infrahub_version) -> None: +def test_anonymous_info_detail_command_success(mock_query_infrahub_version: HTTPXMock) -> None: result = runner.invoke(app, ["info", "--detail"]) assert result.exit_code == 0 for expected in ["Connection Status", "Version Information", "Client Info", "Infrahub Info", "anonymous"]: diff --git a/tests/unit/ctl/test_render_app.py b/tests/unit/ctl/test_render_app.py index 88e71f86..18bf3c19 100644 --- a/tests/unit/ctl/test_render_app.py +++ b/tests/unit/ctl/test_render_app.py @@ -4,7 +4,7 @@ from pathlib import Path import pytest -from pytest_httpx._httpx_mock import HTTPXMock +from pytest_httpx import HTTPXMock from typer.testing import CliRunner from infrahub_sdk.ctl.cli_commands import app @@ -80,7 +80,12 @@ def test_validate_template_not_found(test_case: RenderAppFailure, httpx_mock: HT ], ) def test_render_branch_selection( - monkeypatch, httpx_mock: HTTPXMock, cli_branch, env_branch, from_git, expected_branch + monkeypatch: pytest.MonkeyPatch, + httpx_mock: HTTPXMock, + cli_branch: str | None, + env_branch: str | None, + from_git: bool, + expected_branch: str, ) -> None: """Test that the render command uses the correct branch source.""" diff --git a/tests/unit/ctl/test_repository_app.py b/tests/unit/ctl/test_repository_app.py index 45f567dd..d097e186 100644 --- a/tests/unit/ctl/test_repository_app.py +++ b/tests/unit/ctl/test_repository_app.py @@ -3,6 +3,7 @@ from unittest import mock import pytest +from pytest_httpx import HTTPXMock from typer.testing import CliRunner from infrahub_sdk.client import InfrahubClient @@ -26,7 +27,7 @@ class TestInfrahubctlRepository: """Groups the 'infrahubctl repository' test cases.""" @mock.patch("infrahub_sdk.ctl.repository.initialize_client") - def test_repo_no_username_or_password(self, mock_init_client, mock_client) -> None: + def test_repo_no_username_or_password(self, mock_init_client: mock.Mock, mock_client: mock.Mock) -> None: """Case allow no username to be passed in and set it as None rather than blank string that fails.""" mock_cred = mock.AsyncMock() mock_cred.id = "1234" @@ -73,7 +74,7 @@ def test_repo_no_username_or_password(self, mock_init_client, mock_client) -> No ) @mock.patch("infrahub_sdk.ctl.repository.initialize_client") - def test_repo_no_username(self, mock_init_client, mock_client) -> None: + def test_repo_no_username(self, mock_init_client: mock.Mock, mock_client: mock.Mock) -> None: """Case allow no username to be passed in and set it as None rather than blank string that fails.""" mock_cred = mock.AsyncMock() mock_cred.id = "1234" @@ -132,7 +133,7 @@ def test_repo_no_username(self, mock_init_client, mock_client) -> None: ) @mock.patch("infrahub_sdk.ctl.repository.initialize_client") - def test_repo_username(self, mock_init_client, mock_client) -> None: + def test_repo_username(self, mock_init_client: mock.Mock, mock_client: mock.Mock) -> None: """Case allow no username to be passed in and set it as None rather than blank string that fails.""" mock_cred = mock.AsyncMock() mock_cred.id = "1234" @@ -193,7 +194,7 @@ def test_repo_username(self, mock_init_client, mock_client) -> None: ) @mock.patch("infrahub_sdk.ctl.repository.initialize_client") - def test_repo_readonly_true(self, mock_init_client, mock_client) -> None: + def test_repo_readonly_true(self, mock_init_client: mock.Mock, mock_client: mock.Mock) -> None: """Case allow no username to be passed in and set it as None rather than blank string that fails.""" mock_cred = mock.AsyncMock() mock_cred.id = "1234" @@ -253,7 +254,7 @@ def test_repo_readonly_true(self, mock_init_client, mock_client) -> None: ) @mock.patch("infrahub_sdk.ctl.repository.initialize_client") - def test_repo_description_commit_branch(self, mock_init_client, mock_client) -> None: + def test_repo_description_commit_branch(self, mock_init_client: mock.Mock, mock_client: mock.Mock) -> None: """Case allow no username to be passed in and set it as None rather than blank string that fails.""" mock_cred = mock.AsyncMock() mock_cred.id = "1234" @@ -317,7 +318,7 @@ def test_repo_description_commit_branch(self, mock_init_client, mock_client) -> tracker="mutation-repository-create", ) - def test_repo_list(self, mock_repositories_list) -> None: + def test_repo_list(self, mock_repositories_list: HTTPXMock) -> None: result = runner.invoke(app, ["repository", "list"]) assert result.exit_code == 0 assert strip_color(result.stdout) == read_fixture("output.txt", "integration/test_infrahubctl/repository_list") From 9e2c5606f3b2e6655573892fdf392ab566016400 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Tue, 30 Dec 2025 09:50:45 +0100 Subject: [PATCH 17/27] Fix unsupported operator on MainSchemaTypes --- infrahub_sdk/node/node.py | 4 ++-- pyproject.toml | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/infrahub_sdk/node/node.py b/infrahub_sdk/node/node.py index ecf1b773..ff24821c 100644 --- a/infrahub_sdk/node/node.py +++ b/infrahub_sdk/node/node.py @@ -198,8 +198,8 @@ def get_kind(self) -> str: return self._schema.kind def get_all_kinds(self) -> list[str]: - if hasattr(self._schema, "inherit_from"): - return [self._schema.kind] + self._schema.inherit_from + if inherit_from := getattr(self._schema, "inherit_from", None): + return [self._schema.kind] + inherit_from return [self._schema.kind] def is_ip_prefix(self) -> bool: diff --git a/pyproject.toml b/pyproject.toml index cbbabf57..362f38e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -136,7 +136,6 @@ invalid-await = "ignore" invalid-type-form = "ignore" no-matching-overload = "ignore" unresolved-attribute = "ignore" -unsupported-operator = "ignore" [[tool.ty.overrides]] include = ["infrahub_sdk/ctl/config.py"] From 36e2de827b2dfcf4d3ff31eb09931b85e9ce4fbd Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Tue, 30 Dec 2025 10:19:39 +0100 Subject: [PATCH 18/27] Upgrade ty=0.0.8 --- pyproject.toml | 4 ++-- uv.lock | 46 +++++++++++++++++++++++----------------------- 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index cbbabf57..1d85244a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ lint = [ "mypy==1.11.2", "ruff==0.14.5", "astroid>=3.1,<4.0", - "ty==0.0.5", + "ty==0.0.8", ] types = [ "types-ujson", @@ -157,7 +157,7 @@ invalid-argument-type = "ignore" invalid-assignment = "ignore" invalid-method-override = "ignore" no-matching-overload = "ignore" -non-subscriptable = "ignore" +not-subscriptable = "ignore" not-iterable = "ignore" possibly-missing-attribute = "ignore" unresolved-attribute = "ignore" diff --git a/uv.lock b/uv.lock index 20fe380c..c4145021 100644 --- a/uv.lock +++ b/uv.lock @@ -868,7 +868,7 @@ dev = [ { name = "requests" }, { name = "ruff", specifier = "==0.14.5" }, { name = "towncrier", specifier = ">=24.8.0" }, - { name = "ty", specifier = "==0.0.5" }, + { name = "ty", specifier = "==0.0.8" }, { name = "types-python-slugify", specifier = ">=8.0.0.3" }, { name = "types-pyyaml" }, { name = "types-ujson" }, @@ -878,7 +878,7 @@ lint = [ { name = "astroid", specifier = ">=3.1,<4.0" }, { name = "mypy", specifier = "==1.11.2" }, { name = "ruff", specifier = "==0.14.5" }, - { name = "ty", specifier = "==0.0.5" }, + { name = "ty", specifier = "==0.0.8" }, { name = "yamllint" }, ] tests = [ @@ -2754,27 +2754,27 @@ wheels = [ [[package]] name = "ty" -version = "0.0.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9e/db/6299d478000f4f1c6f9bf2af749359381610ffc4cbe6713b66e436ecf6e7/ty-0.0.5.tar.gz", hash = "sha256:983da6330773ff71e2b249810a19c689f9a0372f6e21bbf7cde37839d05b4346", size = 4806218, upload-time = "2025-12-20T21:19:17.24Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/98/c1f61ba378b4191e641bb36c07b7fcc70ff844d61be7a4bf2fea7472b4a9/ty-0.0.5-py3-none-linux_armv6l.whl", hash = "sha256:1594cd9bb68015eb2f5a3c68a040860f3c9306dc6667d7a0e5f4df9967b460e2", size = 9785554, upload-time = "2025-12-20T21:19:05.024Z" }, - { url = "https://files.pythonhosted.org/packages/ab/f9/b37b77c03396bd779c1397dae4279b7ad79315e005b3412feed8812a4256/ty-0.0.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7c0140ba980233d28699d9ddfe8f43d0b3535d6a3bbff9935df625a78332a3cf", size = 9603995, upload-time = "2025-12-20T21:19:15.256Z" }, - { url = "https://files.pythonhosted.org/packages/7d/70/4e75c11903b0e986c0203040472627cb61d6a709e1797fb08cdf9d565743/ty-0.0.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:15de414712cde92048ae4b1a77c4dc22920bd23653fe42acaf73028bad88f6b9", size = 9145815, upload-time = "2025-12-20T21:19:36.481Z" }, - { url = "https://files.pythonhosted.org/packages/89/05/93983dfcf871a41dfe58e5511d28e6aa332a1f826cc67333f77ae41a2f8a/ty-0.0.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:438aa51ad6c5fae64191f8d58876266e26f9250cf09f6624b6af47a22fa88618", size = 9619849, upload-time = "2025-12-20T21:19:19.084Z" }, - { url = "https://files.pythonhosted.org/packages/82/b6/896ab3aad59f846823f202e94be6016fb3f72434d999d2ae9bd0f28b3af9/ty-0.0.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b3d373fd96af1564380caf153600481c676f5002ee76ba8a7c3508cdff82ee0", size = 9606611, upload-time = "2025-12-20T21:19:24.583Z" }, - { url = "https://files.pythonhosted.org/packages/ca/ae/098e33fc92330285ed843e2750127e896140c4ebd2d73df7732ea496f588/ty-0.0.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8453692503212ad316cf8b99efbe85a91e5f63769c43be5345e435a1b16cba5a", size = 10029523, upload-time = "2025-12-20T21:19:07.055Z" }, - { url = "https://files.pythonhosted.org/packages/04/5a/f4b4c33758b9295e9aca0de9645deca0f4addd21d38847228723a6e780fc/ty-0.0.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2e4c454139473abbd529767b0df7a795ed828f780aef8d0d4b144558c0dc4446", size = 10870892, upload-time = "2025-12-20T21:19:34.495Z" }, - { url = "https://files.pythonhosted.org/packages/c3/c5/4e3e7e88389365aa1e631c99378711cf0c9d35a67478cb4720584314cf44/ty-0.0.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:426d4f3b82475b1ec75f3cc9ee5a667c8a4ae8441a09fcd8e823a53b706d00c7", size = 10599291, upload-time = "2025-12-20T21:19:26.557Z" }, - { url = "https://files.pythonhosted.org/packages/c1/5d/138f859ea87bd95e17b9818e386ae25a910e46521c41d516bf230ed83ffc/ty-0.0.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5710817b67c6b2e4c0224e4f319b7decdff550886e9020f6d46aa1ce8f89a609", size = 10413515, upload-time = "2025-12-20T21:19:11.094Z" }, - { url = "https://files.pythonhosted.org/packages/27/21/1cbcd0d3b1182172f099e88218137943e0970603492fb10c7c9342369d9a/ty-0.0.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23c55ef08882c7c5ced1ccb90b4eeefa97f690aea254f58ac0987896c590f76", size = 10144992, upload-time = "2025-12-20T21:19:13.225Z" }, - { url = "https://files.pythonhosted.org/packages/ad/30/fdac06a5470c09ad2659a0806497b71f338b395d59e92611f71b623d05a0/ty-0.0.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b9e4c1a28a23b14cf8f4f793f4da396939f16c30bfa7323477c8cc234e352ac4", size = 9606408, upload-time = "2025-12-20T21:19:09.212Z" }, - { url = "https://files.pythonhosted.org/packages/09/93/e99dcd7f53295192d03efd9cbcec089a916f49cad4935c0160ea9adbd53d/ty-0.0.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4e9ebb61529b9745af662e37c37a01ad743cdd2c95f0d1421705672874d806cd", size = 9630040, upload-time = "2025-12-20T21:19:38.165Z" }, - { url = "https://files.pythonhosted.org/packages/d7/f8/6d1e87186e4c35eb64f28000c1df8fd5f73167ce126c5e3dd21fd1204a23/ty-0.0.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5eb191a8e332f50f56dfe45391bdd7d43dd4ef6e60884710fd7ce84c5d8c1eb5", size = 9754016, upload-time = "2025-12-20T21:19:32.79Z" }, - { url = "https://files.pythonhosted.org/packages/28/e6/20f989342cb3115852dda404f1d89a10a3ce93f14f42b23f095a3d1a00c9/ty-0.0.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:92ed7451a1e82ee134a2c24ca43b74dd31e946dff2b08e5c34473e6b051de542", size = 10252877, upload-time = "2025-12-20T21:19:20.787Z" }, - { url = "https://files.pythonhosted.org/packages/57/9d/fc66fa557443233dfad9ae197ff3deb70ae0efcfb71d11b30ef62f5cdcc3/ty-0.0.5-py3-none-win32.whl", hash = "sha256:71f6707e4c1c010c158029a688a498220f28bb22fdb6707e5c20e09f11a5e4f2", size = 9212640, upload-time = "2025-12-20T21:19:30.817Z" }, - { url = "https://files.pythonhosted.org/packages/68/b6/05c35f6dea29122e54af0e9f8dfedd0a100c721affc8cc801ebe2bc2ed13/ty-0.0.5-py3-none-win_amd64.whl", hash = "sha256:2b8b754a0d7191e94acdf0c322747fec34371a4d0669f5b4e89549aef28814ae", size = 10034701, upload-time = "2025-12-20T21:19:28.311Z" }, - { url = "https://files.pythonhosted.org/packages/df/ca/4201ed5cb2af73912663d0c6ded927c28c28b3c921c9348aa8d2cfef4853/ty-0.0.5-py3-none-win_arm64.whl", hash = "sha256:83bea5a5296caac20d52b790ded2b830a7ff91c4ed9f36730fe1f393ceed6654", size = 9566474, upload-time = "2025-12-20T21:19:22.518Z" }, +version = "0.0.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/9d/59e955cc39206a0d58df5374808785c45ec2a8a2a230eb1638fbb4fe5c5d/ty-0.0.8.tar.gz", hash = "sha256:352ac93d6e0050763be57ad1e02087f454a842887e618ec14ac2103feac48676", size = 4828477, upload-time = "2025-12-29T13:50:07.193Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/2b/dd61f7e50a69c72f72c625d026e9ab64a0db62b2dd32e7426b520e2429c6/ty-0.0.8-py3-none-linux_armv6l.whl", hash = "sha256:a289d033c5576fa3b4a582b37d63395edf971cdbf70d2d2e6b8c95638d1a4fcd", size = 9853417, upload-time = "2025-12-29T13:50:08.979Z" }, + { url = "https://files.pythonhosted.org/packages/90/72/3f1d3c64a049a388e199de4493689a51fc6aa5ff9884c03dea52b4966657/ty-0.0.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:788ea97dc8153a94e476c4d57b2551a9458f79c187c4aba48fcb81f05372924a", size = 9657890, upload-time = "2025-12-29T13:50:27.867Z" }, + { url = "https://files.pythonhosted.org/packages/71/d1/08ac676bd536de3c2baba0deb60e67b3196683a2fabebfd35659d794b5e9/ty-0.0.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1b5f1f3d3e230f35a29e520be7c3d90194a5229f755b721e9092879c00842d31", size = 9180129, upload-time = "2025-12-29T13:50:22.842Z" }, + { url = "https://files.pythonhosted.org/packages/af/93/610000e2cfeea1875900f73a375ba917624b0a008d4b8a6c18c894c8dbbc/ty-0.0.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6da9ed377fbbcec0a3b60b2ca5fd30496e15068f47cef2344ba87923e78ba996", size = 9683517, upload-time = "2025-12-29T13:50:18.658Z" }, + { url = "https://files.pythonhosted.org/packages/05/04/bef50ba7d8580b0140be597de5cc0ba9a63abe50d3f65560235f23658762/ty-0.0.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7d0a2bdce5e701d19eb8d46d9da0fe31340f079cecb7c438f5ac6897c73fc5ba", size = 9676279, upload-time = "2025-12-29T13:50:25.207Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b9/2aff1ef1f41b25898bc963173ae67fc8f04ca666ac9439a9c4e78d5cc0ff/ty-0.0.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef9078799d26d3cc65366e02392e2b78f64f72911b599e80a8497d2ec3117ddb", size = 10073015, upload-time = "2025-12-29T13:50:35.422Z" }, + { url = "https://files.pythonhosted.org/packages/df/0e/9feb6794b6ff0a157c3e6a8eb6365cbfa3adb9c0f7976e2abdc48615dd72/ty-0.0.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:54814ac39b4ab67cf111fc0a236818155cf49828976152378347a7678d30ee89", size = 10961649, upload-time = "2025-12-29T13:49:58.717Z" }, + { url = "https://files.pythonhosted.org/packages/f4/3b/faf7328b14f00408f4f65c9d01efe52e11b9bcc4a79e06187b370457b004/ty-0.0.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4baf0a80398e8b6c68fa36ff85045a50ede1906cd4edb41fb4fab46d471f1d4", size = 10676190, upload-time = "2025-12-29T13:50:01.11Z" }, + { url = "https://files.pythonhosted.org/packages/64/a5/cfeca780de7eeab7852c911c06a84615a174d23e9ae08aae42a645771094/ty-0.0.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac8e23c3faefc579686799ef1649af8d158653169ad5c3a7df56b152781eeb67", size = 10438641, upload-time = "2025-12-29T13:50:29.664Z" }, + { url = "https://files.pythonhosted.org/packages/0e/8d/8667c7e0ac9f13c461ded487c8d7350f440cd39ba866d0160a8e1b1efd6c/ty-0.0.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b558a647a073d0c25540aaa10f8947de826cb8757d034dd61ecf50ab8dbd77bf", size = 10214082, upload-time = "2025-12-29T13:50:31.531Z" }, + { url = "https://files.pythonhosted.org/packages/f8/11/e563229870e2c1d089e7e715c6c3b7605a34436dddf6f58e9205823020c2/ty-0.0.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8c0104327bf480508bd81f320e22074477df159d9eff85207df39e9c62ad5e96", size = 9664364, upload-time = "2025-12-29T13:50:05.443Z" }, + { url = "https://files.pythonhosted.org/packages/b1/ad/05b79b778bf5237bcd7ee08763b226130aa8da872cbb151c8cfa2e886203/ty-0.0.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:496f1cb87261dd1a036a5609da80ee13de2e6ee4718a661bfa2afb91352fe528", size = 9679440, upload-time = "2025-12-29T13:50:11.289Z" }, + { url = "https://files.pythonhosted.org/packages/12/b5/23ba887769c4a7b8abfd1b6395947dc3dcc87533fbf86379d3a57f87ae8f/ty-0.0.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:2c488031f92a075ae39d13ac6295fdce2141164ec38c5d47aa8dc24ee3afa37e", size = 9808201, upload-time = "2025-12-29T13:50:21.003Z" }, + { url = "https://files.pythonhosted.org/packages/f8/90/5a82ac0a0707db55376922aed80cd5fca6b2e6d6e9bcd8c286e6b43b4084/ty-0.0.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:90d6f08c5982fa3e802b8918a32e326153519077b827f91c66eea4913a86756a", size = 10313262, upload-time = "2025-12-29T13:50:03.306Z" }, + { url = "https://files.pythonhosted.org/packages/14/f7/ff97f37f0a75db9495ddbc47738ec4339837867c4bfa145bdcfbd0d1eb2f/ty-0.0.8-py3-none-win32.whl", hash = "sha256:d7f460ad6fc9325e9cc8ea898949bbd88141b4609d1088d7ede02ce2ef06e776", size = 9254675, upload-time = "2025-12-29T13:50:33.35Z" }, + { url = "https://files.pythonhosted.org/packages/af/51/eba5d83015e04630002209e3590c310a0ff1d26e1815af204a322617a42e/ty-0.0.8-py3-none-win_amd64.whl", hash = "sha256:1641fb8dedc3d2da43279d21c3c7c1f80d84eae5c264a1e8daa544458e433c19", size = 10131382, upload-time = "2025-12-29T13:50:13.719Z" }, + { url = "https://files.pythonhosted.org/packages/38/1c/0d8454ff0f0f258737ecfe84f6e508729191d29663b404832f98fa5626b7/ty-0.0.8-py3-none-win_arm64.whl", hash = "sha256:ec74f022f315bede478ecae1277a01ab618e6500c1d68450d7883f5cd6ed554a", size = 9636374, upload-time = "2025-12-29T13:50:16.344Z" }, ] [[package]] From af827c138f80babc13564fbedf3c77f91aaab1f7 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Tue, 30 Dec 2025 10:16:16 +0100 Subject: [PATCH 19/27] Use pathlib instead of os for Path operations --- infrahub_sdk/checks.py | 4 ++-- infrahub_sdk/operation.py | 4 ++-- pyproject.toml | 2 -- tests/helpers/utils.py | 4 ++-- tests/integration/test_infrahubctl.py | 2 +- tests/unit/ctl/test_render_app.py | 3 +-- tests/unit/ctl/test_transform_app.py | 3 +-- 7 files changed, 9 insertions(+), 13 deletions(-) diff --git a/infrahub_sdk/checks.py b/infrahub_sdk/checks.py index e1dfc404..a68ce0d4 100644 --- a/infrahub_sdk/checks.py +++ b/infrahub_sdk/checks.py @@ -2,7 +2,7 @@ import importlib import inspect -import os +import pathlib import warnings from abc import abstractmethod from typing import TYPE_CHECKING, Any @@ -55,7 +55,7 @@ def __init__( self.branch = branch self.params = params or {} - self.root_directory = root_directory or os.getcwd() + self.root_directory = root_directory or str(pathlib.Path.cwd()) self._client = client diff --git a/infrahub_sdk/operation.py b/infrahub_sdk/operation.py index 3ae3d9c9..ed0bf19a 100644 --- a/infrahub_sdk/operation.py +++ b/infrahub_sdk/operation.py @@ -1,6 +1,6 @@ from __future__ import annotations -import os +import pathlib from typing import TYPE_CHECKING from .repository import GitRepoManager @@ -22,7 +22,7 @@ def __init__( ) -> None: self.branch = branch self.convert_query_response = convert_query_response - self.root_directory = root_directory or os.getcwd() + self.root_directory = root_directory or str(pathlib.Path.cwd()) self.infrahub_node = infrahub_node self._nodes: list[InfrahubNode] = [] self._related_nodes: list[InfrahubNode] = [] diff --git a/pyproject.toml b/pyproject.toml index cbbabf57..99466282 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -255,8 +255,6 @@ ignore = [ "PLR6301", # Method could be a function, class method, or static method "PLW0603", # Using the global statement to update `SETTINGS` is discouraged "PLW1641", # Object does not implement `__hash__` method - "PTH100", # `os.path.abspath()` should be replaced by `Path.resolve()` - "PTH109", # `os.getcwd()` should be replaced by `Path.cwd()` "RUF005", # Consider `[*path, str(key)]` instead of concatenation "RUF029", # Function is declared `async`, but doesn't `await` or use `async` features. "S311", # Standard pseudo-random generators are not suitable for cryptographic purposes diff --git a/tests/helpers/utils.py b/tests/helpers/utils.py index 9de4ee71..515d9609 100644 --- a/tests/helpers/utils.py +++ b/tests/helpers/utils.py @@ -15,7 +15,7 @@ def change_directory(new_directory: str) -> Generator[None, None, None]: """Helper function used to change directories in a with block.""" # Save the current working directory - original_directory = os.getcwd() + original_directory = Path.cwd() # Change to the new directory try: @@ -30,7 +30,7 @@ def change_directory(new_directory: str) -> Generator[None, None, None]: @contextmanager def temp_repo_and_cd(source_dir: Path) -> Generator[Path, None, None]: temp_dir = tempfile.mkdtemp() - original_directory = os.getcwd() + original_directory = Path.cwd() try: shutil.copytree(source_dir, temp_dir, dirs_exist_ok=True) diff --git a/tests/integration/test_infrahubctl.py b/tests/integration/test_infrahubctl.py index 20106050..a4fa197b 100644 --- a/tests/integration/test_infrahubctl.py +++ b/tests/integration/test_infrahubctl.py @@ -22,7 +22,7 @@ if TYPE_CHECKING: from infrahub_sdk import InfrahubClient -FIXTURE_BASE_DIR = Path(Path(os.path.abspath(__file__)).parent / ".." / "fixtures") +FIXTURE_BASE_DIR = Path(Path(Path(__file__).resolve()).parent / ".." / "fixtures") runner = CliRunner() diff --git a/tests/unit/ctl/test_render_app.py b/tests/unit/ctl/test_render_app.py index 88e71f86..4ccde6f9 100644 --- a/tests/unit/ctl/test_render_app.py +++ b/tests/unit/ctl/test_render_app.py @@ -1,5 +1,4 @@ import json -import os from dataclasses import dataclass from pathlib import Path @@ -14,7 +13,7 @@ runner = CliRunner() -FIXTURE_BASE_DIR = Path(Path(os.path.abspath(__file__)).parent / ".." / ".." / "fixtures" / "repos") +FIXTURE_BASE_DIR = Path(Path(Path(__file__).resolve()).parent / ".." / ".." / "fixtures" / "repos") @dataclass diff --git a/tests/unit/ctl/test_transform_app.py b/tests/unit/ctl/test_transform_app.py index 9ae4585d..5b7cb4f1 100644 --- a/tests/unit/ctl/test_transform_app.py +++ b/tests/unit/ctl/test_transform_app.py @@ -1,7 +1,6 @@ """Integration tests for infrahubctl commands.""" import json -import os import shutil import tempfile from collections.abc import Generator @@ -20,7 +19,7 @@ FIXTURE_BASE_DIR = Path( - Path(os.path.abspath(__file__)).parent / ".." / ".." / "fixtures" / "integration" / "test_infrahubctl" + Path(Path(__file__).resolve()).parent / ".." / ".." / "fixtures" / "integration" / "test_infrahubctl" ) From 1506d951d083b57b12a4151b50b0147161cbf6d0 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Tue, 30 Dec 2025 09:22:25 +0100 Subject: [PATCH 20/27] Fix test annotations on unit tests --- pyproject.toml | 13 ------- tests/unit/sdk/checks/test_checks.py | 10 ++++- tests/unit/sdk/graphql/test_renderer.py | 10 +++-- tests/unit/sdk/test_batch.py | 8 ++-- tests/unit/sdk/test_branch.py | 12 +++++- tests/unit/sdk/test_group_context.py | 8 +++- tests/unit/sdk/test_protocols_generator.py | 6 ++- tests/unit/sdk/test_query_analyzer.py | 4 +- tests/unit/sdk/test_repository.py | 10 ++--- tests/unit/sdk/test_schema_sorter.py | 7 +++- tests/unit/sdk/test_store_branch.py | 9 +++-- tests/unit/sdk/test_timestamp.py | 6 +-- tests/unit/sdk/test_utils.py | 43 ++++++++++++++-------- 13 files changed, 87 insertions(+), 59 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0bcaf242..874f0033 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -282,7 +282,6 @@ ignorelist = [ # Review and update builtin shadowing below this line "filter", "format", - "input", "list", "property", ] @@ -347,18 +346,6 @@ max-complexity = 17 "tests/unit/sdk/test_diff_summary.py" = ["ANN001"] # 9 errors "tests/unit/sdk/test_object_store.py" = ["ANN001"] # 7 errors "tests/unit/sdk/graphql/test_query.py" = ["ANN001"] # 7 errors -"tests/unit/sdk/test_timestamp.py" = ["ANN001"] # 6 errors -"tests/unit/sdk/test_repository.py" = ["ANN001"] # 6 errors -"tests/unit/sdk/test_utils.py" = ["ANN001"] # 4 errors -"tests/unit/sdk/test_store_branch.py" = ["ANN001"] # 4 errors -"tests/unit/sdk/test_query_analyzer.py" = ["ANN001"] # 4 errors -"tests/unit/sdk/test_group_context.py" = ["ANN001"] # 4 errors -"tests/unit/sdk/test_branch.py" = ["ANN001"] # 4 errors -"tests/unit/sdk/test_batch.py" = ["ANN001"] # 4 errors -"tests/unit/sdk/graphql/test_renderer.py" = ["ANN001"] # 4 errors -"tests/unit/sdk/checks/test_checks.py" = ["ANN001"] # 2 errors -"tests/unit/sdk/test_schema_sorter.py" = ["ANN001"] # 1 error -"tests/unit/sdk/test_protocols_generator.py" = ["ANN001"] # 1 error # tests/integration/ - 60 errors total "tests/integration/test_infrahub_client.py" = ["ANN001"] # 32 errors diff --git a/tests/unit/sdk/checks/test_checks.py b/tests/unit/sdk/checks/test_checks.py index 7fe0c691..6433b8b4 100644 --- a/tests/unit/sdk/checks/test_checks.py +++ b/tests/unit/sdk/checks/test_checks.py @@ -1,10 +1,16 @@ +from __future__ import annotations + from pathlib import Path +from typing import TYPE_CHECKING import pytest from infrahub_sdk import InfrahubClient from infrahub_sdk.checks import InfrahubCheck +if TYPE_CHECKING: + from pytest_httpx import HTTPXMock + pytestmark = pytest.mark.httpx_mock(can_send_already_matched_responses=True) @@ -36,7 +42,7 @@ class IFCheckNoName(InfrahubCheck): assert check.root_directory == str(tmp_path) -async def test_async_init(client) -> None: +async def test_async_init(client: InfrahubClient) -> None: class IFCheck(InfrahubCheck): query = "my_query" @@ -44,7 +50,7 @@ class IFCheck(InfrahubCheck): assert isinstance(check.client, InfrahubClient) -async def test_validate_sync_async(mock_gql_query_my_query) -> None: +async def test_validate_sync_async(mock_gql_query_my_query: HTTPXMock) -> None: class IFCheckAsync(InfrahubCheck): query = "my_query" diff --git a/tests/unit/sdk/graphql/test_renderer.py b/tests/unit/sdk/graphql/test_renderer.py index 7803a890..642d688f 100644 --- a/tests/unit/sdk/graphql/test_renderer.py +++ b/tests/unit/sdk/graphql/test_renderer.py @@ -1,7 +1,9 @@ +from typing import Any + from infrahub_sdk.graphql.renderers import render_input_block, render_query_block -def test_render_query_block(query_data_no_filter) -> None: +def test_render_query_block(query_data_no_filter: dict[str, Any]) -> None: lines = render_query_block(data=query_data_no_filter) expected_lines = [ @@ -44,7 +46,7 @@ def test_render_query_block(query_data_no_filter) -> None: assert lines == expected_lines -def test_render_query_block_alias(query_data_alias) -> None: +def test_render_query_block_alias(query_data_alias: dict[str, Any]) -> None: lines = render_query_block(data=query_data_alias) expected_lines = [ @@ -66,7 +68,7 @@ def test_render_query_block_alias(query_data_alias) -> None: assert lines == expected_lines -def test_render_query_block_fragment(query_data_fragment) -> None: +def test_render_query_block_fragment(query_data_fragment: dict[str, Any]) -> None: lines = render_query_block(data=query_data_fragment) expected_lines = [ @@ -90,7 +92,7 @@ def test_render_query_block_fragment(query_data_fragment) -> None: assert lines == expected_lines -def test_render_input_block(input_data_01) -> None: +def test_render_input_block(input_data_01: dict[str, Any]) -> None: lines = render_input_block(data=input_data_01) expected_lines = [ diff --git a/tests/unit/sdk/test_batch.py b/tests/unit/sdk/test_batch.py index 2998f767..7bdf00ad 100644 --- a/tests/unit/sdk/test_batch.py +++ b/tests/unit/sdk/test_batch.py @@ -50,8 +50,8 @@ def test_func() -> int: @pytest.mark.parametrize("client_type", client_types) async def test_batch_return_exception( httpx_mock: HTTPXMock, - mock_query_mutation_location_create_failed, - mock_schema_query_01, + mock_query_mutation_location_create_failed: HTTPXMock, + mock_schema_query_01: HTTPXMock, clients: BothClients, client_type: str, ) -> None: @@ -96,8 +96,8 @@ async def test_batch_return_exception( @pytest.mark.parametrize("client_type", client_types) async def test_batch_exception( httpx_mock: HTTPXMock, - mock_query_mutation_location_create_failed, - mock_schema_query_01, + mock_query_mutation_location_create_failed: HTTPXMock, + mock_schema_query_01: HTTPXMock, clients: BothClients, client_type: str, ) -> None: diff --git a/tests/unit/sdk/test_branch.py b/tests/unit/sdk/test_branch.py index 4e3d93f2..88f4a530 100644 --- a/tests/unit/sdk/test_branch.py +++ b/tests/unit/sdk/test_branch.py @@ -1,4 +1,7 @@ +from __future__ import annotations + import inspect +from typing import TYPE_CHECKING import pytest @@ -8,6 +11,11 @@ InfrahubBranchManagerSync, ) +if TYPE_CHECKING: + from pytest_httpx import HTTPXMock + + from tests.unit.sdk.conftest import BothClients + async_branch_methods = [method for method in dir(InfrahubBranchManager) if not method.startswith("_")] sync_branch_methods = [method for method in dir(InfrahubBranchManagerSync) if not method.startswith("_")] @@ -21,7 +29,7 @@ def test_method_sanity() -> None: @pytest.mark.parametrize("method", async_branch_methods) -def test_validate_method_signature(method) -> None: +def test_validate_method_signature(method: str) -> None: async_method = getattr(InfrahubBranchManager, method) sync_method = getattr(InfrahubBranchManagerSync, method) async_sig = inspect.signature(async_method) @@ -31,7 +39,7 @@ def test_validate_method_signature(method) -> None: @pytest.mark.parametrize("client_type", client_types) -async def test_get_branches(clients, mock_branches_list_query, client_type) -> None: +async def test_get_branches(clients: BothClients, mock_branches_list_query: HTTPXMock, client_type: str) -> None: if client_type == "standard": branches = await clients.standard.branch.all() else: diff --git a/tests/unit/sdk/test_group_context.py b/tests/unit/sdk/test_group_context.py index be00cd51..7b4de550 100644 --- a/tests/unit/sdk/test_group_context.py +++ b/tests/unit/sdk/test_group_context.py @@ -1,8 +1,10 @@ import inspect +from collections.abc import Callable import pytest from infrahub_sdk.query_groups import InfrahubGroupContext, InfrahubGroupContextBase, InfrahubGroupContextSync +from infrahub_sdk.schema import NodeSchemaAPI async_methods = [method for method in dir(InfrahubGroupContext) if not method.startswith("_")] sync_methods = [method for method in dir(InfrahubGroupContextSync) if not method.startswith("_")] @@ -18,7 +20,9 @@ async def test_method_sanity() -> None: @pytest.mark.parametrize("method", async_methods) async def test_validate_method_signature( - method, replace_sync_return_annotation, replace_async_return_annotation + method: str, + replace_sync_return_annotation: Callable[[str], str], + replace_async_return_annotation: Callable[[str], str], ) -> None: async_method = getattr(InfrahubGroupContext, method) sync_method = getattr(InfrahubGroupContextSync, method) @@ -65,7 +69,7 @@ def test_generate_group_name() -> None: assert context._generate_group_name(suffix="xxx") == "MYID-xxx-11aaec5206c3dca37cbbcaaabf121550" -def test_generate_group_description(std_group_schema) -> None: +def test_generate_group_description(std_group_schema: NodeSchemaAPI) -> None: context = InfrahubGroupContextBase() context.set_properties(identifier="MYID") assert not context._generate_group_description(schema=std_group_schema) diff --git a/tests/unit/sdk/test_protocols_generator.py b/tests/unit/sdk/test_protocols_generator.py index 22822717..55796db6 100644 --- a/tests/unit/sdk/test_protocols_generator.py +++ b/tests/unit/sdk/test_protocols_generator.py @@ -1,10 +1,14 @@ from dataclasses import dataclass +from typing import TYPE_CHECKING import pytest from infrahub_sdk import InfrahubClient from infrahub_sdk.protocols_generator.generator import CodeGenerator +if TYPE_CHECKING: + from pytest_httpx import HTTPXMock + @dataclass class SyncifyTestCase: @@ -41,7 +45,7 @@ async def test_filter_syncify(test_case: SyncifyTestCase) -> None: assert CodeGenerator._jinja2_filter_syncify(value=test_case.input, sync=test_case.sync) == test_case.output -async def test_generator(client: InfrahubClient, mock_schema_query_05) -> None: +async def test_generator(client: InfrahubClient, mock_schema_query_05: "HTTPXMock") -> None: schemas = await client.schema.fetch(branch="main") code_generator = CodeGenerator(schema=schemas) diff --git a/tests/unit/sdk/test_query_analyzer.py b/tests/unit/sdk/test_query_analyzer.py index 26acdd16..a4deefb2 100644 --- a/tests/unit/sdk/test_query_analyzer.py +++ b/tests/unit/sdk/test_query_analyzer.py @@ -5,7 +5,7 @@ from infrahub_sdk.analyzer import GraphQLOperation, GraphQLQueryAnalyzer -async def test_analyzer_init_query_only(query_01, bad_query_01) -> None: +async def test_analyzer_init_query_only(query_01: str, bad_query_01: str) -> None: gqa = GraphQLQueryAnalyzer(query=query_01) assert isinstance(gqa.document, DocumentNode) @@ -151,7 +151,7 @@ async def test_get_variables(query_01: str, query_04: str, query_05: str, query_ "var_type,var_required", [("[ID]", False), ("[ID]!", True), ("[ID!]", False), ("[ID!]!", True)], ) -async def test_get_nested_variables(var_type, var_required) -> None: +async def test_get_nested_variables(var_type: str, var_required: bool) -> None: query = ( """ query ($ids: %s){ diff --git a/tests/unit/sdk/test_repository.py b/tests/unit/sdk/test_repository.py index 4e4bf177..a3c7f6eb 100644 --- a/tests/unit/sdk/test_repository.py +++ b/tests/unit/sdk/test_repository.py @@ -17,7 +17,7 @@ def temp_dir() -> Generator[str]: yield tmp_dir -def test_initialize_repo_creates_new_repo(temp_dir) -> None: +def test_initialize_repo_creates_new_repo(temp_dir: str) -> None: """Test that a new Git repository is created if none exists.""" manager = GitRepoManager(root_directory=temp_dir, branch="main") @@ -29,7 +29,7 @@ def test_initialize_repo_creates_new_repo(temp_dir) -> None: assert isinstance(manager.git, Repo) -def test_initialize_repo_uses_existing_repo(temp_dir) -> None: +def test_initialize_repo_uses_existing_repo(temp_dir: str) -> None: """Test that the GitRepoManager uses an existing repository without an active branch.""" # Manually initialize a repo Repo.init(temp_dir, default_branch=b"main") @@ -40,7 +40,7 @@ def test_initialize_repo_uses_existing_repo(temp_dir) -> None: assert (Path(temp_dir) / ".git").is_dir() -def test_active_branch_returns_correct_branch(temp_dir) -> None: +def test_active_branch_returns_correct_branch(temp_dir: str) -> None: """Test that the active branch is correctly returned.""" manager = GitRepoManager(temp_dir, branch="develop") @@ -48,7 +48,7 @@ def test_active_branch_returns_correct_branch(temp_dir) -> None: assert manager.active_branch == "develop" -def test_initialize_repo_raises_error_on_failure(monkeypatch, temp_dir) -> None: +def test_initialize_repo_raises_error_on_failure(monkeypatch: pytest.MonkeyPatch, temp_dir: str) -> None: """Test that an error is raised if the repository cannot be initialized.""" def mock_init(*args, **kwargs) -> None: # noqa: ANN002, ANN003 @@ -60,7 +60,7 @@ def mock_init(*args, **kwargs) -> None: # noqa: ANN002, ANN003 GitRepoManager(temp_dir) -def test_gitrepo_init(temp_dir) -> None: +def test_gitrepo_init(temp_dir: str) -> None: src_directory = get_fixtures_dir() / "integration/mock_repo" repo = GitRepo(name="mock_repo", src_directory=src_directory, dst_directory=Path(temp_dir)) assert len(list(repo._repo.git.get_walker())) == 1 diff --git a/tests/unit/sdk/test_schema_sorter.py b/tests/unit/sdk/test_schema_sorter.py index 5db5bb68..20d0cc5f 100644 --- a/tests/unit/sdk/test_schema_sorter.py +++ b/tests/unit/sdk/test_schema_sorter.py @@ -1,8 +1,13 @@ +from typing import TYPE_CHECKING + from infrahub_sdk import InfrahubClient from infrahub_sdk.transfer.schema_sorter import InfrahubSchemaTopologicalSorter +if TYPE_CHECKING: + from pytest_httpx import HTTPXMock + -async def test_schema_sorter(client: InfrahubClient, mock_schema_query_01) -> None: +async def test_schema_sorter(client: InfrahubClient, mock_schema_query_01: "HTTPXMock") -> None: schemas = await client.schema.all() topological_sorter = InfrahubSchemaTopologicalSorter() diff --git a/tests/unit/sdk/test_store_branch.py b/tests/unit/sdk/test_store_branch.py index 3262f406..81c89705 100644 --- a/tests/unit/sdk/test_store_branch.py +++ b/tests/unit/sdk/test_store_branch.py @@ -3,10 +3,11 @@ from infrahub_sdk.client import InfrahubClient from infrahub_sdk.exceptions import NodeNotFoundError from infrahub_sdk.node import InfrahubNode +from infrahub_sdk.schema import NodeSchemaAPI from infrahub_sdk.store import NodeStoreBranch -def test_node_store_set(client: InfrahubClient, schema_with_hfid) -> None: +def test_node_store_set(client: InfrahubClient, schema_with_hfid: dict[str, NodeSchemaAPI]) -> None: data = { "name": {"value": "JFK1"}, "description": {"value": "JFK Airport"}, @@ -22,7 +23,7 @@ def test_node_store_set(client: InfrahubClient, schema_with_hfid) -> None: assert store._keys["mykey"] == node._internal_id -def test_node_store_set_no_hfid(client: InfrahubClient, location_schema) -> None: +def test_node_store_set_no_hfid(client: InfrahubClient, location_schema: NodeSchemaAPI) -> None: data = { "name": {"value": "JFK1"}, "description": {"value": "JFK Airport"}, @@ -39,7 +40,7 @@ def test_node_store_set_no_hfid(client: InfrahubClient, location_schema) -> None assert store._keys["mykey"] == node._internal_id -def test_node_store_get(client: InfrahubClient, location_schema) -> None: +def test_node_store_get(client: InfrahubClient, location_schema: NodeSchemaAPI) -> None: data = { "id": "54f3108c-1f21-44c4-93cf-ec5737587b48", "name": {"value": "JFK1"}, @@ -65,7 +66,7 @@ def test_node_store_get(client: InfrahubClient, location_schema) -> None: store.get(key="anotherkey") -def test_node_store_get_with_hfid(client: InfrahubClient, schema_with_hfid) -> None: +def test_node_store_get_with_hfid(client: InfrahubClient, schema_with_hfid: dict[str, NodeSchemaAPI]) -> None: data = { "id": "54f3108c-1f21-44c4-93cf-ec5737587b48", "name": {"value": "JFK1"}, diff --git a/tests/unit/sdk/test_timestamp.py b/tests/unit/sdk/test_timestamp.py index ec219a45..a4e9bc79 100644 --- a/tests/unit/sdk/test_timestamp.py +++ b/tests/unit/sdk/test_timestamp.py @@ -63,7 +63,7 @@ def test_parse_string() -> None: ), ], ) -def test_to_datetime(input_str, expected_datetime) -> None: +def test_to_datetime(input_str: str, expected_datetime: datetime) -> None: assert isinstance(Timestamp(input_str).to_datetime(), datetime) assert Timestamp(input_str).to_datetime() == expected_datetime @@ -85,7 +85,7 @@ def test_to_datetime(input_str, expected_datetime) -> None: ), ], ) -def test_to_string_default(input_str, expected_str, expected_str_no_z) -> None: +def test_to_string_default(input_str: str, expected_str: str, expected_str_no_z: str) -> None: assert isinstance(Timestamp(input_str).to_string(), str) assert Timestamp(input_str).to_string() == expected_str assert Timestamp(input_str).to_string(with_z=False) == expected_str_no_z @@ -129,6 +129,6 @@ def test_serialize() -> None: @pytest.mark.parametrize("invalid_str", ["blurple", "1122334455667788", "2023-45-99"]) -def test_invalid_raises_correct_error(invalid_str) -> None: +def test_invalid_raises_correct_error(invalid_str: str) -> None: with pytest.raises(TimestampFormatError): Timestamp(invalid_str) diff --git a/tests/unit/sdk/test_utils.py b/tests/unit/sdk/test_utils.py index fb7a3557..eae23150 100644 --- a/tests/unit/sdk/test_utils.py +++ b/tests/unit/sdk/test_utils.py @@ -1,7 +1,9 @@ import json import tempfile import uuid +from dataclasses import dataclass from pathlib import Path +from typing import Any from unittest.mock import Mock import pytest @@ -47,22 +49,31 @@ def test_is_valid_uuid() -> None: assert is_valid_uuid(uuid.UUID) is False +@dataclass +class ValidURLTestCase: + input: Any + result: bool + + +VALID_URL_TEST_CASES = [ + ValidURLTestCase(input=55, result=False), + ValidURLTestCase(input="https://", result=False), + ValidURLTestCase(input="my-server", result=False), + ValidURLTestCase(input="http://my-server", result=True), + ValidURLTestCase(input="http://my-server:8080", result=True), + ValidURLTestCase(input="http://192.168.1.10", result=True), + ValidURLTestCase(input="/test", result=True), + ValidURLTestCase(input="/", result=True), + ValidURLTestCase(input="http:/192.168.1.10", result=False), +] + + @pytest.mark.parametrize( - "input,result", - [ - (55, False), - ("https://", False), - ("my-server", False), - ("http://my-server", True), - ("http://my-server:8080", True), - ("http://192.168.1.10", True), - ("/test", True), - ("/", True), - ("http:/192.168.1.10", False), - ], + "test_case", + [pytest.param(tc, id=str(tc.input)) for tc in VALID_URL_TEST_CASES], ) -def test_is_valid_url(input, result) -> None: - assert is_valid_url(input) is result +def test_is_valid_url(test_case: ValidURLTestCase) -> None: + assert is_valid_url(test_case.input) is test_case.result def test_duplicates() -> None: @@ -156,7 +167,7 @@ def test_dict_hash() -> None: assert dict_hash({}) == "99914b932bd37a50b983c5e7c90ae93b" -async def test_extract_fields(query_01) -> None: +async def test_extract_fields(query_01: str) -> None: document = parse(query_01) expected_response = { "TestPerson": { @@ -171,7 +182,7 @@ async def test_extract_fields(query_01) -> None: assert await extract_fields(document.definitions[0].selection_set) == expected_response -async def test_extract_fields_fragment(query_02) -> None: +async def test_extract_fields_fragment(query_02: str) -> None: document = parse(query_02) expected_response = { From 51d4feb102b8b87e200316da4674d82babfd9d9d Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Tue, 30 Dec 2025 13:44:44 +0100 Subject: [PATCH 21/27] Use pathlib --- tests/unit/ctl/test_graphql_app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/ctl/test_graphql_app.py b/tests/unit/ctl/test_graphql_app.py index 07af1d20..285507ae 100644 --- a/tests/unit/ctl/test_graphql_app.py +++ b/tests/unit/ctl/test_graphql_app.py @@ -198,7 +198,7 @@ def test_generate_return_types_default_cwd(self, tmp_path: Path) -> None: shutil.copy(FIXTURES_DIR / "valid_query.gql", query_file) # Change to temp directory and run without specifying query path - original_dir = os.getcwd() + original_dir = Path.cwd() try: os.chdir(tmp_path) result = runner.invoke(app, ["generate-return-types", "--schema", str(schema_file)], catch_exceptions=False) From 4b70399e7a5941f510193345b85f61020b438315 Mon Sep 17 00:00:00 2001 From: Guillaume Mazoyer Date: Fri, 2 Jan 2026 16:09:13 +0100 Subject: [PATCH 22/27] Convert and enable integration tests (#730) Fixes #187 This PR rewrites integration tests that were previously disabled. These tests are now based on the `TestInfrahubDockerClient`. Some tests are marked as `XFAIL` for now as they either highlight issues or require a different version of Infrahub server. --- changelog/187.fixed.md | 1 + pyproject.toml | 9 +- tests/integration/conftest.py | 747 ++++----------- tests/integration/test_convert_object_type.py | 9 +- tests/integration/test_export_import.py | 872 +++++++----------- tests/integration/test_infrahub_client.py | 388 ++++---- .../integration/test_infrahub_client_sync.py | 700 +++++++------- tests/integration/test_infrahubctl.py | 26 +- tests/integration/test_node.py | 513 ++++++----- tests/integration/test_object_store.py | 44 +- tests/integration/test_repository.py | 4 +- tests/integration/test_schema.py | 84 +- 12 files changed, 1465 insertions(+), 1932 deletions(-) create mode 100644 changelog/187.fixed.md diff --git a/changelog/187.fixed.md b/changelog/187.fixed.md new file mode 100644 index 00000000..1911c8dc --- /dev/null +++ b/changelog/187.fixed.md @@ -0,0 +1 @@ +Rewrite and re-enable integration tests \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 91261e2a..491e5ae0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -347,12 +347,9 @@ max-complexity = 17 "tests/unit/sdk/test_object_store.py" = ["ANN001"] # 7 errors "tests/unit/sdk/graphql/test_query.py" = ["ANN001"] # 7 errors -# tests/integration/ - 60 errors total -"tests/integration/test_infrahub_client.py" = ["ANN001"] # 32 errors -"tests/integration/test_node.py" = ["ANN001"] # 15 errors -"tests/integration/test_infrahubctl.py" = ["ANN001"] # 9 errors -"tests/integration/test_convert_object_type.py" = ["ANN001"] # 3 errors -"tests/integration/test_repository.py" = ["ANN001"] # 1 error +# tests/integration/ +"tests/integration/test_infrahub_client.py" = ["PLR0904"] +"tests/integration/test_infrahub_client_sync.py" = ["PLR0904"] "tasks.py" = [ "PLC0415", # `import` should be at the top-level of a file diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index a999d84e..25347480 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,562 +1,201 @@ -# import asyncio +from __future__ import annotations + import os +from typing import Any + +import pytest -# import httpx -# import pytest -# import ujson -# from fastapi.testclient import TestClient -# from infrahub import config -# from infrahub.components import ComponentType -# from infrahub.core.initialization import first_time_initialization, initialization -# from infrahub.core.node import Node -# from infrahub.core.utils import delete_all_nodes -# from infrahub.database import InfrahubDatabase, get_db -# from infrahub.lock import initialize_lock -# from infrahub.message_bus import InfrahubMessage -# from infrahub.message_bus.types import MessageTTL -# from infrahub.services.adapters.message_bus import InfrahubMessageBus -# from infrahub_sdk.schema import NodeSchema, SchemaRoot -# from infrahub_sdk.types import HTTPMethod from infrahub_sdk.utils import str_to_bool BUILD_NAME = os.environ.get("INFRAHUB_BUILD_NAME", "infrahub") TEST_IN_DOCKER = str_to_bool(os.environ.get("INFRAHUB_TEST_IN_DOCKER", "false")) -# @pytest.fixture(scope="session", autouse=True) -# def add_tracker(): -# os.environ["PYTEST_RUNNING"] = "true" - - -# class InfrahubTestClient(TestClient): -# def _request( -# self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: Optional[dict] = None -# ) -> httpx.Response: -# content = None -# if payload: -# content = str(ujson.dumps(payload)).encode("UTF-8") -# with self as client: -# return client.request( -# method=method.value, -# url=url, -# headers=headers, -# timeout=timeout, -# content=content, -# ) - -# async def async_request( -# self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: Optional[dict] = None -# ) -> httpx.Response: -# return self._request(url=url, method=method, headers=headers, timeout=timeout, payload=payload) - -# def sync_request( -# self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: Optional[dict] = None -# ) -> httpx.Response: -# return self._request(url=url, method=method, headers=headers, timeout=timeout, payload=payload) - - -# @pytest.fixture(scope="session") -# def event_loop(): -# """Overrides pytest default function scoped event loop""" -# policy = asyncio.get_event_loop_policy() -# loop = policy.new_event_loop() -# yield loop -# loop.close() - - -# @pytest.fixture(scope="module", autouse=True) -# def execute_before_any_test(worker_id, tmpdir_factory): -# config.load_and_exit() - -# config.SETTINGS.storage.driver = config.StorageDriver.FileSystemStorage - -# if TEST_IN_DOCKER: -# try: -# db_id = int(worker_id[2]) + 1 -# except (ValueError, IndexError): -# db_id = 1 -# config.SETTINGS.cache.address = f"{BUILD_NAME}-cache-1" -# config.SETTINGS.database.address = f"{BUILD_NAME}-database-{db_id}" -# config.SETTINGS.storage.local = config.FileSystemStorageSettings(path="/opt/infrahub/storage") -# else: -# storage_dir = tmpdir_factory.mktemp("storage") -# config.SETTINGS.storage.local.path_ = str(storage_dir) - -# config.SETTINGS.broker.enable = False -# config.SETTINGS.cache.enable = True -# config.SETTINGS.miscellaneous.start_background_runner = False -# config.SETTINGS.security.secret_key = "4e26b3d9-b84f-42c9-a03f-fee3ada3b2fa" -# config.SETTINGS.main.internal_address = "http://mock" -# config.OVERRIDE.message_bus = BusRecorder() - -# initialize_lock() - - -# @pytest.fixture(scope="module") -# async def db() -> InfrahubDatabase: -# driver = InfrahubDatabase(driver=await get_db(retry=1)) - -# yield driver - -# await driver.close() - - -# @pytest.fixture(scope="module") -# async def init_db_base(db: InfrahubDatabase): -# await delete_all_nodes(db=db) -# await first_time_initialization(db=db) -# await initialization(db=db) - - -# @pytest.fixture(scope="module") -# async def builtin_org_schema() -> SchemaRoot: -# SCHEMA = { -# "version": "1.0", -# "nodes": [ -# { -# "name": "Organization", -# "namespace": "Test", -# "description": "An organization represent a legal entity, a company.", -# "include_in_menu": True, -# "label": "Organization", -# "icon": "mdi:domain", -# "default_filter": "name__value", -# "order_by": ["name__value"], -# "display_labels": ["label__value"], -# "branch": "aware", -# "attributes": [ -# {"name": "name", "kind": "Text", "unique": True}, -# {"name": "label", "kind": "Text", "optional": True}, -# {"name": "description", "kind": "Text", "optional": True}, -# ], -# "relationships": [ -# { -# "name": "tags", -# "peer": "BuiltinTag", -# "kind": "Attribute", -# "optional": True, -# "cardinality": "many", -# }, -# ], -# }, -# { -# "name": "Status", -# "namespace": "Builtin", -# "description": "Represent the status of an object: active, maintenance", -# "include_in_menu": True, -# "icon": "mdi:list-status", -# "label": "Status", -# "default_filter": "name__value", -# "order_by": ["name__value"], -# "display_labels": ["label__value"], -# "branch": "aware", -# "attributes": [ -# {"name": "name", "kind": "Text", "unique": True}, -# {"name": "label", "kind": "Text", "optional": True}, -# {"name": "description", "kind": "Text", "optional": True}, -# ], -# }, -# { -# "name": "Role", -# "namespace": "Builtin", -# "description": "Represent the role of an object", -# "include_in_menu": True, -# "icon": "mdi:ballot", -# "label": "Role", -# "default_filter": "name__value", -# "order_by": ["name__value"], -# "display_labels": ["label__value"], -# "branch": "aware", -# "attributes": [ -# {"name": "name", "kind": "Text", "unique": True}, -# {"name": "label", "kind": "Text", "optional": True}, -# {"name": "description", "kind": "Text", "optional": True}, -# ], -# }, -# { -# "name": "Location", -# "namespace": "Builtin", -# "description": "A location represent a physical element: a building, a site, a city", -# "include_in_menu": True, -# "icon": "mdi:map-marker-radius-outline", -# "label": "Location", -# "default_filter": "name__value", -# "order_by": ["name__value"], -# "display_labels": ["name__value"], -# "branch": "aware", -# "attributes": [ -# {"name": "name", "kind": "Text", "unique": True}, -# {"name": "description", "kind": "Text", "optional": True}, -# {"name": "type", "kind": "Text"}, -# ], -# "relationships": [ -# { -# "name": "tags", -# "peer": "BuiltinTag", -# "kind": "Attribute", -# "optional": True, -# "cardinality": "many", -# }, -# ], -# }, -# { -# "name": "Criticality", -# "namespace": "Builtin", -# "description": "Level of criticality expressed from 1 to 10.", -# "include_in_menu": True, -# "icon": "mdi:alert-octagon-outline", -# "label": "Criticality", -# "default_filter": "name__value", -# "order_by": ["name__value"], -# "display_labels": ["name__value"], -# "branch": "aware", -# "attributes": [ -# {"name": "name", "kind": "Text", "unique": True}, -# {"name": "level", "kind": "Number", "enum": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]}, -# {"name": "description", "kind": "Text", "optional": True}, -# ], -# }, -# ], -# } - -# return SCHEMA - - -# @pytest.fixture -# async def location_schema() -> NodeSchema: -# data = { -# "name": "Location", -# "namespace": "Builtin", -# "default_filter": "name__value", -# "attributes": [ -# {"name": "name", "kind": "String", "unique": True}, -# {"name": "description", "kind": "String", "optional": True}, -# {"name": "type", "kind": "String"}, -# ], -# "relationships": [ -# { -# "name": "tags", -# "peer": "BuiltinTag", -# "optional": True, -# "cardinality": "many", -# }, -# { -# "name": "primary_tag", -# "peer": "BultinTag", -# "optional": True, -# "cardinality": "one", -# }, -# ], -# } -# return NodeSchema(**data) - - -# @pytest.fixture -# async def location_cdg(db: InfrahubDatabase, tag_blue: Node, tag_red: Node) -> Node: -# obj = await Node.init(schema="BuiltinLocation", db=db) -# await obj.new(db=db, name="cdg01", type="SITE", tags=[tag_blue, tag_red]) -# await obj.save(db=db) -# return obj - - -# @pytest.fixture -# async def tag_blue(db: InfrahubDatabase) -> Node: -# obj = await Node.init(schema="BuiltinTag", db=db) -# await obj.new(db=db, name="Blue") -# await obj.save(db=db) -# return obj - - -# @pytest.fixture -# async def tag_red(db: InfrahubDatabase) -> Node: -# obj = await Node.init(schema="BuiltinTag", db=db) -# await obj.new(db=db, name="Red") -# await obj.save(db=db) -# return obj - - -# @pytest.fixture -# async def tag_green(db: InfrahubDatabase) -> Node: -# obj = await Node.init(schema="BuiltinTag", db=db) -# await obj.new(db=db, name="Green") -# await obj.save(db=db) -# return obj - - -# @pytest.fixture -# async def first_account(db: InfrahubDatabase) -> Node: -# obj = await Node.init(db=db, schema="CoreAccount") -# await obj.new(db=db, name="First Account", account_type="Git", password="TestPassword123") -# await obj.save(db=db) -# return obj - - -# @pytest.fixture -# async def second_account(db: InfrahubDatabase) -> Node: -# obj = await Node.init(db=db, schema="CoreAccount") -# await obj.new(db=db, name="Second Account", account_type="Git", password="TestPassword123") -# await obj.save(db=db) -# return obj - - -# @pytest.fixture -# async def repo01(db: InfrahubDatabase) -> Node: -# obj = await Node.init(db=db, schema="CoreRepository") -# await obj.new(db=db, name="repo01", location="https://github.com/my/repo.git") -# await obj.save(db=db) -# return obj - - -# @pytest.fixture -# async def repo99(db: InfrahubDatabase) -> Node: -# obj = await Node.init(db=db, schema="CoreRepository") -# await obj.new(db=db, name="repo99", location="https://github.com/my/repo99.git") -# await obj.save(db=db) -# return obj - - -# @pytest.fixture -# async def gqlquery01(db: InfrahubDatabase) -> Node: -# obj = await Node.init(db=db, schema="CoreGraphQLQuery") -# await obj.new(db=db, name="query01", query="query { device { name { value }}}") -# await obj.save(db=db) -# return obj - - -# @pytest.fixture -# async def gqlquery02(db: InfrahubDatabase, repo01: Node, tag_blue: Node, tag_red: Node) -> Node: -# obj = await Node.init(db=db, schema="CoreGraphQLQuery") -# await obj.new( -# db=db, -# name="query02", -# query="query { CoreRepository { edges { node { name { value }}}}}", -# repository=repo01, -# tags=[tag_blue, tag_red], -# ) -# await obj.save(db=db) -# return obj - - -# @pytest.fixture -# async def gqlquery03(db: InfrahubDatabase, repo01: Node, tag_blue: Node, tag_red: Node) -> Node: -# obj = await Node.init(db=db, schema="CoreGraphQLQuery") -# await obj.new( -# db=db, -# name="query03", -# query="query { CoreRepository { edges { node { name { value }}}}}", -# repository=repo01, -# tags=[tag_blue, tag_red], -# ) -# await obj.save(db=db) -# return obj - - -# @pytest.fixture -# async def schema_extension_01() -> dict[str, Any]: -# return { -# "version": "1.0", -# "nodes": [ -# { -# "name": "Rack", -# "namespace": "Infra", -# "description": "A Rack represents a physical two- or four-post equipment rack in which devices can be installed.", -# "label": "Rack", -# "default_filter": "name__value", -# "display_labels": ["name__value"], -# "attributes": [ -# {"name": "name", "kind": "Text"}, -# {"name": "description", "kind": "Text", "optional": True}, -# ], -# "relationships": [ -# { -# "name": "tags", -# "peer": "BuiltinTag", -# "optional": True, -# "cardinality": "many", -# "kind": "Attribute", -# }, -# ], -# } -# ], -# "extensions": { -# "nodes": [ -# { -# "kind": "BuiltinTag", -# "relationships": [ -# { -# "name": "racks", -# "peer": "InfraRack", -# "optional": True, -# "cardinality": "many", -# "kind": "Generic", -# } -# ], -# } -# ] -# }, -# } - - -# @pytest.fixture -# async def schema_extension_02() -> dict[str, Any]: -# return { -# "version": "1.0", -# "nodes": [ -# { -# "name": "Contract", -# "namespace": "Procurement", -# "description": "Generic Contract", -# "label": "Contract", -# "display_labels": ["contract_ref__value"], -# "order_by": ["contract_ref__value"], -# "attributes": [ -# { -# "name": "contract_ref", -# "label": "Contract Reference", -# "kind": "Text", -# "unique": True, -# }, -# {"name": "description", "kind": "Text", "optional": True}, -# ], -# "relationships": [ -# { -# "name": "tags", -# "peer": "BuiltinTag", -# "optional": True, -# "cardinality": "many", -# "kind": "Attribute", -# }, -# ], -# } -# ], -# "extensions": { -# "nodes": [ -# { -# "kind": "BuiltinTag", -# "relationships": [ -# { -# "name": "contracts", -# "peer": "ProcurementContract", -# "optional": True, -# "cardinality": "many", -# "kind": "Generic", -# } -# ], -# } -# ] -# }, -# } - - -# @pytest.fixture(scope="module") -# async def ipam_schema() -> SchemaRoot: -# SCHEMA = { -# "version": "1.0", -# "nodes": [ -# { -# "name": "IPPrefix", -# "namespace": "Ipam", -# "include_in_menu": False, -# "inherit_from": ["BuiltinIPPrefix"], -# "description": "IPv4 or IPv6 network", -# "icon": "mdi:ip-network", -# "label": "IP Prefix", -# }, -# { -# "name": "IPAddress", -# "namespace": "Ipam", -# "include_in_menu": False, -# "inherit_from": ["BuiltinIPAddress"], -# "description": "IP Address", -# "icon": "mdi:ip-outline", -# "label": "IP Address", -# }, -# { -# "name": "Device", -# "namespace": "Infra", -# "label": "Device", -# "human_friendly_id": ["name__value"], -# "order_by": ["name__value"], -# "display_labels": ["name__value"], -# "attributes": [{"name": "name", "kind": "Text", "unique": True}], -# "relationships": [ -# { -# "name": "primary_address", -# "peer": "IpamIPAddress", -# "label": "Primary IP Address", -# "optional": True, -# "cardinality": "one", -# "kind": "Attribute", -# } -# ], -# }, -# ], -# } - -# return SCHEMA - - -# @pytest.fixture(scope="module") -# async def hierarchical_schema() -> dict: -# schema = { -# "version": "1.0", -# "generics": [ -# { -# "name": "Generic", -# "namespace": "Location", -# "description": "Generic hierarchical location", -# "label": "Location", -# "hierarchical": True, -# "human_friendly_id": ["name__value"], -# "include_in_menu": True, -# "attributes": [ -# {"name": "name", "kind": "Text", "unique": True, "order_weight": 900}, -# ], -# } -# ], -# "nodes": [ -# { -# "name": "Country", -# "namespace": "Location", -# "description": "A country within a continent.", -# "inherit_from": ["LocationGeneric"], -# "generate_profile": False, -# "default_filter": "name__value", -# "order_by": ["name__value"], -# "display_labels": ["name__value"], -# "children": "LocationSite", -# "attributes": [{"name": "shortname", "kind": "Text"}], -# }, -# { -# "name": "Site", -# "namespace": "Location", -# "description": "A site within a country.", -# "inherit_from": ["LocationGeneric"], -# "default_filter": "name__value", -# "order_by": ["name__value"], -# "display_labels": ["name__value"], -# "children": "", -# "parent": "LocationCountry", -# "attributes": [{"name": "shortname", "kind": "Text"}], -# }, -# ], -# } -# return schema - - -# class BusRecorder(InfrahubMessageBus): -# def __init__(self, component_type: Optional[ComponentType] = None): -# self.messages: list[InfrahubMessage] = [] -# self.messages_per_routing_key: dict[str, list[InfrahubMessage]] = {} - -# async def publish( -# self, message: InfrahubMessage, routing_key: str, delay: Optional[MessageTTL] = None, is_retry: bool = False -# ) -> None: -# self.messages.append(message) -# if routing_key not in self.messages_per_routing_key: -# self.messages_per_routing_key[routing_key] = [] -# self.messages_per_routing_key[routing_key].append(message) - -# @property -# def seen_routing_keys(self) -> list[str]: -# return list(self.messages_per_routing_key.keys()) +@pytest.fixture(scope="module") +def schema_extension_01() -> dict[str, Any]: + return { + "version": "1.0", + "nodes": [ + { + "name": "Rack", + "namespace": "Infra", + "description": "A Rack represents a physical two- or four-post equipment rack.", + "label": "Rack", + "default_filter": "name__value", + "display_labels": ["name__value"], + "attributes": [ + {"name": "name", "kind": "Text"}, + {"name": "description", "kind": "Text", "optional": True}, + ], + "relationships": [ + { + "name": "tags", + "peer": "BuiltinTag", + "optional": True, + "cardinality": "many", + "kind": "Attribute", + }, + ], + } + ], + "extensions": { + "nodes": [ + { + "kind": "BuiltinTag", + "relationships": [ + { + "name": "racks", + "peer": "InfraRack", + "optional": True, + "cardinality": "many", + "kind": "Generic", + } + ], + } + ] + }, + } + + +@pytest.fixture(scope="module") +def schema_extension_02() -> dict[str, Any]: + return { + "version": "1.0", + "nodes": [ + { + "name": "Contract", + "namespace": "Procurement", + "description": "Generic Contract", + "label": "Contract", + "display_labels": ["contract_ref__value"], + "order_by": ["contract_ref__value"], + "attributes": [ + { + "name": "contract_ref", + "label": "Contract Reference", + "kind": "Text", + "unique": True, + }, + {"name": "description", "kind": "Text", "optional": True}, + ], + "relationships": [ + { + "name": "tags", + "peer": "BuiltinTag", + "optional": True, + "cardinality": "many", + "kind": "Attribute", + }, + ], + } + ], + "extensions": { + "nodes": [ + { + "kind": "BuiltinTag", + "relationships": [ + { + "name": "contracts", + "peer": "ProcurementContract", + "optional": True, + "cardinality": "many", + "kind": "Generic", + } + ], + } + ] + }, + } + + +@pytest.fixture(scope="module") +def hierarchical_schema() -> dict[str, Any]: + return { + "version": "1.0", + "generics": [ + { + "name": "Generic", + "namespace": "Location", + "description": "Generic hierarchical location", + "label": "Location", + "hierarchical": True, + "human_friendly_id": ["name__value"], + "include_in_menu": True, + "attributes": [ + {"name": "name", "kind": "Text", "unique": True, "order_weight": 900}, + ], + } + ], + "nodes": [ + { + "name": "Country", + "namespace": "Location", + "description": "A country within a continent.", + "inherit_from": ["LocationGeneric"], + "generate_profile": False, + "default_filter": "name__value", + "order_by": ["name__value"], + "display_labels": ["name__value"], + "children": "LocationSite", + "attributes": [{"name": "shortname", "kind": "Text"}], + }, + { + "name": "Site", + "namespace": "Location", + "description": "A site within a country.", + "inherit_from": ["LocationGeneric"], + "default_filter": "name__value", + "order_by": ["name__value"], + "display_labels": ["name__value"], + "children": "", + "parent": "LocationCountry", + "attributes": [{"name": "shortname", "kind": "Text"}], + }, + ], + } + + +@pytest.fixture(scope="module") +def ipam_schema() -> dict[str, Any]: + return { + "version": "1.0", + "nodes": [ + { + "name": "IPPrefix", + "namespace": "Ipam", + "include_in_menu": False, + "inherit_from": ["BuiltinIPPrefix"], + "description": "IPv4 or IPv6 network", + "icon": "mdi:ip-network", + "label": "IP Prefix", + }, + { + "name": "IPAddress", + "namespace": "Ipam", + "include_in_menu": False, + "inherit_from": ["BuiltinIPAddress"], + "description": "IP Address", + "icon": "mdi:ip-outline", + "label": "IP Address", + }, + { + "name": "Device", + "namespace": "Infra", + "label": "Device", + "human_friendly_id": ["name__value"], + "order_by": ["name__value"], + "display_labels": ["name__value"], + "attributes": [{"name": "name", "kind": "Text", "unique": True}], + "relationships": [ + { + "name": "primary_address", + "peer": "IpamIPAddress", + "label": "Primary IP Address", + "optional": True, + "cardinality": "one", + "kind": "Attribute", + } + ], + }, + ], + } diff --git a/tests/integration/test_convert_object_type.py b/tests/integration/test_convert_object_type.py index 7aee141a..f5581e2f 100644 --- a/tests/integration/test_convert_object_type.py +++ b/tests/integration/test_convert_object_type.py @@ -1,7 +1,7 @@ from __future__ import annotations import uuid -from typing import Any +from typing import TYPE_CHECKING, Any import pytest @@ -9,6 +9,9 @@ from infrahub_sdk.testing.docker import TestInfrahubDockerClient from tests.constants import CLIENT_TYPE_ASYNC, CLIENT_TYPES +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient, InfrahubClientSync + SCHEMA: dict[str, Any] = { "version": "1.0", "generics": [ @@ -63,7 +66,9 @@ class TestConvertObjectType(TestInfrahubDockerClient): @pytest.mark.parametrize("client_type", CLIENT_TYPES) - async def test_convert_object_type(self, client, client_sync, client_type) -> None: + async def test_convert_object_type( + self, client: InfrahubClient, client_sync: InfrahubClientSync, client_type: str + ) -> None: resp = await client.schema.load(schemas=[SCHEMA], wait_until_converged=True) assert not resp.errors diff --git a/tests/integration/test_export_import.py b/tests/integration/test_export_import.py index 846a4803..a138728f 100644 --- a/tests/integration/test_export_import.py +++ b/tests/integration/test_export_import.py @@ -1,545 +1,327 @@ -# from pathlib import Path -# from typing import Any, Dict -# -# import pytest -# import ujson -# -# from infrahub_sdk import InfrahubClient -# from infrahub_sdk.ctl.exporter import LineDelimitedJSONExporter -# from infrahub_sdk.ctl.importer import LineDelimitedJSONImporter -# from infrahub_sdk.exceptions import SchemaNotFoundError -# from infrahub_sdk.transfer.exceptions import TransferFileNotFoundError -# from infrahub_sdk.transfer.schema_sorter import InfrahubSchemaTopologicalSorter -# from tests.helpers.test_app import TestInfrahubApp -# -# PERSON_KIND = "TestingPerson" -# POOL_KIND = "TestingPool" -# CAR_KIND = "TestingCar" -# MANUFACTURER_KIND = "TestingManufacturer" -# TAG_KIND = "TestingTag" -# -# -# -# -# class TestSchemaExportImportBase(TestInfrahubApp): -# @pytest.fixture(scope="class") -# def temporary_directory(self, tmp_path_factory) -> Path: -# return tmp_path_factory.mktemp("infrahub-integration-tests") -# -# @pytest.fixture(scope="class") -# def schema_person_base(self) -> Dict[str, Any]: -# return { -# "name": "Person", -# "namespace": "Testing", -# "include_in_menu": True, -# "label": "Person", -# "attributes": [ -# {"name": "name", "kind": "Text"}, -# {"name": "description", "kind": "Text", "optional": True}, -# {"name": "height", "kind": "Number", "optional": True}, -# ], -# "relationships": [ -# {"name": "cars", "kind": "Generic", "optional": True, "peer": "TestingCar", "cardinality": "many"} -# ], -# } -# -# @pytest.fixture(scope="class") -# def schema_car_base(self) -> Dict[str, Any]: -# return { -# "name": "Car", -# "namespace": "Testing", -# "include_in_menu": True, -# "label": "Car", -# "attributes": [ -# {"name": "name", "kind": "Text"}, -# {"name": "description", "kind": "Text", "optional": True}, -# {"name": "color", "kind": "Text"}, -# ], -# "relationships": [ -# { -# "name": "owner", -# "kind": "Attribute", -# "optional": False, -# "peer": "TestingPerson", -# "cardinality": "one", -# }, -# { -# "name": "manufacturer", -# "kind": "Attribute", -# "optional": False, -# "peer": "TestingManufacturer", -# "cardinality": "one", -# "identifier": "car__manufacturer", -# }, -# ], -# } -# -# @pytest.fixture(scope="class") -# def schema_manufacturer_base(self) -> Dict[str, Any]: -# return { -# "name": "Manufacturer", -# "namespace": "Testing", -# "include_in_menu": True, -# "label": "Manufacturer", -# "attributes": [{"name": "name", "kind": "Text"}, {"name": "description", "kind": "Text", "optional": True}], -# "relationships": [ -# { -# "name": "cars", -# "kind": "Generic", -# "optional": True, -# "peer": "TestingCar", -# "cardinality": "many", -# "identifier": "car__manufacturer", -# }, -# { -# "name": "customers", -# "kind": "Generic", -# "optional": True, -# "peer": "TestingPerson", -# "cardinality": "many", -# "identifier": "person__manufacturer", -# }, -# ], -# } -# -# @pytest.fixture(scope="class") -# def schema_tag_base(self) -> Dict[str, Any]: -# return { -# "name": "Tag", -# "namespace": "Testing", -# "include_in_menu": True, -# "label": "Testing Tag", -# "attributes": [{"name": "name", "kind": "Text"}], -# "relationships": [ -# {"name": "cars", "kind": "Generic", "optional": True, "peer": "TestingCar", "cardinality": "many"}, -# { -# "name": "persons", -# "kind": "Generic", -# "optional": True, -# "peer": "TestingPerson", -# "cardinality": "many", -# }, -# ], -# } -# -# @pytest.fixture(scope="class") -# def schema(self, schema_car_base, schema_person_base, schema_manufacturer_base, schema_tag_base) -> Dict[str, Any]: -# return { -# "version": "1.0", -# "nodes": [schema_person_base, schema_car_base, schema_manufacturer_base, schema_tag_base], -# } -# -# @pytest.fixture(scope="class") -# async def initial_dataset(self, client: InfrahubClient, schema): -# await client.schema.load(schemas=[schema]) -# -# john = await client.create( -# kind=PERSON_KIND, data=dict(name="John", height=175, description="The famous Joe Doe") -# ) -# await john.save() -# -# jane = await client.create( -# kind=PERSON_KIND, data=dict(name="Jane", height=165, description="The famous Jane Doe") -# ) -# await jane.save() -# -# honda = await client.create(kind=MANUFACTURER_KIND, data=dict(name="honda", description="Honda Motor Co., Ltd")) -# await honda.save() -# -# renault = await client.create( -# kind=MANUFACTURER_KIND, -# data=dict(name="renault", description="Groupe Renault is a French multinational automobile manufacturer"), -# ) -# await renault.save() -# -# accord = await client.create( -# kind=CAR_KIND, -# data=dict(name="accord", description="Honda Accord", color="#3443eb", manufacturer=honda, owner=jane), -# ) -# await accord.save() -# -# civic = await client.create( -# kind=CAR_KIND, -# data=dict(name="civic", description="Honda Civic", color="#c9eb34", manufacturer=honda, owner=jane), -# ) -# await civic.save() -# -# megane = await client.create( -# kind=CAR_KIND, -# data=dict(name="Megane", description="Renault Megane", color="#c93420", manufacturer=renault, owner=john), -# ) -# await megane.save() -# -# blue = await client.create(kind=TAG_KIND, data=dict(name="blue", cars=[accord, civic], persons=[jane])) -# await blue.save() -# -# red = await client.create(kind=TAG_KIND, data=dict(name="red", persons=[john])) -# await red.save() -# -# objs = { -# "john": john.id, -# "jane": jane.id, -# "honda": honda.id, -# "renault": renault.id, -# "accord": accord.id, -# "civic": civic.id, -# "megane": megane.id, -# "blue": blue.id, -# "red": red.id, -# } -# -# return objs -# -# def reset_export_directory(self, temporary_directory: Path): -# for file in temporary_directory.iterdir(): -# if file.is_file(): -# file.unlink() -# -# async def test_step01_export_no_schema(self, client: InfrahubClient, temporary_directory: Path): -# exporter = LineDelimitedJSONExporter(client=client) -# await exporter.export(export_directory=temporary_directory, branch="main", namespaces=[]) -# -# nodes_file = temporary_directory / "nodes.json" -# relationships_file = temporary_directory / "relationships.json" -# -# # Export should create files even if they do not really hold any data -# assert nodes_file.exists() -# assert relationships_file.exists() -# -# # Verify that only the admin account has been exported -# with nodes_file.open() as f: -# admin_account_node_dump = ujson.loads(f.readline()) -# assert admin_account_node_dump -# assert admin_account_node_dump["kind"] == "CoreAccount" -# assert ujson.loads(admin_account_node_dump["graphql_json"])["name"]["value"] == "admin" -# -# relationships_dump = ujson.loads(relationships_file.read_text()) -# assert relationships_dump -# -# async def test_step02_import_no_schema(self, client: InfrahubClient, temporary_directory: Path): -# importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) -# await importer.import_data(import_directory=temporary_directory, branch="main") -# -# # Schema should not be present -# for kind in (PERSON_KIND, CAR_KIND, MANUFACTURER_KIND, TAG_KIND): -# with pytest.raises(SchemaNotFoundError): -# await client.all(kind=kind) -# -# # Cleanup for next tests -# self.reset_export_directory(temporary_directory) -# -# async def test_step03_export_empty_dataset(self, client: InfrahubClient, temporary_directory: Path, schema): -# await client.schema.load(schemas=[schema]) -# -# exporter = LineDelimitedJSONExporter(client=client) -# await exporter.export(export_directory=temporary_directory, branch="main", namespaces=[]) -# -# nodes_file = temporary_directory / "nodes.json" -# relationships_file = temporary_directory / "relationships.json" -# -# # Export should create files even if they do not really hold any data -# assert nodes_file.exists() -# assert relationships_file.exists() -# -# # Verify that only the admin account has been exported -# with nodes_file.open() as f: -# admin_account_node_dump = ujson.loads(f.readline()) -# assert admin_account_node_dump -# assert admin_account_node_dump["kind"] == "CoreAccount" -# assert ujson.loads(admin_account_node_dump["graphql_json"])["name"]["value"] == "admin" -# -# relationships_dump = ujson.loads(relationships_file.read_text()) -# assert relationships_dump -# -# async def test_step04_import_empty_dataset(self, client: InfrahubClient, temporary_directory: Path, schema): -# await client.schema.load(schemas=[schema]) -# -# importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) -# await importer.import_data(import_directory=temporary_directory, branch="main") -# -# # No data for any kind should be retrieved -# for kind in (PERSON_KIND, CAR_KIND, MANUFACTURER_KIND, TAG_KIND): -# assert not await client.all(kind=kind) -# -# # Cleanup for next tests -# self.reset_export_directory(temporary_directory) -# -# async def test_step05_export_initial_dataset( -# self, client: InfrahubClient, temporary_directory: Path, initial_dataset -# ): -# exporter = LineDelimitedJSONExporter(client=client) -# await exporter.export(export_directory=temporary_directory, branch="main", namespaces=[]) -# -# nodes_file = temporary_directory / "nodes.json" -# relationships_file = temporary_directory / "relationships.json" -# -# # Export should create files -# assert nodes_file.exists() -# assert relationships_file.exists() -# -# # Verify that nodes have been exported -# nodes_dump = [] -# with nodes_file.open() as reader: -# while line := reader.readline(): -# nodes_dump.append(ujson.loads(line)) -# assert len(nodes_dump) == len(initial_dataset) + 5 # add number to account for default data -# -# relationships_dump = ujson.loads(relationships_file.read_text()) -# assert relationships_dump -# -# async def test_step06_import_initial_dataset(self, client: InfrahubClient, temporary_directory: Path, schema): -# await client.schema.load(schemas=[schema]) -# -# importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) -# await importer.import_data(import_directory=temporary_directory, branch="main") -# -# # Each kind must have nodes -# for kind in (PERSON_KIND, CAR_KIND, MANUFACTURER_KIND, TAG_KIND): -# assert await client.all(kind=kind) -# -# async def test_step07_import_initial_dataset_with_existing_data( -# self, client: InfrahubClient, temporary_directory: Path, initial_dataset -# ): -# # Count existing nodes -# counters: Dict[str, int] = {} -# for kind in (PERSON_KIND, CAR_KIND, MANUFACTURER_KIND, TAG_KIND): -# nodes = await client.all(kind=kind) -# counters[kind] = len(nodes) -# -# importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) -# await importer.import_data(import_directory=temporary_directory, branch="main") -# -# # Nodes must not be duplicated -# for kind in (PERSON_KIND, CAR_KIND, MANUFACTURER_KIND, TAG_KIND): -# nodes = await client.all(kind=kind) -# assert len(nodes) == counters[kind] -# -# # Cleanup for next tests -# self.reset_export_directory(temporary_directory) -# -# async def test_step99_import_wrong_drectory(self, client: InfrahubClient): -# importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) -# # Using a directory that does not exist, should lead to exception -# with pytest.raises(TransferFileNotFoundError): -# await importer.import_data(import_directory=Path("this_directory_does_not_exist"), branch="main") -# -# -# class TestSchemaExportImportManyRelationships(TestInfrahubApp): -# @pytest.fixture(scope="class") -# def temporary_directory(self, tmp_path_factory) -> Path: -# return tmp_path_factory.mktemp("infrahub-integration-tests") -# -# @pytest.fixture(scope="class") -# def schema_pool_base(self) -> Dict[str, Any]: -# return { -# "name": "Pool", -# "namespace": "Testing", -# "include_in_menu": True, -# "label": "Pool", -# "attributes": [{"name": "name", "kind": "Text"}, {"name": "description", "kind": "Text", "optional": True}], -# "relationships": [ -# { -# "name": "cars", -# "kind": "Attribute", -# "optional": True, -# "peer": "TestingCar", -# "cardinality": "many", -# "identifier": "car__pool", -# } -# ], -# } -# -# @pytest.fixture(scope="class") -# def schema_car_base(self) -> Dict[str, Any]: -# return { -# "name": "Car", -# "namespace": "Testing", -# "include_in_menu": True, -# "label": "Car", -# "attributes": [ -# {"name": "name", "kind": "Text"}, -# {"name": "description", "kind": "Text", "optional": True}, -# {"name": "color", "kind": "Text"}, -# ], -# "relationships": [ -# { -# "name": "pools", -# "kind": "Attribute", -# "optional": True, -# "peer": "TestingPool", -# "cardinality": "many", -# "identifier": "car__pool", -# }, -# { -# "name": "manufacturer", -# "kind": "Attribute", -# "optional": False, -# "peer": "TestingManufacturer", -# "cardinality": "one", -# "identifier": "car__manufacturer", -# }, -# ], -# } -# -# @pytest.fixture(scope="class") -# def schema_manufacturer_base(self) -> Dict[str, Any]: -# return { -# "name": "Manufacturer", -# "namespace": "Testing", -# "include_in_menu": True, -# "label": "Manufacturer", -# "attributes": [{"name": "name", "kind": "Text"}, {"name": "description", "kind": "Text", "optional": True}], -# "relationships": [ -# { -# "name": "cars", -# "kind": "Generic", -# "optional": True, -# "peer": "TestingCar", -# "cardinality": "many", -# "identifier": "car__manufacturer", -# } -# ], -# } -# -# @pytest.fixture(scope="class") -# def schema(self, schema_car_base, schema_pool_base, schema_manufacturer_base) -> Dict[str, Any]: -# return { -# "version": "1.0", -# "nodes": [schema_pool_base, schema_car_base, schema_manufacturer_base], -# } -# -# @pytest.fixture(scope="class") -# async def initial_dataset(self, client: InfrahubClient, schema): -# await client.schema.load(schemas=[schema]) -# -# bmw = await client.create( -# kind=MANUFACTURER_KIND, -# data=dict( -# name="BMW", -# description="Bayerische Motoren Werke AG is a German multinational manufacturer of luxury vehicles and motorcycles", -# ), -# ) -# await bmw.save() -# -# fiat = await client.create( -# kind=MANUFACTURER_KIND, -# data=dict(name="Fiat", description="Fiat Automobiles S.p.A. is an Italian automobile manufacturer"), -# ) -# await fiat.save() -# -# five_series = await client.create( -# kind=CAR_KIND, data=dict(name="5 series", description="BMW 5 series", color="#000000", manufacturer=bmw) -# ) -# await five_series.save() -# -# five_hundred = await client.create( -# kind=CAR_KIND, data=dict(name="500", description="Fiat 500", color="#540302", manufacturer=fiat) -# ) -# await five_hundred.save() -# -# premium = await client.create( -# kind=POOL_KIND, data=dict(name="Premium", description="Premium cars", cars=[five_series]) -# ) -# await premium.save() -# -# compact = await client.create( -# kind=POOL_KIND, data=dict(name="Compact", description="Compact cars", cars=[five_hundred]) -# ) -# await compact.save() -# -# sedan = await client.create( -# kind=POOL_KIND, data=dict(name="Sedan", description="Sedan cars", cars=[five_series]) -# ) -# await sedan.save() -# -# city_cars = await client.create( -# kind=POOL_KIND, data=dict(name="City", description="City cars", cars=[five_hundred]) -# ) -# await city_cars.save() -# -# objs = { -# "bmw": bmw.id, -# "fiat": fiat.id, -# "5series": five_series.id, -# "500": five_hundred.id, -# "premium": premium.id, -# "compact": compact.id, -# "sedan": sedan.id, -# "city_cars": city_cars.id, -# } -# -# return objs -# -# def reset_export_directory(self, temporary_directory: Path): -# for file in temporary_directory.iterdir(): -# if file.is_file(): -# file.unlink() -# -# async def test_step01_export_initial_dataset( -# self, client: InfrahubClient, temporary_directory: Path, initial_dataset -# ): -# exporter = LineDelimitedJSONExporter(client=client) -# await exporter.export(export_directory=temporary_directory, branch="main", namespaces=[]) -# -# nodes_file = temporary_directory / "nodes.json" -# relationships_file = temporary_directory / "relationships.json" -# -# # Export should create files -# assert nodes_file.exists() -# assert relationships_file.exists() -# -# # Verify that nodes have been exported -# nodes_dump = [] -# with nodes_file.open() as reader: -# while line := reader.readline(): -# nodes_dump.append(ujson.loads(line)) -# assert len(nodes_dump) == len(initial_dataset) + 5 # add number to account for default data -# -# # Make sure there are as many relationships as there are in the database -# relationship_count = 0 -# for node in await client.all(kind=POOL_KIND): -# await node.cars.fetch() -# relationship_count += len(node.cars.peers) -# relationships_dump = ujson.loads(relationships_file.read_text()) -# assert len(relationships_dump) == relationship_count + 1 # add number to account for default data -# -# async def test_step02_import_initial_dataset(self, client: InfrahubClient, temporary_directory: Path, schema): -# await client.schema.load(schemas=[schema]) -# -# importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) -# await importer.import_data(import_directory=temporary_directory, branch="main") -# -# # Each kind must have nodes -# for kind in (POOL_KIND, CAR_KIND, MANUFACTURER_KIND): -# assert await client.all(kind=kind) -# -# # Make sure relationships were properly imported -# relationship_count = 0 -# for node in await client.all(kind=POOL_KIND): -# await node.cars.fetch() -# relationship_count += len(node.cars.peers) -# relationships_file = temporary_directory / "relationships.json" -# relationships_dump = ujson.loads(relationships_file.read_text()) -# assert len(relationships_dump) == relationship_count + 1 # add number to account for default data -# -# async def test_step03_import_initial_dataset_with_existing_data( -# self, client: InfrahubClient, temporary_directory: Path, initial_dataset -# ): -# importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) -# await importer.import_data(import_directory=temporary_directory, branch="main") -# -# # Each kind must have nodes -# for kind in (POOL_KIND, CAR_KIND, MANUFACTURER_KIND): -# assert await client.all(kind=kind) -# -# # Make sure relationships were properly imported -# relationship_count = 0 -# for node in await client.all(kind=POOL_KIND): -# await node.cars.fetch() -# relationship_count += len(node.cars.peers) -# relationships_file = temporary_directory / "relationships.json" -# relationships_dump = ujson.loads(relationships_file.read_text()) -# assert len(relationships_dump) == relationship_count + 1 # add number to account for default data -# -# # Cleanup for next tests -# self.reset_export_directory(temporary_directory) +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING, Any + +import pytest +import ujson + +from infrahub_sdk.exceptions import SchemaNotFoundError +from infrahub_sdk.testing.docker import TestInfrahubDockerClient +from infrahub_sdk.testing.schemas.car_person import TESTING_CAR, TESTING_MANUFACTURER, TESTING_PERSON, SchemaCarPerson +from infrahub_sdk.transfer.exceptions import TransferFileNotFoundError +from infrahub_sdk.transfer.exporter.json import LineDelimitedJSONExporter +from infrahub_sdk.transfer.importer.json import LineDelimitedJSONImporter +from infrahub_sdk.transfer.schema_sorter import InfrahubSchemaTopologicalSorter + +if TYPE_CHECKING: + from pytest import TempPathFactory + + from infrahub_sdk import InfrahubClient + from infrahub_sdk.node import InfrahubNode + from infrahub_sdk.schema import SchemaRoot + + +class TestSchemaExportImportBase(TestInfrahubDockerClient, SchemaCarPerson): + @pytest.fixture(scope="class") + def temporary_directory(self, tmp_path_factory: TempPathFactory) -> Path: + return tmp_path_factory.mktemp("infrahub-integration-tests") + + @pytest.fixture(scope="class") + async def load_schema(self, client: InfrahubClient, schema_base: SchemaRoot) -> None: + resp = await client.schema.load(schemas=[schema_base.to_schema_dict()], wait_until_converged=True) + assert resp.errors == {} + + @pytest.fixture(scope="class") + async def initial_dataset( + self, + client: InfrahubClient, + load_schema: None, + person_joe: InfrahubNode, + person_jane: InfrahubNode, + tag_blue: InfrahubNode, + tag_red: InfrahubNode, + ) -> dict[str, Any]: + honda = await client.create(kind=TESTING_MANUFACTURER, name="Honda", description="Honda Motor Co., Ltd") + await honda.save() + + renault = await client.create(kind=TESTING_MANUFACTURER, name="Renault", description="Groupe Renault") + await renault.save() + + accord = await client.create( + kind=TESTING_CAR, + name="Accord", + description="Honda Accord", + color="#3443eb", + manufacturer=honda, + owner=person_jane, + ) + await accord.save() + + civic = await client.create( + kind=TESTING_CAR, + name="Civic", + description="Honda Civic", + color="#c9eb34", + manufacturer=honda, + owner=person_jane, + ) + await civic.save() + + megane = await client.create( + kind=TESTING_CAR, + name="Megane", + description="Renault Megane", + color="#c93420", + manufacturer=renault, + owner=person_joe, + ) + await megane.save() + + await accord.tags.fetch() + accord.tags.add(tag_blue) + await accord.save() + + await civic.tags.fetch() + civic.tags.add(tag_blue) + await civic.save() + + return { + "joe": person_joe.id, + "jane": person_jane.id, + "honda": honda.id, + "renault": renault.id, + "accord": accord.id, + "civic": civic.id, + "megane": megane.id, + "blue": tag_blue.id, + "red": tag_red.id, + } + + def reset_export_directory(self, temporary_directory: Path) -> None: + for file in temporary_directory.iterdir(): + if file.is_file(): + file.unlink() + + async def test_step01_export_no_schema(self, client: InfrahubClient, temporary_directory: Path) -> None: + exporter = LineDelimitedJSONExporter(client=client) + await exporter.export(export_directory=temporary_directory, branch="main", namespaces=[]) + + nodes_file = temporary_directory / "nodes.json" + relationships_file = temporary_directory / "relationships.json" + + assert nodes_file.exists() + assert relationships_file.exists() + + admin_found = False + with nodes_file.open() as f: + for line in f: + node_dump = ujson.loads(line) + if node_dump.get("kind") == "CoreAccount": + graphql_data = ujson.loads(node_dump["graphql_json"]) + if graphql_data.get("name", {}).get("value") == "admin": + admin_found = True + break + assert admin_found, "Admin account not found in exported nodes" + + async def test_step02_import_no_schema(self, client: InfrahubClient, temporary_directory: Path) -> None: + importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) + await importer.import_data(import_directory=temporary_directory, branch="main") + + for kind in (TESTING_PERSON, TESTING_CAR, TESTING_MANUFACTURER): + with pytest.raises(SchemaNotFoundError): + await client.all(kind=kind) + + self.reset_export_directory(temporary_directory) + + async def test_step03_export_initial_dataset( + self, client: InfrahubClient, temporary_directory: Path, initial_dataset: dict[str, Any] + ) -> None: + exporter = LineDelimitedJSONExporter(client=client) + await exporter.export(export_directory=temporary_directory, branch="main", namespaces=[]) + + nodes_file = temporary_directory / "nodes.json" + relationships_file = temporary_directory / "relationships.json" + + assert nodes_file.exists() + assert relationships_file.exists() + + nodes_dump = [] + with nodes_file.open() as reader: + while line := reader.readline(): + nodes_dump.append(ujson.loads(line)) + assert len(nodes_dump) >= len(initial_dataset) + + relationships_dump = ujson.loads(relationships_file.read_text()) + assert relationships_dump + + async def test_step04_import_initial_dataset( + self, client: InfrahubClient, temporary_directory: Path, load_schema: None + ) -> None: + importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) + await importer.import_data(import_directory=temporary_directory, branch="main") + + for kind in (TESTING_PERSON, TESTING_CAR, TESTING_MANUFACTURER): + assert await client.all(kind=kind) + + async def test_step05_import_initial_dataset_with_existing_data( + self, client: InfrahubClient, temporary_directory: Path, initial_dataset: dict[str, Any] + ) -> None: + counters: dict[str, int] = {} + for kind in (TESTING_PERSON, TESTING_CAR, TESTING_MANUFACTURER): + nodes = await client.all(kind=kind) + counters[kind] = len(nodes) + + importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) + await importer.import_data(import_directory=temporary_directory, branch="main") + + for kind in (TESTING_PERSON, TESTING_CAR, TESTING_MANUFACTURER): + nodes = await client.all(kind=kind) + assert len(nodes) == counters[kind] + + self.reset_export_directory(temporary_directory) + + async def test_step99_import_wrong_directory(self, client: InfrahubClient) -> None: + importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) + with pytest.raises(TransferFileNotFoundError): + await importer.import_data(import_directory=Path("this_directory_does_not_exist"), branch="main") + + +class TestSchemaExportImportManyRelationships(TestInfrahubDockerClient, SchemaCarPerson): + @pytest.fixture(scope="class") + def temporary_directory(self, tmp_path_factory: TempPathFactory) -> Path: + return tmp_path_factory.mktemp("infrahub-integration-tests-many") + + @pytest.fixture(scope="class") + async def load_schema(self, client: InfrahubClient, schema_base: SchemaRoot) -> None: + resp = await client.schema.load(schemas=[schema_base.to_schema_dict()], wait_until_converged=True) + assert resp.errors == {} + + @pytest.fixture(scope="class") + async def initial_dataset( + self, + client: InfrahubClient, + load_schema: None, + person_joe: InfrahubNode, + person_jane: InfrahubNode, + tag_blue: InfrahubNode, + tag_red: InfrahubNode, + tag_green: InfrahubNode, + ) -> dict[str, Any]: + bmw = await client.create( + kind=TESTING_MANUFACTURER, + name="BMW", + description="Bayerische Motoren Werke AG is a German multinational manufacturer", + ) + await bmw.save() + + fiat = await client.create( + kind=TESTING_MANUFACTURER, + name="Fiat", + description="Fiat Automobiles S.p.A. is an Italian automobile manufacturer", + ) + await fiat.save() + + five_series = await client.create( + kind=TESTING_CAR, + name="5 series", + description="BMW 5 series", + color="#000000", + manufacturer=bmw, + owner=person_joe, + ) + await five_series.save() + + five_hundred = await client.create( + kind=TESTING_CAR, + name="500", + description="Fiat 500", + color="#540302", + manufacturer=fiat, + owner=person_jane, + ) + await five_hundred.save() + + await five_series.tags.fetch() + five_series.tags.add(tag_blue) + five_series.tags.add(tag_green) + await five_series.save() + + await five_hundred.tags.fetch() + five_hundred.tags.add(tag_red) + five_hundred.tags.add(tag_green) + await five_hundred.save() + + return { + "bmw": bmw.id, + "fiat": fiat.id, + "5series": five_series.id, + "500": five_hundred.id, + "blue": tag_blue.id, + "red": tag_red.id, + "green": tag_green.id, + } + + def reset_export_directory(self, temporary_directory: Path) -> None: + for file in temporary_directory.iterdir(): + if file.is_file(): + file.unlink() + + async def test_step01_export_initial_dataset( + self, client: InfrahubClient, temporary_directory: Path, initial_dataset: dict[str, Any] + ) -> None: + exporter = LineDelimitedJSONExporter(client=client) + await exporter.export(export_directory=temporary_directory, branch="main", namespaces=[]) + + nodes_file = temporary_directory / "nodes.json" + relationships_file = temporary_directory / "relationships.json" + + assert nodes_file.exists() + assert relationships_file.exists() + + nodes_dump = [] + with nodes_file.open() as reader: + while line := reader.readline(): + nodes_dump.append(ujson.loads(line)) + assert len(nodes_dump) >= len(initial_dataset) + + relationships_dump = ujson.loads(relationships_file.read_text()) + assert relationships_dump + + async def test_step02_import_initial_dataset( + self, client: InfrahubClient, temporary_directory: Path, initial_dataset: dict[str, Any] + ) -> None: + importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) + await importer.import_data(import_directory=temporary_directory, branch="main") + + for kind in (TESTING_CAR, TESTING_MANUFACTURER): + assert await client.all(kind=kind) + + relationship_count = 0 + for node in await client.all(kind=TESTING_CAR): + await node.tags.fetch() + relationship_count += len(node.tags.peers) + assert relationship_count >= 4 + + async def test_step03_import_initial_dataset_with_existing_data( + self, client: InfrahubClient, temporary_directory: Path, initial_dataset: dict[str, Any] + ) -> None: + relationship_count_before = 0 + for node in await client.all(kind=TESTING_CAR): + await node.tags.fetch() + relationship_count_before += len(node.tags.peers) + + importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) + await importer.import_data(import_directory=temporary_directory, branch="main") + + for kind in (TESTING_CAR, TESTING_MANUFACTURER): + assert await client.all(kind=kind) + + relationship_count_after = 0 + for node in await client.all(kind=TESTING_CAR): + await node.tags.fetch() + relationship_count_after += len(node.tags.peers) + + assert relationship_count_after == relationship_count_before + + self.reset_export_directory(temporary_directory) diff --git a/tests/integration/test_infrahub_client.py b/tests/integration/test_infrahub_client.py index db86d759..2fe9d801 100644 --- a/tests/integration/test_infrahub_client.py +++ b/tests/integration/test_infrahub_client.py @@ -1,20 +1,23 @@ from __future__ import annotations from collections.abc import AsyncGenerator -from typing import TYPE_CHECKING +from pathlib import Path +from typing import Any import pytest +from infrahub_sdk import Config, InfrahubClient from infrahub_sdk.branch import BranchData +from infrahub_sdk.constants import InfrahubClientMode from infrahub_sdk.exceptions import BranchNotFoundError, URLNotFoundError from infrahub_sdk.node import InfrahubNode -from infrahub_sdk.schema import ProfileSchemaAPI +from infrahub_sdk.playback import JSONPlayback +from infrahub_sdk.recorder import JSONRecorder +from infrahub_sdk.schema import GenericSchema, NodeSchema, ProfileSchemaAPI from infrahub_sdk.task.models import Task, TaskFilter, TaskLog, TaskState from infrahub_sdk.testing.docker import TestInfrahubDockerClient from infrahub_sdk.testing.schemas.animal import TESTING_ANIMAL, TESTING_CAT, TESTING_DOG, TESTING_PERSON, SchemaAnimal - -if TYPE_CHECKING: - from infrahub_sdk import InfrahubClient +from infrahub_sdk.types import Order class TestInfrahubNode(TestInfrahubDockerClient, SchemaAnimal): @@ -22,14 +25,14 @@ class TestInfrahubNode(TestInfrahubDockerClient, SchemaAnimal): async def base_dataset( self, client: InfrahubClient, - load_schema, - person_liam, - person_ethan, - person_sophia, - cat_luna, - cat_bella, - dog_daisy, - dog_rocky, + load_schema: None, + person_liam: InfrahubNode, + person_ethan: InfrahubNode, + person_sophia: InfrahubNode, + cat_luna: InfrahubNode, + cat_bella: InfrahubNode, + dog_daisy: InfrahubNode, + dog_rocky: InfrahubNode, ) -> None: await client.branch.create(branch_name="branch01") @@ -40,7 +43,7 @@ async def set_pagination_size3(self, client: InfrahubClient) -> AsyncGenerator: yield client.pagination_size = original_pagination_size - async def test_query_branches(self, client: InfrahubClient, base_dataset) -> None: + async def test_query_branches(self, client: InfrahubClient, base_dataset: None) -> None: branches = await client.branch.all() main = await client.branch.get(branch_name="main") @@ -51,7 +54,7 @@ async def test_query_branches(self, client: InfrahubClient, base_dataset) -> Non assert "main" in branches assert "branch01" in branches - async def test_branch_delete(self, client: InfrahubClient, base_dataset) -> None: + async def test_branch_delete(self, client: InfrahubClient, base_dataset: None) -> None: async_branch = "async-delete-branch" await client.branch.create(branch_name=async_branch) pre_delete = await client.branch.all() @@ -60,28 +63,27 @@ async def test_branch_delete(self, client: InfrahubClient, base_dataset) -> None assert async_branch in pre_delete assert async_branch not in post_delete - async def test_get_all(self, client: InfrahubClient, base_dataset) -> None: + async def test_get_all(self, client: InfrahubClient, base_dataset: None) -> None: nodes = await client.all(kind=TESTING_CAT) assert len(nodes) == 2 assert isinstance(nodes[0], InfrahubNode) assert [node.name.value for node in nodes] == ["Bella", "Luna"] - # TODO enable these tests for Infrahub version containing this commit - # https://github.com/opsmill/infrahub/commit/5a4d6860196b5bfb51fb8a124f33125f4a0b6753 - # when we support testing against multiple Infrahub versions. - # async def test_get_all_no_order(self, client: InfrahubClient, base_dataset): - # nodes = await client.all(kind=TESTING_CAT, order=Order(disable=True)) - # assert len(nodes) == 2 - # assert isinstance(nodes[0], InfrahubNode) - # assert {node.name.value for node in nodes} == {"Bella", "Luna"} - # - # async def test_get_filters_no_order(self, client: InfrahubClient, base_dataset): - # nodes = await client.filters(kind=TESTING_CAT, order=Order(disable=True)) - # assert len(nodes) == 2 - # assert isinstance(nodes[0], InfrahubNode) - # assert {node.name.value for node in nodes} == {"Bella", "Luna"} - - async def test_get_one(self, client: InfrahubClient, base_dataset, cat_luna, person_sophia) -> None: + async def test_get_all_no_order(self, client: InfrahubClient, base_dataset: None) -> None: + nodes = await client.all(kind=TESTING_CAT, order=Order(disable=True)) + assert len(nodes) == 2 + assert isinstance(nodes[0], InfrahubNode) + assert {node.name.value for node in nodes} == {"Bella", "Luna"} + + async def test_get_filters_no_order(self, client: InfrahubClient, base_dataset: None) -> None: + nodes = await client.filters(kind=TESTING_CAT, order=Order(disable=True)) + assert len(nodes) == 2 + assert isinstance(nodes[0], InfrahubNode) + assert {node.name.value for node in nodes} == {"Bella", "Luna"} + + async def test_get_one( + self, client: InfrahubClient, base_dataset: None, cat_luna: InfrahubNode, person_sophia: InfrahubNode + ) -> None: node1 = await client.get(kind=TESTING_CAT, id=cat_luna.id) assert isinstance(node1, InfrahubNode) assert node1.name.value == "Luna" @@ -90,7 +92,7 @@ async def test_get_one(self, client: InfrahubClient, base_dataset, cat_luna, per assert isinstance(node2, InfrahubNode) assert node2.name.value == "Sophia Walker" - async def test_filters_partial_match(self, client: InfrahubClient, base_dataset) -> None: + async def test_filters_partial_match(self, client: InfrahubClient, base_dataset: None) -> None: nodes = await client.filters(kind=TESTING_PERSON, name__value="Walker") assert not nodes @@ -99,17 +101,34 @@ async def test_filters_partial_match(self, client: InfrahubClient, base_dataset) assert isinstance(nodes[0], InfrahubNode) assert sorted([node.name.value for node in nodes]) == ["Liam Walker", "Sophia Walker"] - async def test_get_generic(self, client: InfrahubClient, base_dataset) -> None: + async def test_get_generic(self, client: InfrahubClient, base_dataset: None) -> None: nodes = await client.all(kind=TESTING_ANIMAL) assert len(nodes) == 4 - async def test_get_generic_fragment(self, client: InfrahubClient, base_dataset) -> None: + async def test_get_generic_fragment(self, client: InfrahubClient, base_dataset: None) -> None: nodes = await client.all(kind=TESTING_ANIMAL, fragment=True) assert len(nodes) assert nodes[0].typename in {TESTING_DOG, TESTING_CAT} assert nodes[0].breed.value is not None - async def test_get_related_nodes(self, client: InfrahubClient, base_dataset, person_ethan) -> None: + async def test_get_generic_filter_source( + self, client: InfrahubClient, base_dataset: None, person_liam: InfrahubNode + ) -> None: + admin = await client.get(kind="CoreAccount", name__value="admin") + + obj = await client.create( + kind=TESTING_CAT, name={"value": "SourceFilterCat", "source": admin.id}, breed="Siamese", owner=person_liam + ) + await obj.save() + + nodes = await client.filters(kind="CoreNode", any__source__id=admin.id) + assert len(nodes) == 1 + assert nodes[0].typename == TESTING_CAT + assert nodes[0].id == obj.id + + async def test_get_related_nodes( + self, client: InfrahubClient, base_dataset: None, person_ethan: InfrahubNode + ) -> None: ethan = await client.get(kind=TESTING_PERSON, id=person_ethan.id) assert ethan @@ -117,16 +136,21 @@ async def test_get_related_nodes(self, client: InfrahubClient, base_dataset, per await ethan.animals.fetch() assert len(ethan.animals.peers) == 3 - async def test_profile(self, client: InfrahubClient, base_dataset, person_liam) -> None: + async def test_count(self, client: InfrahubClient, base_dataset: None) -> None: + count = await client.count(kind=TESTING_PERSON) + assert count == 3 + + async def test_count_with_filter(self, client: InfrahubClient, base_dataset: None) -> None: + count = await client.count(kind=TESTING_PERSON, name__values=["Liam Walker", "Ethan Carter"]) + assert count == 2 + + async def test_profile(self, client: InfrahubClient, base_dataset: None, person_liam: InfrahubNode) -> None: profile_schema_kind = f"Profile{TESTING_DOG}" profile_schema = await client.schema.get(kind=profile_schema_kind) assert isinstance(profile_schema, ProfileSchemaAPI) profile1 = await client.create( - kind=profile_schema_kind, - profile_name="profile1", - profile_priority=1000, - color="#111111", + kind=profile_schema_kind, profile_name="profile1", profile_priority=1000, color="#111111" ) await profile1.save() @@ -138,29 +162,50 @@ async def test_profile(self, client: InfrahubClient, base_dataset, person_liam) obj1 = await client.get(kind=TESTING_DOG, id=obj.id) assert obj1.color.value == "#111111" - async def test_create_branch(self, client: InfrahubClient, base_dataset) -> None: + @pytest.mark.xfail(reason="Require Infrahub v1.7") + async def test_profile_relationship_is_from_profile( + self, client: InfrahubClient, base_dataset: None, person_liam: InfrahubNode + ) -> None: + tag = await client.create(kind="BuiltinTag", name="profile-tag-test") + await tag.save() + + profile_schema_kind = f"Profile{TESTING_PERSON}" + profile = await client.create( + kind=profile_schema_kind, profile_name="person-profile-with-tag", profile_priority=1000, tags=[tag] + ) + await profile.save() + + person = await client.create(kind=TESTING_PERSON, name="Profile Relationship Test Person", profiles=[profile]) + await person.save() + + fetched_person = await client.get(kind=TESTING_PERSON, id=person.id, property=True, include=["tags"]) + assert fetched_person.tags.initialized + assert len(fetched_person.tags.peers) == 1 + assert fetched_person.tags.peers[0].id == tag.id + assert fetched_person.tags.peers[0].is_from_profile + assert fetched_person.tags.is_from_profile + + async def test_create_branch(self, client: InfrahubClient, base_dataset: None) -> None: branch = await client.branch.create(branch_name="new-branch-1") assert isinstance(branch, BranchData) assert branch.id is not None - async def test_create_branch_async(self, client: InfrahubClient, base_dataset) -> None: + async def test_create_branch_async(self, client: InfrahubClient, base_dataset: None) -> None: task_id = await client.branch.create(branch_name="new-branch-2", wait_until_completion=False) assert isinstance(task_id, str) - async def test_count(self, client: InfrahubClient, base_dataset) -> None: - count = await client.count(kind=TESTING_PERSON) - assert count == 3 - - async def test_count_with_filter(self, client: InfrahubClient, base_dataset) -> None: - count = await client.count(kind=TESTING_PERSON, name__values=["Liam Walker", "Ethan Carter"]) - assert count == 2 - async def test_query_unexisting_branch(self, client: InfrahubClient) -> None: with pytest.raises(URLNotFoundError, match=r"/graphql/unexisting` not found."): await client.execute_graphql(query="unused", branch_name="unexisting") async def test_create_generic_rel_with_hfid( - self, client: InfrahubClient, base_dataset, cat_luna, person_sophia, schema_animal, schema_cat + self, + client: InfrahubClient, + base_dataset: None, + cat_luna: InfrahubNode, + person_sophia: InfrahubNode, + schema_animal: GenericSchema, + schema_cat: NodeSchema, ) -> None: # See https://github.com/opsmill/infrahub-sdk-python/issues/277 assert schema_animal.human_friendly_id != schema_cat.human_friendly_id, ( @@ -177,7 +222,9 @@ async def test_create_generic_rel_with_hfid( person_sophia = await client.get(kind=TESTING_PERSON, id=person_sophia.id, prefetch_relationships=True) assert not person_sophia.favorite_animal.id - async def test_task_query(self, client: InfrahubClient, base_dataset, set_pagination_size3) -> None: + async def test_task_query( + self, client: InfrahubClient, base_dataset: None, set_pagination_size3: AsyncGenerator[None, None] + ) -> None: nbr_tasks = await client.task.count() assert nbr_tasks @@ -221,128 +268,113 @@ async def test_task_query(self, client: InfrahubClient, base_dataset, set_pagina assert all_logs[0].timestamp assert all_logs[0].severity - # async def test_get_generic_filter_source(self, client: InfrahubClient, base_dataset): - # admin = await client.get(kind="CoreAccount", name__value="admin") - - # obj1 = await client.create( - # kind="BuiltinLocation", name={"value": "jfk3", "source": admin.id}, description="new york", type="site" - # ) - # await obj1.save() - - # nodes = await client.filters(kind="CoreNode", any__source__id=admin.id) - # assert len(nodes) == 1 - # assert nodes[0].typename == "BuiltinLocation" - # assert nodes[0].id == obj1.id - - -# async def test_tracking_mode(self, client: InfrahubClient, db: InfrahubDatabase, init_db_base, base_dataset): -# tag_names = ["BLUE", "RED", "YELLOW"] -# orgname = "Acme" -# -# async def create_org_with_tag(clt: InfrahubClient, nbr_tags: int): -# tags = [] -# for idx in range(nbr_tags): -# obj = await clt.create(kind="BuiltinTag", name=f"tracking-{tag_names[idx]}") -# await obj.save(allow_upsert=True) -# tags.append(obj) -# -# org = await clt.create(kind="TestOrganization", name=orgname, tags=tags) -# await org.save(allow_upsert=True) -# -# # First execution, we create one org with 3 tags -# nbr_tags = 3 -# async with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=True) as clt: -# await create_org_with_tag(clt=clt, nbr_tags=nbr_tags) -# -# assert client.mode == InfrahubClientMode.DEFAULT -# group = await client.get( -# kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] -# ) -# assert len(group.members.peers) == 4 -# tags = await client.all(kind="BuiltinTag") -# assert len(tags) == 3 -# -# # Second execution, we create one org with 2 tags but we don't delete the third one -# nbr_tags = 2 -# async with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=False) as clt: -# await create_org_with_tag(clt=clt, nbr_tags=nbr_tags) -# -# assert client.mode == InfrahubClientMode.DEFAULT -# group = await client.get( -# kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] -# ) -# assert len(group.members.peers) == 3 -# tags = await client.all(kind="BuiltinTag") -# assert len(tags) == 3 -# -# # Third execution, we create one org with 1 tag and we delete the second one -# nbr_tags = 1 -# async with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=True) as clt: -# await create_org_with_tag(clt=clt, nbr_tags=nbr_tags) -# -# assert client.mode == InfrahubClientMode.DEFAULT -# group = await client.get( -# kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] -# ) -# assert len(group.members.peers) == 2 -# -# tags = await client.all(kind="BuiltinTag") -# assert len(tags) == 2 -# -# # Forth one, validate that the group will not be updated if there is an exception -# nbr_tags = 3 -# with pytest.raises(ValueError): -# async with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=True) as clt: -# await create_org_with_tag(clt=clt, nbr_tags=nbr_tags) -# raise ValueError("something happened") -# -# assert client.mode == InfrahubClientMode.DEFAULT -# group = await client.get( -# kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] -# ) -# assert len(group.members.peers) == 2 -# -# async def test_recorder_with_playback_rewrite_host( -# self, client: InfrahubClient, db: InfrahubDatabase, init_db_base, base_dataset, tmp_path: Path -# ): -# client.config.custom_recorder = JSONRecorder(host="recorder-test", directory=str(tmp_path)) -# nodes = await client.all(kind="CoreRepository") -# -# playback_config = JSONPlayback(directory=str(tmp_path)) -# config = Config( -# address="http://recorder-test:8000", -# requester=playback_config.async_request, -# ) -# playback = InfrahubClient(config=config) -# recorded_nodes = await playback.all(kind="CoreRepository") -# -# assert len(nodes) == 1 -# assert nodes == recorded_nodes -# assert recorded_nodes[0].name.value == "repository1" -# - - -# # See issue #148. -# async def test_hierarchical( -# self, client: InfrahubClient, db: InfrahubDatabase, init_db_base, base_dataset, hierarchical_schema -# ): -# await client.schema.load(schemas=[hierarchical_schema]) -# -# location_country = await client.create( -# kind="LocationCountry", name="country_name", shortname="country_shortname" -# ) -# await location_country.save() -# -# location_site = await client.create( -# kind="LocationSite", name="site_name", shortname="site_shortname", parent=location_country -# ) -# await location_site.save() -# -# nodes = await client.all(kind="LocationSite", prefetch_relationships=True, populate_store=True) -# assert len(nodes) == 1 -# site_node = nodes[0] -# assert site_node.name.value == "site_name" -# assert site_node.shortname.value == "site_shortname" -# country_node = site_node.parent.get() -# assert country_node.name.value == "country_name" -# assert country_node.shortname.value == "country_shortname" + async def test_tracking_mode(self, client: InfrahubClient, base_dataset: None) -> None: + tag_names = ["BLUE", "RED", "YELLOW"] + person_name = "TrackingTestPerson" + + async def create_person_with_tags(clt: InfrahubClient, nbr_tags: int) -> None: + tags = [] + for idx in range(nbr_tags): + obj = await clt.create(kind="BuiltinTag", name=f"tracking-{tag_names[idx]}") + await obj.save(allow_upsert=True) + tags.append(obj) + + person = await clt.create(kind=TESTING_PERSON, name=person_name, tags=tags) + await person.save(allow_upsert=True) + + # First execution, we create one person with 3 tags + nbr_tags = 3 + async with client.start_tracking(params={"person_name": person_name}, delete_unused_nodes=True) as clt: + await create_person_with_tags(clt=clt, nbr_tags=nbr_tags) + + assert client.mode == InfrahubClientMode.DEFAULT + group = await client.get( + kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] + ) + assert len(group.members.peers) == 4 # 1 person + 3 tags + + # Second execution, we create one person with 2 tags but we don't delete the third one + nbr_tags = 2 + async with client.start_tracking(params={"person_name": person_name}, delete_unused_nodes=False) as clt: + await create_person_with_tags(clt=clt, nbr_tags=nbr_tags) + + assert client.mode == InfrahubClientMode.DEFAULT + group = await client.get( + kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] + ) + assert len(group.members.peers) == 3 # 1 person + 2 tags (third tag still exists but not in group) + + # Third execution, we create one person with 1 tag and we delete the second one + nbr_tags = 1 + async with client.start_tracking(params={"person_name": person_name}, delete_unused_nodes=True) as clt: + await create_person_with_tags(clt=clt, nbr_tags=nbr_tags) + + assert client.mode == InfrahubClientMode.DEFAULT + group = await client.get( + kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] + ) + assert len(group.members.peers) == 2 # 1 person + 1 tag + + # Fourth execution, validate that the group will not be updated if there is an exception + nbr_tags = 3 + with pytest.raises(ValueError): + async with client.start_tracking(params={"person_name": person_name}, delete_unused_nodes=True) as clt: + await create_person_with_tags(clt=clt, nbr_tags=nbr_tags) + raise ValueError("something happened") + + # Group should still have 2 members since the exception caused a rollback + group = await client.get( + kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] + ) + assert len(group.members.peers) == 2 + + @pytest.mark.xfail(reason="https://github.com/opsmill/infrahub-sdk-python/issues/733") + async def test_recorder_with_playback_rewrite_host( + self, base_dataset: None, tmp_path: Path, infrahub_port: int + ) -> None: + # Create a fresh client for recording to ensure clean state (no cached schema) + recorder_config = Config( + username="admin", + password="infrahub", + address=f"http://localhost:{infrahub_port}", + custom_recorder=JSONRecorder(host="recorder-test", directory=str(tmp_path)), + ) + recorder_client = InfrahubClient(config=recorder_config) + + query = "query { BuiltinTag { edges { node { id name { value } } } } }" + result = await recorder_client.execute_graphql(query=query) + + playback_config = JSONPlayback(directory=str(tmp_path)) + config = Config(address=f"http://recorder-test:{infrahub_port}", requester=playback_config.async_request) + playback = InfrahubClient(config=config) + recorded_result = await playback.execute_graphql(query=query) + + assert result == recorded_result + assert result.get("BuiltinTag", {}).get("edges") is not None + + +class TestHierarchicalSchema(TestInfrahubDockerClient): + @pytest.fixture(scope="class") + async def load_hierarchical_schema(self, client: InfrahubClient, hierarchical_schema: dict[str, Any]) -> None: + resp = await client.schema.load(schemas=[hierarchical_schema], wait_until_converged=True) + assert resp.errors == {} + + async def test_hierarchical(self, client: InfrahubClient, load_hierarchical_schema: None) -> None: + location_country = await client.create( + kind="LocationCountry", name="country_name", shortname="country_shortname" + ) + await location_country.save() + + location_site = await client.create( + kind="LocationSite", name="site_name", shortname="site_shortname", parent=location_country + ) + await location_site.save() + + nodes = await client.all(kind="LocationSite", prefetch_relationships=True, populate_store=True) + assert len(nodes) == 1 + site_node = nodes[0] + assert site_node.name.value == "site_name" + assert site_node.shortname.value == "site_shortname" + + country_node = site_node.parent.get() + assert country_node.name.value == "country_name" diff --git a/tests/integration/test_infrahub_client_sync.py b/tests/integration/test_infrahub_client_sync.py index d5b9ad48..472c3378 100644 --- a/tests/integration/test_infrahub_client_sync.py +++ b/tests/integration/test_infrahub_client_sync.py @@ -1,320 +1,380 @@ -# from __future__ import annotations -# -# from typing import TYPE_CHECKING -# -# import pytest -# from infrahub.core.initialization import create_branch -# from infrahub.core.node import Node -# from infrahub.server import app -# -# from infrahub_sdk import Config, InfrahubClientSync -# from infrahub_sdk.branch import BranchData -# from infrahub_sdk.constants import InfrahubClientMode -# from infrahub_sdk.exceptions import BranchNotFoundError -# from infrahub_sdk.node import InfrahubNodeSync -# from infrahub_sdk.playback import JSONPlayback -# from infrahub_sdk.recorder import JSONRecorder -# from infrahub_sdk.schema import ProfileSchema -# -# from .conftest import InfrahubTestClient -# -# if TYPE_CHECKING: -# from pathlib import Path -# -# from infrahub.database import InfrahubDatabase -# -# -# -# -# -# class TestInfrahubClientSync: -# @pytest.fixture(scope="class") -# async def test_client(self) -> InfrahubTestClient: -# return InfrahubTestClient(app) -# -# @pytest.fixture -# def client(self, test_client: InfrahubTestClient): -# config = Config( -# username="admin", -# password="infrahub", -# sync_requester=test_client.sync_request, -# ) -# return InfrahubClientSync(config=config) -# -# @pytest.fixture(scope="class") -# async def base_dataset(self, db: InfrahubDatabase, test_client: InfrahubTestClient, builtin_org_schema): -# config = Config(username="admin", password="infrahub", sync_requester=test_client.sync_request) -# client = InfrahubClientSync(config=config) -# response = client.schema.load(schemas=[builtin_org_schema]) -# assert not response.errors -# -# await create_branch(branch_name="branch01", db=db) -# -# query_string = """ -# query { -# branch { -# id -# name -# } -# } -# """ -# obj1 = await Node.init(schema="CoreGraphQLQuery", db=db) -# await obj1.new(db=db, name="test_query2", description="test query", query=query_string) -# await obj1.save(db=db) -# -# obj2 = await Node.init(schema="CoreRepository", db=db) -# await obj2.new( -# db=db, -# name="repository1", -# description="test repository", -# location="git@github.com:mock/test.git", -# ) -# await obj2.save(db=db) -# -# obj3 = await Node.init(schema="CoreTransformJinja2", db=db) -# await obj3.new( -# db=db, -# name="rfile1", -# description="test rfile", -# template_path="mytemplate.j2", -# repository=obj2, -# query=obj1, -# ) -# await obj3.save(db=db) -# -# obj4 = await Node.init(schema="CoreTransformPython", db=db) -# await obj4.new( -# db=db, -# name="transform01", -# description="test transform01", -# file_path="mytransformation.py", -# class_name="Transform01", -# query=obj1, -# repository=obj2, -# ) -# await obj4.save(db=db) -# -# async def test_query_branches(self, client: InfrahubClientSync, init_db_base, base_dataset): -# branches = client.branch.all() -# main = client.branch.get(branch_name="main") -# -# with pytest.raises(BranchNotFoundError): -# client.branch.get(branch_name="not-found") -# -# assert main.name == "main" -# assert "main" in branches -# assert "branch01" in branches -# -# async def test_branch_delete(self, client: InfrahubClientSync, init_db_base, base_dataset, db): -# async_branch = "async-delete-branch" -# await create_branch(branch_name=async_branch, db=db) -# -# pre_delete = client.branch.all() -# client.branch.delete(async_branch) -# post_delete = client.branch.all() -# assert async_branch in pre_delete.keys() -# assert async_branch not in post_delete.keys() -# -# async def test_get_all(self, client: InfrahubClientSync, init_db_base, base_dataset): -# obj1 = client.create(kind="BuiltinLocation", name="jfk1", description="new york", type="site") -# obj1.save() -# -# obj2 = client.create(kind="BuiltinLocation", name="sfo1", description="san francisco", type="site") -# obj2.save() -# -# nodes = client.all(kind="BuiltinLocation") -# assert len(nodes) == 2 -# assert isinstance(nodes[0], InfrahubNodeSync) -# assert sorted([node.name.value for node in nodes]) == ["jfk1", "sfo1"] # type: ignore[attr-defined] -# -# async def test_get_one(self, client: InfrahubClientSync, init_db_base, base_dataset): -# obj1 = client.create(kind="BuiltinLocation", name="jfk2", description="new york", type="site") -# obj1.save() -# -# obj2 = client.create(kind="BuiltinLocation", name="sfo2", description="san francisco", type="site") -# obj2.save() -# -# node1 = client.get(kind="BuiltinLocation", id=obj1.id) -# assert isinstance(node1, InfrahubNodeSync) -# assert node1.name.value == "jfk2" # type: ignore[attr-defined] -# -# node2 = client.get(kind="BuiltinLocation", id="jfk2") -# assert isinstance(node2, InfrahubNodeSync) -# assert node2.name.value == "jfk2" # type: ignore[attr-defined] -# -# async def test_filters_partial_match(self, client: InfrahubClientSync, init_db_base, base_dataset): -# nodes = client.filters(kind="BuiltinLocation", name__value="jfk") -# assert not nodes -# -# nodes = client.filters(kind="BuiltinLocation", partial_match=True, name__value="jfk") -# assert len(nodes) == 2 -# assert isinstance(nodes[0], InfrahubNodeSync) -# assert sorted([node.name.value for node in nodes]) == ["jfk1", "jfk2"] # type: ignore[attr-defined] -# -# async def test_get_generic(self, client: InfrahubClientSync, init_db_base): -# nodes = client.all(kind="CoreNode") -# assert len(nodes) -# -# async def test_get_generic_fragment(self, client: InfrahubClientSync, init_db_base, base_dataset): -# nodes = client.all(kind="CoreGenericAccount", fragment=True, exclude=["type"]) -# assert len(nodes) -# assert nodes[0].typename == "CoreAccount" -# assert nodes[0].name.value is not None # type: ignore[attr-defined] -# -# async def test_get_generic_filter_source(self, client: InfrahubClientSync, init_db_base, base_dataset): -# admin = client.get(kind="CoreAccount", name__value="admin") -# -# obj1 = client.create( -# kind="BuiltinLocation", name={"value": "jfk3", "source": admin.id}, description="new york", type="site" -# ) -# obj1.save() -# -# nodes = client.filters(kind="CoreNode", any__source__id=admin.id) -# assert len(nodes) == 1 -# assert nodes[0].typename == "BuiltinLocation" -# assert nodes[0].id == obj1.id -# -# async def test_get_related_nodes(self, client: InfrahubClientSync, init_db_base, base_dataset): -# nodes = client.all(kind="CoreRepository") -# assert len(nodes) == 1 -# repo = nodes[0] -# -# assert repo.transformations.peers == [] # type: ignore[attr-defined] -# repo.transformations.fetch() # type: ignore[attr-defined] -# assert len(repo.transformations.peers) == 2 # type: ignore[attr-defined] -# -# def test_tracking_mode(self, client: InfrahubClientSync, db: InfrahubDatabase, init_db_base, base_dataset): -# tag_names = ["BLUE", "RED", "YELLOW"] -# orgname = "Acme" -# -# def create_org_with_tag(clt: InfrahubClientSync, nbr_tags: int): -# tags = [] -# for idx in range(nbr_tags): -# obj = clt.create(kind="BuiltinTag", name=f"tracking-{tag_names[idx]}") -# obj.save(allow_upsert=True) -# tags.append(obj) -# -# org = clt.create(kind="TestOrganization", name=orgname, tags=tags) -# org.save(allow_upsert=True) -# -# # First execution, we create one org with 3 tags -# nbr_tags = 3 -# with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=True) as clt: -# create_org_with_tag(clt=clt, nbr_tags=nbr_tags) -# -# assert client.mode == InfrahubClientMode.DEFAULT -# group = client.get( -# kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] -# ) -# assert len(group.members.peers) == 4 -# tags = client.all(kind="BuiltinTag") -# assert len(tags) == 3 -# -# # Second execution, we create one org with 2 tags but we don't delete the third one -# nbr_tags = 2 -# with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=False) as clt: -# create_org_with_tag(clt=clt, nbr_tags=nbr_tags) -# -# assert client.mode == InfrahubClientMode.DEFAULT -# group = client.get( -# kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] -# ) -# assert len(group.members.peers) == 3 -# tags = client.all(kind="BuiltinTag") -# assert len(tags) == 3 -# -# # Third execution, we create one org with 1 tag and we delete the second one -# nbr_tags = 1 -# with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=True) as clt: -# create_org_with_tag(clt=clt, nbr_tags=nbr_tags) -# -# assert client.mode == InfrahubClientMode.DEFAULT -# group = client.get( -# kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] -# ) -# assert len(group.members.peers) == 2 -# -# tags = client.all(kind="BuiltinTag") -# assert len(tags) == 2 -# -# # Forth one, validate that the group will not be updated if there is an exception -# nbr_tags = 3 -# with pytest.raises(ValueError): -# with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=True) as clt: -# create_org_with_tag(clt=clt, nbr_tags=nbr_tags) -# raise ValueError("something happened") -# -# assert client.mode == InfrahubClientMode.DEFAULT -# group = client.get( -# kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] -# ) -# assert len(group.members.peers) == 2 -# -# def test_recorder_with_playback( -# self, client: InfrahubClientSync, db: InfrahubDatabase, init_db_base, base_dataset, tmp_path: Path -# ): -# client.config.custom_recorder = JSONRecorder(directory=str(tmp_path)) -# nodes = client.all(kind="CoreRepository") -# -# playback_config = JSONPlayback(directory=str(tmp_path)) -# config = Config( -# address=client.config.address, -# sync_requester=playback_config.sync_request, -# ) -# playback = InfrahubClientSync(config=config) -# recorded_nodes = playback.all(kind="CoreRepository") -# -# assert len(nodes) == 1 -# assert nodes == recorded_nodes -# assert recorded_nodes[0].name.value == "repository1" -# -# def test_profile(self, client: InfrahubClientSync, db: InfrahubDatabase, init_db_base, base_dataset): -# profile_schema_kind = "ProfileBuiltinStatus" -# profile_schema = client.schema.get(kind=profile_schema_kind) -# assert isinstance(profile_schema, ProfileSchema) -# -# profile1 = client.create( -# kind=profile_schema_kind, -# profile_name="profile1", -# profile_priority=1000, -# description="description in profile", -# ) -# profile1.save() -# -# obj = client.create(kind="BuiltinStatus", name="planned", profiles=[profile1]) -# obj.save() -# -# obj1 = client.get(kind="BuiltinStatus", id=obj.id) -# assert obj1.description.value == "description in profile" -# -# def test_create_branch(self, client: InfrahubClientSync, db: InfrahubDatabase, init_db_base, base_dataset): -# branch = client.branch.create(branch_name="new-branch-1") -# assert isinstance(branch, BranchData) -# assert branch.id is not None -# -# def test_create_branch_async(self, client: InfrahubClientSync, db: InfrahubDatabase, init_db_base, base_dataset): -# task_id = client.branch.create(branch_name="new-branch-2", wait_until_completion=False) -# assert isinstance(task_id, str) -# -# # See issue #148. -# def test_hierarchical( -# self, client: InfrahubClientSync, db: InfrahubDatabase, init_db_base, base_dataset, hierarchical_schema -# ): -# client.schema.load(schemas=[hierarchical_schema]) -# -# location_country = client.create(kind="LocationCountry", name="country_name", shortname="country_shortname") -# location_country.save() -# -# location_site = client.create( -# kind="LocationSite", name="site_name", shortname="site_shortname", parent=location_country -# ) -# location_site.save() -# -# nodes = client.all(kind="LocationSite", prefetch_relationships=True, populate_store=True) -# assert len(nodes) == 1 -# site_node = nodes[0] -# assert site_node.name.value == "site_name" -# assert site_node.shortname.value == "site_shortname" -# country_node = site_node.parent.get() -# assert country_node.name.value == "country_name" -# assert country_node.shortname.value == "country_shortname" +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING, Any + +import pytest + +from infrahub_sdk import Config, InfrahubClientSync +from infrahub_sdk.branch import BranchData +from infrahub_sdk.constants import InfrahubClientMode +from infrahub_sdk.exceptions import BranchNotFoundError, URLNotFoundError +from infrahub_sdk.node import InfrahubNodeSync +from infrahub_sdk.playback import JSONPlayback +from infrahub_sdk.recorder import JSONRecorder +from infrahub_sdk.schema import GenericSchema, NodeSchema, ProfileSchemaAPI +from infrahub_sdk.task.models import Task, TaskFilter, TaskLog, TaskState +from infrahub_sdk.testing.docker import TestInfrahubDockerClient +from infrahub_sdk.testing.schemas.animal import TESTING_ANIMAL, TESTING_CAT, TESTING_DOG, TESTING_PERSON, SchemaAnimal +from infrahub_sdk.types import Order + +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient + from infrahub_sdk.node import InfrahubNode + + +class TestInfrahubClientSync(TestInfrahubDockerClient, SchemaAnimal): + @pytest.fixture(scope="class") + async def base_dataset( + self, + client: InfrahubClient, + load_schema: None, + person_liam: InfrahubNode, + person_ethan: InfrahubNode, + person_sophia: InfrahubNode, + cat_luna: InfrahubNode, + cat_bella: InfrahubNode, + dog_daisy: InfrahubNode, + dog_rocky: InfrahubNode, + ) -> None: + await client.branch.create(branch_name="sync-branch01") + + def test_query_branches(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + branches = client_sync.branch.all() + main = client_sync.branch.get(branch_name="main") + + with pytest.raises(BranchNotFoundError): + client_sync.branch.get(branch_name="not-found") + + assert main.name == "main" + assert "main" in branches + assert "sync-branch01" in branches + + def test_branch_delete(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + sync_branch = "sync-delete-branch" + client_sync.branch.create(branch_name=sync_branch) + pre_delete = client_sync.branch.all() + client_sync.branch.delete(sync_branch) + post_delete = client_sync.branch.all() + assert sync_branch in pre_delete + assert sync_branch not in post_delete + + def test_get_all(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + nodes = client_sync.all(kind=TESTING_CAT) + assert len(nodes) == 2 + assert isinstance(nodes[0], InfrahubNodeSync) + assert [node.name.value for node in nodes] == ["Bella", "Luna"] + + def test_get_all_no_order(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + nodes = client_sync.all(kind=TESTING_CAT, order=Order(disable=True)) + assert len(nodes) == 2 + assert isinstance(nodes[0], InfrahubNodeSync) + assert {node.name.value for node in nodes} == {"Bella", "Luna"} + + def test_get_filters_no_order(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + nodes = client_sync.filters(kind=TESTING_CAT, order=Order(disable=True)) + assert len(nodes) == 2 + assert isinstance(nodes[0], InfrahubNodeSync) + assert {node.name.value for node in nodes} == {"Bella", "Luna"} + + def test_get_one( + self, client_sync: InfrahubClientSync, base_dataset: None, cat_luna: InfrahubNode, person_sophia: InfrahubNode + ) -> None: + node1 = client_sync.get(kind=TESTING_CAT, id=cat_luna.id) + assert isinstance(node1, InfrahubNodeSync) + assert node1.name.value == "Luna" + + node2 = client_sync.get(kind=TESTING_PERSON, id=person_sophia.id) + assert isinstance(node2, InfrahubNodeSync) + assert node2.name.value == "Sophia Walker" + + def test_filters_partial_match(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + nodes = client_sync.filters(kind=TESTING_PERSON, name__value="Walker") + assert not nodes + + nodes = client_sync.filters(kind=TESTING_PERSON, partial_match=True, name__value="Walker") + assert len(nodes) == 2 + assert isinstance(nodes[0], InfrahubNodeSync) + assert sorted([node.name.value for node in nodes]) == ["Liam Walker", "Sophia Walker"] + + def test_get_generic(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + nodes = client_sync.all(kind=TESTING_ANIMAL) + assert len(nodes) == 4 + + def test_get_generic_fragment(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + nodes = client_sync.all(kind=TESTING_ANIMAL, fragment=True) + assert len(nodes) + assert nodes[0].typename in {TESTING_DOG, TESTING_CAT} + assert nodes[0].breed.value is not None + + def test_get_generic_filter_source( + self, client_sync: InfrahubClientSync, base_dataset: None, person_liam: InfrahubNode + ) -> None: + admin = client_sync.get(kind="CoreAccount", name__value="admin") + + obj = client_sync.create( + kind=TESTING_CAT, + name={"value": "SyncSourceFilterCat", "source": admin.id}, + breed="Siamese", + owner=person_liam, + ) + obj.save() + + nodes = client_sync.filters(kind="CoreNode", any__source__id=admin.id) + assert len(nodes) == 1 + assert nodes[0].typename == TESTING_CAT + assert nodes[0].id == obj.id + + def test_get_related_nodes( + self, client_sync: InfrahubClientSync, base_dataset: None, person_ethan: InfrahubNode + ) -> None: + ethan = client_sync.get(kind=TESTING_PERSON, id=person_ethan.id) + assert ethan + + assert ethan.animals.peers == [] + ethan.animals.fetch() + assert len(ethan.animals.peers) == 3 + + def test_count(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + count = client_sync.count(kind=TESTING_PERSON) + assert count == 3 + + def test_count_with_filter(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + count = client_sync.count(kind=TESTING_PERSON, name__values=["Liam Walker", "Ethan Carter"]) + assert count == 2 + + def test_profile(self, client_sync: InfrahubClientSync, base_dataset: None, person_liam: InfrahubNode) -> None: + profile_schema_kind = f"Profile{TESTING_DOG}" + profile_schema = client_sync.schema.get(kind=profile_schema_kind) + assert isinstance(profile_schema, ProfileSchemaAPI) + + profile1 = client_sync.create( + kind=profile_schema_kind, profile_name="sync-profile1", profile_priority=1000, color="#222222" + ) + profile1.save() + + obj = client_sync.create( + kind=TESTING_DOG, name="Sync-Sparky", breed="Poodle", owner=person_liam, profiles=[profile1] + ) + obj.save() + + obj1 = client_sync.get(kind=TESTING_DOG, id=obj.id) + assert obj1.color.value == "#222222" + + @pytest.mark.xfail(reason="Require Infrahub v1.7") + def test_profile_relationship_is_from_profile( + self, client_sync: InfrahubClientSync, base_dataset: None, person_liam: InfrahubNode + ) -> None: + tag = client_sync.create(kind="BuiltinTag", name="sync-profile-tag-test") + tag.save() + + profile_schema_kind = f"Profile{TESTING_PERSON}" + profile = client_sync.create( + kind=profile_schema_kind, profile_name="sync-person-profile-with-tag", profile_priority=1000, tags=[tag] + ) + profile.save() + + person = client_sync.create( + kind=TESTING_PERSON, name="Sync Profile Relationship Test Person", profiles=[profile] + ) + person.save() + + fetched_person = client_sync.get(kind=TESTING_PERSON, id=person.id, property=True, include=["tags"]) + assert fetched_person.tags.initialized + assert len(fetched_person.tags.peers) == 1 + assert fetched_person.tags.peers[0].id == tag.id + assert fetched_person.tags.peers[0].is_from_profile + assert fetched_person.tags.is_from_profile + + def test_create_branch(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + branch = client_sync.branch.create(branch_name="sync-new-branch-1") + assert isinstance(branch, BranchData) + assert branch.id is not None + + def test_create_branch_async(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + task_id = client_sync.branch.create(branch_name="sync-new-branch-2", wait_until_completion=False) + assert isinstance(task_id, str) + + def test_query_unexisting_branch(self, client_sync: InfrahubClientSync) -> None: + with pytest.raises(URLNotFoundError, match=r"/graphql/unexisting` not found."): + client_sync.execute_graphql(query="unused", branch_name="unexisting") + + def test_create_generic_rel_with_hfid( + self, + client_sync: InfrahubClientSync, + base_dataset: None, + cat_luna: InfrahubNode, + person_sophia: InfrahubNode, + schema_animal: GenericSchema, + schema_cat: NodeSchema, + ) -> None: + # See https://github.com/opsmill/infrahub-sdk-python/issues/277 + assert schema_animal.human_friendly_id != schema_cat.human_friendly_id, ( + "Inherited node schema should have a different hfid than generic one for this test to be relevant" + ) + person_sophia_sync = client_sync.get(kind=TESTING_PERSON, id=person_sophia.id) + person_sophia_sync.favorite_animal = {"hfid": cat_luna.hfid, "kind": TESTING_CAT} + person_sophia_sync.save() + person_sophia_sync = client_sync.get(kind=TESTING_PERSON, id=person_sophia.id, prefetch_relationships=True) + assert person_sophia_sync.favorite_animal.id == cat_luna.id + + # Ensure that nullify it will remove the relationship related node + person_sophia_sync.favorite_animal = None + person_sophia_sync.save() + person_sophia_sync = client_sync.get(kind=TESTING_PERSON, id=person_sophia.id, prefetch_relationships=True) + assert not person_sophia_sync.favorite_animal.id + + def test_task_query(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + nbr_tasks = client_sync.task.count() + assert nbr_tasks + + tasks = client_sync.task.filter(filter=TaskFilter(state=[TaskState.COMPLETED])) + assert tasks + task_ids = [task.id for task in tasks] + + # Query Tasks using Parallel mode + tasks_parallel = client_sync.task.filter(filter=TaskFilter(state=[TaskState.COMPLETED]), parallel=True) + assert tasks_parallel + task_parallel_ids = [task.id for task in tasks_parallel] + + # Additional tasks might have been completed between the two queries + # validate that we get at least as many tasks as in the first query + # and that all task IDs from the first query are present in the second one + assert len(tasks_parallel) >= len(tasks) + assert set(task_ids).issubset(set(task_parallel_ids)) + + # Query Tasks by ID + tasks_parallel_filtered = client_sync.task.filter(filter=TaskFilter(ids=task_ids[:2]), parallel=True) + assert tasks_parallel_filtered + assert len(tasks_parallel_filtered) == 2 + + # Query individual Task + task = client_sync.task.get(id=tasks[0].id) + assert task + assert isinstance(task, Task) + assert task.logs == [] + + # Wait for Task completion + task = client_sync.task.wait_for_completion(id=tasks[0].id) + assert task + assert isinstance(task, Task) + + # Query Tasks with logs + tasks = client_sync.task.filter(filter=TaskFilter(state=[TaskState.COMPLETED]), include_logs=True) + all_logs = [log for task in tasks for log in task.logs] + assert all_logs + assert isinstance(all_logs[0], TaskLog) + assert all_logs[0].message + assert all_logs[0].timestamp + assert all_logs[0].severity + + def test_tracking_mode(self, client_sync: InfrahubClientSync, base_dataset: None) -> None: + tag_names = ["BLUE", "RED", "YELLOW"] + person_name = "SyncTrackingTestPerson" + + def create_person_with_tags(clt: InfrahubClientSync, nbr_tags: int) -> None: + tags = [] + for idx in range(nbr_tags): + obj = clt.create(kind="BuiltinTag", name=f"sync-tracking-{tag_names[idx]}") + obj.save(allow_upsert=True) + tags.append(obj) + + person = clt.create(kind=TESTING_PERSON, name=person_name, tags=tags) + person.save(allow_upsert=True) + + # First execution, we create one person with 3 tags + nbr_tags = 3 + with client_sync.start_tracking(params={"person_name": person_name}, delete_unused_nodes=True) as clt: + create_person_with_tags(clt=clt, nbr_tags=nbr_tags) + + assert client_sync.mode == InfrahubClientMode.DEFAULT + group = client_sync.get( + kind="CoreStandardGroup", name__value=client_sync.group_context._generate_group_name(), include=["members"] + ) + assert len(group.members.peers) == 4 # 1 person + 3 tags + + # Second execution, we create one person with 2 tags but we don't delete the third one + nbr_tags = 2 + with client_sync.start_tracking(params={"person_name": person_name}, delete_unused_nodes=False) as clt: + create_person_with_tags(clt=clt, nbr_tags=nbr_tags) + + assert client_sync.mode == InfrahubClientMode.DEFAULT + group = client_sync.get( + kind="CoreStandardGroup", name__value=client_sync.group_context._generate_group_name(), include=["members"] + ) + assert len(group.members.peers) == 3 # 1 person + 2 tags (third tag still exists but not in group) + + # Third execution, we create one person with 1 tag and we delete the second one + nbr_tags = 1 + with client_sync.start_tracking(params={"person_name": person_name}, delete_unused_nodes=True) as clt: + create_person_with_tags(clt=clt, nbr_tags=nbr_tags) + + assert client_sync.mode == InfrahubClientMode.DEFAULT + group = client_sync.get( + kind="CoreStandardGroup", name__value=client_sync.group_context._generate_group_name(), include=["members"] + ) + assert len(group.members.peers) == 2 # 1 person + 1 tag + + # Fourth execution, validate that the group will not be updated if there is an exception + nbr_tags = 3 + with ( + pytest.raises(ValueError), + client_sync.start_tracking(params={"person_name": person_name}, delete_unused_nodes=True) as clt, + ): + create_person_with_tags(clt=clt, nbr_tags=nbr_tags) + raise ValueError("something happened") + + # Group should still have 2 members since the exception caused a rollback + group = client_sync.get( + kind="CoreStandardGroup", name__value=client_sync.group_context._generate_group_name(), include=["members"] + ) + assert len(group.members.peers) == 2 + + @pytest.mark.xfail(reason="https://github.com/opsmill/infrahub-sdk-python/issues/733") + def test_recorder_with_playback_rewrite_host(self, base_dataset: None, tmp_path: Path, infrahub_port: int) -> None: + # Create a fresh client for recording to ensure clean state (no cached schema) + recorder_config = Config( + username="admin", + password="infrahub", + address=f"http://localhost:{infrahub_port}", + custom_recorder=JSONRecorder(host="recorder-test", directory=str(tmp_path)), + ) + recorder_client = InfrahubClientSync(config=recorder_config) + + query = "query { BuiltinTag { edges { node { id name { value } } } } }" + result = recorder_client.execute_graphql(query=query) + + playback_config = JSONPlayback(directory=str(tmp_path)) + config = Config(address=f"http://recorder-test:{infrahub_port}", sync_requester=playback_config.sync_request) + playback = InfrahubClientSync(config=config) + recorded_result = playback.execute_graphql(query=query) + + assert result == recorded_result + assert result.get("BuiltinTag", {}).get("edges") is not None + + +class TestHierarchicalSchema(TestInfrahubDockerClient): + @pytest.fixture(scope="class") + def load_hierarchical_schema(self, client_sync: InfrahubClientSync, hierarchical_schema: dict[str, Any]) -> None: + resp = client_sync.schema.load(schemas=[hierarchical_schema], wait_until_converged=True) + assert resp.errors == {} + + def test_hierarchical(self, client_sync: InfrahubClientSync, load_hierarchical_schema: None) -> None: + location_country = client_sync.create( + kind="LocationCountry", name="country_name", shortname="country_shortname" + ) + location_country.save() + + location_site = client_sync.create( + kind="LocationSite", name="site_name", shortname="site_shortname", parent=location_country + ) + location_site.save() + + nodes = client_sync.all(kind="LocationSite", prefetch_relationships=True, populate_store=True) + assert len(nodes) == 1 + site_node = nodes[0] + assert site_node.name.value == "site_name" + assert site_node.shortname.value == "site_shortname" + + country_node = site_node.parent.get() + assert country_node.name.value == "country_name" diff --git a/tests/integration/test_infrahubctl.py b/tests/integration/test_infrahubctl.py index a4fa197b..f9b47c42 100644 --- a/tests/integration/test_infrahubctl.py +++ b/tests/integration/test_infrahubctl.py @@ -4,7 +4,6 @@ import os import shutil import tempfile -from collections.abc import Generator from pathlib import Path from typing import TYPE_CHECKING @@ -20,7 +19,10 @@ from tests.helpers.utils import change_directory, strip_color if TYPE_CHECKING: + from collections.abc import Generator + from infrahub_sdk import InfrahubClient + from infrahub_sdk.node import InfrahubNode FIXTURE_BASE_DIR = Path(Path(Path(__file__).resolve()).parent / ".." / "fixtures") @@ -33,20 +35,20 @@ class TestInfrahubCtl(TestInfrahubDockerClient, SchemaAnimal): async def base_dataset( self, client: InfrahubClient, - load_schema, - person_liam, - person_ethan, - person_sophia, - cat_luna, - cat_bella, - dog_daisy, - dog_rocky, - ctl_client_config, + load_schema: None, + person_liam: InfrahubNode, + person_ethan: InfrahubNode, + person_sophia: InfrahubNode, + cat_luna: InfrahubNode, + cat_bella: InfrahubNode, + dog_daisy: InfrahubNode, + dog_rocky: InfrahubNode, + ctl_client_config: Generator[None, None, None], ) -> None: await client.branch.create(branch_name="branch01") @pytest.fixture(scope="class") - def repository(self) -> Generator[str]: + def repository(self) -> Generator[str, None, None]: temp_dir = tempfile.mkdtemp() try: @@ -61,7 +63,7 @@ def repository(self) -> Generator[str]: shutil.rmtree(temp_dir) @pytest.fixture(scope="class") - def ctl_client_config(self, client: InfrahubClient) -> Generator: + def ctl_client_config(self, client: InfrahubClient) -> Generator[None, None, None]: load_configuration(value="infrahubctl.toml") assert config.SETTINGS._settings config.SETTINGS._settings.server_address = client.config.address diff --git a/tests/integration/test_node.py b/tests/integration/test_node.py index 652e7203..eb629e6e 100644 --- a/tests/integration/test_node.py +++ b/tests/integration/test_node.py @@ -1,12 +1,20 @@ +from __future__ import annotations + +import ipaddress +from typing import TYPE_CHECKING, Any + import pytest -from infrahub_sdk import InfrahubClient -from infrahub_sdk.exceptions import NodeNotFoundError +from infrahub_sdk.exceptions import NodeNotFoundError, UninitializedError from infrahub_sdk.node import InfrahubNode +from infrahub_sdk.protocols import IpamNamespace from infrahub_sdk.schema import NodeSchema, NodeSchemaAPI, SchemaRoot from infrahub_sdk.testing.docker import TestInfrahubDockerClient from infrahub_sdk.testing.schemas.car_person import TESTING_CAR, TESTING_MANUFACTURER, SchemaCarPerson +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient + class TestInfrahubNode(TestInfrahubDockerClient, SchemaCarPerson): @pytest.fixture(scope="class") @@ -30,11 +38,7 @@ async def test_node_create( await node.save() assert node.id is not None - async def test_node_delete( - self, - client: InfrahubClient, - initial_schema: None, - ) -> None: + async def test_node_delete(self, client: InfrahubClient, initial_schema: None) -> None: obj = await client.create(kind=TESTING_MANUFACTURER, name="Dacia") await obj.save() @@ -50,11 +54,11 @@ async def test_node_create_with_relationships( default_branch: str, client: InfrahubClient, initial_schema: None, - manufacturer_mercedes, - person_joe, + manufacturer_mercedes: InfrahubNode, + person_joe: InfrahubNode, ) -> None: node = await client.create( - kind=TESTING_CAR, name="Tiguan", color="Black", manufacturer=manufacturer_mercedes.id, owner=person_joe.id + kind=TESTING_CAR, name="CLS", color="Black", manufacturer=manufacturer_mercedes.id, owner=person_joe.id ) await node.save() assert node.id is not None @@ -68,13 +72,13 @@ async def test_node_create_with_relationships_using_related_node( default_branch: str, client: InfrahubClient, initial_schema: None, - manufacturer_mercedes, - car_golf, - person_joe, + manufacturer_mercedes: InfrahubNode, + car_golf: InfrahubNode, + person_joe: InfrahubNode, ) -> None: related_node = car_golf.owner node = await client.create( - kind=TESTING_CAR, name="Tiguan", color="Black", manufacturer=manufacturer_mercedes, owner=related_node + kind=TESTING_CAR, name="CLS", color="Black", manufacturer=manufacturer_mercedes, owner=related_node ) await node.save(allow_upsert=True) assert node.id is not None @@ -90,13 +94,13 @@ async def test_node_filters_include( default_branch: str, client: InfrahubClient, initial_schema: None, - manufacturer_mercedes, - person_joe, - tag_red, + manufacturer_mercedes: InfrahubNode, + person_joe: InfrahubNode, + tag_red: InfrahubNode, ) -> None: car = await client.create( kind=TESTING_CAR, - name="Tiguan2", + name="CLS AMG", color="Black", manufacturer=manufacturer_mercedes, owner=person_joe, @@ -120,10 +124,7 @@ async def test_node_filters_include( assert node_after.owner.peer.id == person_joe.id, f"{person_joe.id=}" async def test_node_update_with_original_data( - self, - default_branch: str, - client: InfrahubClient, - initial_schema: None, + self, default_branch: str, client: InfrahubClient, initial_schema: None ) -> None: person_marina = await client.create(kind="TestingPerson", name="marina", age=20) await person_marina.save() @@ -138,85 +139,70 @@ async def test_node_update_with_original_data( node = await client.get(kind="TestingPerson", id=person_marina.id) assert node.age.value == 20, node.age.value - # async def test_node_update_payload_with_relationships( - # self, - # db: InfrahubDatabase, - # client: InfrahubClient, - # init_db_base, - # load_builtin_schema, - # tag_blue: Node, - # tag_red: Node, - # repo01: Node, - # gqlquery01: Node, - # ): - # data = { - # "name": "rfile10", - # "template_path": "mytemplate.j2", - # "query": gqlquery01.id, - # "repository": repo01.id, - # "tags": [tag_blue.id, tag_red.id], - # } - # schema = await client.schema.get(kind="CoreTransformJinja2", branch="main") - # create_payload = client.schema.generate_payload_create( - # schema=schema, data=data, source=repo01.id, is_protected=True - # ) - # obj = await client.create(kind="CoreTransformJinja2", branch="main", **create_payload) - # await obj.save() - - # assert obj.id is not None - # nodedb = await client.get(kind="CoreTransformJinja2", id=str(obj.id)) - - # input_data = nodedb._generate_input_data()["data"]["data"] - # assert input_data["name"]["value"] == "rfile10" - # # Validate that the source isn't a dictionary bit a reference to the repo - # assert input_data["name"]["source"] == repo01.id - - # async def test_node_create_with_properties( - # self, - # db: InfrahubDatabase, - # client: InfrahubClient, - # init_db_base, - # load_builtin_schema, - # tag_blue: Node, - # tag_red: Node, - # repo01: Node, - # gqlquery01: Node, - # first_account: Node, - # ): - # data = { - # "name": { - # "value": "rfile02", - # "is_protected": True, - # "source": first_account.id, - # "owner": first_account.id, - # }, - # "template_path": {"value": "mytemplate.j2"}, - # "query": {"id": gqlquery01.id}, # "source": first_account.id, "owner": first_account.id}, - # "repository": {"id": repo01.id}, # "source": first_account.id, "owner": first_account.id}, - # "tags": [tag_blue.id, tag_red.id], - # } - - # node = await client.create(kind="CoreTransformJinja2", data=data) - # await node.save() - - # assert node.id is not None - - # nodedb = await NodeManager.get_one(id=node.id, db=db, include_owner=True, include_source=True) - # assert nodedb.name.value == node.name.value - # assert nodedb.name.is_protected is True + async def test_node_generate_input_data_with_relationships( + self, + client: InfrahubClient, + initial_schema: None, + manufacturer_mercedes: InfrahubNode, + person_joe: InfrahubNode, + tag_blue: InfrahubNode, + tag_red: InfrahubNode, + ) -> None: + car = await client.create( + kind=TESTING_CAR, + name="InputDataCar", + color="Silver", + manufacturer=manufacturer_mercedes.id, + owner=person_joe.id, + tags=[tag_blue.id, tag_red.id], + ) + await car.save() + assert car.id is not None + + input_data = car._generate_input_data()["data"]["data"] + + assert input_data["name"]["value"] == "InputDataCar" + assert input_data["color"]["value"] == "Silver" + assert "manufacturer" in input_data + assert input_data["manufacturer"]["id"] == manufacturer_mercedes.id + + async def test_node_create_with_properties( + self, + client: InfrahubClient, + initial_schema: None, + manufacturer_mercedes: InfrahubNode, + person_joe: InfrahubNode, + ) -> None: + data = { + "name": {"value": "ProtectedCar", "is_protected": True}, + "color": {"value": "Gold"}, + "manufacturer": {"id": manufacturer_mercedes.id}, + "owner": {"id": person_joe.id}, + } + + node = await client.create(kind=TESTING_CAR, data=data) + await node.save() + + assert node.id is not None + assert node.name.value == "ProtectedCar" + assert node.name.is_protected + + node_fetched = await client.get(kind=TESTING_CAR, id=node.id, property=True) + assert node_fetched.name.value == "ProtectedCar" + assert node_fetched.name.is_protected async def test_node_update( self, default_branch: str, client: InfrahubClient, initial_schema: None, - manufacturer_mercedes, - person_joe, - person_jane, - car_golf, - tag_blue, - tag_red, - tag_green, + manufacturer_mercedes: InfrahubNode, + person_joe: InfrahubNode, + person_jane: InfrahubNode, + car_golf: InfrahubNode, + tag_blue: InfrahubNode, + tag_red: InfrahubNode, + tag_green: InfrahubNode, ) -> None: car_golf.color.value = "White" await car_golf.tags.fetch() @@ -238,149 +224,194 @@ async def test_node_update( await car3.tags.fetch() assert sorted([tag.id for tag in car3.tags.peers]) == sorted([tag_green.id, tag_blue.id]) - # async def test_node_update_3_idempotency( - # self, - # db: InfrahubDatabase, - # client: InfrahubClient, - # init_db_base, - # load_builtin_schema, - # tag_green: Node, - # tag_red: Node, - # tag_blue: Node, - # gqlquery03: Node, - # repo99: Node, - # ): - # node = await client.get(kind="CoreGraphQLQuery", name__value="query03") - # assert node.id is not None - - # updated_query = f"\n\n{node.query.value}" - # node.name.value = "query031" - # node.query.value = updated_query - # first_update = node._generate_input_data(exclude_unmodified=True) - # await node.save() - # nodedb = await NodeManager.get_one(id=node.id, db=db, include_owner=True, include_source=True) - - # node = await client.get(kind="CoreGraphQLQuery", name__value="query031") - - # node.name.value = "query031" - # node.query.value = updated_query - - # second_update = node._generate_input_data(exclude_unmodified=True) - - # assert nodedb.query.value == updated_query - # assert "query" in first_update["data"]["data"] - # assert "value" in first_update["data"]["data"]["query"] - # assert first_update["variables"] - # assert "query" not in second_update["data"]["data"] - # assert not second_update["variables"] - - # async def test_relationship_manager_errors_without_fetch(self, client: InfrahubClient, load_builtin_schema): - # organization = await client.create("TestOrganization", name="organization-1") - # await organization.save() - # tag = await client.create("BuiltinTag", name="blurple") - # await tag.save() - - # with pytest.raises(UninitializedError, match=r"Must call fetch"): - # organization.tags.add(tag) - - # await organization.tags.fetch() - # organization.tags.add(tag) - # await organization.save() - - # organization = await client.get("TestOrganization", name__value="organization-1") - # assert [t.id for t in organization.tags.peers] == [tag.id] - - # async def test_relationships_not_overwritten( - # self, client: InfrahubClient, load_builtin_schema, schema_extension_01 - # ): - # await client.schema.load(schemas=[schema_extension_01]) - # rack = await client.create("InfraRack", name="rack-1") - # await rack.save() - # tag = await client.create("BuiltinTag", name="blizzow") - # # TODO: is it a bug that we need to save the object and fetch the tags before adding to a RelationshipManager now? - # await tag.save() - # await tag.racks.fetch() - # tag.racks.add(rack) - # await tag.save() - # tag_2 = await client.create("BuiltinTag", name="blizzow2") - # await tag_2.save() - - # # the "rack" object has no link to the "tag" object here - # # rack.tags.peers is empty - # rack.name.value = "New Rack Name" - # await rack.save() - - # # assert that the above rack.save() did not overwrite the existing Rack-Tag relationship - # refreshed_rack = await client.get("InfraRack", id=rack.id) - # await refreshed_rack.tags.fetch() - # assert [t.id for t in refreshed_rack.tags.peers] == [tag.id] - - # # check that we can purposefully remove a tag - # refreshed_rack.tags.remove(tag.id) - # await refreshed_rack.save() - # rack_without_tag = await client.get("InfraRack", id=rack.id) - # await rack_without_tag.tags.fetch() - # assert rack_without_tag.tags.peers == [] - - # # check that we can purposefully add a tag - # rack_without_tag.tags.add(tag_2) - # await rack_without_tag.save() - # refreshed_rack_with_tag = await client.get("InfraRack", id=rack.id) - # await refreshed_rack_with_tag.tags.fetch() - # assert [t.id for t in refreshed_rack_with_tag.tags.peers] == [tag_2.id] - - # async def test_node_create_from_pool( - # self, db: InfrahubDatabase, client: InfrahubClient, init_db_base, default_ipam_namespace, load_ipam_schema - # ): - # ip_prefix = await client.create(kind="IpamIPPrefix", prefix="192.0.2.0/24") - # await ip_prefix.save() - - # ip_pool = await client.create( - # kind="CoreIPAddressPool", - # name="Core loopbacks 1", - # default_address_type="IpamIPAddress", - # default_prefix_length=32, - # ip_namespace=default_ipam_namespace, - # resources=[ip_prefix], - # ) - # await ip_pool.save() - - # devices = [] - # for i in range(1, 5): - # d = await client.create(kind="InfraDevice", name=f"core0{i}", primary_address=ip_pool) - # await d.save() - # devices.append(d) - - # assert [str(device.primary_address.peer.address.value) for device in devices] == [ - # "192.0.2.1/32", - # "192.0.2.2/32", - # "192.0.2.3/32", - # "192.0.2.4/32", - # ] - - # async def test_node_update_from_pool( - # self, db: InfrahubDatabase, client: InfrahubClient, init_db_base, default_ipam_namespace, load_ipam_schema - # ): - # starter_ip_address = await client.create(kind="IpamIPAddress", address="10.0.0.1/32") - # await starter_ip_address.save() - - # ip_prefix = await client.create(kind="IpamIPPrefix", prefix="192.168.0.0/24") - # await ip_prefix.save() - - # ip_pool = await client.create( - # kind="CoreIPAddressPool", - # name="Core loopbacks 2", - # default_address_type="IpamIPAddress", - # default_prefix_length=32, - # ip_namespace=default_ipam_namespace, - # resources=[ip_prefix], - # ) - # await ip_pool.save() - - # device = await client.create(kind="InfraDevice", name="core05", primary_address=starter_ip_address) - # await device.save() - - # device.primary_address = ip_pool - # await device.save() - - # assert str(device.primary_address.peer.address.value) == "192.168.0.1/32" + async def test_relationship_manager_errors_without_fetch( + self, + client: InfrahubClient, + initial_schema: None, + manufacturer_mercedes: InfrahubNode, + person_joe: InfrahubNode, + tag_blue: InfrahubNode, + ) -> None: + car = await client.create( + kind=TESTING_CAR, name="UnfetchedCar", color="Blue", manufacturer=manufacturer_mercedes, owner=person_joe + ) + await car.save() + + with pytest.raises(UninitializedError, match=r"Must call fetch"): + car.tags.add(tag_blue) + + await car.tags.fetch() + car.tags.add(tag_blue) + await car.save() + + car = await client.get(kind=TESTING_CAR, id=car.id) + await car.tags.fetch() + assert {t.id for t in car.tags.peers} == {tag_blue.id} + + async def test_relationships_not_overwritten( + self, + client: InfrahubClient, + initial_schema: None, + manufacturer_mercedes: InfrahubNode, + person_joe: InfrahubNode, + tag_blue: InfrahubNode, + tag_red: InfrahubNode, + ) -> None: + car = await client.create( + kind=TESTING_CAR, + name="RelationshipTestCar", + color="Green", + manufacturer=manufacturer_mercedes, + owner=person_joe, + ) + await car.save() + + await car.tags.fetch() + car.tags.add(tag_blue) + await car.save() + + car_refetch = await client.get(kind=TESTING_CAR, id=car.id) + car_refetch.color.value = "Red" + await car_refetch.save() + + # Verify the tag relationship was not overwritten + refreshed_car = await client.get(kind=TESTING_CAR, id=car.id) + await refreshed_car.tags.fetch() + assert [t.id for t in refreshed_car.tags.peers] == [tag_blue.id] + + # Check that we can purposefully remove a tag + refreshed_car.tags.remove(tag_blue.id) + await refreshed_car.save() + car_without_tag = await client.get(kind=TESTING_CAR, id=car.id) + await car_without_tag.tags.fetch() + assert car_without_tag.tags.peers == [] + + # Check that we can purposefully add a tag + car_without_tag.tags.add(tag_red) + await car_without_tag.save() + car_with_new_tag = await client.get(kind=TESTING_CAR, id=car.id) + await car_with_new_tag.tags.fetch() + assert [t.id for t in car_with_new_tag.tags.peers] == [tag_red.id] + + async def test_node_update_idempotency(self, client: InfrahubClient, initial_schema: None) -> None: + original_query = "query { CoreRepository { edges { node { name { value }}}}}" + node = await client.create(kind="CoreGraphQLQuery", name="idempotency-query", query=original_query) + await node.save() + + node = await client.get(kind="CoreGraphQLQuery", name__value="idempotency-query") + assert node.id is not None + + updated_query = f"\n\n{node.query.value}" + node.name.value = "idempotency-query-updated" + node.query.value = updated_query + first_update = node._generate_input_data(exclude_unmodified=True) + await node.save() + + # Verify the first update contains the changes + assert "query" in first_update["data"]["data"] + assert "value" in first_update["data"]["data"]["query"] + assert first_update["variables"] + + # Fetch the node again and set the same values + node = await client.get(kind="CoreGraphQLQuery", name__value="idempotency-query-updated") + node.name.value = "idempotency-query-updated" + node.query.value = updated_query + second_update = node._generate_input_data(exclude_unmodified=True) + + # Verify the second update doesn't contain any data (idempotent) + assert "query" not in second_update["data"]["data"] + assert not second_update["variables"] + + +class TestNodeWithPools(TestInfrahubDockerClient): + @pytest.fixture(scope="class") + async def load_ipam_schema(self, default_branch: str, client: InfrahubClient, ipam_schema: dict[str, Any]) -> None: + await client.schema.wait_until_converged(branch=default_branch) + resp = await client.schema.load(schemas=[ipam_schema], branch=default_branch, wait_until_converged=True) + assert resp.errors == {} + + @pytest.fixture(scope="class") + async def default_ipam_namespace(self, client: InfrahubClient, load_ipam_schema: None) -> IpamNamespace: + return await client.get(kind=IpamNamespace, name__value="default") + + @pytest.fixture(scope="class") + async def ip_prefix(self, client: InfrahubClient, load_ipam_schema: None) -> InfrahubNode: + prefix = await client.create(kind="IpamIPPrefix", prefix="192.0.2.0/24", member_type="address") + await prefix.save() + return prefix + + @pytest.fixture(scope="class") + async def ip_pool( + self, client: InfrahubClient, ip_prefix: InfrahubNode, default_ipam_namespace: IpamNamespace + ) -> InfrahubNode: + pool = await client.create( + kind="CoreIPAddressPool", + name="Test IP Pool", + default_address_type="IpamIPAddress", + default_prefix_length=32, + resources=[ip_prefix], + ip_namespace=default_ipam_namespace, + ) + await pool.save() + return pool + + async def test_node_create_from_pool( + self, client: InfrahubClient, ip_pool: InfrahubNode, load_ipam_schema: None + ) -> None: + devices = [] + for i in range(1, 4): + device = await client.create(kind="InfraDevice", name=f"device-{i:02d}", primary_address=ip_pool) + await device.save() + devices.append(device) + + ip_addresses = [] + devices = await client.all(kind="InfraDevice", prefetch_relationships=True) + for device in devices: + assert device.primary_address.peer is not None + ip_addresses.append(device.primary_address.peer.address.value) + + assert len(set(ip_addresses)) == len(devices) + + for ip in ip_addresses: + assert ip in ipaddress.ip_network("192.0.2.0/24") + + async def test_allocate_next_ip_address_idempotent( + self, client: InfrahubClient, ip_pool: InfrahubNode, load_ipam_schema: None + ) -> None: + identifier = "idempotent-allocation-test" + + # Allocate twice with the same identifier + ip1 = await client.allocate_next_ip_address(resource_pool=ip_pool, identifier=identifier) + ip2 = await client.allocate_next_ip_address(resource_pool=ip_pool, identifier=identifier) + + assert ip1.id == ip2.id + assert ip1.address.value == ip2.address.value + + async def test_node_update_from_pool( + self, client: InfrahubClient, load_ipam_schema: None, default_ipam_namespace: IpamNamespace + ) -> None: + starter_ip_address = await client.create(kind="IpamIPAddress", address="10.0.0.1/32") + await starter_ip_address.save() + + ip_prefix = await client.create(kind="IpamIPPrefix", prefix="192.168.0.0/24", member_type="address") + await ip_prefix.save() + + ip_pool = await client.create( + kind="CoreIPAddressPool", + name="Update Test Pool", + default_address_type="IpamIPAddress", + default_prefix_length=32, + resources=[ip_prefix], + ip_namespace=default_ipam_namespace, + ) + await ip_pool.save() + + device = await client.create(kind="InfraDevice", name="update-device", primary_address=starter_ip_address) + await device.save() + + device.primary_address = ip_pool + await device.save() + + fetched_device = await client.get(kind="InfraDevice", id=device.id, prefetch_relationships=True) + assert fetched_device.primary_address.peer is not None + assert fetched_device.primary_address.peer.address.value == ipaddress.ip_interface("192.168.0.1/32") diff --git a/tests/integration/test_object_store.py b/tests/integration/test_object_store.py index a1b62b32..26ba2687 100644 --- a/tests/integration/test_object_store.py +++ b/tests/integration/test_object_store.py @@ -1,19 +1,25 @@ -# from infrahub_sdk import InfrahubClient -# from tests.helpers.test_app import TestInfrahubApp -# -# FILE_CONTENT_01 = """ -# any content -# another content -# """ -# -# -# class TestObjectStore(TestInfrahubApp): -# async def test_upload_and_get(self, client: InfrahubClient): -# response = await client.object_store.upload(content=FILE_CONTENT_01) -# -# assert sorted(response.keys()) == ["checksum", "identifier"] -# assert response["checksum"] == "aa19b96860ec59a73906dd8660bb3bad" -# assert response["identifier"] -# -# content = await client.object_store.get(identifier=response["identifier"]) -# assert content == FILE_CONTENT_01 +from __future__ import annotations + +from typing import TYPE_CHECKING + +from infrahub_sdk.testing.docker import TestInfrahubDockerClient + +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient + +FILE_CONTENT_01 = """ + any content + another content + """ + + +class TestObjectStore(TestInfrahubDockerClient): + async def test_upload_and_get(self, client: InfrahubClient) -> None: + response = await client.object_store.upload(content=FILE_CONTENT_01) + + assert sorted(response.keys()) == ["checksum", "identifier"] + assert response["checksum"] == "aa19b96860ec59a73906dd8660bb3bad" + assert response["identifier"] + + content = await client.object_store.get(identifier=response["identifier"]) + assert content == FILE_CONTENT_01 diff --git a/tests/integration/test_repository.py b/tests/integration/test_repository.py index 37bc7089..942e3bc4 100644 --- a/tests/integration/test_repository.py +++ b/tests/integration/test_repository.py @@ -7,11 +7,13 @@ from infrahub_sdk.utils import get_fixtures_dir if TYPE_CHECKING: + from pathlib import Path + from infrahub_sdk import InfrahubClient class TestInfrahubRepository(TestInfrahubDockerClient): - async def test_add_repository(self, client: InfrahubClient, remote_repos_dir) -> None: + async def test_add_repository(self, client: InfrahubClient, remote_repos_dir: Path) -> None: src_directory = get_fixtures_dir() / "integration/mock_repo" repo = GitRepo(name="mock_repo", src_directory=src_directory, dst_directory=remote_repos_dir) commit = repo._repo.git[repo._repo.git.head()] diff --git a/tests/integration/test_schema.py b/tests/integration/test_schema.py index 1d081ae2..314a3ffa 100644 --- a/tests/integration/test_schema.py +++ b/tests/integration/test_schema.py @@ -1,19 +1,13 @@ +from typing import Any + import pytest from infrahub_sdk import InfrahubClient from infrahub_sdk.exceptions import BranchNotFoundError +from infrahub_sdk.schema import NodeSchemaAPI from infrahub_sdk.testing.docker import TestInfrahubDockerClient -# from infrahub.core.schema import core_models -# from infrahub.server import app -# -# from infrahub_sdk.schema import NodeSchemaAPI -# -# from .conftest import InfrahubTestClient -# -# -# class TestInfrahubSchema(TestInfrahubDockerClient): async def test_query_schema_for_branch_not_found(self, client: InfrahubClient) -> None: with pytest.raises(BranchNotFoundError) as exc: @@ -21,49 +15,31 @@ async def test_query_schema_for_branch_not_found(self, client: InfrahubClient) - assert str(exc.value) == "The requested branch was not found on the server [I-do-not-exist]" + async def test_schema_all(self, client: InfrahubClient) -> None: + schema_nodes = await client.schema.all() + + assert [node for node in schema_nodes.values() if node.namespace == "Profile"] + + assert "BuiltinTag" in schema_nodes + assert isinstance(schema_nodes["BuiltinTag"], NodeSchemaAPI) + + async def test_schema_get(self, client: InfrahubClient) -> None: + schema_node = await client.schema.get(kind="BuiltinTag") + + assert isinstance(schema_node, NodeSchemaAPI) + assert client.default_branch in client.schema.cache + + +class TestInfrahubSchemaLoad(TestInfrahubDockerClient): + async def test_schema_load_many( + self, client: InfrahubClient, schema_extension_01: dict[str, Any], schema_extension_02: dict[str, Any] + ) -> None: + response = await client.schema.load( + schemas=[schema_extension_01, schema_extension_02], wait_until_converged=True + ) + + assert response.schema_updated -# class TestInfrahubSchema: -# @pytest.fixture(scope="class") -# async def client(self): -# return InfrahubTestClient(app) -# -# async def test_schema_all(self, client, init_db_base): -# config = Config(requester=client.async_request) -# ifc = InfrahubClient(config=config) -# schema_nodes = await ifc.schema.all() -# -# nodes = [node for node in core_models["nodes"] if node["namespace"] != "Internal"] -# generics = [node for node in core_models["generics"] if node["namespace"] != "Internal"] -# -# profiles = [node for node in schema_nodes.values() if node.namespace == "Profile"] -# assert profiles -# -# assert len(schema_nodes) == len(nodes) + len(generics) + len(profiles) -# assert "BuiltinTag" in schema_nodes -# assert isinstance(schema_nodes["BuiltinTag"], NodeSchemaAPI) -# -# async def test_schema_get(self, client, init_db_base): -# config = Config(username="admin", password="infrahub", requester=client.async_request) -# ifc = InfrahubClient(config=config) -# schema_node = await ifc.schema.get(kind="BuiltinTag") -# -# assert isinstance(schema_node, NodeSchemaAPI) -# assert ifc.default_branch in ifc.schema.cache -# nodes = [node for node in core_models["nodes"] if node["namespace"] != "Internal"] -# generics = [node for node in core_models["generics"] if node["namespace"] != "Internal"] -# -# schema_without_profiles = [ -# node for node in ifc.schema.cache[ifc.default_branch].values() if node.namespace != "Profile" -# ] -# assert len(schema_without_profiles) == len(nodes) + len(generics) -# -# async def test_schema_load_many(self, client, init_db_base, schema_extension_01, schema_extension_02): -# config = Config(username="admin", password="infrahub", requester=client.async_request) -# ifc = InfrahubClient(config=config) -# response = await ifc.schema.load(schemas=[schema_extension_01, schema_extension_02]) -# -# assert response.schema_updated -# -# schema_nodes = await ifc.schema.all(refresh=True) -# assert "InfraRack" in schema_nodes.keys() -# assert "ProcurementContract" in schema_nodes.keys() + schema_nodes = await client.schema.all(refresh=True) + assert "InfraRack" in schema_nodes + assert "ProcurementContract" in schema_nodes From 927892cc6fe73ec46f6a359a07daf1477822f35f Mon Sep 17 00:00:00 2001 From: Alex Gittings Date: Thu, 8 Jan 2026 13:11:29 +0000 Subject: [PATCH 23/27] Add docs for metadata and ordering (#735) * Add docs for metadata and ordering * Add relationship metadata * Remove test file --- docs/docs/python-sdk/guides/query_data.mdx | 273 +++++++++++++++++++++ 1 file changed, 273 insertions(+) diff --git a/docs/docs/python-sdk/guides/query_data.mdx b/docs/docs/python-sdk/guides/query_data.mdx index fde6667f..74fb3ec9 100644 --- a/docs/docs/python-sdk/guides/query_data.mdx +++ b/docs/docs/python-sdk/guides/query_data.mdx @@ -527,6 +527,279 @@ By default, the [meta data or properties]($(base_url)topics/metadata) of attribu +## Node metadata + +Node metadata provides information about when a node was created or last updated, and by whom. This includes timestamps and references to the accounts that made the changes. + +### Including node metadata in queries + +By default, node metadata is not included in query results. You can include it using the `include_metadata` argument of the SDK client's `all`, `filters`, or `get` method. + + + + + ```python + device = await client.get(kind="TestDevice", name__value="atl1-edge1", include_metadata=True) + ``` + + + + + ```python + device = client.get(kind="TestDevice", name__value="atl1-edge1", include_metadata=True) + ``` + + + + +### Accessing node metadata + +Once metadata is included in the query, you can access it using the `get_node_metadata()` method. The metadata object contains the following fields: + +- `created_at`: Timestamp when the node was created +- `created_by`: Reference to the account that created the node +- `updated_at`: Timestamp when the node was last updated +- `updated_by`: Reference to the account that last updated the node + + + + + ```python + device = await client.get(kind="TestDevice", name__value="atl1-edge1", include_metadata=True) + + # Get the metadata object + metadata = device.get_node_metadata() + + # Access creation metadata + print(metadata.created_at) # e.g., "2024-01-15T10:30:00Z" + print(metadata.created_by.display_label) # e.g., "admin" + + # Access update metadata + print(metadata.updated_at) # e.g., "2024-01-20T14:45:00Z" + print(metadata.updated_by.display_label) # e.g., "admin" + ``` + + + + + ```python + device = client.get(kind="TestDevice", name__value="atl1-edge1", include_metadata=True) + + # Get the metadata object + metadata = device.get_node_metadata() + + # Access creation metadata + print(metadata.created_at) # e.g., "2024-01-15T10:30:00Z" + print(metadata.created_by.display_label) # e.g., "admin" + + # Access update metadata + print(metadata.updated_at) # e.g., "2024-01-20T14:45:00Z" + print(metadata.updated_by.display_label) # e.g., "admin" + ``` + + + + +The `created_by` and `updated_by` fields are `NodeProperty` objects that include: + +- `id`: The unique identifier of the account +- `display_label`: A human-readable label for the account +- `typename`: The GraphQL type name of the account + +## Relationship metadata + +When querying with `include_metadata=True`, you can also access metadata about relationship edges themselves. This tells you when a specific relationship (the connection between two nodes) was last modified and by whom. + +### Accessing relationship metadata + +Use the `get_relationship_metadata()` method on a related node to access the relationship edge metadata. This is different from node metadata - it describes when the relationship itself was created or modified, not the connected node. + + + + + ```python + device = await client.get(kind="TestDevice", name__value="atl1-edge1", include_metadata=True) + + # For a cardinality-one relationship + rel_metadata = device.site.get_relationship_metadata() + if rel_metadata: + print(rel_metadata.updated_at) # e.g., "2024-01-17T08:00:00Z" + print(rel_metadata.updated_by.display_label) # e.g., "admin" + + # For a cardinality-many relationship + for tag in device.tags.peers: + rel_metadata = tag.get_relationship_metadata() + if rel_metadata: + print(f"Tag relationship updated at: {rel_metadata.updated_at}") + print(f"Updated by: {rel_metadata.updated_by.display_label}") + ``` + + + + + ```python + device = client.get(kind="TestDevice", name__value="atl1-edge1", include_metadata=True) + + # For a cardinality-one relationship + rel_metadata = device.site.get_relationship_metadata() + if rel_metadata: + print(rel_metadata.updated_at) # e.g., "2024-01-17T08:00:00Z" + print(rel_metadata.updated_by.display_label) # e.g., "admin" + + # For a cardinality-many relationship + for tag in device.tags.peers: + rel_metadata = tag.get_relationship_metadata() + if rel_metadata: + print(f"Tag relationship updated at: {rel_metadata.updated_at}") + print(f"Updated by: {rel_metadata.updated_by.display_label}") + ``` + + + + +The `RelationshipMetadata` object contains: + +- `updated_at`: Timestamp when the relationship was last updated +- `updated_by`: Reference to the account that last updated the relationship (a `NodeProperty` object with `id`, `display_label`, and `typename`) + +:::note +Relationship metadata tracks changes to the relationship edge itself (for example, when the relationship was created or when its properties were modified), not changes to the connected nodes. For node-level metadata, use `get_node_metadata()` on the node itself. +::: + +## Ordering query results + +You can control the order in which query results are returned using the `order` argument. This is particularly useful when you need results sorted by metadata fields like creation or update timestamps. + +### Ordering by node metadata + +Use the `Order` and `NodeMetaOrder` classes along with `OrderDirection` to specify how results should be ordered. + + + + + ```python + from infrahub_sdk.enums import OrderDirection + from infrahub_sdk.types import NodeMetaOrder, Order + + # Get devices ordered by creation time (oldest first) + devices = await client.all( + kind="TestDevice", + order=Order(node_metadata=NodeMetaOrder(created_at=OrderDirection.ASC)) + ) + + # Get devices ordered by last update time (most recent first) + devices = await client.all( + kind="TestDevice", + order=Order(node_metadata=NodeMetaOrder(updated_at=OrderDirection.DESC)) + ) + ``` + + + + + ```python + from infrahub_sdk.enums import OrderDirection + from infrahub_sdk.types import NodeMetaOrder, Order + + # Get devices ordered by creation time (oldest first) + devices = client.all( + kind="TestDevice", + order=Order(node_metadata=NodeMetaOrder(created_at=OrderDirection.ASC)) + ) + + # Get devices ordered by last update time (most recent first) + devices = client.all( + kind="TestDevice", + order=Order(node_metadata=NodeMetaOrder(updated_at=OrderDirection.DESC)) + ) + ``` + + + + +The available order directions are: + +- `OrderDirection.ASC`: Ascending order (oldest/smallest first) +- `OrderDirection.DESC`: Descending order (newest/largest first) + +:::note +You can only order by one metadata field at a time. Specifying both `created_at` and `updated_at` in the same `NodeMetaOrder` will raise a validation error, as they are mutually exclusive. +::: + +### Disabling default ordering + +For performance optimization, you can disable the default ordering behavior entirely: + + + + + ```python + from infrahub_sdk.types import Order + + # Disable ordering to improve query performance + devices = await client.all(kind="TestDevice", order=Order(disable=True)) + ``` + + + + + ```python + from infrahub_sdk.types import Order + + # Disable ordering to improve query performance + devices = client.all(kind="TestDevice", order=Order(disable=True)) + ``` + + + + +### Combining metadata and ordering + +You can include metadata and order results in the same query: + + + + + ```python + from infrahub_sdk.enums import OrderDirection + from infrahub_sdk.types import NodeMetaOrder, Order + + # Get the 10 most recently updated devices with their metadata + devices = await client.filters( + kind="TestDevice", + limit=10, + include_metadata=True, + order=Order(node_metadata=NodeMetaOrder(updated_at=OrderDirection.DESC)) + ) + + for device in devices: + metadata = device.get_node_metadata() + print(f"{device.name.value} - Last updated: {metadata.updated_at}") + ``` + + + + + ```python + from infrahub_sdk.enums import OrderDirection + from infrahub_sdk.types import NodeMetaOrder, Order + + # Get the 10 most recently updated devices with their metadata + devices = client.filters( + kind="TestDevice", + limit=10, + include_metadata=True, + order=Order(node_metadata=NodeMetaOrder(updated_at=OrderDirection.DESC)) + ) + + for device in devices: + metadata = device.get_node_metadata() + print(f"{device.name.value} - Last updated: {metadata.updated_at}") + ``` + + + + ## Query a node(s) in a different branch If you want to query a node(s) in a different branch than the default branch with which the SDK client was initiated, then you can use the `branch` argument of the query methods. From 0da2e7850c6c688de71edf47878e9b1d1d8c3e8c Mon Sep 17 00:00:00 2001 From: wvandeun Date: Thu, 8 Jan 2026 15:41:55 +0100 Subject: [PATCH 24/27] bump version to v1.18.0 --- pyproject.toml | 2 +- uv.lock | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 491e5ae0..5f86203c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "infrahub-sdk" -version = "1.17.0" +version = "1.18.0" description = "Python Client to interact with Infrahub" authors = [ {name = "OpsMill", email = "info@opsmill.com"} diff --git a/uv.lock b/uv.lock index dc511d55..9d0bf9a6 100644 --- a/uv.lock +++ b/uv.lock @@ -731,7 +731,7 @@ wheels = [ [[package]] name = "infrahub-sdk" -version = "1.17.0" +version = "1.18.0" source = { editable = "." } dependencies = [ { name = "dulwich" }, @@ -877,8 +877,8 @@ dev = [ lint = [ { name = "astroid", specifier = ">=3.1,<4.0" }, { name = "mypy", specifier = "==1.11.2" }, - { name = "ty", specifier = "==0.0.8" }, { name = "ruff", specifier = "==0.14.10" }, + { name = "ty", specifier = "==0.0.8" }, { name = "yamllint" }, ] tests = [ From 2017aa9812f6d9fb2bfea61642b5df1460156754 Mon Sep 17 00:00:00 2001 From: wvandeun Date: Thu, 8 Jan 2026 15:46:26 +0100 Subject: [PATCH 25/27] update CHANGELOG for 1.18.0 release --- CHANGELOG.md | 20 ++++++++++++++++++++ changelog/+1b40f022.housekeeping.md | 1 - changelog/+86c0992a.added.md | 1 - changelog/+d3b5369f.added.md | 1 - changelog/+e2f96e7b.removed.md | 1 - changelog/187.fixed.md | 1 - changelog/630.fixed.md | 1 - 7 files changed, 20 insertions(+), 6 deletions(-) delete mode 100644 changelog/+1b40f022.housekeeping.md delete mode 100644 changelog/+86c0992a.added.md delete mode 100644 changelog/+d3b5369f.added.md delete mode 100644 changelog/+e2f96e7b.removed.md delete mode 100644 changelog/187.fixed.md delete mode 100644 changelog/630.fixed.md diff --git a/CHANGELOG.md b/CHANGELOG.md index db86d527..0abcb189 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,26 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang +## [1.18.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.18.0) - 2026-01-08 + +### Added + +- Add ability to query for metadata on nodes to include information such as creation and update timestamps, creator and last user to update an object. +- Added ability to order nodes by metadata created_at or updated_at fields + +### Removed + +- The previously deprecated 'background_execution' parameter under client.branch.create() was removed. + +### Fixed + +- Rewrite and re-enable integration tests ([#187](https://github.com/opsmill/infrahub-sdk-python/issues/187)) +- Fixed SDK including explicit `null` values for uninitialized optional relationships when creating nodes with object templates, which prevented the backend from applying template defaults. ([#630](https://github.com/opsmill/infrahub-sdk-python/issues/630)) + +### Housekeeping + +- Fixed Python 3.14 compatibility warnings. Testing now requires pytest>=9. + ## [1.17.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.17.0) - 2025-12-11 ### Added diff --git a/changelog/+1b40f022.housekeeping.md b/changelog/+1b40f022.housekeeping.md deleted file mode 100644 index 40a566c7..00000000 --- a/changelog/+1b40f022.housekeeping.md +++ /dev/null @@ -1 +0,0 @@ -Fixed Python 3.14 compatibility warnings. Testing now requires pytest>=9. diff --git a/changelog/+86c0992a.added.md b/changelog/+86c0992a.added.md deleted file mode 100644 index 1e53de4b..00000000 --- a/changelog/+86c0992a.added.md +++ /dev/null @@ -1 +0,0 @@ -Added ability to order nodes by metadata created_at or updated_at fields diff --git a/changelog/+d3b5369f.added.md b/changelog/+d3b5369f.added.md deleted file mode 100644 index 3942c0cc..00000000 --- a/changelog/+d3b5369f.added.md +++ /dev/null @@ -1 +0,0 @@ -Add ability to query for metadata on nodes to include information such as creation and update timestamps, creator and last user to update an object. diff --git a/changelog/+e2f96e7b.removed.md b/changelog/+e2f96e7b.removed.md deleted file mode 100644 index 52e96350..00000000 --- a/changelog/+e2f96e7b.removed.md +++ /dev/null @@ -1 +0,0 @@ -The previously deprecated 'background_execution' parameter under client.branch.create() was removed. diff --git a/changelog/187.fixed.md b/changelog/187.fixed.md deleted file mode 100644 index 1911c8dc..00000000 --- a/changelog/187.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Rewrite and re-enable integration tests \ No newline at end of file diff --git a/changelog/630.fixed.md b/changelog/630.fixed.md deleted file mode 100644 index 34a54db3..00000000 --- a/changelog/630.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed SDK including explicit `null` values for uninitialized optional relationships when creating nodes with object templates, which prevented the backend from applying template defaults. From 263945d9f410ab85f272ef8339d1f3d5655430ba Mon Sep 17 00:00:00 2001 From: Mikhail Yohman Date: Thu, 8 Jan 2026 16:46:20 -0700 Subject: [PATCH 26/27] Revert "Fix code for HFID casting of strings that aren't UUIDs (#732)" (#740) This reverts commit 0cec7ec7408d1340eb21452ddb706e52483389a9. --- infrahub_sdk/spec/object.py | 38 ++-------- tests/unit/sdk/spec/test_object.py | 107 +---------------------------- 2 files changed, 5 insertions(+), 140 deletions(-) diff --git a/infrahub_sdk/spec/object.py b/infrahub_sdk/spec/object.py index 548b5fc3..0df3f95c 100644 --- a/infrahub_sdk/spec/object.py +++ b/infrahub_sdk/spec/object.py @@ -7,7 +7,6 @@ from ..exceptions import ObjectValidationError, ValidationError from ..schema import GenericSchemaAPI, RelationshipKind, RelationshipSchema -from ..utils import is_valid_uuid from ..yaml import InfrahubFile, InfrahubFileKind from .models import InfrahubObjectParameters from .processors.factory import DataProcessorFactory @@ -34,32 +33,6 @@ def validate_list_of_objects(value: list[Any]) -> bool: return all(isinstance(item, dict) for item in value) -def normalize_hfid_reference(value: str | list[str]) -> str | list[str]: - """Normalize a reference value to HFID format. - - Args: - value: Either a string (ID or single-component HFID) or a list of strings (multi-component HFID). - - Returns: - - If value is already a list: returns it unchanged as list[str] - - If value is a valid UUID string: returns it unchanged as str (will be treated as an ID) - - If value is a non-UUID string: wraps it in a list as list[str] (single-component HFID) - """ - if isinstance(value, list): - return value - if is_valid_uuid(value): - return value - return [value] - - -def normalize_hfid_references(values: list[str | list[str]]) -> list[str | list[str]]: - """Normalize a list of reference values to HFID format. - - Each string that is not a valid UUID will be wrapped in a list to treat it as a single-component HFID. - """ - return [normalize_hfid_reference(v) for v in values] - - class RelationshipDataFormat(str, Enum): UNKNOWN = "unknown" @@ -471,13 +444,10 @@ async def create_node( # - if the relationship is bidirectional and is mandatory on the other side, then we need to create this object First # - if the relationship is bidirectional and is not mandatory on the other side, then we need should create the related object First # - if the relationship is not bidirectional, then we need to create the related object First - if rel_info.format == RelationshipDataFormat.MANY_REF and isinstance(value, list): - # Cardinality-many: normalize each string HFID to list format: "name" -> ["name"] - # UUIDs are left as-is since they are treated as IDs - clean_data[key] = normalize_hfid_references(value) - elif rel_info.format == RelationshipDataFormat.ONE_REF: - # Cardinality-one: normalize string to HFID list format: "name" -> ["name"] or keep as string (UUID) - clean_data[key] = normalize_hfid_reference(value) + if rel_info.is_reference and isinstance(value, list): + clean_data[key] = value + elif rel_info.format == RelationshipDataFormat.ONE_REF and isinstance(value, str): + clean_data[key] = [value] elif not rel_info.is_reference and rel_info.is_bidirectional and rel_info.is_mandatory: remaining_rels.append(key) elif not rel_info.is_reference and not rel_info.is_mandatory: diff --git a/tests/unit/sdk/spec/test_object.py b/tests/unit/sdk/spec/test_object.py index 581b2572..1af02ac3 100644 --- a/tests/unit/sdk/spec/test_object.py +++ b/tests/unit/sdk/spec/test_object.py @@ -1,8 +1,6 @@ from __future__ import annotations -from dataclasses import dataclass -from typing import TYPE_CHECKING, Any -from unittest.mock import AsyncMock, patch +from typing import TYPE_CHECKING import pytest @@ -11,7 +9,6 @@ if TYPE_CHECKING: from infrahub_sdk.client import InfrahubClient - from infrahub_sdk.node import InfrahubNode @pytest.fixture @@ -266,105 +263,3 @@ async def test_parameters_non_dict(client_with_schema_01: InfrahubClient, locati obj = ObjectFile(location="some/path", content=location_with_non_dict_parameters) with pytest.raises(ValidationError): await obj.validate_format(client=client_with_schema_01) - - -@dataclass -class HfidLoadTestCase: - """Test case for HFID normalization in object loading.""" - - name: str - data: list[dict[str, Any]] - expected_primary_tag: str | list[str] | None - expected_tags: list[str] | list[list[str]] | None - - -HFID_NORMALIZATION_TEST_CASES = [ - HfidLoadTestCase( - name="cardinality_one_string_hfid_normalized", - data=[{"name": "Mexico", "type": "Country", "primary_tag": "Important"}], - expected_primary_tag=["Important"], - expected_tags=None, - ), - HfidLoadTestCase( - name="cardinality_one_list_hfid_unchanged", - data=[{"name": "Mexico", "type": "Country", "primary_tag": ["Important"]}], - expected_primary_tag=["Important"], - expected_tags=None, - ), - HfidLoadTestCase( - name="cardinality_one_uuid_unchanged", - data=[{"name": "Mexico", "type": "Country", "primary_tag": "550e8400-e29b-41d4-a716-446655440000"}], - expected_primary_tag="550e8400-e29b-41d4-a716-446655440000", - expected_tags=None, - ), - HfidLoadTestCase( - name="cardinality_many_string_hfids_normalized", - data=[{"name": "Mexico", "type": "Country", "tags": ["Important", "Active"]}], - expected_primary_tag=None, - expected_tags=[["Important"], ["Active"]], - ), - HfidLoadTestCase( - name="cardinality_many_list_hfids_unchanged", - data=[{"name": "Mexico", "type": "Country", "tags": [["Important"], ["Active"]]}], - expected_primary_tag=None, - expected_tags=[["Important"], ["Active"]], - ), - HfidLoadTestCase( - name="cardinality_many_mixed_hfids_normalized", - data=[{"name": "Mexico", "type": "Country", "tags": ["Important", ["namespace", "name"]]}], - expected_primary_tag=None, - expected_tags=[["Important"], ["namespace", "name"]], - ), - HfidLoadTestCase( - name="cardinality_many_uuids_unchanged", - data=[ - { - "name": "Mexico", - "type": "Country", - "tags": ["550e8400-e29b-41d4-a716-446655440000", "6ba7b810-9dad-11d1-80b4-00c04fd430c8"], - } - ], - expected_primary_tag=None, - expected_tags=["550e8400-e29b-41d4-a716-446655440000", "6ba7b810-9dad-11d1-80b4-00c04fd430c8"], - ), -] - - -@pytest.mark.parametrize("test_case", HFID_NORMALIZATION_TEST_CASES, ids=lambda tc: tc.name) -async def test_hfid_normalization_in_object_loading( - client_with_schema_01: InfrahubClient, test_case: HfidLoadTestCase -) -> None: - """Test that HFIDs are normalized correctly based on cardinality and format.""" - - root_location = {"apiVersion": "infrahub.app/v1", "kind": "Object", "spec": {"kind": "BuiltinLocation", "data": []}} - location = { - "apiVersion": root_location["apiVersion"], - "kind": root_location["kind"], - "spec": {"kind": root_location["spec"]["kind"], "data": test_case.data}, - } - - obj = ObjectFile(location="some/path", content=location) - await obj.validate_format(client=client_with_schema_01) - - create_calls: list[dict[str, Any]] = [] - - async def mock_create( - kind: str, - branch: str | None = None, - data: dict | None = None, - **kwargs: Any, # noqa: ANN401 - ) -> InfrahubNode: - create_calls.append({"kind": kind, "data": data}) - original_create = client_with_schema_01.__class__.create - return await original_create(client_with_schema_01, kind=kind, branch=branch, data=data, **kwargs) - - client_with_schema_01.create = mock_create - - with patch("infrahub_sdk.node.InfrahubNode.save", new_callable=AsyncMock): - await obj.process(client=client_with_schema_01) - - assert len(create_calls) == 1 - if test_case.expected_primary_tag is not None: - assert create_calls[0]["data"]["primary_tag"] == test_case.expected_primary_tag - if test_case.expected_tags is not None: - assert create_calls[0]["data"]["tags"] == test_case.expected_tags From d116aa39c2f739d1fc96af26679bc2e0087b43d9 Mon Sep 17 00:00:00 2001 From: Mikhail Yohman Date: Thu, 8 Jan 2026 21:21:15 -0700 Subject: [PATCH 27/27] Bump to 1.18.1 and update changelog. (#742) --- CHANGELOG.md | 5 +++++ pyproject.toml | 2 +- uv.lock | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0abcb189..b6c418e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the changes for the upcoming release can be found in . +## [1.18.1](https://github.com/opsmill/infrahub-sdk-python/tree/v1.18.1) - 2026-01-08 + +### Fixed + +- Reverted #723 (Fix code for HFID casting of strings that aren't UUIDs) ## [1.18.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.18.0) - 2026-01-08 diff --git a/pyproject.toml b/pyproject.toml index 5f86203c..cab7c7c4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "infrahub-sdk" -version = "1.18.0" +version = "1.18.1" description = "Python Client to interact with Infrahub" authors = [ {name = "OpsMill", email = "info@opsmill.com"} diff --git a/uv.lock b/uv.lock index 9d0bf9a6..a4855b8b 100644 --- a/uv.lock +++ b/uv.lock @@ -731,7 +731,7 @@ wheels = [ [[package]] name = "infrahub-sdk" -version = "1.18.0" +version = "1.18.1" source = { editable = "." } dependencies = [ { name = "dulwich" },