From a9918930e6a59478ed35c301c543cc6940a48e2f Mon Sep 17 00:00:00 2001 From: Edan Bainglass Date: Sun, 28 Sep 2025 16:32:59 +0200 Subject: [PATCH 1/7] Update type hints in ORM package --- src/aiida/common/typing.py | 12 +- src/aiida/orm/authinfos.py | 28 +- src/aiida/orm/comments.py | 36 +- src/aiida/orm/computers.py | 86 +- src/aiida/orm/entities.py | 66 +- src/aiida/orm/groups.py | 52 +- src/aiida/orm/implementation/authinfos.py | 12 +- src/aiida/orm/implementation/comments.py | 8 +- src/aiida/orm/implementation/computers.py | 6 +- src/aiida/orm/implementation/entities.py | 14 +- src/aiida/orm/implementation/groups.py | 10 +- src/aiida/orm/implementation/logs.py | 8 +- src/aiida/orm/implementation/nodes.py | 28 +- src/aiida/orm/implementation/querybuilder.py | 30 +- .../orm/implementation/storage_backend.py | 29 +- src/aiida/orm/logs.py | 22 +- src/aiida/orm/nodes/attributes.py | 16 +- src/aiida/orm/nodes/caching.py | 14 +- src/aiida/orm/nodes/comments.py | 6 +- src/aiida/orm/nodes/data/array/bands.py | 6 +- src/aiida/orm/nodes/data/array/kpoints.py | 25 +- src/aiida/orm/nodes/data/array/trajectory.py | 11 +- src/aiida/orm/nodes/data/base.py | 8 +- src/aiida/orm/nodes/data/cif.py | 22 +- src/aiida/orm/nodes/data/code/abstract.py | 8 +- src/aiida/orm/nodes/data/code/legacy.py | 14 +- src/aiida/orm/nodes/data/code/portable.py | 6 +- src/aiida/orm/nodes/data/data.py | 15 +- src/aiida/orm/nodes/data/dict.py | 8 +- src/aiida/orm/nodes/data/enum.py | 14 +- src/aiida/orm/nodes/data/folder.py | 30 +- src/aiida/orm/nodes/data/jsonable.py | 12 +- src/aiida/orm/nodes/data/list.py | 7 +- src/aiida/orm/nodes/data/remote/base.py | 2 +- .../orm/nodes/data/remote/stash/compress.py | 10 +- .../orm/nodes/data/remote/stash/custom.py | 10 +- .../orm/nodes/data/remote/stash/folder.py | 10 +- src/aiida/orm/nodes/data/singlefile.py | 48 +- src/aiida/orm/nodes/data/structure.py | 2766 ++++++++--------- src/aiida/orm/nodes/links.py | 31 +- src/aiida/orm/nodes/node.py | 53 +- .../orm/nodes/process/calculation/calcjob.py | 14 +- src/aiida/orm/nodes/process/process.py | 50 +- src/aiida/orm/nodes/repository.py | 32 +- src/aiida/orm/querybuilder.py | 148 +- src/aiida/orm/users.py | 14 +- src/aiida/orm/utils/links.py | 28 +- src/aiida/orm/utils/remote.py | 6 +- 48 files changed, 1995 insertions(+), 1896 deletions(-) diff --git a/src/aiida/common/typing.py b/src/aiida/common/typing.py index 74fa988fad..2e6b03734c 100644 --- a/src/aiida/common/typing.py +++ b/src/aiida/common/typing.py @@ -11,9 +11,17 @@ from __future__ import annotations import pathlib +import sys from typing import Union -__all__ = ('FilePath',) - +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self FilePath = Union[str, pathlib.PurePath] + +__all__ = ( + 'FilePath', + 'Self', +) diff --git a/src/aiida/orm/authinfos.py b/src/aiida/orm/authinfos.py index 8282e944fb..5c8adb9451 100644 --- a/src/aiida/orm/authinfos.py +++ b/src/aiida/orm/authinfos.py @@ -10,7 +10,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, Optional, Type +from typing import TYPE_CHECKING, Any from aiida.common import exceptions from aiida.common.pydantic import MetadataField @@ -33,7 +33,7 @@ class AuthInfoCollection(entities.Collection['AuthInfo']): """The collection of `AuthInfo` entries.""" @staticmethod - def _entity_base_cls() -> Type['AuthInfo']: + def _entity_base_cls() -> type['AuthInfo']: return AuthInfo def delete(self, pk: int) -> None: @@ -44,7 +44,7 @@ def delete(self, pk: int) -> None: self._backend.authinfos.delete(pk) -class AuthInfo(entities.Entity['BackendAuthInfo', AuthInfoCollection]): +class AuthInfo(entities.Entity['BackendAuthInfo']): """ORM class that models the authorization information that allows a `User` to connect to a `Computer`.""" _CLS_COLLECTION = AuthInfoCollection @@ -68,12 +68,12 @@ class Model(entities.Entity.Model): description='Whether the instance is enabled', is_attribute=False, ) - auth_params: Dict[str, Any] = MetadataField( + auth_params: dict[str, Any] = MetadataField( default_factory=dict, description='Dictionary of authentication parameters', is_attribute=False, ) - metadata: Dict[str, Any] = MetadataField( + metadata: dict[str, Any] = MetadataField( default_factory=dict, description='Dictionary of metadata', is_attribute=False, @@ -84,9 +84,9 @@ def __init__( computer: 'Computer', user: 'User', enabled: bool = True, - auth_params: Dict[str, Any] | None = None, - metadata: Dict[str, Any] | None = None, - backend: Optional['StorageBackend'] = None, + auth_params: dict[str, Any] | None = None, + metadata: dict[str, Any] | None = None, + backend: 'StorageBackend' | None = None, ) -> None: """Create an `AuthInfo` instance for the given computer and user. @@ -151,35 +151,35 @@ def user(self) -> 'User': return entities.from_backend_entity(users.User, self._backend_entity.user) @property - def auth_params(self) -> Dict[str, Any]: + def auth_params(self) -> dict[str, Any]: return self._backend_entity.get_auth_params() @property - def metadata(self) -> Dict[str, Any]: + def metadata(self) -> dict[str, Any]: return self._backend_entity.get_metadata() - def get_auth_params(self) -> Dict[str, Any]: + def get_auth_params(self) -> dict[str, Any]: """Return the dictionary of authentication parameters :return: a dictionary with authentication parameters """ return self._backend_entity.get_auth_params() - def set_auth_params(self, auth_params: Dict[str, Any]) -> None: + def set_auth_params(self, auth_params: dict[str, Any]) -> None: """Set the dictionary of authentication parameters :param auth_params: a dictionary with authentication parameters """ self._backend_entity.set_auth_params(auth_params) - def get_metadata(self) -> Dict[str, Any]: + def get_metadata(self) -> dict[str, Any]: """Return the dictionary of metadata :return: a dictionary with metadata """ return self._backend_entity.get_metadata() - def set_metadata(self, metadata: Dict[str, Any]) -> None: + def set_metadata(self, metadata: dict[str, Any]) -> None: """Set the dictionary of metadata :param metadata: a dictionary with metadata diff --git a/src/aiida/orm/comments.py b/src/aiida/orm/comments.py index f4e4dc2e47..964101327a 100644 --- a/src/aiida/orm/comments.py +++ b/src/aiida/orm/comments.py @@ -8,8 +8,10 @@ ########################################################################### """Comment objects and functions""" +from __future__ import annotations + from datetime import datetime -from typing import TYPE_CHECKING, List, Optional, Type +from typing import TYPE_CHECKING, Optional from aiida.common.pydantic import MetadataField from aiida.manage import get_manager @@ -29,7 +31,7 @@ class CommentCollection(entities.Collection['Comment']): """The collection of Comment entries.""" @staticmethod - def _entity_base_cls() -> Type['Comment']: + def _entity_base_cls() -> type['Comment']: return Comment def delete(self, pk: int) -> None: @@ -49,7 +51,7 @@ def delete_all(self) -> None: """ self._backend.comments.delete_all() - def delete_many(self, filters: dict) -> List[int]: + def delete_many(self, filters: dict) -> list[int]: """Delete Comments from the Collection based on ``filters`` :param filters: similar to QueryBuilder filter @@ -62,20 +64,29 @@ def delete_many(self, filters: dict) -> List[int]: return self._backend.comments.delete_many(filters) -class Comment(entities.Entity['BackendComment', CommentCollection]): +class Comment(entities.Entity['BackendComment']): """Base class to map a DbComment that represents a comment attached to a certain Node.""" _CLS_COLLECTION = CommentCollection class Model(entities.Entity.Model): uuid: Optional[str] = MetadataField( - description='The UUID of the comment', is_attribute=False, exclude_to_orm=True + None, + description='The UUID of the comment', + is_attribute=False, + exclude_to_orm=True, ) ctime: Optional[datetime] = MetadataField( - description='Creation time of the comment', is_attribute=False, exclude_to_orm=True + None, + description='Creation time of the comment', + is_attribute=False, + exclude_to_orm=True, ) mtime: Optional[datetime] = MetadataField( - description='Modified time of the comment', is_attribute=False, exclude_to_orm=True + None, + description='Modified time of the comment', + is_attribute=False, + exclude_to_orm=True, ) node: int = MetadataField( description='Node PK that the comment is attached to', @@ -89,10 +100,17 @@ class Model(entities.Entity.Model): orm_class='core.user', orm_to_model=lambda comment, _: comment.user.pk, ) - content: str = MetadataField(description='Content of the comment', is_attribute=False) + content: str = MetadataField( + description='Content of the comment', + is_attribute=False, + ) def __init__( - self, node: 'Node', user: 'User', content: Optional[str] = None, backend: Optional['StorageBackend'] = None + self, + node: 'Node', + user: 'User', + content: str | None = None, + backend: 'StorageBackend' | None = None, ): """Create a Comment for a given node and user diff --git a/src/aiida/orm/computers.py b/src/aiida/orm/computers.py index 237f6fc794..9b5463ce92 100644 --- a/src/aiida/orm/computers.py +++ b/src/aiida/orm/computers.py @@ -8,9 +8,11 @@ ########################################################################### """Module for Computer entities""" +from __future__ import annotations + import logging import os -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union +from typing import TYPE_CHECKING, Any from aiida.common import exceptions from aiida.common.pydantic import MetadataField @@ -32,10 +34,10 @@ class ComputerCollection(entities.Collection['Computer']): """The collection of Computer entries.""" @staticmethod - def _entity_base_cls() -> Type['Computer']: + def _entity_base_cls() -> type['Computer']: return Computer - def get_or_create(self, label: Optional[str] = None, **kwargs) -> Tuple[bool, 'Computer']: + def get_or_create(self, label: str | None = None, **kwargs) -> tuple[bool, 'Computer']: """Try to retrieve a Computer from the DB with the given arguments; create (and store) a new Computer if such a Computer was not present yet. @@ -52,7 +54,7 @@ def get_or_create(self, label: Optional[str] = None, **kwargs) -> Tuple[bool, 'C except exceptions.NotExistent: return True, Computer(backend=self.backend, label=label, **kwargs) - def list_labels(self) -> List[str]: + def list_labels(self) -> list[str]: """Return a list with all the labels of the computers in the DB.""" return self._backend.computers.list_names() @@ -61,7 +63,7 @@ def delete(self, pk: int) -> None: return self._backend.computers.delete(pk) -class Computer(entities.Entity['BackendComputer', ComputerCollection]): +class Computer(entities.Entity['BackendComputer']): """Computer entity.""" _logger = logging.getLogger(__name__) @@ -74,24 +76,46 @@ class Computer(entities.Entity['BackendComputer', ComputerCollection]): _CLS_COLLECTION = ComputerCollection class Model(entities.Entity.Model): - uuid: str = MetadataField(description='The UUID of the computer', is_attribute=False, exclude_to_orm=True) - label: str = MetadataField(description='Label for the computer', is_attribute=False) - description: str = MetadataField(description='Description of the computer', is_attribute=False) - hostname: str = MetadataField(description='Hostname of the computer', is_attribute=False) - transport_type: str = MetadataField(description='Transport type of the computer', is_attribute=False) - scheduler_type: str = MetadataField(description='Scheduler type of the computer', is_attribute=False) - metadata: Dict[str, Any] = MetadataField(description='Metadata of the computer', is_attribute=False) + uuid: str = MetadataField( + description='The UUID of the computer', + is_attribute=False, + exclude_to_orm=True, + ) + label: str = MetadataField( + description='Label for the computer', + is_attribute=False, + ) + description: str = MetadataField( + description='Description of the computer', + is_attribute=False, + ) + hostname: str = MetadataField( + description='Hostname of the computer', + is_attribute=False, + ) + transport_type: str = MetadataField( + description='Transport type of the computer', + is_attribute=False, + ) + scheduler_type: str = MetadataField( + description='Scheduler type of the computer', + is_attribute=False, + ) + metadata: dict[str, Any] = MetadataField( + description='Metadata of the computer', + is_attribute=False, + ) def __init__( self, - label: Optional[str] = None, + label: str | None = None, hostname: str = '', description: str = '', transport_type: str = '', scheduler_type: str = '', - workdir: Optional[str] = None, - metadata: Optional[Dict[str, Any]] = None, - backend: Optional['StorageBackend'] = None, + workdir: str | None = None, + metadata: dict[str, Any] | None = None, + backend: StorageBackend | None = None, ) -> None: """Construct a new computer.""" backend = backend or get_manager().get_profile_storage() @@ -186,7 +210,7 @@ def _workdir_validator(cls, workdir: str) -> None: if not os.path.isabs(convertedwd): raise exceptions.ValidationError('The workdir must be an absolute path') - def _mpirun_command_validator(self, mpirun_cmd: Union[List[str], Tuple[str, ...]]) -> None: + def _mpirun_command_validator(self, mpirun_cmd: list[str] | tuple[str, ...]) -> None: """Validates the mpirun_command variable. MUST be called after properly checking for a valid scheduler. """ @@ -239,7 +263,7 @@ def validate(self) -> None: self._mpirun_command_validator(mpirun_cmd) @classmethod - def _default_mpiprocs_per_machine_validator(cls, def_cpus_per_machine: Optional[int]) -> None: + def _default_mpiprocs_per_machine_validator(cls, def_cpus_per_machine: int | None) -> None: """Validates the default number of CPUs per machine (node)""" if def_cpus_per_machine is None: return @@ -251,7 +275,7 @@ def _default_mpiprocs_per_machine_validator(cls, def_cpus_per_machine: Optional[ ) @classmethod - def default_memory_per_machine_validator(cls, def_memory_per_machine: Optional[int]) -> None: + def default_memory_per_machine_validator(cls, def_memory_per_machine: int | None) -> None: """Validates the default amount of memory (kB) per machine (node)""" if def_memory_per_machine is None: return @@ -355,7 +379,7 @@ def transport_type(self, value: str) -> None: self._backend_entity.set_transport_type(value) @property - def metadata(self) -> Dict[str, Any]: + def metadata(self) -> dict[str, Any]: """Return the computer metadata. :return: the metadata. @@ -363,7 +387,7 @@ def metadata(self) -> Dict[str, Any]: return self._backend_entity.get_metadata() @metadata.setter - def metadata(self, value: Dict[str, Any]) -> None: + def metadata(self, value: dict[str, Any]) -> None: """Set the computer metadata. :param value: the metadata to set. @@ -441,7 +465,7 @@ def set_use_double_quotes(self, val: bool) -> None: type_check(val, bool) self.set_property('use_double_quotes', val) - def get_mpirun_command(self) -> List[str]: + def get_mpirun_command(self) -> list[str]: """Return the mpirun command. Must be a list of strings, that will be then joined with spaces when submitting. @@ -449,7 +473,7 @@ def get_mpirun_command(self) -> List[str]: """ return self.get_property('mpirun_command', ['mpirun', '-np', '{tot_num_mpiprocs}']) - def set_mpirun_command(self, val: Union[List[str], Tuple[str, ...]]) -> None: + def set_mpirun_command(self, val: list[str] | tuple[str, ...]) -> None: """Set the mpirun command. It must be a list of strings (you can use string.split() if you have a single, space-separated string). """ @@ -457,13 +481,13 @@ def set_mpirun_command(self, val: Union[List[str], Tuple[str, ...]]) -> None: raise TypeError('the mpirun_command must be a list of strings') self.set_property('mpirun_command', val) - def get_default_mpiprocs_per_machine(self) -> Optional[int]: + def get_default_mpiprocs_per_machine(self) -> int | None: """Return the default number of CPUs per machine (node) for this computer, or None if it was not set. """ return self.get_property('default_mpiprocs_per_machine', None) - def set_default_mpiprocs_per_machine(self, def_cpus_per_machine: Optional[int]) -> None: + def set_default_mpiprocs_per_machine(self, def_cpus_per_machine: int | None) -> None: """Set the default number of CPUs per machine (node) for this computer. Accepts None if you do not want to set this value. """ @@ -473,13 +497,13 @@ def set_default_mpiprocs_per_machine(self, def_cpus_per_machine: Optional[int]) raise TypeError('def_cpus_per_machine must be an integer (or None)') self.set_property('default_mpiprocs_per_machine', def_cpus_per_machine) - def get_default_memory_per_machine(self) -> Optional[int]: + def get_default_memory_per_machine(self) -> int | None: """Return the default amount of memory (kB) per machine (node) for this computer, or None if it was not set. """ return self.get_property('default_memory_per_machine', None) - def set_default_memory_per_machine(self, def_memory_per_machine: Optional[int]) -> None: + def set_default_memory_per_machine(self, def_memory_per_machine: int | None) -> None: """Set the default amount of memory (kB) per machine (node) for this computer. Accepts None if you do not want to set this value. """ @@ -588,7 +612,7 @@ def is_user_enabled(self, user: 'User') -> bool: # Return False if the user is not configured (in a sense, it is disabled for that user) return False - def get_transport(self, user: Optional['User'] = None) -> 'Transport': + def get_transport(self, user: 'User' | None = None) -> 'Transport': """Return a Transport class, configured with all correct parameters. The Transport is closed (meaning that if you want to run any operation with it, you have to open it first (i.e., e.g. for a SSH transport, you have @@ -612,7 +636,7 @@ def get_transport(self, user: Optional['User'] = None) -> 'Transport': authinfo = authinfos.AuthInfo.get_collection(self.backend).get(dbcomputer=self, aiidauser=user) return authinfo.get_transport() - def get_transport_class(self) -> Type['Transport']: + def get_transport_class(self) -> type['Transport']: """Get the transport class for this computer. Can be used to instantiate a transport instance.""" try: return TransportFactory(self.transport_type) @@ -632,7 +656,7 @@ def get_scheduler(self) -> 'Scheduler': f'No scheduler found for {self.label} [type {self.scheduler_type}], message: {exception}' ) - def configure(self, user: Optional['User'] = None, **kwargs: Any) -> 'AuthInfo': + def configure(self, user: 'User' | None = None, **kwargs: Any) -> 'AuthInfo': """Configure a computer for a user with valid auth params passed via kwargs :param user: the user to configure the computer for @@ -663,7 +687,7 @@ def configure(self, user: Optional['User'] = None, **kwargs: Any) -> 'AuthInfo': return authinfo - def get_configuration(self, user: Optional['User'] = None) -> Dict[str, Any]: + def get_configuration(self, user: 'User' | None = None) -> dict[str, Any]: """Get the configuration of computer for the given user as a dictionary :param user: the user to to get the configuration for, otherwise default user diff --git a/src/aiida/orm/entities.py b/src/aiida/orm/entities.py index a7d9deda7c..7b181b7ecb 100644 --- a/src/aiida/orm/entities.py +++ b/src/aiida/orm/entities.py @@ -11,19 +11,19 @@ from __future__ import annotations import abc -import pathlib from enum import Enum from functools import lru_cache -from typing import TYPE_CHECKING, Any, Generic, List, Optional, Type, TypeVar, Union +from pathlib import Path +from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar from plumpy.base.utils import call_with_super_check, super_check -from pydantic import BaseModel -from pydantic.fields import FieldInfo +from pydantic import BaseModel, fields from aiida.common import exceptions, log from aiida.common.exceptions import EntryPointError, InvalidOperation, NotExistent from aiida.common.lang import classproperty, type_check from aiida.common.pydantic import MetadataField, get_metadata +from aiida.common.typing import Self from aiida.common.warnings import warn_deprecation from aiida.manage import get_manager @@ -35,7 +35,6 @@ __all__ = ('Collection', 'Entity', 'EntityTypes') -CollectionType = TypeVar('CollectionType', bound='Collection') EntityType = TypeVar('EntityType', bound='Entity') BackendEntityType = TypeVar('BackendEntityType', bound='BackendEntity') @@ -59,12 +58,12 @@ class Collection(abc.ABC, Generic[EntityType]): @staticmethod @abc.abstractmethod - def _entity_base_cls() -> Type[EntityType]: + def _entity_base_cls() -> type[EntityType]: """The allowed entity class or subclasses thereof.""" @classmethod @lru_cache(maxsize=100) - def get_cached(cls, entity_class: Type[EntityType], backend: 'StorageBackend'): + def get_cached(cls, entity_class: type[EntityType], backend: 'StorageBackend'): """Get the cached collection instance for the given entity class and backend. :param backend: the backend instance to get the collection for @@ -74,7 +73,7 @@ def get_cached(cls, entity_class: Type[EntityType], backend: 'StorageBackend'): type_check(backend, StorageBackend) return cls(entity_class, backend=backend) - def __init__(self, entity_class: Type[EntityType], backend: Optional['StorageBackend'] = None) -> None: + def __init__(self, entity_class: type[EntityType], backend: 'StorageBackend' | None = None) -> None: """Construct a new entity collection. :param entity_class: the entity type e.g. User, Computer, etc @@ -87,14 +86,14 @@ def __init__(self, entity_class: Type[EntityType], backend: Optional['StorageBac self._backend = backend or get_manager().get_profile_storage() self._entity_type = entity_class - def __call__(self: CollectionType, backend: 'StorageBackend') -> CollectionType: + def __call__(self, backend: 'StorageBackend') -> Self: """Get or create a cached collection using a new backend.""" if backend is self._backend: return self return self.get_cached(self.entity_type, backend=backend) # type: ignore[arg-type] @property - def entity_type(self) -> Type[EntityType]: + def entity_type(self) -> type[EntityType]: """The entity type for this instance.""" return self._entity_type @@ -105,11 +104,11 @@ def backend(self) -> 'StorageBackend': def query( self, - filters: Optional['FilterType'] = None, - order_by: Optional['OrderByType'] = None, - project: Optional[Union[list[str], str]] = None, - limit: Optional[int] = None, - offset: Optional[int] = None, + filters: 'FilterType' | None = None, + order_by: 'OrderByType' | None = None, + project: list[str] | str | None = None, + limit: int | None = None, + offset: int | None = None, subclassing: bool = True, ) -> 'QueryBuilder': """Get a query builder for the objects of this collection. @@ -143,10 +142,10 @@ def get(self, **filters: Any) -> EntityType: def find( self, - filters: Optional['FilterType'] = None, - order_by: Optional['OrderByType'] = None, - limit: Optional[int] = None, - ) -> List[EntityType]: + filters: 'FilterType' | None = None, + order_by: 'OrderByType' | None = None, + limit: int | None = None, + ) -> list[EntityType]: """Find collection entries matching the filter criteria. :param filters: the keyword value pair filters to match @@ -158,14 +157,14 @@ def find( query = self.query(filters=filters, order_by=order_by, limit=limit) return query.all(flat=True) - def all(self) -> List[EntityType]: + def all(self) -> list[EntityType]: """Get all entities in this collection. :return: A list of all entities """ return self.query().all(flat=True) - def count(self, filters: Optional['FilterType'] = None) -> int: + def count(self, filters: 'FilterType' | None = None) -> int: """Count entities in this collection according to criteria. :param filters: the keyword value pair filters to match @@ -175,10 +174,10 @@ def count(self, filters: Optional['FilterType'] = None) -> int: return self.query(filters=filters).count() -class Entity(abc.ABC, Generic[BackendEntityType, CollectionType], metaclass=EntityFieldMeta): +class Entity(abc.ABC, Generic[BackendEntityType], metaclass=EntityFieldMeta): """An AiiDA entity""" - _CLS_COLLECTION: Type[CollectionType] = Collection # type: ignore[assignment] + _CLS_COLLECTION: type[Collection[Self]] = Collection _logger = log.AIIDA_LOGGER.getChild('orm.entities') class Model(BaseModel, defer_build=True): @@ -191,7 +190,7 @@ class Model(BaseModel, defer_build=True): ) @classmethod - def model_to_orm_fields(cls) -> dict[str, FieldInfo]: + def model_to_orm_fields(cls) -> dict[str, fields.FieldInfo]: return { key: field for key, field in cls.Model.model_fields.items() if not get_metadata(field, 'exclude_to_orm') } @@ -208,6 +207,7 @@ def model_to_orm_field_values(cls, model: Model) -> dict[str, Any]: if field_value is None: continue + orm_class: type[Entity] | str | None = None if orm_class := get_metadata(field, 'orm_class'): if isinstance(orm_class, str): try: @@ -227,7 +227,7 @@ def model_to_orm_field_values(cls, model: Model) -> dict[str, Any]: return fields - def _to_model(self, repository_path: pathlib.Path) -> Model: + def _to_model(self, repository_path: Path | None = None) -> Model: """Return the entity instance as an instance of its model.""" fields = {} @@ -240,15 +240,15 @@ def _to_model(self, repository_path: pathlib.Path) -> Model: return self.Model(**fields) @classmethod - def _from_model(cls, model: Model) -> 'Entity': + def _from_model(cls, model: Model) -> Self: """Return an entity instance from an instance of its model.""" fields = cls.model_to_orm_field_values(model) return cls(**fields) - def serialize(self, repository_path: Union[pathlib.Path, None] = None) -> dict[str, Any]: + def serialize(self, repository_path: Path | None = None) -> dict[str, Any]: """Serialize the entity instance to JSON. - :param repository_path: If the orm node has files in the repository, this path is used to dump the repostiory + :param repository_path: If the orm node has files in the repository, this path is used to dump the repository files to. If no path is specified a temporary path is created using the entities pk. """ self.logger.warning( @@ -257,7 +257,7 @@ def serialize(self, repository_path: Union[pathlib.Path, None] = None) -> dict[s if repository_path is None: import tempfile - repository_path = pathlib.Path(tempfile.mkdtemp()) / f'./aiida_serialization/{self.pk}/' + repository_path = Path(tempfile.mkdtemp()) / f'./aiida_serialization/{self.pk}/' repository_path.mkdir(parents=True) else: if not repository_path.exists(): @@ -267,7 +267,7 @@ def serialize(self, repository_path: Union[pathlib.Path, None] = None) -> dict[s return self._to_model(repository_path).model_dump() @classmethod - def from_serialized(cls, **kwargs: dict[str, Any]) -> 'Entity': + def from_serialized(cls, **kwargs: dict[str, Any]) -> EntityType: """Construct an entity instance from JSON serialized data.""" cls._logger.warning( 'Serialization through pydantic is still an experimental feature and might break in future releases.' @@ -275,7 +275,7 @@ def from_serialized(cls, **kwargs: dict[str, Any]) -> 'Entity': return cls._from_model(cls.Model(**kwargs)) # type: ignore[arg-type] @classproperty - def objects(cls: EntityType) -> CollectionType: # noqa: N805 + def objects(cls: EntityType) -> Collection[Self]: # noqa: N805 """Get a collection for objects of this type, with the default backend. .. deprecated:: This will be removed in v3, use ``collection`` instead. @@ -286,7 +286,7 @@ def objects(cls: EntityType) -> CollectionType: # noqa: N805 return cls.collection @classproperty - def collection(cls) -> CollectionType: # noqa: N805 + def collection(cls) -> Collection[Self]: # noqa: N805 """Get a collection for objects of this type, with the default backend. :return: an object that can be used to access entities of this type @@ -396,7 +396,7 @@ def backend_entity(self) -> BackendEntityType: return self._backend_entity -def from_backend_entity(cls: Type[EntityType], backend_entity: BackendEntityType) -> EntityType: +def from_backend_entity(cls: type[EntityType], backend_entity: BackendEntityType) -> EntityType: """Construct an entity from a backend entity instance :param backend_entity: the backend entity diff --git a/src/aiida/orm/groups.py b/src/aiida/orm/groups.py index 574f15b958..23807cf7e9 100644 --- a/src/aiida/orm/groups.py +++ b/src/aiida/orm/groups.py @@ -6,17 +6,20 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""AiiDA Group entites""" +"""AiiDA Group entities""" + +from __future__ import annotations import datetime import warnings from functools import cached_property from pathlib import Path -from typing import TYPE_CHECKING, Any, ClassVar, Dict, Optional, Sequence, Tuple, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, ClassVar, Optional, Sequence, cast from aiida.common import exceptions from aiida.common.lang import classproperty, type_check from aiida.common.pydantic import MetadataField +from aiida.common.typing import Self from aiida.common.warnings import warn_deprecation from aiida.manage import get_manager @@ -31,10 +34,8 @@ __all__ = ('AutoGroup', 'Group', 'ImportGroup', 'UpfFamily') -SelfType = TypeVar('SelfType', bound='Group') - -def load_group_class(type_string: str) -> Type['Group']: +def load_group_class(type_string: str) -> type['Group']: """Load the sub class of `Group` that corresponds to the given `type_string`. .. note:: will fall back on `aiida.orm.groups.Group` if `type_string` cannot be resolved to loadable entry point. @@ -59,10 +60,10 @@ class GroupCollection(entities.Collection['Group']): """Collection of Groups""" @staticmethod - def _entity_base_cls() -> Type['Group']: + def _entity_base_cls() -> type['Group']: return Group - def get_or_create(self, label: Optional[str] = None, **kwargs) -> Tuple['Group', bool]: + def get_or_create(self, label: str | None = None, **kwargs) -> tuple['Group', bool]: """Try to retrieve a group from the DB with the given arguments; create (and store) a new group if such a group was not present yet. @@ -105,10 +106,10 @@ def extras(self) -> extras.EntityExtras: return extras.EntityExtras(self._group) -class Group(entities.Entity['BackendGroup', GroupCollection]): +class Group(entities.Entity['BackendGroup']): """An AiiDA ORM implementation of group of nodes.""" - __type_string: ClassVar[Optional[str]] + __type_string: ClassVar[str | None] class Model(entities.Entity.Model): uuid: str = MetadataField(description='The UUID of the group', is_attribute=False, exclude_to_orm=True) @@ -122,9 +123,7 @@ class Model(entities.Entity.Model): time: Optional[datetime.datetime] = MetadataField( description='The creation time of the node', is_attribute=False ) - label: str = MetadataField(description='The group label', is_attribute=False) - description: Optional[str] = MetadataField(description='The group description', is_attribute=False) - extras: Optional[Dict[str, Any]] = MetadataField( + extras: Optional[dict[str, Any]] = MetadataField( description='The group extras', is_attribute=False, is_subscriptable=True, @@ -135,13 +134,13 @@ class Model(entities.Entity.Model): def __init__( self, - label: Optional[str] = None, - user: Optional['User'] = None, + label: str | None = None, + user: User | None = None, description: str = '', - type_string: Optional[str] = None, - time: Optional[datetime.datetime] = None, - extras: Optional[Dict[str, Any]] = None, - backend: Optional['StorageBackend'] = None, + type_string: str | None = None, + time: datetime.datetime | None = None, + extras: dict[str, Any] | None = None, + backend: StorageBackend | None = None, ): """Create a new group. Either pass a dbgroup parameter, to reload a group from the DB (and then, no further parameters are allowed), @@ -171,7 +170,7 @@ def __init__( self.base.extras.set_many(extras) @classproperty - def _type_string(cls) -> Optional[str]: # noqa: N805 + def _type_string(cls) -> str | None: # noqa: N805 from aiida.plugins.entry_point import get_entry_point_from_class if hasattr(cls, '__type_string'): @@ -203,11 +202,10 @@ def __repr__(self) -> str: def __str__(self) -> str: return f'{self.__class__.__name__}<{self.label}>' - def store(self: SelfType) -> SelfType: + def store(self) -> Self: """Verify that the group is allowed to be stored, which is the case along as `type_string` is set.""" if self._type_string is None: raise exceptions.StoringNotAllowed('`type_string` is `None` so the group cannot be stored.') - return super().store() @classproperty @@ -313,7 +311,7 @@ def clear(self) -> None: """Remove all the nodes from this group.""" return self._backend_entity.clear() - def add_nodes(self, nodes: Union['Node', Sequence['Node']]) -> None: + def add_nodes(self, nodes: 'Node' | Sequence['Node']) -> None: """Add a node or a set of nodes to the group. :note: all the nodes *and* the group itself have to be stored. @@ -334,7 +332,7 @@ def add_nodes(self, nodes: Union['Node', Sequence['Node']]) -> None: self._backend_entity.add_nodes([node.backend_entity for node in nodes]) - def remove_nodes(self, nodes: Union['Node', Sequence['Node']]) -> None: + def remove_nodes(self, nodes: 'Node' | Sequence['Node']) -> None: """Remove a node or a set of nodes to the group. :note: all the nodes *and* the group itself have to be stored. @@ -361,14 +359,14 @@ def is_user_defined(self) -> bool: def dump( self, - output_path: Optional[Union[str, Path]] = None, + output_path: str | Path | None = None, # Dump mode options dry_run: bool = False, overwrite: bool = False, # Time filtering options - past_days: Optional[int] = None, - start_date: Optional[datetime.datetime] = None, - end_date: Optional[datetime.datetime] = None, + past_days: int | None = None, + start_date: datetime.datetime | None = None, + end_date: datetime.datetime | None = None, filter_by_last_dump_time: bool = True, # Node collection options only_top_level_calcs: bool = True, diff --git a/src/aiida/orm/implementation/authinfos.py b/src/aiida/orm/implementation/authinfos.py index 508874943b..b4b5cc275a 100644 --- a/src/aiida/orm/implementation/authinfos.py +++ b/src/aiida/orm/implementation/authinfos.py @@ -8,8 +8,10 @@ ########################################################################### """Module for the backend implementation of the `AuthInfo` ORM class.""" +from __future__ import annotations + import abc -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any from .entities import BackendCollection, BackendEntity @@ -55,28 +57,28 @@ def user(self) -> 'BackendUser': """Return the user associated with this instance.""" @abc.abstractmethod - def get_auth_params(self) -> Dict[str, Any]: + def get_auth_params(self) -> dict[str, Any]: """Return the dictionary of authentication parameters :return: a dictionary with authentication parameters """ @abc.abstractmethod - def set_auth_params(self, auth_params: Dict[str, Any]) -> None: + def set_auth_params(self, auth_params: dict[str, Any]) -> None: """Set the dictionary of authentication parameters :param auth_params: a dictionary with authentication parameters """ @abc.abstractmethod - def get_metadata(self) -> Dict[str, Any]: + def get_metadata(self) -> dict[str, Any]: """Return the dictionary of metadata :return: a dictionary with metadata """ @abc.abstractmethod - def set_metadata(self, metadata: Dict[str, Any]) -> None: + def set_metadata(self, metadata: dict[str, Any]) -> None: """Set the dictionary of metadata :param metadata: a dictionary with metadata diff --git a/src/aiida/orm/implementation/comments.py b/src/aiida/orm/implementation/comments.py index be0fca8f99..489ace0d06 100644 --- a/src/aiida/orm/implementation/comments.py +++ b/src/aiida/orm/implementation/comments.py @@ -8,9 +8,11 @@ ########################################################################### """Module for comment backend classes.""" +from __future__ import annotations + import abc from datetime import datetime -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING from .entities import BackendCollection, BackendEntity @@ -77,7 +79,7 @@ class BackendCommentCollection(BackendCollection[BackendComment]): @abc.abstractmethod def create( # type: ignore[override] - self, node: 'BackendNode', user: 'BackendUser', content: Optional[str] = None, **kwargs + self, node: 'BackendNode', user: 'BackendUser', content: str | None = None, **kwargs ): """Create a Comment for a given node and user @@ -105,7 +107,7 @@ def delete_all(self) -> None: """ @abc.abstractmethod - def delete_many(self, filters: dict) -> List[int]: + def delete_many(self, filters: dict) -> list[int]: """Delete Comments based on ``filters`` :param filters: similar to QueryBuilder filter diff --git a/src/aiida/orm/implementation/computers.py b/src/aiida/orm/implementation/computers.py index cd607c6d26..2ef8f8c7fc 100644 --- a/src/aiida/orm/implementation/computers.py +++ b/src/aiida/orm/implementation/computers.py @@ -10,7 +10,7 @@ import abc import logging -from typing import Any, Dict +from typing import Any from .entities import BackendCollection, BackendEntity @@ -62,11 +62,11 @@ def set_hostname(self, val: str) -> None: """ @abc.abstractmethod - def get_metadata(self) -> Dict[str, Any]: + def get_metadata(self) -> dict[str, Any]: """Return the metadata for the computer.""" @abc.abstractmethod - def set_metadata(self, metadata: Dict[str, Any]) -> None: + def set_metadata(self, metadata: dict[str, Any]) -> None: """Set the metadata for the computer.""" @abc.abstractmethod diff --git a/src/aiida/orm/implementation/entities.py b/src/aiida/orm/implementation/entities.py index f64f1a83e8..dd1a544aa3 100644 --- a/src/aiida/orm/implementation/entities.py +++ b/src/aiida/orm/implementation/entities.py @@ -11,7 +11,7 @@ from __future__ import annotations import abc -from typing import TYPE_CHECKING, Any, ClassVar, Dict, Generic, Iterable, List, Tuple, Type, TypeVar +from typing import TYPE_CHECKING, Any, ClassVar, Generic, Iterable, TypeVar if TYPE_CHECKING: from aiida.orm.implementation import StorageBackend @@ -74,7 +74,7 @@ def is_stored(self) -> bool: class BackendCollection(Generic[EntityType]): """Container class that represents a collection of entries of a particular backend entity.""" - ENTITY_CLASS: ClassVar[Type[EntityType]] + ENTITY_CLASS: ClassVar[type[EntityType]] def __init__(self, backend: 'StorageBackend'): """:param backend: the backend this collection belongs to""" @@ -99,7 +99,7 @@ class BackendEntityExtrasMixin(abc.ABC): @property @abc.abstractmethod - def extras(self) -> Dict[str, Any]: + def extras(self) -> dict[str, Any]: """Return the complete extras dictionary. .. warning:: While the entity is unstored, this will return references of the extras on the database model, @@ -128,7 +128,7 @@ def get_extra(self, key: str) -> Any: :raises AttributeError: if the extra does not exist """ - def get_extra_many(self, keys: Iterable[str]) -> List[Any]: + def get_extra_many(self, keys: Iterable[str]) -> list[Any]: """Return the values of multiple extras. .. warning:: While the entity is unstored, this will return references of the extras on the database model, @@ -153,7 +153,7 @@ def set_extra(self, key: str, value: Any) -> None: :param value: value of the extra """ - def set_extra_many(self, extras: Dict[str, Any]) -> None: + def set_extra_many(self, extras: dict[str, Any]) -> None: """Set multiple extras. .. note:: This will override any existing extras that are present in the new dictionary. @@ -164,7 +164,7 @@ def set_extra_many(self, extras: Dict[str, Any]) -> None: self.set_extra(key, value) @abc.abstractmethod - def reset_extras(self, extras: Dict[str, Any]) -> None: + def reset_extras(self, extras: dict[str, Any]) -> None: """Reset the extras. .. note:: This will completely clear any existing extras and replace them with the new dictionary. @@ -194,7 +194,7 @@ def clear_extras(self) -> None: """Delete all extras.""" @abc.abstractmethod - def extras_items(self) -> Iterable[Tuple[str, Any]]: + def extras_items(self) -> Iterable[tuple[str, Any]]: """Return an iterator over the extras key/value pairs.""" @abc.abstractmethod diff --git a/src/aiida/orm/implementation/groups.py b/src/aiida/orm/implementation/groups.py index 3be75c2050..45b644110a 100644 --- a/src/aiida/orm/implementation/groups.py +++ b/src/aiida/orm/implementation/groups.py @@ -8,9 +8,11 @@ ########################################################################### """Backend group module""" +from __future__ import annotations + import abc import datetime -from typing import TYPE_CHECKING, List, Optional, Protocol, Sequence, Union +from typing import TYPE_CHECKING, Protocol, Sequence from .entities import BackendCollection, BackendEntity, BackendEntityExtrasMixin from .nodes import BackendNode @@ -30,7 +32,7 @@ def __iter__(self) -> 'NodeIterator': def __next__(self) -> BackendNode: """Return the next node in the group.""" - def __getitem__(self, value: Union[int, slice]) -> Union[BackendNode, List[BackendNode]]: + def __getitem__(self, value: int | slice) -> BackendNode | list[BackendNode]: """Index node(s) from the group.""" def __len__(self) -> int: @@ -61,12 +63,12 @@ def label(self, name: str) -> None: @property @abc.abstractmethod - def description(self) -> Optional[str]: + def description(self) -> str | None: """Return the description of the group as a string.""" @description.setter @abc.abstractmethod - def description(self, value: Optional[str]): + def description(self, value: str | None): """Return the description of the group as a string.""" @property diff --git a/src/aiida/orm/implementation/logs.py b/src/aiida/orm/implementation/logs.py index 9833a77a68..22ad527a84 100644 --- a/src/aiida/orm/implementation/logs.py +++ b/src/aiida/orm/implementation/logs.py @@ -8,9 +8,11 @@ ########################################################################### """Backend group module""" +from __future__ import annotations + import abc from datetime import datetime -from typing import TYPE_CHECKING, Any, Dict, List +from typing import TYPE_CHECKING, Any from .entities import BackendCollection, BackendEntity @@ -58,7 +60,7 @@ def message(self) -> str: @property @abc.abstractmethod - def metadata(self) -> Dict[str, Any]: + def metadata(self) -> dict[str, Any]: """Return the metadata corresponding to the log entry.""" @@ -85,7 +87,7 @@ def delete_all(self) -> None: """ @abc.abstractmethod - def delete_many(self, filters: 'FilterType') -> List[int]: + def delete_many(self, filters: 'FilterType') -> list[int]: """Delete Logs based on ``filters`` :param filters: similar to QueryBuilder filter diff --git a/src/aiida/orm/implementation/nodes.py b/src/aiida/orm/implementation/nodes.py index d16268b1e1..9ce5b4e3e8 100644 --- a/src/aiida/orm/implementation/nodes.py +++ b/src/aiida/orm/implementation/nodes.py @@ -8,9 +8,11 @@ ########################################################################### """Abstract BackendNode and BackendNodeCollection implementation.""" +from __future__ import annotations + import abc from datetime import datetime -from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Sequence, Tuple, TypeVar +from typing import TYPE_CHECKING, Any, Iterable, Sequence, TypeVar from .entities import BackendCollection, BackendEntity, BackendEntityExtrasMixin @@ -55,7 +57,7 @@ def node_type(self) -> str: @property @abc.abstractmethod - def process_type(self) -> Optional[str]: + def process_type(self) -> str | None: """Return the node process type. :return: the process type @@ -63,7 +65,7 @@ def process_type(self) -> Optional[str]: @process_type.setter @abc.abstractmethod - def process_type(self, value: Optional[str]) -> None: + def process_type(self, value: str | None) -> None: """Set the process type. :param value: the new value to set @@ -103,7 +105,7 @@ def description(self, value: str) -> None: @property @abc.abstractmethod - def repository_metadata(self) -> Dict[str, Any]: + def repository_metadata(self) -> dict[str, Any]: """Return the node repository metadata. :return: the repository metadata @@ -111,7 +113,7 @@ def repository_metadata(self) -> Dict[str, Any]: @repository_metadata.setter @abc.abstractmethod - def repository_metadata(self, value: Dict[str, Any]) -> None: + def repository_metadata(self, value: dict[str, Any]) -> None: """Set the repository metadata. :param value: the new value to set @@ -119,7 +121,7 @@ def repository_metadata(self, value: Dict[str, Any]) -> None: @property @abc.abstractmethod - def computer(self) -> Optional['BackendComputer']: + def computer(self) -> 'BackendComputer' | None: """Return the computer of this node. :return: the computer or None @@ -127,7 +129,7 @@ def computer(self) -> Optional['BackendComputer']: @computer.setter @abc.abstractmethod - def computer(self, computer: Optional['BackendComputer']) -> None: + def computer(self, computer: 'BackendComputer' | None) -> None: """Set the computer of this node. :param computer: a `BackendComputer` @@ -180,7 +182,7 @@ def add_incoming(self, source: 'BackendNode', link_type, link_label): @abc.abstractmethod def store( - self: BackendNodeType, links: Optional[Sequence['LinkTriple']] = None, clean: bool = True + self: BackendNodeType, links: Sequence['LinkTriple'] | None = None, clean: bool = True ) -> BackendNodeType: """Store the node in the database. @@ -201,7 +203,7 @@ def clean_values(self): @property @abc.abstractmethod - def attributes(self) -> Dict[str, Any]: + def attributes(self) -> dict[str, Any]: """Return the complete attributes dictionary. .. warning:: While the entity is unstored, this will return references of the attributes on the database model, @@ -230,7 +232,7 @@ def get_attribute(self, key: str) -> Any: :raises AttributeError: if the attribute does not exist """ - def get_attribute_many(self, keys: Iterable[str]) -> List[Any]: + def get_attribute_many(self, keys: Iterable[str]) -> list[Any]: """Return the values of multiple attributes. .. warning:: While the entity is unstored, this will return references of the attributes on the database model, @@ -258,7 +260,7 @@ def set_attribute(self, key: str, value: Any) -> None: :param value: value of the attribute """ - def set_attribute_many(self, attributes: Dict[str, Any]) -> None: + def set_attribute_many(self, attributes: dict[str, Any]) -> None: """Set multiple attributes. .. note:: This will override any existing attributes that are present in the new dictionary. @@ -269,7 +271,7 @@ def set_attribute_many(self, attributes: Dict[str, Any]) -> None: self.set_attribute(key, value) @abc.abstractmethod - def reset_attributes(self, attributes: Dict[str, Any]) -> None: + def reset_attributes(self, attributes: dict[str, Any]) -> None: """Reset the attributes. .. note:: This will completely clear any existing attributes and replace them with the new dictionary. @@ -299,7 +301,7 @@ def clear_attributes(self): """Delete all attributes.""" @abc.abstractmethod - def attributes_items(self) -> Iterable[Tuple[str, Any]]: + def attributes_items(self) -> Iterable[tuple[str, Any]]: """Return an iterator over the attributes. :return: an iterator with attribute key value pairs diff --git a/src/aiida/orm/implementation/querybuilder.py b/src/aiida/orm/implementation/querybuilder.py index 6cc83f58b1..f08f7928d9 100644 --- a/src/aiida/orm/implementation/querybuilder.py +++ b/src/aiida/orm/implementation/querybuilder.py @@ -8,8 +8,10 @@ ########################################################################### """Abstract `QueryBuilder` definition.""" +from __future__ import annotations + import abc -from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Literal, Optional, Set, TypedDict, Union +from typing import TYPE_CHECKING, Any, Iterable, Literal, TypedDict from aiida.common.lang import type_check from aiida.common.log import AIIDA_LOGGER @@ -22,7 +24,7 @@ QUERYBUILD_LOGGER = AIIDA_LOGGER.getChild('orm.querybuilder') -EntityRelationships: Dict[str, Set[str]] = { +EntityRelationships: dict[str, set[str]] = { EntityTypes.AUTHINFO.value: {'with_computer', 'with_user'}, EntityTypes.COMMENT.value: {'with_node', 'with_user'}, EntityTypes.COMPUTER.value: {'with_node'}, @@ -47,7 +49,7 @@ class PathItemType(TypedDict): """An item on the query path""" - entity_type: Union[str, List[str]] + entity_type: str | list[str] # this can be derived from the entity_type, but it is more efficient to store orm_base: Literal['node', 'group', 'authinfo', 'comment', 'computer', 'log', 'user'] tag: str @@ -60,20 +62,20 @@ class PathItemType(TypedDict): class QueryDictType(TypedDict): """A JSON serialisable representation of a ``QueryBuilder`` instance""" - path: List[PathItemType] + path: list[PathItemType] # mapping: tag -> 'and' | 'or' | '~or' | '~and' | '!and' | '!or' -> [] -> operator -> value # -> operator -> value - filters: Dict[str, Dict[str, Union[Dict[str, List[Dict[str, Any]]], Dict[str, Any]]]] + filters: dict[str, dict[str, dict[str, list[dict[str, Any]]] | dict[str, Any]]] # mapping: tag -> [] -> field -> 'func' -> 'max' | 'min' | 'count' # 'cast' -> 'b' | 'd' | 'f' | 'i' | 'j' | 't' - project: Dict[str, List[Dict[str, Dict[str, Any]]]] + project: dict[str, list[dict[str, dict[str, Any]]]] # mapping: tag -> field -> return key for iterdict method - project_map: Dict[str, Dict[str, str]] + project_map: dict[str, dict[str, str]] # list of mappings: tag -> [] -> field -> 'order' -> 'asc' | 'desc' # 'cast' -> 'b' | 'd' | 'f' | 'i' | 'j' | 't' - order_by: List[Dict[str, List[Dict[str, Dict[str, str]]]]] - offset: Optional[int] - limit: Optional[int] + order_by: list[dict[str, list[dict[str, dict[str, str]]]]] + offset: int | None + limit: int | None distinct: bool @@ -101,18 +103,18 @@ def count(self, data: QueryDictType) -> int: """Return the number of results of the query""" @abc.abstractmethod - def first(self, data: QueryDictType) -> Optional[List[Any]]: + def first(self, data: QueryDictType) -> list[Any] | None: """Executes query, asking for one instance. :returns: One row of aiida results """ @abc.abstractmethod - def iterall(self, data: QueryDictType, batch_size: Optional[int]) -> Iterable[List[Any]]: + def iterall(self, data: QueryDictType, batch_size: int | None) -> Iterable[list[Any]]: """Return an iterator over all the results of a list of lists.""" @abc.abstractmethod - def iterdict(self, data: QueryDictType, batch_size: Optional[int]) -> Iterable[Dict[str, Dict[str, Any]]]: + def iterdict(self, data: QueryDictType, batch_size: int | None) -> Iterable[dict[str, dict[str, Any]]]: """Return an iterator over all the results of a list of dictionaries.""" def as_sql(self, data: QueryDictType, inline: bool = False) -> str: @@ -138,7 +140,7 @@ def analyze_query(self, data: QueryDictType, execute: bool = True, verbose: bool raise NotImplementedError @abc.abstractmethod - def get_creation_statistics(self, user_pk: Optional[int] = None) -> Dict[str, Any]: + def get_creation_statistics(self, user_pk: int | None = None) -> dict[str, Any]: """Return a dictionary with the statistics of node creation, summarized by day. :note: Days when no nodes were created are not present in the returned `ctime_by_day` dictionary. diff --git a/src/aiida/orm/implementation/storage_backend.py b/src/aiida/orm/implementation/storage_backend.py index 6137508f51..d3ac2cb028 100644 --- a/src/aiida/orm/implementation/storage_backend.py +++ b/src/aiida/orm/implementation/storage_backend.py @@ -11,7 +11,7 @@ from __future__ import annotations import abc -from typing import TYPE_CHECKING, Any, ContextManager, List, Optional, Sequence, TypeVar, Union +from typing import TYPE_CHECKING, Any, ContextManager, Sequence, TypeVar if TYPE_CHECKING: from aiida.manage.configuration.profile import Profile @@ -61,7 +61,7 @@ def version_head(cls) -> str: @classmethod @abc.abstractmethod - def version_profile(cls, profile: 'Profile') -> Optional[str]: + def version_profile(cls, profile: 'Profile') -> str | None: """Return the schema version of the given profile's storage, or None for empty/uninitialised storage. :raises: `~aiida.common.exceptions.UnreachableStorage` if the storage cannot be accessed @@ -105,7 +105,7 @@ def __init__(self, profile: 'Profile') -> None: from aiida.orm.autogroup import AutogroupManager self._profile = profile - self._default_user: Optional['User'] = None + self._default_user: 'User' | None = None self._autogroup = AutogroupManager(self) @abc.abstractmethod @@ -192,7 +192,7 @@ def users(self) -> 'BackendUserCollection': """Return the collection of users""" @property - def default_user(self) -> Optional['User']: + def default_user(self) -> 'User' | None: """Return the default user for the profile, if it has been created. This is cached, since it is a frequently used operation, for creating other entities. @@ -223,7 +223,12 @@ def in_transaction(self) -> bool: """Return whether a transaction is currently active.""" @abc.abstractmethod - def bulk_insert(self, entity_type: 'EntityTypes', rows: List[dict], allow_defaults: bool = False) -> List[int]: + def bulk_insert( + self, + entity_type: 'EntityTypes', + rows: list[dict], + allow_defaults: bool = False, + ) -> list[int]: """Insert a list of entities into the database, directly into a backend transaction. :param entity_type: The type of the entity @@ -238,7 +243,7 @@ def bulk_insert(self, entity_type: 'EntityTypes', rows: List[dict], allow_defaul """ @abc.abstractmethod - def bulk_update(self, entity_type: 'EntityTypes', rows: List[dict]) -> None: + def bulk_update(self, entity_type: 'EntityTypes', rows: list[dict]) -> None: """Update a list of entities in the database, directly with a backend transaction. :param entity_type: The type of the entity @@ -269,7 +274,11 @@ def get_repository(self) -> 'AbstractRepositoryBackend': @abc.abstractmethod def set_global_variable( - self, key: str, value: Union[None, str, int, float], description: Optional[str] = None, overwrite=True + self, + key: str, + value: None | str | int | float, + description: str | None = None, + overwrite=True, ) -> None: """Set a global variable in the storage. @@ -282,7 +291,7 @@ def set_global_variable( """ @abc.abstractmethod - def get_global_variable(self, key: str) -> Union[None, str, int, float]: + def get_global_variable(self, key: str) -> None | str | int | float: """Return a global variable from the storage. :param key: the key of the setting @@ -308,7 +317,7 @@ def maintain(self, full: bool = False, dry_run: bool = False, **kwargs) -> None: def _backup( self, dest: str, - keep: Optional[int] = None, + keep: int | None = None, ): raise NotImplementedError @@ -395,7 +404,7 @@ def _validate_or_init_backup_folder(self, dest, keep): def backup( self, dest: str, - keep: Optional[int] = None, + keep: int | None = None, ): """Create a backup of the storage contents. diff --git a/src/aiida/orm/logs.py b/src/aiida/orm/logs.py index ed07deb847..3a56e4baa3 100644 --- a/src/aiida/orm/logs.py +++ b/src/aiida/orm/logs.py @@ -8,9 +8,11 @@ ########################################################################### """Module for orm logging abstract classes""" +from __future__ import annotations + import logging from datetime import datetime -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type +from typing import TYPE_CHECKING, Any from aiida.common import timezone from aiida.common.pydantic import MetadataField @@ -40,10 +42,10 @@ class LogCollection(entities.Collection['Log']): """ @staticmethod - def _entity_base_cls() -> Type['Log']: + def _entity_base_cls() -> type['Log']: return Log - def create_entry_from_record(self, record: logging.LogRecord) -> Optional['Log']: + def create_entry_from_record(self, record: logging.LogRecord) -> 'Log' | None: """Helper function to create a log entry from a record created as by the python logging library :param record: The record created by the logging module @@ -81,7 +83,7 @@ def create_entry_from_record(self, record: logging.LogRecord) -> Optional['Log'] backend=self.backend, ) - def get_logs_for(self, entity: 'Node', order_by: Optional['OrderByType'] = None) -> List['Log']: + def get_logs_for(self, entity: 'Node', order_by: 'OrderByType' | None = None) -> list['Log']: """Get all the log messages for a given node and optionally sort :param entity: the entity to get logs for @@ -112,7 +114,7 @@ def delete_all(self) -> None: """ return self._backend.logs.delete_all() - def delete_many(self, filters: 'FilterType') -> List[int]: + def delete_many(self, filters: 'FilterType') -> list[int]: """Delete Logs based on ``filters`` :param filters: filters to pass to the QueryBuilder @@ -124,7 +126,7 @@ def delete_many(self, filters: 'FilterType') -> List[int]: return self._backend.logs.delete_many(filters) -class Log(entities.Entity['BackendLog', LogCollection]): +class Log(entities.Entity['BackendLog']): """An AiiDA Log entity. Corresponds to a logged message against a particular AiiDA node.""" _CLS_COLLECTION = LogCollection @@ -135,7 +137,7 @@ class Model(entities.Entity.Model): levelname: str = MetadataField(description='The name of the log level', is_attribute=False) message: str = MetadataField(description='The message of the log', is_attribute=False) time: datetime = MetadataField(description='The time at which the log was created', is_attribute=False) - metadata: Dict[str, Any] = MetadataField(description='The metadata of the log', is_attribute=False) + metadata: dict[str, Any] = MetadataField(description='The metadata of the log', is_attribute=False) dbnode_id: int = MetadataField(description='Associated node', is_attribute=False) def __init__( @@ -145,8 +147,8 @@ def __init__( levelname: str, dbnode_id: int, message: str = '', - metadata: Optional[Dict[str, Any]] = None, - backend: Optional['StorageBackend'] = None, + metadata: dict[str, Any] | None = None, + backend: 'StorageBackend' | None = None, ): """Construct a new log @@ -229,7 +231,7 @@ def message(self) -> str: return self._backend_entity.message @property - def metadata(self) -> Dict[str, Any]: + def metadata(self) -> dict[str, Any]: """Get the metadata corresponding to the entry :return: The entry metadata diff --git a/src/aiida/orm/nodes/attributes.py b/src/aiida/orm/nodes/attributes.py index e3e0e171ef..c5040d11d4 100644 --- a/src/aiida/orm/nodes/attributes.py +++ b/src/aiida/orm/nodes/attributes.py @@ -8,8 +8,10 @@ ########################################################################### """Interface to the attributes of a node instance.""" +from __future__ import annotations + import copy -from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Tuple +from typing import TYPE_CHECKING, Any, Iterable if TYPE_CHECKING: from .node import Node @@ -39,7 +41,7 @@ def __contains__(self, key: str) -> bool: return key in self._backend_node.attributes @property - def all(self) -> Dict[str, Any]: + def all(self) -> dict[str, Any]: """Return the complete attributes dictionary. .. warning:: While the entity is unstored, this will return references of the attributes on the database model, @@ -85,7 +87,7 @@ def get(self, key: str, default=_NO_DEFAULT) -> Any: return attribute - def get_many(self, keys: List[str]) -> List[Any]: + def get_many(self, keys: list[str]) -> list[Any]: """Return the values of multiple attributes. .. warning:: While the entity is unstored, this will return references of the attributes on the database model, @@ -118,7 +120,7 @@ def set(self, key: str, value: Any) -> None: self._node._check_mutability_attributes([key]) self._backend_node.set_attribute(key, value) - def set_many(self, attributes: Dict[str, Any]) -> None: + def set_many(self, attributes: dict[str, Any]) -> None: """Set multiple attributes. .. note:: This will override any existing attributes that are present in the new dictionary. @@ -130,7 +132,7 @@ def set_many(self, attributes: Dict[str, Any]) -> None: self._node._check_mutability_attributes(list(attributes)) self._backend_node.set_attribute_many(attributes) - def reset(self, attributes: Dict[str, Any]) -> None: + def reset(self, attributes: dict[str, Any]) -> None: """Reset the attributes. .. note:: This will completely clear any existing attributes and replace them with the new dictionary. @@ -152,7 +154,7 @@ def delete(self, key: str) -> None: self._node._check_mutability_attributes([key]) self._backend_node.delete_attribute(key) - def delete_many(self, keys: List[str]) -> None: + def delete_many(self, keys: list[str]) -> None: """Delete multiple attributes. :param keys: names of the attributes to delete @@ -167,7 +169,7 @@ def clear(self) -> None: self._node._check_mutability_attributes() self._backend_node.clear_attributes() - def items(self) -> Iterable[Tuple[str, Any]]: + def items(self) -> Iterable[tuple[str, Any]]: """Return an iterator over the attributes. :return: an iterator with attribute key value pairs diff --git a/src/aiida/orm/nodes/caching.py b/src/aiida/orm/nodes/caching.py index 7bc8dcd1a7..635b182426 100644 --- a/src/aiida/orm/nodes/caching.py +++ b/src/aiida/orm/nodes/caching.py @@ -2,7 +2,7 @@ from __future__ import annotations -import typing as t +from typing import TYPE_CHECKING, Any, Iterator from aiida.common import exceptions from aiida.common.hashing import make_hash @@ -11,7 +11,7 @@ from ..querybuilder import QueryBuilder -if t.TYPE_CHECKING: +if TYPE_CHECKING: from .node import Node @@ -27,7 +27,7 @@ def __init__(self, node: 'Node') -> None: """Initialize the caching interface.""" self._node = node - def compute_hash(self, ignore_errors: bool = True, **kwargs: t.Any) -> str | None: + def compute_hash(self, ignore_errors: bool = True, **kwargs: Any) -> str | None: """Return the computed hash for this node based on its attributes. :param ignore_errors: return ``None`` on ``aiida.common.exceptions.HashingError`` (logging the exception) @@ -37,7 +37,7 @@ def compute_hash(self, ignore_errors: bool = True, **kwargs: t.Any) -> str | Non return self._compute_hash(ignore_errors=ignore_errors, **kwargs) - def _compute_hash(self, ignore_errors: bool = True, **kwargs: t.Any) -> str | None: + def _compute_hash(self, ignore_errors: bool = True, **kwargs: Any) -> str | None: """Return the hash for this node based on its attributes. This will always work, even before storing. @@ -53,13 +53,13 @@ def _compute_hash(self, ignore_errors: bool = True, **kwargs: t.Any) -> str | No self._node.logger.exception('Node hashing failed') return None - def _get_objects_to_hash(self) -> dict[str, t.Any]: + def _get_objects_to_hash(self) -> dict[str, Any]: warn_deprecation( '`NodeCaching._get_objects_to_hash` is deprecated, use `NodeCaching.get_objects_to_hash` instead', version=3 ) return self.get_objects_to_hash() - def get_objects_to_hash(self) -> dict[str, t.Any]: + def get_objects_to_hash(self) -> dict[str, Any]: """Return a list of objects which should be included in the hash.""" return { @@ -141,7 +141,7 @@ def get_all_same_nodes(self) -> list['Node']: """ return list(self._iter_all_same_nodes()) - def _iter_all_same_nodes(self, allow_before_store=False) -> t.Iterator['Node']: + def _iter_all_same_nodes(self, allow_before_store=False) -> Iterator['Node']: """Returns an iterator of all same nodes. Note: this should be only called on stored nodes, or internally from .store() since it first calls diff --git a/src/aiida/orm/nodes/comments.py b/src/aiida/orm/nodes/comments.py index e638642e9f..8cd9ae4111 100644 --- a/src/aiida/orm/nodes/comments.py +++ b/src/aiida/orm/nodes/comments.py @@ -2,12 +2,12 @@ from __future__ import annotations -import typing as t +from typing import TYPE_CHECKING from ..comments import Comment from ..users import User -if t.TYPE_CHECKING: +if TYPE_CHECKING: from .node import Node @@ -18,7 +18,7 @@ def __init__(self, node: 'Node') -> None: """Initialize the comments interface.""" self._node = node - def add(self, content: str, user: t.Optional[User] = None) -> Comment: + def add(self, content: str, user: User | None = None) -> Comment: """Add a new comment. :param content: string with comment diff --git a/src/aiida/orm/nodes/data/array/bands.py b/src/aiida/orm/nodes/data/array/bands.py index cbae36b90d..643cd423e2 100644 --- a/src/aiida/orm/nodes/data/array/bands.py +++ b/src/aiida/orm/nodes/data/array/bands.py @@ -10,9 +10,11 @@ in a Brillouin zone, and how to operate on them. """ +from __future__ import annotations + import json -import typing as t from string import Template +from typing import Optional import numpy @@ -214,7 +216,7 @@ class BandsData(KpointsData): """Class to handle bands data""" class Model(KpointsData.Model): - array_labels: t.Optional[t.List[str]] = MetadataField(description='Labels associated with the band arrays') + array_labels: Optional[list[str]] = MetadataField(description='Labels associated with the band arrays') units: str = MetadataField(description='Units in which the data in bands were stored') def set_kpointsdata(self, kpointsdata): diff --git a/src/aiida/orm/nodes/data/array/kpoints.py b/src/aiida/orm/nodes/data/array/kpoints.py index e7958970a4..1744f17f85 100644 --- a/src/aiida/orm/nodes/data/array/kpoints.py +++ b/src/aiida/orm/nodes/data/array/kpoints.py @@ -11,7 +11,7 @@ periodic crystal structure). """ -import typing as t +from __future__ import annotations import numpy @@ -39,11 +39,11 @@ class KpointsData(ArrayData): """ class Model(ArrayData.Model): - labels: t.List[str] = MetadataField(description='Labels associated with the list of kpoints') - label_numbers: t.List[int] = MetadataField(description='Index of the labels in the list of kpoints') - mesh: t.List[int] = MetadataField(description='Mesh of kpoints') - offset: t.List[float] = MetadataField(description='Offset of kpoints') - cell: t.List[t.List[float]] = MetadataField(description='Unit cell of the crystal, in Angstroms') + labels: list[str] = MetadataField(description='Labels associated with the list of kpoints') + label_numbers: list[int] = MetadataField(description='Index of the labels in the list of kpoints') + mesh: list[int] = MetadataField(description='Mesh of kpoints') + offset: list[float] = MetadataField(description='Offset of kpoints') + cell: list[list[float]] = MetadataField(description='Unit cell of the crystal, in Angstroms') pbc1: bool = MetadataField(description='True if the first lattice vector is periodic') pbc2: bool = MetadataField(description='True if the second lattice vector is periodic') pbc3: bool = MetadataField(description='True if the third lattice vector is periodic') @@ -194,7 +194,7 @@ def set_cell_from_structure(self, structuredata): if not isinstance(structuredata, StructureData): raise ValueError( - 'An instance of StructureData should be passed to ' 'the KpointsData, found instead {}'.format( + 'An instance of StructureData should be passed to the KpointsData, found instead {}'.format( structuredata.__class__ ) ) @@ -355,9 +355,9 @@ def _validate_kpoints_weights(self, kpoints, weights): kpoints = numpy.array([[0.0, 0.0, 0.0]]) else: raise ValueError( - 'empty kpoints list is valid only in zero dimension' - '; instead here with have {} dimensions' - ''.format(self._dimension) + 'empty kpoints list is valid only in zero dimension; instead here with have {} dimensions'.format( + self._dimension + ) ) if len(kpoints.shape) <= 1: @@ -373,8 +373,9 @@ def _validate_kpoints_weights(self, kpoints, weights): if kpoints.shape[1] < self._dimension: raise ValueError( - 'In a system which has {0} dimensions, kpoint need' - 'more than {0} coordinates (found instead {1})'.format(self._dimension, kpoints.shape[1]) + 'In a system which has {0} dimensions, kpoint needmore than {0} coordinates (found instead {1})'.format( + self._dimension, kpoints.shape[1] + ) ) if weights is not None: diff --git a/src/aiida/orm/nodes/data/array/trajectory.py b/src/aiida/orm/nodes/data/array/trajectory.py index 3d6356ebd2..9b502fde3d 100644 --- a/src/aiida/orm/nodes/data/array/trajectory.py +++ b/src/aiida/orm/nodes/data/array/trajectory.py @@ -8,8 +8,9 @@ ########################################################################### """AiiDA class to deal with crystal structure trajectories.""" +from __future__ import annotations + import collections.abc -from typing import List from aiida.common.pydantic import MetadataField @@ -26,7 +27,7 @@ class TrajectoryData(ArrayData): class Model(ArrayData.Model): units_positions: str = MetadataField(alias='units|positions', description='Unit of positions') units_times: str = MetadataField(alias='units|times', description='Unit of time') - symbols: List[str] = MetadataField(description='List of symbols') + symbols: list[str] = MetadataField(description='List of symbols') def __init__(self, structurelist=None, **kwargs): super().__init__(**kwargs) @@ -70,7 +71,7 @@ def _internal_validate(self, stepids, cells, symbols, positions, times, velociti numatoms = len(symbols) if positions.shape != (numsteps, numatoms, 3): raise ValueError( - 'TrajectoryData.positions must have shape (s,n,3), ' 'with s=number of steps and n=number of symbols' + 'TrajectoryData.positions must have shape (s,n,3), with s=number of steps and n=number of symbols' ) if times is not None: if times.shape != (numsteps,): @@ -257,7 +258,7 @@ def get_cells(self): return None @property - def symbols(self) -> List[str]: + def symbols(self) -> list[str]: """Return the array of symbols, if it has already been set. :raises KeyError: if the trajectory has not been set yet. @@ -371,7 +372,7 @@ def get_step_structure(self, index, custom_kinds=None): for k in custom_kinds: if not isinstance(k, Kind): raise TypeError( - 'Each element of the custom_kinds list must ' 'be a aiida.orm.nodes.data.structure.Kind object' + 'Each element of the custom_kinds list must be a aiida.orm.nodes.data.structure.Kind object' ) kind_names.append(k.name) if len(kind_names) != len(set(kind_names)): diff --git a/src/aiida/orm/nodes/data/base.py b/src/aiida/orm/nodes/data/base.py index adc0f3a98e..7c94ea53bf 100644 --- a/src/aiida/orm/nodes/data/base.py +++ b/src/aiida/orm/nodes/data/base.py @@ -8,8 +8,10 @@ ########################################################################### """`Data` sub class to be used as a base for data containers that represent base python data types.""" -import typing as t +from __future__ import annotations + from functools import singledispatch +from typing import Any from aiida.common.pydantic import MetadataField @@ -28,13 +30,13 @@ class BaseType(Data): """`Data` sub class to be used as a base for data containers that represent base python data types.""" class Model(Data.Model): - value: t.Any = MetadataField( + value: Any = MetadataField( ..., title='Data value.', description='The value of the data', ) - def __init__(self, value=None, **kwargs): + def __init__(self, value: Any | None = None, **kwargs): try: getattr(self, '_type') except AttributeError: diff --git a/src/aiida/orm/nodes/data/cif.py b/src/aiida/orm/nodes/data/cif.py index 8421a617eb..8901cfb4bc 100644 --- a/src/aiida/orm/nodes/data/cif.py +++ b/src/aiida/orm/nodes/data/cif.py @@ -8,8 +8,10 @@ ########################################################################### """Tools for handling Crystallographic Information Files (CIF)""" +from __future__ import annotations + import re -import typing as t +from typing import Optional from aiida.common.pydantic import MetadataField from aiida.common.utils import Capturing @@ -251,14 +253,20 @@ class CifData(SinglefileData): _ase = None class Model(SinglefileData.Model): - formulae: t.Optional[t.List[str]] = MetadataField( - None, description='List of formulae contained in the CIF file.', exclude_to_orm=True + formulae: Optional[list[str]] = MetadataField( + None, + description='List of formulae contained in the CIF file.', + exclude_to_orm=True, ) - spacegroup_numbers: t.Optional[t.List[str]] = MetadataField( - None, description='List of space group numbers of the structure.', exclude_to_orm=True + spacegroup_numbers: Optional[list[str]] = MetadataField( + None, + description='List of space group numbers of the structure.', + exclude_to_orm=True, ) - md5: t.Optional[str] = MetadataField( - None, description='MD5 checksum of the file contents.', exclude_to_orm=True + md5: Optional[str] = MetadataField( + None, + description='MD5 checksum of the file contents.', + exclude_to_orm=True, ) def __init__(self, ase=None, file=None, filename=None, values=None, scan_type=None, parse_policy=None, **kwargs): diff --git a/src/aiida/orm/nodes/data/code/abstract.py b/src/aiida/orm/nodes/data/code/abstract.py index eb72649b9c..b7a9d9abdf 100644 --- a/src/aiida/orm/nodes/data/code/abstract.py +++ b/src/aiida/orm/nodes/data/code/abstract.py @@ -13,7 +13,7 @@ import abc import functools import pathlib -import typing as t +from typing import TYPE_CHECKING, Optional from aiida.cmdline.params.options.interactive import TemplateInteractiveOption from aiida.common import exceptions @@ -25,7 +25,7 @@ from ..data import Data -if t.TYPE_CHECKING: +if TYPE_CHECKING: from aiida.engine import ProcessBuilder __all__ = ('AbstractCode',) @@ -58,7 +58,7 @@ class Model(Data.Model, defer_build=True): description='Human-readable description, ideally including version and compilation environment.', short_name='-D', ) - default_calc_job_plugin: t.Optional[str] = MetadataField( + default_calc_job_plugin: Optional[str] = MetadataField( None, title='Default `CalcJob` plugin', description='Entry point name of the default plugin (as listed in `verdi plugin list aiida.calculations`).', @@ -70,7 +70,7 @@ class Model(Data.Model, defer_build=True): description='Whether the executable and arguments of the code in the submission script should be escaped ' 'with single or double quotes.', ) - with_mpi: t.Optional[bool] = MetadataField( + with_mpi: Optional[bool] = MetadataField( None, title='Run with MPI', description='Whether the executable should be run as an MPI program. This option can be left unspecified ' diff --git a/src/aiida/orm/nodes/data/code/legacy.py b/src/aiida/orm/nodes/data/code/legacy.py index b462d8ad7c..04095f51c3 100644 --- a/src/aiida/orm/nodes/data/code/legacy.py +++ b/src/aiida/orm/nodes/data/code/legacy.py @@ -8,9 +8,11 @@ ########################################################################### """Data plugin represeting an executable code to be wrapped and called through a `CalcJob` plugin.""" +from __future__ import annotations + import os import pathlib -import typing as t +from typing import Optional from aiida.common import exceptions from aiida.common.log import override_log_level @@ -48,12 +50,12 @@ class Model(AbstractCode.Model): '', description='The code that will be put in the scheduler script after the execution of the code', ) - input_plugin: t.Optional[str] = MetadataField( - description='The name of the input plugin to be used for this code' + input_plugin: Optional[str] = MetadataField( + description='The name of the input plugin to be used for this code', ) - local_executable: t.Optional[str] = MetadataField(description='Path to a local executable') - remote_exec_path: t.Optional[str] = MetadataField(description='Remote path to executable') - is_local: t.Optional[bool] = MetadataField(description='Whether the code is local or remote') + local_executable: Optional[str] = MetadataField(description='Path to a local executable') + remote_exec_path: Optional[str] = MetadataField(description='Remote path to executable') + is_local: Optional[bool] = MetadataField(description='Whether the code is local or remote') def __init__(self, remote_computer_exec=None, local_executable=None, input_plugin_name=None, files=None, **kwargs): super().__init__(**kwargs) diff --git a/src/aiida/orm/nodes/data/code/portable.py b/src/aiida/orm/nodes/data/code/portable.py index bab715ab5a..872774c8f0 100644 --- a/src/aiida/orm/nodes/data/code/portable.py +++ b/src/aiida/orm/nodes/data/code/portable.py @@ -36,7 +36,7 @@ _LOGGER = logging.getLogger(__name__) -def _export_filpath_files_from_repo(portable_code: PortableCode, repository_path: pathlib.Path) -> str: +def _export_filepath_files_from_repo(portable_code: PortableCode, repository_path: pathlib.Path) -> str: for root, _, filenames in portable_code.base.repository.walk(): for filename in filenames: rel_path = str(root / filename) @@ -72,7 +72,7 @@ class Model(AbstractCode.Model): short_name='-F', is_attribute=False, priority=2, - orm_to_model=_export_filpath_files_from_repo, # type: ignore[arg-type] + orm_to_model=_export_filepath_files_from_repo, # type: ignore[arg-type] ) def __init__( @@ -197,6 +197,6 @@ def _prepare_yaml(self, *args, **kwargs): """Export code to a YAML file.""" result = super()._prepare_yaml(*args, **kwargs)[0] target = pathlib.Path().cwd() / f'{self.label}' - _export_filpath_files_from_repo(self, target) + _export_filepath_files_from_repo(self, target) _LOGGER.info(f'Repository files for PortableCode <{self.pk}> dumped to folder `{target}`.') return result, {} diff --git a/src/aiida/orm/nodes/data/data.py b/src/aiida/orm/nodes/data/data.py index 56b3dcbdbb..993705592a 100644 --- a/src/aiida/orm/nodes/data/data.py +++ b/src/aiida/orm/nodes/data/data.py @@ -8,7 +8,9 @@ ########################################################################### """Module with `Node` sub class `Data` to be used as a base class for data structures.""" -from typing import Dict, Optional +from __future__ import annotations + +from typing import Optional from aiida.common import exceptions from aiida.common.lang import override @@ -40,7 +42,7 @@ class Data(Node): # By default, if not found here, # The fileformat string is assumed to match the extension. # Example: {'dat': 'dat_multicolumn'} - _export_format_replacements: Dict[str, str] = {} + _export_format_replacements: dict[str, str] = {} # Data nodes are storable _storable = True @@ -48,10 +50,13 @@ class Data(Node): class Model(Node.Model): source: Optional[dict] = MetadataField( - None, description='Source of the data.', is_subscriptable=True, exclude_from_cli=True + None, + description='Source of the data.', + is_subscriptable=True, + exclude_from_cli=True, ) - def __init__(self, *args, source=None, **kwargs): + def __init__(self, *args, source: dict | None = None, **kwargs): """Construct a new instance, setting the ``source`` attribute if provided as a keyword argument.""" super().__init__(*args, **kwargs) if source is not None: @@ -83,7 +88,7 @@ def clone(self): return clone @property - def source(self) -> Optional[dict]: + def source(self) -> dict | None: """Gets the dictionary describing the source of Data object. Possible fields: * **db_name**: name of the source database. diff --git a/src/aiida/orm/nodes/data/dict.py b/src/aiida/orm/nodes/data/dict.py index 99673de6fd..6ae858b313 100644 --- a/src/aiida/orm/nodes/data/dict.py +++ b/src/aiida/orm/nodes/data/dict.py @@ -11,7 +11,7 @@ from __future__ import annotations import copy -import typing as t +from typing import Any from aiida.common import exceptions from aiida.common.pydantic import MetadataField @@ -51,7 +51,7 @@ class Dict(Data): """ class Model(Data.Model): - value: t.Dict[str, t.Any] = MetadataField( + value: dict[str, Any] = MetadataField( description='Dictionary content.', is_attribute=False, is_subscriptable=True, @@ -90,7 +90,7 @@ def __contains__(self, key: str) -> bool: """Return whether the node contains a key.""" return key in self.base.attributes - def get(self, key: str, default: t.Any | None = None, /): # type: ignore[override] + def get(self, key: str, default: Any | None = None, /): # type: ignore[override] """Return the value for key if key is in the dictionary, else default. :param key: The key whose value to return. @@ -150,7 +150,7 @@ def items(self): yield key, value @property - def value(self) -> dict[str, t.Any]: + def value(self) -> dict[str, Any]: """Return the value of this node, which is the dictionary content. :return: The dictionary content. diff --git a/src/aiida/orm/nodes/data/enum.py b/src/aiida/orm/nodes/data/enum.py index 1c6fb41c79..45adb44d62 100644 --- a/src/aiida/orm/nodes/data/enum.py +++ b/src/aiida/orm/nodes/data/enum.py @@ -15,8 +15,10 @@ class Color(Enum): ``Color.RED`` is ``RED`` and the value of ``Color.RED`` is ``1``. """ -import typing as t +from __future__ import annotations + from enum import Enum +from typing import Any, TypeVar from plumpy.loaders import get_object_loader @@ -28,7 +30,7 @@ class Color(Enum): __all__ = ('EnumData',) -EnumType = t.TypeVar('EnumType', bound=Enum) +EnumType = TypeVar('EnumType', bound=Enum) @to_aiida_type.register(Enum) @@ -75,11 +77,11 @@ def name(self) -> str: return self.base.attributes.get(self.KEY_NAME) @property - def value(self) -> t.Any: + def value(self) -> Any: """Return the value of the enum member.""" return self.base.attributes.get(self.KEY_VALUE) - def get_enum(self) -> t.Type[EnumType]: + def get_enum(self) -> type[EnumType]: """Return the enum class reconstructed from the serialized identifier stored in the database. :raises `ImportError`: if the enum class represented by the stored identifier cannot be imported. @@ -101,7 +103,7 @@ def get_member(self) -> EnumType: # type: ignore[misc, type-var] :raises `ValueError`: if the stored enum member value is no longer valid for the imported enum class. """ value = self.base.attributes.get(self.KEY_VALUE) - enum: t.Type[EnumType] = self.get_enum() + enum: type[EnumType] = self.get_enum() try: return enum(value) @@ -111,7 +113,7 @@ def get_member(self) -> EnumType: # type: ignore[misc, type-var] 'have changed since storing the node.' ) from exc - def __eq__(self, other: t.Any) -> bool: + def __eq__(self, other: Any) -> bool: """Return whether the other object is equivalent to ourselves.""" if isinstance(other, Enum): try: diff --git a/src/aiida/orm/nodes/data/folder.py b/src/aiida/orm/nodes/data/folder.py index 333b527176..4bb69fe9ba 100644 --- a/src/aiida/orm/nodes/data/folder.py +++ b/src/aiida/orm/nodes/data/folder.py @@ -13,11 +13,11 @@ import contextlib import io import pathlib -import typing as t +from typing import TYPE_CHECKING, BinaryIO, Iterable, Iterator, Literal, TextIO, overload from .data import Data -if t.TYPE_CHECKING: +if TYPE_CHECKING: from aiida.common.typing import FilePath from aiida.repository import File @@ -72,16 +72,16 @@ def list_object_names(self, path: str | None = None) -> list[str]: """ return self.base.repository.list_object_names(path) - @t.overload + @overload @contextlib.contextmanager - def open(self, path: FilePath, mode: t.Literal['r']) -> t.Iterator[t.TextIO]: ... + def open(self, path: FilePath, mode: Literal['r']) -> Iterator[TextIO]: ... - @t.overload + @overload @contextlib.contextmanager - def open(self, path: FilePath, mode: t.Literal['rb']) -> t.Iterator[t.BinaryIO]: ... + def open(self, path: FilePath, mode: Literal['rb']) -> Iterator[BinaryIO]: ... @contextlib.contextmanager - def open(self, path: FilePath, mode: t.Literal['r', 'rb'] = 'r') -> t.Iterator[t.BinaryIO] | t.Iterator[t.TextIO]: + def open(self, path: FilePath, mode: Literal['r', 'rb'] = 'r') -> Iterator[BinaryIO] | Iterator[TextIO]: """Open a file handle to an object stored under the given key. .. note:: this should only be used to open a handle to read an existing file. To write a new file use the method @@ -98,7 +98,7 @@ def open(self, path: FilePath, mode: t.Literal['r', 'rb'] = 'r') -> t.Iterator[t yield handle @contextlib.contextmanager - def as_path(self, path: FilePath | None = None) -> t.Iterator[pathlib.Path]: + def as_path(self, path: FilePath | None = None) -> Iterator[pathlib.Path]: """Make the contents of the repository available as a normal filepath on the local file system. :param path: optional relative path of the object within the repository. @@ -119,13 +119,13 @@ def get_object(self, path: FilePath | None = None) -> File: """ return self.base.repository.get_object(path) - @t.overload - def get_object_content(self, path: str, mode: t.Literal['r']) -> str: ... + @overload + def get_object_content(self, path: str, mode: Literal['r']) -> str: ... - @t.overload - def get_object_content(self, path: str, mode: t.Literal['rb']) -> bytes: ... + @overload + def get_object_content(self, path: str, mode: Literal['rb']) -> bytes: ... - def get_object_content(self, path: str, mode: t.Literal['r', 'rb'] = 'r') -> str | bytes: + def get_object_content(self, path: str, mode: Literal['r', 'rb'] = 'r') -> str | bytes: """Return the content of a object identified by key. :param path: the relative path of the object within the repository. @@ -176,7 +176,7 @@ def put_object_from_tree(self, filepath: str, path: str | None = None) -> None: """ return self.base.repository.put_object_from_tree(filepath, path) - def walk(self, path: FilePath | None = None) -> t.Iterable[tuple[pathlib.PurePath, list[str], list[str]]]: + def walk(self, path: FilePath | None = None) -> Iterable[tuple[pathlib.PurePath, list[str], list[str]]]: """Walk over the directories and files contained within this repository. .. note:: the order of the dirname and filename lists that are returned is not necessarily sorted. This is in @@ -189,7 +189,7 @@ def walk(self, path: FilePath | None = None) -> t.Iterable[tuple[pathlib.PurePat """ yield from self.base.repository.walk(path) - def glob(self) -> t.Iterable[pathlib.PurePath]: + def glob(self) -> Iterable[pathlib.PurePath]: """Yield a recursive list of all paths (files and directories).""" yield from self.base.repository.glob() diff --git a/src/aiida/orm/nodes/data/jsonable.py b/src/aiida/orm/nodes/data/jsonable.py index 24309abd72..40a8e3883c 100644 --- a/src/aiida/orm/nodes/data/jsonable.py +++ b/src/aiida/orm/nodes/data/jsonable.py @@ -1,8 +1,10 @@ """Data plugin that allows to easily wrap objects that are JSON-able.""" +from __future__ import annotations + import importlib import json -import typing +from typing import Any, MutableMapping, Protocol, runtime_checkable from pydantic import ConfigDict @@ -13,9 +15,9 @@ __all__ = ('JsonableData',) -@typing.runtime_checkable -class JsonSerializableProtocol(typing.Protocol): - def as_dict(self) -> typing.MutableMapping[typing.Any, typing.Any]: ... +@runtime_checkable +class JsonSerializableProtocol(Protocol): + def as_dict(self) -> MutableMapping[Any, Any]: ... class JsonableData(Data): @@ -91,7 +93,7 @@ def __init__(self, obj: JsonSerializableProtocol, *args, **kwargs): self.base.attributes.set_many(serialized) @classmethod - def _deserialize_float_constants(cls, data: typing.Any): + def _deserialize_float_constants(cls, data: Any): """Deserialize the contents of a dictionary ``data`` deserializing infinity and NaN string constants. The ``data`` dictionary is recursively checked for the ``Infinity``, ``-Infinity`` and ``NaN`` strings, which diff --git a/src/aiida/orm/nodes/data/list.py b/src/aiida/orm/nodes/data/list.py index f36dc70a8e..92a85fd9f0 100644 --- a/src/aiida/orm/nodes/data/list.py +++ b/src/aiida/orm/nodes/data/list.py @@ -8,7 +8,8 @@ ########################################################################### """`Data` sub class to represent a list.""" -import typing as t +from __future__ import annotations + from collections.abc import MutableSequence from typing import Any @@ -26,7 +27,7 @@ class List(Data, MutableSequence): _LIST_KEY = 'list' class Model(Data.Model): - value: t.List[t.Any] = MetadataField( + value: list[Any] = MetadataField( description='Content of the data', ) @@ -119,7 +120,7 @@ def reverse(self): self.set_list(data) @property - def value(self) -> list[t.Any]: + def value(self) -> list[Any]: """Return the value of this node, which is the list content. :return: The list content. diff --git a/src/aiida/orm/nodes/data/remote/base.py b/src/aiida/orm/nodes/data/remote/base.py index 8bae0fed02..9416692b4b 100644 --- a/src/aiida/orm/nodes/data/remote/base.py +++ b/src/aiida/orm/nodes/data/remote/base.py @@ -41,7 +41,7 @@ class Model(Data.Model): orm_to_model=lambda node, _: node.get_remote_path(), ) - def __init__(self, remote_path: Union[str, None] = None, **kwargs): + def __init__(self, remote_path: str | None = None, **kwargs): super().__init__(**kwargs) if remote_path is not None: self.set_remote_path(remote_path) diff --git a/src/aiida/orm/nodes/data/remote/stash/compress.py b/src/aiida/orm/nodes/data/remote/stash/compress.py index 70fadc3cc2..3810d3a86b 100644 --- a/src/aiida/orm/nodes/data/remote/stash/compress.py +++ b/src/aiida/orm/nodes/data/remote/stash/compress.py @@ -8,7 +8,7 @@ ########################################################################### """Data plugin that models a stashed folder on a remote computer.""" -from typing import List, Tuple, Union +from __future__ import annotations from aiida.common.datastructures import StashMode from aiida.common.lang import type_check @@ -28,7 +28,7 @@ class Model(RemoteStashData.Model): target_basepath: str = MetadataField( description='The the target basepath', ) - source_list: List[str] = MetadataField( + source_list: list[str] = MetadataField( description='The list of source files that were stashed', ) dereference: bool = MetadataField( @@ -39,7 +39,7 @@ def __init__( self, stash_mode: StashMode, target_basepath: str, - source_list: List, + source_list: list[str], dereference: bool, **kwargs, ): @@ -101,7 +101,7 @@ def target_basepath(self, value: str): self.base.attributes.set('target_basepath', value) @property - def source_list(self) -> Union[List, Tuple]: + def source_list(self) -> list | tuple: """Return the list of source files that were stashed. :return: the list of source files. @@ -109,7 +109,7 @@ def source_list(self) -> Union[List, Tuple]: return self.base.attributes.get('source_list') @source_list.setter - def source_list(self, value: Union[List, Tuple]): + def source_list(self, value: list | tuple): """Set the list of source files that were stashed. :param value: the list of source files. diff --git a/src/aiida/orm/nodes/data/remote/stash/custom.py b/src/aiida/orm/nodes/data/remote/stash/custom.py index d46cbc7ae1..6c73dd6cd8 100644 --- a/src/aiida/orm/nodes/data/remote/stash/custom.py +++ b/src/aiida/orm/nodes/data/remote/stash/custom.py @@ -8,7 +8,7 @@ ########################################################################### """Data plugin that models a stashed folder on a remote computer.""" -from typing import List, Tuple, Union +from __future__ import annotations from aiida.common.datastructures import StashMode from aiida.common.lang import type_check @@ -26,13 +26,13 @@ class RemoteStashCustomData(RemoteStashData): class Model(RemoteStashData.Model): target_basepath: str = MetadataField(description='The the target basepath') - source_list: List[str] = MetadataField(description='The list of source files that were stashed') + source_list: list[str] = MetadataField(description='The list of source files that were stashed') def __init__( self, stash_mode: StashMode, target_basepath: str, - source_list: List, + source_list: list[str], **kwargs, ): """Construct a new instance @@ -65,7 +65,7 @@ def target_basepath(self, value: str): self.base.attributes.set('target_basepath', value) @property - def source_list(self) -> Union[List, Tuple]: + def source_list(self) -> list | tuple: """Return the list of source files that were stashed. :return: the list of source files. @@ -73,7 +73,7 @@ def source_list(self) -> Union[List, Tuple]: return self.base.attributes.get('source_list') @source_list.setter - def source_list(self, value: Union[List, Tuple]): + def source_list(self, value: list | tuple): """Set the list of source files that were stashed. :param value: the list of source files. diff --git a/src/aiida/orm/nodes/data/remote/stash/folder.py b/src/aiida/orm/nodes/data/remote/stash/folder.py index 22afd57491..21bb7051b1 100644 --- a/src/aiida/orm/nodes/data/remote/stash/folder.py +++ b/src/aiida/orm/nodes/data/remote/stash/folder.py @@ -8,7 +8,7 @@ ########################################################################### """Data plugin that models a stashed folder on a remote computer.""" -from typing import List, Tuple, Union +from __future__ import annotations from aiida.common.datastructures import StashMode from aiida.common.lang import type_check @@ -29,9 +29,9 @@ class RemoteStashFolderData(RemoteStashData): class Model(RemoteStashData.Model): target_basepath: str = MetadataField(description='The the target basepath') - source_list: List[str] = MetadataField(description='The list of source files that were stashed') + source_list: list[str] = MetadataField(description='The list of source files that were stashed') - def __init__(self, stash_mode: StashMode, target_basepath: str, source_list: List, **kwargs): + def __init__(self, stash_mode: StashMode, target_basepath: str, source_list: list[str], **kwargs): """Construct a new instance :param stash_mode: the stashing mode with which the data was stashed on the remote. @@ -63,7 +63,7 @@ def target_basepath(self, value: str): self.base.attributes.set('target_basepath', value) @property - def source_list(self) -> Union[List, Tuple]: + def source_list(self) -> list | tuple: """Return the list of source files that were stashed. :return: the list of source files. @@ -71,7 +71,7 @@ def source_list(self) -> Union[List, Tuple]: return self.base.attributes.get('source_list') @source_list.setter - def source_list(self, value: Union[List, Tuple]): + def source_list(self, value: list | tuple): """Set the list of source files that were stashed. :param value: the list of source files. diff --git a/src/aiida/orm/nodes/data/singlefile.py b/src/aiida/orm/nodes/data/singlefile.py index 742a5702c6..3e9be68da7 100644 --- a/src/aiida/orm/nodes/data/singlefile.py +++ b/src/aiida/orm/nodes/data/singlefile.py @@ -14,7 +14,7 @@ import io import os import pathlib -import typing as t +from typing import IO, Any, BinaryIO, Iterator, Literal, Optional, TextIO, overload from aiida.common import exceptions from aiida.common.pydantic import MetadataField @@ -35,10 +35,10 @@ class Model(Data.Model): description='The file content.', model_to_orm=lambda model: io.BytesIO(model.content), # type: ignore[attr-defined] ) - filename: t.Optional[str] = MetadataField(None, description='The filename. Defaults to `file.txt`.') + filename: Optional[str] = MetadataField(None, description='The filename. Defaults to `file.txt`.') @classmethod - def from_string(cls, content: str, filename: str | pathlib.Path | None = None, **kwargs: t.Any) -> 'SinglefileData': + def from_string(cls, content: str, filename: str | pathlib.Path | None = None, **kwargs: Any) -> 'SinglefileData': """Construct a new instance and set ``content`` as its contents. :param content: The content as a string. @@ -47,9 +47,7 @@ def from_string(cls, content: str, filename: str | pathlib.Path | None = None, * return cls(io.StringIO(content), filename, **kwargs) @classmethod - def from_bytes( - cls, content: bytes, filename: str | pathlib.Path | None = None, **kwargs: t.Any - ) -> 'SinglefileData': + def from_bytes(cls, content: bytes, filename: str | pathlib.Path | None = None, **kwargs: Any) -> 'SinglefileData': """Construct a new instance and set ``content`` as its contents. :param content: The content as bytes. @@ -59,10 +57,10 @@ def from_bytes( def __init__( self, - file: str | pathlib.Path | t.IO | None = None, + file: str | pathlib.Path | IO | None = None, filename: str | pathlib.Path | None = None, - content: str | pathlib.Path | t.IO | None = None, - **kwargs: t.Any, + content: str | pathlib.Path | IO | None = None, + **kwargs: Any, ) -> None: """Construct a new instance and set the contents to that of the file. @@ -93,26 +91,26 @@ def filename(self) -> str: """ return self.base.attributes.get('filename') - @t.overload + @overload @contextlib.contextmanager - def open(self, path: FilePath, mode: t.Literal['r'] = ...) -> t.Iterator[t.TextIO]: ... + def open(self, path: FilePath, mode: Literal['r'] = ...) -> Iterator[TextIO]: ... - @t.overload + @overload @contextlib.contextmanager - def open(self, path: FilePath, mode: t.Literal['rb']) -> t.Iterator[t.BinaryIO]: ... + def open(self, path: FilePath, mode: Literal['rb']) -> Iterator[BinaryIO]: ... - @t.overload + @overload @contextlib.contextmanager - def open(self, path: None = None, mode: t.Literal['r'] = ...) -> t.Iterator[t.TextIO]: ... + def open(self, path: None = None, mode: Literal['r'] = ...) -> Iterator[TextIO]: ... - @t.overload + @overload @contextlib.contextmanager - def open(self, path: None = None, mode: t.Literal['rb'] = ...) -> t.Iterator[t.BinaryIO]: ... + def open(self, path: None = None, mode: Literal['rb'] = ...) -> Iterator[BinaryIO]: ... @contextlib.contextmanager def open( - self, path: FilePath | None = None, mode: t.Literal['r', 'rb'] = 'r' - ) -> t.Iterator[t.BinaryIO] | t.Iterator[t.TextIO]: + self, path: FilePath | None = None, mode: Literal['r', 'rb'] = 'r' + ) -> Iterator[BinaryIO] | Iterator[TextIO]: """Return an open file handle to the content of this data node. :param path: the relative path of the object within the repository. @@ -126,7 +124,7 @@ def open( yield handle @contextlib.contextmanager - def as_path(self) -> t.Iterator[pathlib.Path]: + def as_path(self) -> Iterator[pathlib.Path]: """Make the contents of the file available as a normal filepath on the local file system. :param path: optional relative path of the object within the repository. @@ -137,11 +135,11 @@ def as_path(self) -> t.Iterator[pathlib.Path]: with self.base.repository.as_path(self.filename) as filepath: yield filepath - @t.overload - def get_content(self, mode: t.Literal['rb']) -> bytes: ... + @overload + def get_content(self, mode: Literal['rb']) -> bytes: ... - @t.overload - def get_content(self, mode: t.Literal['r']) -> str: ... + @overload + def get_content(self, mode: Literal['r']) -> str: ... def get_content(self, mode: str = 'r') -> str | bytes: """Return the content of the single file stored for this data node. @@ -152,7 +150,7 @@ def get_content(self, mode: str = 'r') -> str | bytes: with self.open(mode=mode) as handle: # type: ignore[call-overload] return handle.read() - def set_file(self, file: str | pathlib.Path | t.IO, filename: str | pathlib.Path | None = None) -> None: + def set_file(self, file: str | pathlib.Path | IO, filename: str | pathlib.Path | None = None) -> None: """Store the content of the file in the node's repository, deleting any other existing objects. :param file: an absolute filepath or filelike object whose contents to copy diff --git a/src/aiida/orm/nodes/data/structure.py b/src/aiida/orm/nodes/data/structure.py index 0e0d7e49db..dfe503a9b5 100644 --- a/src/aiida/orm/nodes/data/structure.py +++ b/src/aiida/orm/nodes/data/structure.py @@ -10,11 +10,13 @@ functions to operate on them. """ +from __future__ import annotations + import copy import functools import itertools import json -import typing as t +from typing import Optional from aiida.common.constants import elements from aiida.common.exceptions import UnsupportedSpeciesError @@ -669,1752 +671,1748 @@ def atom_kinds_to_html(atom_kind): return html_formula -class StructureData(Data): - """Data class that represents an atomic structure. +class Kind: + """This class contains the information about the species (kinds) of the system. - The data is organized as a collection of sites together with a cell, the boundary conditions (whether they are - periodic or not) and other related useful information. + It can be a single atom, or an alloy, or even contain vacancies. """ - _set_incompatibilities = [ - ('ase', 'cell'), - ('ase', 'pbc'), - ('ase', 'pymatgen'), - ('ase', 'pymatgen_molecule'), - ('ase', 'pymatgen_structure'), - ('cell', 'pymatgen'), - ('cell', 'pymatgen_molecule'), - ('cell', 'pymatgen_structure'), - ('pbc', 'pymatgen'), - ('pbc', 'pymatgen_molecule'), - ('pbc', 'pymatgen_structure'), - ('pymatgen', 'pymatgen_molecule'), - ('pymatgen', 'pymatgen_structure'), - ('pymatgen_molecule', 'pymatgen_structure'), - ] + def __init__(self, **kwargs): + """Create a site. + One can either pass: - _dimensionality_label = {0: '', 1: 'length', 2: 'surface', 3: 'volume'} - _internal_kind_tags = None + :param raw: the raw python dictionary that will be converted to a + Kind object. + :param ase: an ase Atom object + :param kind: a Kind object (to get a copy) - class Model(Data.Model): - pbc1: bool = MetadataField(description='Whether periodic in the a direction') - pbc2: bool = MetadataField(description='Whether periodic in the b direction') - pbc3: bool = MetadataField(description='Whether periodic in the c direction') - cell: t.List[t.List[float]] = MetadataField(description='The cell parameters') - kinds: t.Optional[t.List[dict]] = MetadataField(description='The kinds of atoms') - sites: t.Optional[t.List[dict]] = MetadataField(description='The atomic sites') + Or alternatively the following parameters: - def __init__( - self, - cell=None, - pbc=None, - ase=None, - pymatgen=None, - pymatgen_structure=None, - pymatgen_molecule=None, - pbc1=None, - pbc2=None, - pbc3=None, - kinds=None, - sites=None, - **kwargs, - ): - if pbc1 is not None and pbc2 is not None and pbc3 is not None: - pbc = [pbc1, pbc2, pbc3] + :param symbols: a single string for the symbol of this site, or a list + of symbol strings + :param weights: (optional) the weights for each atomic species of + this site. + If only a single symbol is provided, then this value is + optional and the weight is set to 1. + :param mass: (optional) the mass for this site in atomic mass units. + If not provided, the mass is set by the + self.reset_mass() function. + :param name: a string that uniquely identifies the kind, and that + is used to identify the sites. + """ + # Internal variables + self._mass = None + self._symbols = None + self._weights = None + self._name = None - args = { - 'cell': cell, - 'pbc': pbc, - 'ase': ase, - 'pymatgen': pymatgen, - 'pymatgen_structure': pymatgen_structure, - 'pymatgen_molecule': pymatgen_molecule, - } + # It will be remain to None in general; it is used to further + # identify this species. At the moment, it is used only when importing + # from ASE, if the species had a tag (different from zero). + ## NOTE! This is not persisted on DB but only used while the class + # is loaded in memory (i.e., it is not output with the get_raw() method) + self._internal_tag = None - for left, right in self._set_incompatibilities: - if args[left] is not None and args[right] is not None: - raise ValueError(f'cannot pass {left} and {right} at the same time') + # Logic to create the site from the raw format + if 'raw' in kwargs: + if len(kwargs) != 1: + raise ValueError("If you pass 'raw', then you cannot pass any other parameter.") - super().__init__(**kwargs) + raw = kwargs['raw'] - if any(ext is not None for ext in [ase, pymatgen, pymatgen_structure, pymatgen_molecule]): - if ase is not None: - self.set_ase(ase) + try: + self.set_symbols_and_weights(raw['symbols'], raw['weights']) + except KeyError: + raise ValueError("You didn't specify either 'symbols' or 'weights' in the raw site data.") + try: + self.mass = raw['mass'] + except KeyError: + raise ValueError("You didn't specify the site mass in the raw site data.") - if pymatgen is not None: - self.set_pymatgen(pymatgen) + try: + self.name = raw['name'] + except KeyError: + raise ValueError("You didn't specify the name in the raw site data.") - if pymatgen_structure is not None: - self.set_pymatgen_structure(pymatgen_structure) + elif 'kind' in kwargs: + if len(kwargs) != 1: + raise ValueError("If you pass 'kind', then you cannot pass any other parameter.") + oldkind = kwargs['kind'] - if pymatgen_molecule is not None: - self.set_pymatgen_molecule(pymatgen_molecule) + try: + self.set_symbols_and_weights(oldkind.symbols, oldkind.weights) + self.mass = oldkind.mass + self.name = oldkind.name + self._internal_tag = oldkind._internal_tag + except AttributeError: + raise ValueError( + 'Error using the Kind object. Are you sure ' + 'it is a Kind object? [Introspection says it is ' + '{}]'.format(str(type(oldkind))) + ) + + elif 'ase' in kwargs: + aseatom = kwargs['ase'] + if len(kwargs) != 1: + raise ValueError("If you pass 'ase', then you cannot pass any other parameter.") + try: + import numpy + + self.set_symbols_and_weights([aseatom.symbol], [1.0]) + # ASE sets mass to numpy.nan for unstable species + if not numpy.isnan(aseatom.mass): + self.mass = aseatom.mass + else: + self.reset_mass() + except AttributeError: + raise ValueError( + 'Error using the aseatom object. Are you sure ' + 'it is a ase.atom.Atom object? [Introspection says it is ' + '{}]'.format(str(type(aseatom))) + ) + if aseatom.tag != 0: + self.set_automatic_kind_name(tag=aseatom.tag) + self._internal_tag = aseatom.tag + else: + self.set_automatic_kind_name() else: - if cell is None: - cell = _DEFAULT_CELL - self.set_cell(cell) + if 'symbols' not in kwargs: + raise ValueError( + "'symbols' need to be " + 'specified (at least) to create a Site object. Otherwise, ' + "pass a raw site using the 'raw' parameter." + ) + weights = kwargs.pop('weights', None) + self.set_symbols_and_weights(kwargs.pop('symbols'), weights) + try: + self.mass = kwargs.pop('mass') + except KeyError: + self.reset_mass() + try: + self.name = kwargs.pop('name') + except KeyError: + self.set_automatic_kind_name() + if kwargs: + raise ValueError(f'Unrecognized parameters passed to Kind constructor: {kwargs.keys()}') - if pbc is None: - pbc = [True, True, True] - self.set_pbc(pbc) + def get_raw(self): + """Return the raw version of the site, mapped to a suitable dictionary. + This is the format that is actually used to store each kind of the + structure in the DB. - if kinds is not None: - self.base.attributes.set('kinds', kinds) + :return: a python dictionary with the kind. + """ + return { + 'symbols': self.symbols, + 'weights': self.weights, + 'mass': self.mass, + 'name': self.name, + } - if sites is not None: - self.base.attributes.set('sites', sites) + def reset_mass(self): + """Reset the mass to the automatic calculated value. - def get_dimensionality(self): - """Return the dimensionality of the structure and its length/surface/volume. + The mass can be set manually; by default, if not provided, + it is the mass of the constituent atoms, weighted with their + weight (after the weight has been normalized to one to take + correctly into account vacancies). - Zero-dimensional structures are assigned "volume" 0. + This function uses the internal _symbols and _weights values and + thus assumes that the values are validated. - :return: returns a dictionary with keys "dim" (dimensionality integer), "label" (dimensionality label) - and "value" (numerical length/surface/volume). + It sets the mass to None if the sum of weights is zero. """ - return _get_dimensionality(self.pbc, self.cell) + w_sum = sum(self._weights) - def set_ase(self, aseatoms): - """Load the structure from a ASE object""" - if is_ase_atoms(aseatoms): - # Read the ase structure - self.cell = aseatoms.cell - self.pbc = aseatoms.pbc - self.clear_kinds() # This also calls clear_sites - for atom in aseatoms: - self.append_atom(ase=atom) - else: - raise TypeError('The value is not an ase.Atoms object') + if abs(w_sum) < _SUM_THRESHOLD: + self._mass = None + return - def set_pymatgen(self, obj, **kwargs): - """Load the structure from a pymatgen object. + normalized_weights = (i / w_sum for i in self._weights) + element_masses = (_atomic_masses[sym] for sym in self._symbols) + # Weighted mass + self._mass = sum(i * j for i, j in zip(normalized_weights, element_masses)) - .. note:: Requires the pymatgen module (version >= 3.0.13, usage - of earlier versions may cause errors). - """ - typestr = type(obj).__name__ - try: - func = getattr(self, f'set_pymatgen_{typestr.lower()}') - except AttributeError: - raise AttributeError(f"Converter for '{typestr}' to AiiDA structure does not exist") - func(obj, **kwargs) + @property + def name(self): + """Return the name of this kind. + The name of a kind is used to identify the species of a site. - def set_pymatgen_molecule(self, mol, margin=5): - """Load the structure from a pymatgen Molecule object. + :return: a string + """ + return self._name - :param margin: the margin to be added in all directions of the - bounding box of the molecule. + @name.setter + def name(self, value): + """Set the name of this site (a string).""" + self._name = str(value) - .. note:: Requires the pymatgen module (version >= 3.0.13, usage - of earlier versions may cause errors). + def set_automatic_kind_name(self, tag=None): + """Set the type to a string obtained with the symbols appended one + after the other, without spaces, in alphabetical order; + if the site has a vacancy, a X is appended at the end too. """ - box = [ - max(x.coords.tolist()[0] for x in mol.sites) - min(x.coords.tolist()[0] for x in mol.sites) + 2 * margin, - max(x.coords.tolist()[1] for x in mol.sites) - min(x.coords.tolist()[1] for x in mol.sites) + 2 * margin, - max(x.coords.tolist()[2] for x in mol.sites) - min(x.coords.tolist()[2] for x in mol.sites) + 2 * margin, - ] - self.set_pymatgen_structure(mol.get_boxed_structure(*box)) - self.pbc = [False, False, False] + name_string = create_automatic_kind_name(self.symbols, self.weights) + if tag is None: + self.name = name_string + else: + self.name = f'{name_string}{tag}' - def set_pymatgen_structure(self, struct): - """Load the structure from a pymatgen Structure object. + def compare_with(self, other_kind): + """Compare with another Kind object to check if they are different. - .. note:: periodic boundary conditions are set to True in all - three directions. - .. note:: Requires the pymatgen module (version >= 3.3.5, usage - of earlier versions may cause errors). + .. note:: This does NOT check the 'type' attribute. Instead, it compares + (with reasonable thresholds, where applicable): the mass, and the list + of symbols and of weights. Moreover, it compares the + ``_internal_tag``, if defined (at the moment, defined automatically + only when importing the Kind from ASE, if the atom has a non-zero tag). + Note that the _internal_tag is only used while the class is loaded, + but is not persisted on the database. - :raise ValueError: if there are partial occupancies together with spins. + :return: A tuple with two elements. The first one is True if the two sites + are 'equivalent' (same mass, symbols and weights), False otherwise. + The second element of the tuple is a string, + which is either None (if the first element was True), or contains + a 'human-readable' description of the first difference encountered + between the two sites. """ + # Check length of symbols + if len(self.symbols) != len(other_kind.symbols): + return (False, 'Different length of symbols list') - def build_kind_name(species_and_occu): - """Build a kind name from a pymatgen Composition, including an additional ordinal if spin is included, - e.g. it returns '1' for an atom with spin < 0 and '2' for an atom with spin > 0, - otherwise (no spin) it returns None + # Check list of symbols + for i, symbol in enumerate(self.symbols): + if symbol != other_kind.symbols[i]: + return (False, f'Symbol at position {i + 1:d} are different ({symbol} vs. {other_kind.symbols[i]})') + # Check weights (assuming length of weights and of symbols have same + # length, which should be always true + for i, weight in enumerate(self.weights): + if weight != other_kind.weights[i]: + return (False, f'Weight at position {i + 1:d} are different ({weight} vs. {other_kind.weights[i]})') + # Check masses + if abs(self.mass - other_kind.mass) > _MASS_THRESHOLD: + return (False, f'Masses are different ({self.mass} vs. {other_kind.mass})') - :param species_and_occu: a pymatgen species and occupations dictionary - :return: a string representing the kind name or None - """ - species = list(species_and_occu.keys()) - occupations = list(species_and_occu.values()) + if self._internal_tag != other_kind._internal_tag: + return (False, f'Internal tags are different ({self._internal_tag} vs. {other_kind._internal_tag})') - # As of v2023.9.2, the ``properties`` argument is removed and the ``spin`` argument should be used. - # See: https://github.com/materialsproject/pymatgen/commit/118c245d6082fe0b13e19d348fc1db9c0d512019 - # The ``spin`` argument was introduced in v2023.6.28. - # See: https://github.com/materialsproject/pymatgen/commit/9f2b3939af45d5129e0778d371d814811924aeb6 - has_spin_attribute = hasattr(species[0], '_spin') + # If we got here, the two Site objects are similar enough + # to be considered of the same kind + return (True, '') - if has_spin_attribute: - has_spin = any(specie.spin != 0 for specie in species) - else: - has_spin = any(specie.as_dict().get('properties', {}).get('spin', 0) != 0 for specie in species) + @property + def mass(self): + """The mass of this species kind. - has_partial_occupancies = len(occupations) != 1 or occupations[0] != 1.0 + :return: a float + """ + return self._mass - if has_partial_occupancies and has_spin: - raise ValueError('Cannot set partial occupancies and spins at the same time') + @mass.setter + def mass(self, value): + the_mass = float(value) + if the_mass <= 0: + raise ValueError('The mass must be positive.') + self._mass = the_mass - if has_spin: - symbols = [specie.symbol for specie in species] - kind_name = create_automatic_kind_name(symbols, occupations) + @property + def weights(self): + """Weights for this species kind. Refer also to + :func:validate_symbols_tuple for the validation rules on the weights. + """ + return copy.deepcopy(self._weights) - # If there is spin, we can only have a single specie, otherwise we would have raised above - specie = species[0] - if has_spin_attribute: - spin = specie.spin - else: - spin = specie.as_dict().get('properties', {}).get('spin', 0) + @weights.setter + def weights(self, value): + """If value is a number, a single weight is used. Otherwise, a list or + tuple of numbers is expected. + None is also accepted, corresponding to the list [1.]. + """ + weights_tuple = _create_weights_tuple(value) - if spin < 0: - kind_name += '1' - else: - kind_name += '2' + if len(weights_tuple) != len(self._symbols): + raise ValueError('Cannot change the number of weights. Use the set_symbols_and_weights function instead.') + validate_weights_tuple(weights_tuple, _SUM_THRESHOLD) - return kind_name + self._weights = weights_tuple - return None + def get_symbols_string(self): + """Return a string that tries to match as good as possible the symbols + of this kind. If there is only one symbol (no alloy) with 100% + occupancy, just returns the symbol name. Otherwise, groups the full + string in curly brackets, and try to write also the composition + (with 2 precision only). - self.cell = struct.lattice.matrix.tolist() - self.pbc = [True, True, True] - self.clear_kinds() + .. note:: If there is a vacancy (sum of weights<1), we indicate it + with the X symbol followed by 1-sum(weights) (still with 2 + digits precision, so it can be 0.00) - for site in struct.sites: - species_and_occu = site.species + .. note:: Note the difference with respect to the symbols and the + symbol properties! + """ + return get_symbols_string(self._symbols, self._weights) - if 'kind_name' in site.properties: - kind_name = site.properties['kind_name'] - else: - kind_name = build_kind_name(species_and_occu) + @property + def symbol(self): + """If the kind has only one symbol, return it; otherwise, raise a + ValueError. + """ + if len(self._symbols) == 1: + return self._symbols[0] - inputs = { - 'symbols': [x.symbol for x in species_and_occu.keys()], - 'weights': list(species_and_occu.values()), - 'position': site.coords.tolist(), - } + raise ValueError(f'This kind has more than one symbol (it is an alloy): {self._symbols}') - if kind_name is not None: - inputs['name'] = kind_name + @property + def symbols(self): + """List of symbols for this site. If the site is a single atom, + pass a list of one element only, or simply the string for that atom. + For alloys, a list of elements. - self.append_atom(**inputs) + .. note:: Note that if you change the list of symbols, the kind + name remains unchanged. + """ + return copy.deepcopy(self._symbols) - def _validate(self): - """Performs some standard validation tests.""" - from aiida.common.exceptions import ValidationError + @symbols.setter + def symbols(self, value): + """If value is a string, a single symbol is used. Otherwise, a list or + tuple of strings is expected. - super()._validate() + I set a copy of the list, so to avoid that the content changes + after the value is set. + """ + symbols_tuple = _create_symbols_tuple(value) - try: - _get_valid_cell(self.cell) - except ValueError as exc: - raise ValidationError(f'Invalid cell: {exc}') + if len(symbols_tuple) != len(self._weights): + raise ValueError('Cannot change the number of symbols. Use the set_symbols_and_weights function instead.') + validate_symbols_tuple(symbols_tuple) - try: - get_valid_pbc(self.pbc) - except ValueError as exc: - raise ValidationError(f'Invalid periodic boundary conditions: {exc}') + self._symbols = symbols_tuple - _validate_dimensionality(self.pbc, self.cell) + def set_symbols_and_weights(self, symbols, weights): + """Set the chemical symbols and the weights for the site. - try: - # This will try to create the kinds objects - kinds = self.kinds - except ValueError as exc: - raise ValidationError(f'Unable to validate the kinds: {exc}') + .. note:: Note that the kind name remains unchanged. + """ + symbols_tuple = _create_symbols_tuple(symbols) + weights_tuple = _create_weights_tuple(weights) + if len(symbols_tuple) != len(weights_tuple): + raise ValueError('The number of symbols and weights must coincide.') + validate_symbols_tuple(symbols_tuple) + validate_weights_tuple(weights_tuple, _SUM_THRESHOLD) + self._symbols = symbols_tuple + self._weights = weights_tuple - from collections import Counter + @property + def is_alloy(self): + """Return whether the Kind is an alloy, i.e. contains more than one element - counts = Counter([k.name for k in kinds]) - for count in counts: - if counts[count] != 1: - raise ValidationError(f"Kind with name '{count}' appears {counts[count]} times instead of only one") + :return: boolean, True if the kind has more than one element, False otherwise. + """ + return len(self._symbols) != 1 - try: - # This will try to create the sites objects - sites = self.sites - except ValueError as exc: - raise ValidationError(f'Unable to validate the sites: {exc}') + @property + def has_vacancies(self): + """Return whether the Kind contains vacancies, i.e. when the sum of the weights is less than one. - for site in sites: - if site.kind_name not in [k.name for k in kinds]: - raise ValidationError(f'A site has kind {site.kind_name}, but no specie with that name exists') + .. note:: the property uses the internal variable `_SUM_THRESHOLD` as a threshold. - kinds_without_sites = set(k.name for k in kinds) - set(s.kind_name for s in sites) - if kinds_without_sites: - raise ValidationError( - f'The following kinds are defined, but there are no sites with that kind: {list(kinds_without_sites)}' - ) + :return: boolean, True if the sum of the weights is less than one, False otherwise + """ + return has_vacancies(self._weights) - def _prepare_xsf(self, main_file_name=''): - """Write the given structure to a string of format XSF (for XCrySDen).""" - if self.is_alloy or self.has_vacancies: - raise NotImplementedError('XSF for alloys or systems with vacancies not implemented.') + def __repr__(self): + return f'<{self.__class__.__name__}: {self!s}>' - sites = self.sites + def __str__(self): + symbol = self.get_symbols_string() + return f"name '{self.name}', symbol '{symbol}'" - return_string = 'CRYSTAL\nPRIMVEC 1\n' - for cell_vector in self.cell: - return_string += ' '.join([f'{i:18.10f}' for i in cell_vector]) - return_string += '\n' - return_string += 'PRIMCOORD 1\n' - return_string += f'{int(len(sites))} 1\n' - for site in sites: - # I checked above that it is not an alloy, therefore I take the - # first symbol - return_string += f'{_atomic_numbers[self.get_kind(site.kind_name).symbols[0]]} ' - return_string += '%18.10f %18.10f %18.10f\n' % tuple(site.position) - return return_string.encode('utf-8'), {} - def _prepare_cif(self, main_file_name=''): - """Write the given structure to a string of format CIF.""" - from aiida.orm import CifData +class Site: + """This class contains the information about a given site of the system. - cif = CifData(ase=self.get_ase()) - return cif._prepare_cif() + It can be a single atom, or an alloy, or even contain vacancies. + """ - def _prepare_chemdoodle(self, main_file_name=''): - """Write the given structure to a string of format required by ChemDoodle.""" - from itertools import product + def __init__(self, **kwargs): + """Create a site. - import numpy as np + :param kind_name: a string that identifies the kind (species) of this site. + This has to be found in the list of kinds of the StructureData + object. + Validation will be done at the StructureData level. + :param position: the absolute position (three floats) in angstrom + """ + self._kind_name = None + self._position = None - supercell_factors = [1, 1, 1] + if 'site' in kwargs: + site = kwargs.pop('site') + if kwargs: + raise ValueError("If you pass 'site', you cannot pass any further parameter to the Site constructor") + if not isinstance(site, Site): + raise ValueError("'site' must be of type Site") + self.kind_name = site.kind_name + self.position = site.position + elif 'raw' in kwargs: + raw = kwargs.pop('raw') + if kwargs: + raise ValueError("If you pass 'raw', you cannot pass any further parameter to the Site constructor") + try: + self.kind_name = raw['kind_name'] + self.position = raw['position'] + except KeyError as exc: + raise ValueError(f'Invalid raw object, it does not contain any key {exc.args[0]}') + except TypeError: + raise ValueError('Invalid raw object, it is not a dictionary') - # Get cell vectors and atomic position - lattice_vectors = np.array(self.base.attributes.get('cell')) - base_sites = self.base.attributes.get('sites') + else: + try: + self.kind_name = kwargs.pop('kind_name') + self.position = kwargs.pop('position') + except KeyError as exc: + raise ValueError(f'You need to specify {exc.args[0]}') + if kwargs: + raise ValueError(f'Unrecognized parameters: {kwargs.keys}') - start1 = -int(supercell_factors[0] / 2) - start2 = -int(supercell_factors[1] / 2) - start3 = -int(supercell_factors[2] / 2) + def get_raw(self): + """Return the raw version of the site, mapped to a suitable dictionary. + This is the format that is actually used to store each site of the + structure in the DB. - stop1 = start1 + supercell_factors[0] - stop2 = start2 + supercell_factors[1] - stop3 = start3 + supercell_factors[2] + :return: a python dictionary with the site. + """ + return { + 'position': self.position, + 'kind_name': self.kind_name, + } - grid1 = range(start1, stop1) - grid2 = range(start2, stop2) - grid3 = range(start3, stop3) + def get_ase(self, kinds): + """Return a ase.Atom object for this site. - atoms_json = [] + :param kinds: the list of kinds from the StructureData object. - # Manual recenter of the structure - center = (lattice_vectors[0] + lattice_vectors[1] + lattice_vectors[2]) / 2.0 + .. note:: If any site is an alloy or has vacancies, a ValueError + is raised (from the site.get_ase() routine). + """ + from collections import defaultdict - for ix, iy, iz in product(grid1, grid2, grid3): - for base_site in base_sites: - shift = (ix * lattice_vectors[0] + iy * lattice_vectors[1] + iz * lattice_vectors[2] - center).tolist() + import ase - kind_name = base_site['kind_name'] - kind_string = self.get_kind(kind_name).get_symbols_string() + # I create the list of tags + tag_list = [] + used_tags = defaultdict(list) + for k in kinds: + # Skip alloys and vacancies + if k.is_alloy or k.has_vacancies: + tag_list.append(None) + # If the kind name is equal to the specie name, + # then no tag should be set + elif str(k.name) == str(k.symbols[0]): + tag_list.append(None) + else: + # Name is not the specie name + if k.name.startswith(k.symbols[0]): + try: + new_tag = int(k.name[len(k.symbols[0])]) + tag_list.append(new_tag) + used_tags[k.symbols[0]].append(new_tag) + continue + except ValueError: + pass + tag_list.append(k.symbols[0]) # I use a string as a placeholder - atoms_json.append( - { - 'l': kind_string, - 'x': base_site['position'][0] + shift[0], - 'y': base_site['position'][1] + shift[1], - 'z': base_site['position'][2] + shift[2], - 'atomic_elements_html': atom_kinds_to_html(kind_string), - } - ) + for i, _ in enumerate(tag_list): + # If it is a string, it is the name of the element, + # and I have to generate a new integer for this element + # and replace tag_list[i] with this new integer + if isinstance(tag_list[i], str): + # I get a list of used tags for this element + existing_tags = used_tags[tag_list[i]] + if existing_tags: + new_tag = max(existing_tags) + 1 + else: # empty list + new_tag = 1 + # I store it also as a used tag! + used_tags[tag_list[i]].append(new_tag) + # I update the tag + tag_list[i] = new_tag - cell_json = { - 't': 'UnitCell', - 'i': 's0', - 'o': (-center).tolist(), - 'x': (lattice_vectors[0] - center).tolist(), - 'y': (lattice_vectors[1] - center).tolist(), - 'z': (lattice_vectors[2] - center).tolist(), - 'xy': (lattice_vectors[0] + lattice_vectors[1] - center).tolist(), - 'xz': (lattice_vectors[0] + lattice_vectors[2] - center).tolist(), - 'yz': (lattice_vectors[1] + lattice_vectors[2] - center).tolist(), - 'xyz': (lattice_vectors[0] + lattice_vectors[1] + lattice_vectors[2] - center).tolist(), - } + found = False + for kind_candidate, tag_candidate in zip(kinds, tag_list): + if kind_candidate.name == self.kind_name: + kind = kind_candidate + tag = tag_candidate + found = True + break + if not found: + raise ValueError(f"No kind '{self.kind_name}' has been found in the list of kinds") - return_dict = {'s': [cell_json], 'm': [{'a': atoms_json}], 'units': 'Å'} + if kind.is_alloy or kind.has_vacancies: + raise ValueError('Cannot convert to ASE if the kind represents an alloy or it has vacancies.') + aseatom = ase.Atom(position=self.position, symbol=str(kind.symbols[0]), mass=kind.mass) + if tag is not None: + aseatom.tag = tag + return aseatom - return json.dumps(return_dict).encode('utf-8'), {} + @property + def kind_name(self): + """Return the kind name of this site (a string). - def _prepare_xyz(self, main_file_name=''): - """Write the given structure to a string of format XYZ.""" - if self.is_alloy or self.has_vacancies: - raise NotImplementedError('XYZ for alloys or systems with vacancies not implemented.') + The type of a site is used to decide whether two sites are identical + (same mass, symbols, weights, ...) or not. + """ + return self._kind_name - sites = self.sites - cell = self.cell + @kind_name.setter + def kind_name(self, value): + """Set the type of this site (a string).""" + self._kind_name = str(value) - return_list = [f'{len(sites)}'] - return_list.append( - 'Lattice="{} {} {} {} {} {} {} {} {}" pbc="{} {} {}"'.format( - cell[0][0], - cell[0][1], - cell[0][2], - cell[1][0], - cell[1][1], - cell[1][2], - cell[2][0], - cell[2][1], - cell[2][2], - self.pbc[0], - self.pbc[1], - self.pbc[2], - ) - ) - for site in sites: - # I checked above that it is not an alloy, therefore I take the - # first symbol - return_list.append( - '{:6s} {:18.10f} {:18.10f} {:18.10f}'.format( - self.get_kind(site.kind_name).symbols[0], site.position[0], site.position[1], site.position[2] - ) - ) + @property + def position(self): + """Return the position of this site in absolute coordinates, + in angstrom. + """ + return copy.deepcopy(self._position) - return_string = '\n'.join(return_list) - return return_string.encode('utf-8'), {} + @position.setter + def position(self, value): + """Set the position of this site in absolute coordinates, + in angstrom. + """ + try: + internal_pos = tuple(float(i) for i in value) + if len(internal_pos) != 3: + raise ValueError + # value is not iterable or elements are not floats or len != 3 + except (ValueError, TypeError): + raise ValueError('Wrong format for position, must be a list of three float numbers.') + self._position = internal_pos - def _parse_xyz(self, inputstring): - """Read the structure from a string of format XYZ.""" - from aiida.tools.data.structure import xyz_parser_iterator + def __repr__(self): + return f'<{self.__class__.__name__}: {self!s}>' - # idiom to get to the last block - atoms = None - for _, _, atoms in xyz_parser_iterator(inputstring): - pass + def __str__(self): + return f"kind name '{self.kind_name}' @ {self.position[0]},{self.position[1]},{self.position[2]}" - if atoms is None: - raise TypeError('The data does not contain any XYZ data') - self.clear_kinds() - self.pbc = (False, False, False) +class StructureData(Data): + """Data class that represents an atomic structure. - for sym, position in atoms: - self.append_atom(symbols=sym, position=position) + The data is organized as a collection of sites together with a cell, the boundary conditions (whether they are + periodic or not) and other related useful information. + """ - def _adjust_default_cell(self, vacuum_factor=1.0, vacuum_addition=10.0, pbc=(False, False, False)): - """If the structure was imported from an xyz file, it lacks a cell. - This method will adjust the cell - """ - import numpy as np + _set_incompatibilities = [ + ('ase', 'cell'), + ('ase', 'pbc'), + ('ase', 'pymatgen'), + ('ase', 'pymatgen_molecule'), + ('ase', 'pymatgen_structure'), + ('cell', 'pymatgen'), + ('cell', 'pymatgen_molecule'), + ('cell', 'pymatgen_structure'), + ('pbc', 'pymatgen'), + ('pbc', 'pymatgen_molecule'), + ('pbc', 'pymatgen_structure'), + ('pymatgen', 'pymatgen_molecule'), + ('pymatgen', 'pymatgen_structure'), + ('pymatgen_molecule', 'pymatgen_structure'), + ] - def get_extremas_from_positions(positions): - """Returns the minimum and maximum value for each dimension in the positions given""" - return list(zip(*[(min(values), max(values)) for values in zip(*positions)])) + _dimensionality_label = {0: '', 1: 'length', 2: 'surface', 3: 'volume'} + _internal_kind_tags = None - # Calculating the minimal cell: - positions = np.array([site.position for site in self.sites]) - position_min, _ = get_extremas_from_positions(positions) + class Model(Data.Model): + pbc1: bool = MetadataField(description='Whether periodic in the a direction') + pbc2: bool = MetadataField(description='Whether periodic in the b direction') + pbc3: bool = MetadataField(description='Whether periodic in the c direction') + cell: list[list[float]] = MetadataField(description='The cell parameters') + kinds: Optional[list[dict]] = MetadataField(None, description='The kinds of atoms') + sites: Optional[list[dict]] = MetadataField(None, description='The atomic sites') - # Translate the structure to the origin, such that the minimal values in each dimension - # amount to (0,0,0) - positions -= position_min - for index, site in enumerate(self.base.attributes.get('sites')): - site['position'] = list(positions[index]) + def __init__( + self, + cell=None, + pbc=None, + ase=None, + pymatgen=None, + pymatgen_structure=None, + pymatgen_molecule=None, + pbc1=None, + pbc2=None, + pbc3=None, + kinds=None, + sites=None, + **kwargs, + ): + if pbc1 is not None and pbc2 is not None and pbc3 is not None: + pbc = [pbc1, pbc2, pbc3] - # The orthorhombic cell that (just) accomodates the whole structure is now given by the - # extremas of position in each dimension: - minimal_orthorhombic_cell_dimensions = np.array(get_extremas_from_positions(positions)[1]) - minimal_orthorhombic_cell_dimensions = np.dot(vacuum_factor, minimal_orthorhombic_cell_dimensions) - minimal_orthorhombic_cell_dimensions += vacuum_addition + args = { + 'cell': cell, + 'pbc': pbc, + 'ase': ase, + 'pymatgen': pymatgen, + 'pymatgen_structure': pymatgen_structure, + 'pymatgen_molecule': pymatgen_molecule, + } - # Transform the vector (a, b, c ) to [[a,0,0], [0,b,0], [0,0,c]] - newcell = np.diag(minimal_orthorhombic_cell_dimensions) - self.set_cell(newcell.tolist()) + for left, right in self._set_incompatibilities: + if args[left] is not None and args[right] is not None: + raise ValueError(f'cannot pass {left} and {right} at the same time') - # Now set PBC (checks are done in set_pbc, no need to check anything here) - self.set_pbc(pbc) + super().__init__(**kwargs) - return self + if any(ext is not None for ext in [ase, pymatgen, pymatgen_structure, pymatgen_molecule]): + if ase is not None: + self.set_ase(ase) - def get_description(self): - """Returns a string with infos retrieved from StructureData node's properties + if pymatgen is not None: + self.set_pymatgen(pymatgen) - :param self: the StructureData node - :return: retsrt: the description string - """ - return self.get_formula(mode='hill_compact') + if pymatgen_structure is not None: + self.set_pymatgen_structure(pymatgen_structure) - def get_symbols_set(self): - """Return a set containing the names of all elements involved in - this structure (i.e., for it joins the list of symbols for each - kind k in the structure). + if pymatgen_molecule is not None: + self.set_pymatgen_molecule(pymatgen_molecule) - :returns: a set of strings of element names. - """ - return set(itertools.chain.from_iterable(kind.symbols for kind in self.kinds)) + else: + if cell is None: + cell = _DEFAULT_CELL + self.set_cell(cell) - def get_formula(self, mode='hill', separator=''): - """Return a string with the chemical formula. + if pbc is None: + pbc = [True, True, True] + self.set_pbc(pbc) - :param mode: a string to specify how to generate the formula, can - assume one of the following values: + if kinds is not None: + self.base.attributes.set('kinds', kinds) - * 'hill' (default): count the number of atoms of each species, - then use Hill notation, i.e. alphabetical order with C and H - first if one or several C atom(s) is (are) present, e.g. - ``['C','H','H','H','O','C','H','H','H']`` will return ``'C2H6O'`` - ``['S','O','O','H','O','H','O']`` will return ``'H2O4S'`` - From E. A. Hill, J. Am. Chem. Soc., 22 (8), pp 478-494 (1900) + if sites is not None: + self.base.attributes.set('sites', sites) - * 'hill_compact': same as hill but the number of atoms for each - species is divided by the greatest common divisor of all of them, e.g. - ``['C','H','H','H','O','C','H','H','H','O','O','O']`` - will return ``'CH3O2'`` + def get_dimensionality(self): + """Return the dimensionality of the structure and its length/surface/volume. - * 'reduce': group repeated symbols e.g. - ``['Ba', 'Ti', 'O', 'O', 'O', 'Ba', 'Ti', 'O', 'O', 'O', - 'Ba', 'Ti', 'Ti', 'O', 'O', 'O']`` will return ``'BaTiO3BaTiO3BaTi2O3'`` + Zero-dimensional structures are assigned "volume" 0. - * 'group': will try to group as much as possible parts of the formula - e.g. - ``['Ba', 'Ti', 'O', 'O', 'O', 'Ba', 'Ti', 'O', 'O', 'O', - 'Ba', 'Ti', 'Ti', 'O', 'O', 'O']`` will return ``'(BaTiO3)2BaTi2O3'`` + :return: returns a dictionary with keys "dim" (dimensionality integer), "label" (dimensionality label) + and "value" (numerical length/surface/volume). + """ + return _get_dimensionality(self.pbc, self.cell) - * 'count': same as hill (i.e. one just counts the number - of atoms of each species) without the re-ordering (take the - order of the atomic sites), e.g. - ``['Ba', 'Ti', 'O', 'O', 'O','Ba', 'Ti', 'O', 'O', 'O']`` - will return ``'Ba2Ti2O6'`` + def set_ase(self, aseatoms): + """Load the structure from a ASE object""" + if is_ase_atoms(aseatoms): + # Read the ase structure + self.cell = aseatoms.cell + self.pbc = aseatoms.pbc + self.clear_kinds() # This also calls clear_sites + for atom in aseatoms: + self.append_atom(ase=atom) + else: + raise TypeError('The value is not an ase.Atoms object') - * 'count_compact': same as count but the number of atoms - for each species is divided by the greatest common divisor of - all of them, e.g. - ``['Ba', 'Ti', 'O', 'O', 'O','Ba', 'Ti', 'O', 'O', 'O']`` - will return ``'BaTiO3'`` + def set_pymatgen(self, obj, **kwargs): + """Load the structure from a pymatgen object. - :param separator: a string used to concatenate symbols. Default empty. + .. note:: Requires the pymatgen module (version >= 3.0.13, usage + of earlier versions may cause errors). + """ + typestr = type(obj).__name__ + try: + func = getattr(self, f'set_pymatgen_{typestr.lower()}') + except AttributeError: + raise AttributeError(f"Converter for '{typestr}' to AiiDA structure does not exist") + func(obj, **kwargs) - :return: a string with the formula + def set_pymatgen_molecule(self, mol, margin=5): + """Load the structure from a pymatgen Molecule object. - .. note:: in modes reduce, group, count and count_compact, the - initial order in which the atoms were appended by the user is - used to group and/or order the symbols in the formula - """ - symbol_list = [self.get_kind(s.kind_name).get_symbols_string() for s in self.sites] + :param margin: the margin to be added in all directions of the + bounding box of the molecule. - return get_formula(symbol_list, mode=mode, separator=separator) + .. note:: Requires the pymatgen module (version >= 3.0.13, usage + of earlier versions may cause errors). + """ + box = [ + max(x.coords.tolist()[0] for x in mol.sites) - min(x.coords.tolist()[0] for x in mol.sites) + 2 * margin, + max(x.coords.tolist()[1] for x in mol.sites) - min(x.coords.tolist()[1] for x in mol.sites) + 2 * margin, + max(x.coords.tolist()[2] for x in mol.sites) - min(x.coords.tolist()[2] for x in mol.sites) + 2 * margin, + ] + self.set_pymatgen_structure(mol.get_boxed_structure(*box)) + self.pbc = [False, False, False] - def get_site_kindnames(self): - """Return a list with length equal to the number of sites of this structure, - where each element of the list is the kind name of the corresponding site. + def set_pymatgen_structure(self, struct): + """Load the structure from a pymatgen Structure object. - .. note:: This is NOT necessarily a list of chemical symbols! Use - ``[ self.get_kind(s.kind_name).get_symbols_string() for s in self.sites]`` - for chemical symbols + .. note:: periodic boundary conditions are set to True in all + three directions. + .. note:: Requires the pymatgen module (version >= 3.3.5, usage + of earlier versions may cause errors). - :return: a list of strings + :raise ValueError: if there are partial occupancies together with spins. """ - return [this_site.kind_name for this_site in self.sites] - def get_composition(self, mode='full'): - """Returns the chemical composition of this structure as a dictionary, - where each key is the kind symbol (e.g. H, Li, Ba), - and each value is the number of occurences of that element in this - structure. + def build_kind_name(species_and_occu): + """Build a kind name from a pymatgen Composition, including an additional ordinal if spin is included, + e.g. it returns '1' for an atom with spin < 0 and '2' for an atom with spin > 0, + otherwise (no spin) it returns None - :param mode: Specify the mode of the composition to return. Choose from ``full``, ``reduced`` or ``fractional``. - For example, given the structure with formula Ba2Zr2O6, the various modes operate as follows. - ``full``: The default, the counts are left unnnormalized. - ``reduced``: The counts are renormalized to the greatest common denominator. - ``fractional``: The counts are renormalized such that the sum equals 1. + :param species_and_occu: a pymatgen species and occupations dictionary + :return: a string representing the kind name or None + """ + species = list(species_and_occu.keys()) + occupations = list(species_and_occu.values()) - :returns: a dictionary with the composition - """ - import numpy as np + # As of v2023.9.2, the ``properties`` argument is removed and the ``spin`` argument should be used. + # See: https://github.com/materialsproject/pymatgen/commit/118c245d6082fe0b13e19d348fc1db9c0d512019 + # The ``spin`` argument was introduced in v2023.6.28. + # See: https://github.com/materialsproject/pymatgen/commit/9f2b3939af45d5129e0778d371d814811924aeb6 + has_spin_attribute = hasattr(species[0], '_spin') - symbols_list = [self.get_kind(s.kind_name).get_symbols_string() for s in self.sites] - symbols_set = set(symbols_list) + if has_spin_attribute: + has_spin = any(specie.spin != 0 for specie in species) + else: + has_spin = any(specie.as_dict().get('properties', {}).get('spin', 0) != 0 for specie in species) - if mode == 'full': - return {symbol: symbols_list.count(symbol) for symbol in symbols_set} + has_partial_occupancies = len(occupations) != 1 or occupations[0] != 1.0 - if mode == 'reduced': - gcd = np.gcd.reduce([symbols_list.count(symbol) for symbol in symbols_set]) - return {symbol: (symbols_list.count(symbol) / gcd) for symbol in symbols_set} + if has_partial_occupancies and has_spin: + raise ValueError('Cannot set partial occupancies and spins at the same time') - if mode == 'fractional': - sum_comp = sum(symbols_list.count(symbol) for symbol in symbols_set) - return {symbol: symbols_list.count(symbol) / sum_comp for symbol in symbols_set} + if has_spin: + symbols = [specie.symbol for specie in species] + kind_name = create_automatic_kind_name(symbols, occupations) - raise ValueError(f'mode `{mode}` is invalid, choose from `full`, `reduced` or `fractional`.') + # If there is spin, we can only have a single specie, otherwise we would have raised above + specie = species[0] + if has_spin_attribute: + spin = specie.spin + else: + spin = specie.as_dict().get('properties', {}).get('spin', 0) - def get_ase(self): - """Get the ASE object. - Requires to be able to import ase. + if spin < 0: + kind_name += '1' + else: + kind_name += '2' - :return: an ASE object corresponding to this - :py:class:`StructureData ` - object. + return kind_name - .. note:: If any site is an alloy or has vacancies, a ValueError - is raised (from the site.get_ase() routine). - """ - return self._get_object_ase() + return None - def get_pymatgen(self, **kwargs): - """Get pymatgen object. Returns pymatgen Structure for structures with periodic boundary conditions - (in 1D, 2D, 3D) and Molecule otherwise. - :param add_spin: True to add the spins to the pymatgen structure. - Default is False (no spin added). + self.cell = struct.lattice.matrix.tolist() + self.pbc = [True, True, True] + self.clear_kinds() - .. note:: The spins are set according to the following rule: + for site in struct.sites: + species_and_occu = site.species - * if the kind name ends with 1 -> spin=+1 + if 'kind_name' in site.properties: + kind_name = site.properties['kind_name'] + else: + kind_name = build_kind_name(species_and_occu) - * if the kind name ends with 2 -> spin=-1 + inputs = { + 'symbols': [x.symbol for x in species_and_occu.keys()], + 'weights': list(species_and_occu.values()), + 'position': site.coords.tolist(), + } - .. note:: Requires the pymatgen module (version >= 3.0.13, usage - of earlier versions may cause errors). - """ - return self._get_object_pymatgen(**kwargs) + if kind_name is not None: + inputs['name'] = kind_name - def get_pymatgen_structure(self, **kwargs): - """Get the pymatgen Structure object with any PBC, provided the cell is not singular. - :param add_spin: True to add the spins to the pymatgen structure. - Default is False (no spin added). + self.append_atom(**inputs) - .. note:: The spins are set according to the following rule: + def _validate(self): + """Performs some standard validation tests.""" + from aiida.common.exceptions import ValidationError - * if the kind name ends with 1 -> spin=+1 + super()._validate() - * if the kind name ends with 2 -> spin=-1 + try: + _get_valid_cell(self.cell) + except ValueError as exc: + raise ValidationError(f'Invalid cell: {exc}') - .. note:: Requires the pymatgen module (version >= 3.0.13, usage - of earlier versions may cause errors). + try: + get_valid_pbc(self.pbc) + except ValueError as exc: + raise ValidationError(f'Invalid periodic boundary conditions: {exc}') - :return: a pymatgen Structure object corresponding to this - :py:class:`StructureData ` - object. - :raise ValueError: if the cell is singular, e.g. when it has not been set. - Use `get_pymatgen_molecule` instead, or set a proper cell. - """ - return self._get_object_pymatgen_structure(**kwargs) + _validate_dimensionality(self.pbc, self.cell) - def get_pymatgen_molecule(self): - """Get the pymatgen Molecule object. + try: + # This will try to create the kinds objects + kinds = self.kinds + except ValueError as exc: + raise ValidationError(f'Unable to validate the kinds: {exc}') - .. note:: Requires the pymatgen module (version >= 3.0.13, usage - of earlier versions may cause errors). + from collections import Counter - :return: a pymatgen Molecule object corresponding to this - :py:class:`StructureData ` - object. - """ - return self._get_object_pymatgen_molecule() + counts = Counter([k.name for k in kinds]) + for count in counts: + if counts[count] != 1: + raise ValidationError(f"Kind with name '{count}' appears {counts[count]} times instead of only one") - def append_kind(self, kind): - """Append a kind to the - :py:class:`StructureData `. - It makes a copy of the kind. + try: + # This will try to create the sites objects + sites = self.sites + except ValueError as exc: + raise ValidationError(f'Unable to validate the sites: {exc}') - :param kind: the site to append, must be a Kind object. - """ - from aiida.common.exceptions import ModificationNotAllowed + for site in sites: + if site.kind_name not in [k.name for k in kinds]: + raise ValidationError(f'A site has kind {site.kind_name}, but no specie with that name exists') - if self.is_stored: - raise ModificationNotAllowed('The StructureData object cannot be modified, it has already been stored') + kinds_without_sites = set(k.name for k in kinds) - set(s.kind_name for s in sites) + if kinds_without_sites: + raise ValidationError( + f'The following kinds are defined, but there are no sites with that kind: {list(kinds_without_sites)}' + ) - new_kind = Kind(kind=kind) # So we make a copy + def _prepare_xsf(self, main_file_name=''): + """Write the given structure to a string of format XSF (for XCrySDen).""" + if self.is_alloy or self.has_vacancies: + raise NotImplementedError('XSF for alloys or systems with vacancies not implemented.') - if kind.name in [k.name for k in self.kinds]: - raise ValueError(f'A kind with the same name ({kind.name}) already exists.') + sites = self.sites - # If here, no exceptions have been raised, so I add the site. - self.base.attributes.all.setdefault('kinds', []).append(new_kind.get_raw()) - # Note, this is a dict (with integer keys) so it allows for empty spots! - if self._internal_kind_tags is None: - self._internal_kind_tags = {} + return_string = 'CRYSTAL\nPRIMVEC 1\n' + for cell_vector in self.cell: + return_string += ' '.join([f'{i:18.10f}' for i in cell_vector]) + return_string += '\n' + return_string += 'PRIMCOORD 1\n' + return_string += f'{int(len(sites))} 1\n' + for site in sites: + # I checked above that it is not an alloy, therefore I take the + # first symbol + return_string += f'{_atomic_numbers[self.get_kind(site.kind_name).symbols[0]]} ' + return_string += '%18.10f %18.10f %18.10f\n' % tuple(site.position) + return return_string.encode('utf-8'), {} - self._internal_kind_tags[len(self.base.attributes.get('kinds')) - 1] = kind._internal_tag + def _prepare_cif(self, main_file_name=''): + """Write the given structure to a string of format CIF.""" + from aiida.orm import CifData - def append_site(self, site): - """Append a site to the - :py:class:`StructureData `. - It makes a copy of the site. + cif = CifData(ase=self.get_ase()) + return cif._prepare_cif() - :param site: the site to append. It must be a Site object. - """ - from aiida.common.exceptions import ModificationNotAllowed + def _prepare_chemdoodle(self, main_file_name=''): + """Write the given structure to a string of format required by ChemDoodle.""" + from itertools import product - if self.is_stored: - raise ModificationNotAllowed('The StructureData object cannot be modified, it has already been stored') + import numpy as np - new_site = Site(site=site) # So we make a copy + supercell_factors = [1, 1, 1] - if site.kind_name not in [kind.name for kind in self.kinds]: - raise ValueError( - f"No kind with name '{site.kind_name}', available kinds are: {[kind.name for kind in self.kinds]}" - ) + # Get cell vectors and atomic position + lattice_vectors = np.array(self.base.attributes.get('cell')) + base_sites = self.base.attributes.get('sites') - # If here, no exceptions have been raised, so I add the site. - self.base.attributes.all.setdefault('sites', []).append(new_site.get_raw()) + start1 = -int(supercell_factors[0] / 2) + start2 = -int(supercell_factors[1] / 2) + start3 = -int(supercell_factors[2] / 2) - def append_atom(self, **kwargs): - """Append an atom to the Structure, taking care of creating the - corresponding kind. + stop1 = start1 + supercell_factors[0] + stop2 = start2 + supercell_factors[1] + stop3 = start3 + supercell_factors[2] + + grid1 = range(start1, stop1) + grid2 = range(start2, stop2) + grid3 = range(start3, stop3) - :param ase: the ase Atom object from which we want to create a new atom - (if present, this must be the only parameter) - :param position: the position of the atom (three numbers in angstrom) - :param symbols: passed to the constructor of the Kind object. - :param weights: passed to the constructor of the Kind object. - :param name: passed to the constructor of the Kind object. See also the note below. + atoms_json = [] - .. note :: Note on the 'name' parameter (that is, the name of the kind): + # Manual recenter of the structure + center = (lattice_vectors[0] + lattice_vectors[1] + lattice_vectors[2]) / 2.0 - * if specified, no checks are done on existing species. Simply, - a new kind with that name is created. If there is a name - clash, a check is done: if the kinds are identical, no error - is issued; otherwise, an error is issued because you are trying - to store two different kinds with the same name. + for ix, iy, iz in product(grid1, grid2, grid3): + for base_site in base_sites: + shift = (ix * lattice_vectors[0] + iy * lattice_vectors[1] + iz * lattice_vectors[2] - center).tolist() - * if not specified, the name is automatically generated. Before - adding the kind, a check is done. If other species with the - same properties already exist, no new kinds are created, but - the site is added to the existing (identical) kind. - (Actually, the first kind that is encountered). - Otherwise, the name is made unique first, by adding to the string - containing the list of chemical symbols a number starting from 1, - until an unique name is found + kind_name = base_site['kind_name'] + kind_string = self.get_kind(kind_name).get_symbols_string() - .. note :: checks of equality of species are done using - the :py:meth:`~aiida.orm.nodes.data.structure.Kind.compare_with` method. - """ - aseatom = kwargs.pop('ase', None) - if aseatom is not None: - if kwargs: - raise ValueError( - "If you pass 'ase' as a parameter to " 'append_atom, you cannot pass any further' 'parameter' + atoms_json.append( + { + 'l': kind_string, + 'x': base_site['position'][0] + shift[0], + 'y': base_site['position'][1] + shift[1], + 'z': base_site['position'][2] + shift[2], + 'atomic_elements_html': atom_kinds_to_html(kind_string), + } ) - position = aseatom.position - kind = Kind(ase=aseatom) - else: - position = kwargs.pop('position', None) - if position is None: - raise ValueError('You have to specify the position of the new atom') - # all remaining parameters - kind = Kind(**kwargs) - # I look for identical species only if the name is not specified - _kinds = self.kinds + cell_json = { + 't': 'UnitCell', + 'i': 's0', + 'o': (-center).tolist(), + 'x': (lattice_vectors[0] - center).tolist(), + 'y': (lattice_vectors[1] - center).tolist(), + 'z': (lattice_vectors[2] - center).tolist(), + 'xy': (lattice_vectors[0] + lattice_vectors[1] - center).tolist(), + 'xz': (lattice_vectors[0] + lattice_vectors[2] - center).tolist(), + 'yz': (lattice_vectors[1] + lattice_vectors[2] - center).tolist(), + 'xyz': (lattice_vectors[0] + lattice_vectors[1] + lattice_vectors[2] - center).tolist(), + } - if 'name' not in kwargs: - # If the kind is identical to an existing one, I use the existing - # one, otherwise I replace it - exists_already = False - for idx, existing_kind in enumerate(_kinds): - try: - existing_kind._internal_tag = self._internal_kind_tags[idx] - except KeyError: - # self._internal_kind_tags does not contain any info for - # the kind in position idx: I don't have to add anything - # then, and I continue - pass - if kind.compare_with(existing_kind)[0]: - kind = existing_kind - exists_already = True - break - if not exists_already: - # There is not an identical kind. - # By default, the name of 'kind' just contains the elements. - # I then check that the name of 'kind' does not already exist, - # and if it exists I add a number (starting from 1) until I - # find a non-used name. - existing_names = [k.name for k in _kinds] - simplename = kind.name - counter = 1 - while kind.name in existing_names: - kind.name = f'{simplename}{counter}' - counter += 1 - self.append_kind(kind) - else: # 'name' was specified - old_kind = None - for existing_kind in _kinds: - if existing_kind.name == kwargs['name']: - old_kind = existing_kind - break - if old_kind is None: - self.append_kind(kind) - else: - is_the_same, firstdiff = kind.compare_with(old_kind) - if is_the_same: - kind = old_kind - else: - raise ValueError( - 'You are explicitly setting the name ' - "of the kind to '{}', that already " - 'exists, but the two kinds are different!' - ' (first difference: {})'.format(kind.name, firstdiff) - ) + return_dict = {'s': [cell_json], 'm': [{'a': atoms_json}], 'units': 'Å'} - site = Site(kind_name=kind.name, position=position) - self.append_site(site) + return json.dumps(return_dict).encode('utf-8'), {} - def clear_kinds(self): - """Removes all kinds for the StructureData object. + def _prepare_xyz(self, main_file_name=''): + """Write the given structure to a string of format XYZ.""" + if self.is_alloy or self.has_vacancies: + raise NotImplementedError('XYZ for alloys or systems with vacancies not implemented.') - .. note:: Also clear all sites! - """ - from aiida.common.exceptions import ModificationNotAllowed + sites = self.sites + cell = self.cell - if self.is_stored: - raise ModificationNotAllowed('The StructureData object cannot be modified, it has already been stored') + return_list = [f'{len(sites)}'] + return_list.append( + 'Lattice="{} {} {} {} {} {} {} {} {}" pbc="{} {} {}"'.format( + cell[0][0], + cell[0][1], + cell[0][2], + cell[1][0], + cell[1][1], + cell[1][2], + cell[2][0], + cell[2][1], + cell[2][2], + self.pbc[0], + self.pbc[1], + self.pbc[2], + ) + ) + for site in sites: + # I checked above that it is not an alloy, therefore I take the + # first symbol + return_list.append( + '{:6s} {:18.10f} {:18.10f} {:18.10f}'.format( + self.get_kind(site.kind_name).symbols[0], site.position[0], site.position[1], site.position[2] + ) + ) - self.base.attributes.set('kinds', []) - self._internal_kind_tags = {} - self.clear_sites() + return_string = '\n'.join(return_list) + return return_string.encode('utf-8'), {} - def clear_sites(self): - """Removes all sites for the StructureData object.""" - from aiida.common.exceptions import ModificationNotAllowed + def _parse_xyz(self, inputstring): + """Read the structure from a string of format XYZ.""" + from aiida.tools.data.structure import xyz_parser_iterator - if self.is_stored: - raise ModificationNotAllowed('The StructureData object cannot be modified, it has already been stored') + # idiom to get to the last block + atoms = None + for _, _, atoms in xyz_parser_iterator(inputstring): + pass - self.base.attributes.set('sites', []) + if atoms is None: + raise TypeError('The data does not contain any XYZ data') - @property - def sites(self): - """Returns a list of sites.""" - try: - raw_sites = self.base.attributes.get('sites') - except AttributeError: - raw_sites = [] - return [Site(raw=i) for i in raw_sites] + self.clear_kinds() + self.pbc = (False, False, False) - @property - def kinds(self): - """Returns a list of kinds.""" - try: - raw_kinds = self.base.attributes.get('kinds') - except AttributeError: - raw_kinds = [] - return [Kind(raw=i) for i in raw_kinds] + for sym, position in atoms: + self.append_atom(symbols=sym, position=position) - def get_kind(self, kind_name): - """Return the kind object associated with the given kind name. + def _adjust_default_cell(self, vacuum_factor=1.0, vacuum_addition=10.0, pbc=(False, False, False)): + """If the structure was imported from an xyz file, it lacks a cell. + This method will adjust the cell + """ + import numpy as np - :param kind_name: String, the name of the kind you want to get + def get_extremas_from_positions(positions): + """Returns the minimum and maximum value for each dimension in the positions given""" + return list(zip(*[(min(values), max(values)) for values in zip(*positions)])) - :return: The Kind object associated with the given kind_name, if - a Kind with the given name is present in the structure. + # Calculating the minimal cell: + positions = np.array([site.position for site in self.sites]) + position_min, _ = get_extremas_from_positions(positions) - :raise: ValueError if the kind_name is not present. - """ - # Cache the kinds, if stored, for efficiency - if self.is_stored: - try: - kinds_dict = self._kinds_cache - except AttributeError: - self._kinds_cache = {_.name: _ for _ in self.kinds} - kinds_dict = self._kinds_cache - else: - kinds_dict = {_.name: _ for _ in self.kinds} + # Translate the structure to the origin, such that the minimal values in each dimension + # amount to (0,0,0) + positions -= position_min + for index, site in enumerate(self.base.attributes.get('sites')): + site['position'] = list(positions[index]) - # Will raise ValueError if the kind is not present - try: - return kinds_dict[kind_name] - except KeyError: - raise ValueError(f"Kind name '{kind_name}' unknown") + # The orthorhombic cell that (just) accomodates the whole structure is now given by the + # extremas of position in each dimension: + minimal_orthorhombic_cell_dimensions = np.array(get_extremas_from_positions(positions)[1]) + minimal_orthorhombic_cell_dimensions = np.dot(vacuum_factor, minimal_orthorhombic_cell_dimensions) + minimal_orthorhombic_cell_dimensions += vacuum_addition - def get_kind_names(self): - """Return a list of kind names (in the same order of the ``self.kinds`` - property, but return the names rather than Kind objects) + # Transform the vector (a, b, c ) to [[a,0,0], [0,b,0], [0,0,c]] + newcell = np.diag(minimal_orthorhombic_cell_dimensions) + self.set_cell(newcell.tolist()) - .. note:: This is NOT necessarily a list of chemical symbols! Use - get_symbols_set for chemical symbols + # Now set PBC (checks are done in set_pbc, no need to check anything here) + self.set_pbc(pbc) - :return: a list of strings. + return self + + def get_description(self): + """Returns a string with infos retrieved from StructureData node's properties + + :param self: the StructureData node + :return: retsrt: the description string """ - return [k.name for k in self.kinds] + return self.get_formula(mode='hill_compact') - @property - def cell(self) -> t.List[t.List[float]]: - """Returns the cell shape. + def get_symbols_set(self): + """Return a set containing the names of all elements involved in + this structure (i.e., for it joins the list of symbols for each + kind k in the structure). - :return: a 3x3 list of lists. + :returns: a set of strings of element names. """ - return copy.deepcopy(self.base.attributes.get('cell')) + return set(itertools.chain.from_iterable(kind.symbols for kind in self.kinds)) - @cell.setter - def cell(self, value): - """Set the cell.""" - self.set_cell(value) + def get_formula(self, mode='hill', separator=''): + """Return a string with the chemical formula. - def set_cell(self, value): - """Set the cell.""" - from aiida.common.exceptions import ModificationNotAllowed + :param mode: a string to specify how to generate the formula, can + assume one of the following values: - if self.is_stored: - raise ModificationNotAllowed('The StructureData object cannot be modified, it has already been stored') + * 'hill' (default): count the number of atoms of each species, + then use Hill notation, i.e. alphabetical order with C and H + first if one or several C atom(s) is (are) present, e.g. + ``['C','H','H','H','O','C','H','H','H']`` will return ``'C2H6O'`` + ``['S','O','O','H','O','H','O']`` will return ``'H2O4S'`` + From E. A. Hill, J. Am. Chem. Soc., 22 (8), pp 478-494 (1900) - the_cell = _get_valid_cell(value) - self.base.attributes.set('cell', the_cell) + * 'hill_compact': same as hill but the number of atoms for each + species is divided by the greatest common divisor of all of them, e.g. + ``['C','H','H','H','O','C','H','H','H','O','O','O']`` + will return ``'CH3O2'`` - def reset_cell(self, new_cell): - """Reset the cell of a structure not yet stored to a new value. + * 'reduce': group repeated symbols e.g. + ``['Ba', 'Ti', 'O', 'O', 'O', 'Ba', 'Ti', 'O', 'O', 'O', + 'Ba', 'Ti', 'Ti', 'O', 'O', 'O']`` will return ``'BaTiO3BaTiO3BaTi2O3'`` - :param new_cell: list specifying the cell vectors + * 'group': will try to group as much as possible parts of the formula + e.g. + ``['Ba', 'Ti', 'O', 'O', 'O', 'Ba', 'Ti', 'O', 'O', 'O', + 'Ba', 'Ti', 'Ti', 'O', 'O', 'O']`` will return ``'(BaTiO3)2BaTi2O3'`` - :raises: - ModificationNotAllowed: if object is already stored - """ - from aiida.common.exceptions import ModificationNotAllowed + * 'count': same as hill (i.e. one just counts the number + of atoms of each species) without the re-ordering (take the + order of the atomic sites), e.g. + ``['Ba', 'Ti', 'O', 'O', 'O','Ba', 'Ti', 'O', 'O', 'O']`` + will return ``'Ba2Ti2O6'`` - if self.is_stored: - raise ModificationNotAllowed() + * 'count_compact': same as count but the number of atoms + for each species is divided by the greatest common divisor of + all of them, e.g. + ``['Ba', 'Ti', 'O', 'O', 'O','Ba', 'Ti', 'O', 'O', 'O']`` + will return ``'BaTiO3'`` - self.base.attributes.set('cell', new_cell) + :param separator: a string used to concatenate symbols. Default empty. - def reset_sites_positions(self, new_positions, conserve_particle=True): - """Replace all the Site positions attached to the Structure + :return: a string with the formula - :param new_positions: list of (3D) positions for every sites. + .. note:: in modes reduce, group, count and count_compact, the + initial order in which the atoms were appended by the user is + used to group and/or order the symbols in the formula + """ + symbol_list = [self.get_kind(s.kind_name).get_symbols_string() for s in self.sites] - :param conserve_particle: if True, allows the possibility of removing a site. - currently not implemented. + return get_formula(symbol_list, mode=mode, separator=separator) - :raises aiida.common.ModificationNotAllowed: if object is stored already - :raises ValueError: if positions are invalid + def get_site_kindnames(self): + """Return a list with length equal to the number of sites of this structure, + where each element of the list is the kind name of the corresponding site. - .. note:: it is assumed that the order of the new_positions is - given in the same order of the one it's substituting, i.e. the - kind of the site will not be checked. + .. note:: This is NOT necessarily a list of chemical symbols! Use + ``[ self.get_kind(s.kind_name).get_symbols_string() for s in self.sites]`` + for chemical symbols + + :return: a list of strings """ - from aiida.common.exceptions import ModificationNotAllowed + return [this_site.kind_name for this_site in self.sites] - if self.is_stored: - raise ModificationNotAllowed() + def get_composition(self, mode='full'): + """Returns the chemical composition of this structure as a dictionary, + where each key is the kind symbol (e.g. H, Li, Ba), + and each value is the number of occurences of that element in this + structure. - if not conserve_particle: - raise NotImplementedError - else: - # test consistency of th enew input - n_sites = len(self.sites) - if n_sites != len(new_positions) and conserve_particle: - raise ValueError('the new positions should be as many as the previous structure.') + :param mode: Specify the mode of the composition to return. Choose from ``full``, ``reduced`` or ``fractional``. + For example, given the structure with formula Ba2Zr2O6, the various modes operate as follows. + ``full``: The default, the counts are left unnnormalized. + ``reduced``: The counts are renormalized to the greatest common denominator. + ``fractional``: The counts are renormalized such that the sum equals 1. - new_sites = [] - for i in range(n_sites): - try: - this_pos = [float(j) for j in new_positions[i]] - except ValueError: - raise ValueError(f'Expecting a list of floats. Found instead {new_positions[i]}') + :returns: a dictionary with the composition + """ + import numpy as np - if len(this_pos) != 3: - raise ValueError(f'Expecting a list of lists of length 3. found instead {len(this_pos)}') + symbols_list = [self.get_kind(s.kind_name).get_symbols_string() for s in self.sites] + symbols_set = set(symbols_list) - # now append this Site to the new_site list. - new_site = Site(site=self.sites[i]) # So we make a copy - new_site.position = copy.deepcopy(this_pos) - new_sites.append(new_site) + if mode == 'full': + return {symbol: symbols_list.count(symbol) for symbol in symbols_set} - # now clear the old sites, and substitute with the new ones - self.clear_sites() - for this_new_site in new_sites: - self.append_site(this_new_site) + if mode == 'reduced': + gcd = np.gcd.reduce([symbols_list.count(symbol) for symbol in symbols_set]) + return {symbol: (symbols_list.count(symbol) / gcd) for symbol in symbols_set} - @property - def pbc1(self): - return self.base.attributes.get('pbc1') + if mode == 'fractional': + sum_comp = sum(symbols_list.count(symbol) for symbol in symbols_set) + return {symbol: symbols_list.count(symbol) / sum_comp for symbol in symbols_set} - @property - def pbc2(self): - return self.base.attributes.get('pbc2') + raise ValueError(f'mode `{mode}` is invalid, choose from `full`, `reduced` or `fractional`.') - @property - def pbc3(self): - return self.base.attributes.get('pbc3') + def get_ase(self): + """Get the ASE object. + Requires to be able to import ase. - @property - def pbc(self): - """Get the periodic boundary conditions. + :return: an ASE object corresponding to this + :py:class:`StructureData ` + object. - :return: a tuple of three booleans, each one tells if there are periodic - boundary conditions for the i-th real-space direction (i=1,2,3) + .. note:: If any site is an alloy or has vacancies, a ValueError + is raised (from the site.get_ase() routine). """ - # return copy.deepcopy(self._pbc) - return (self.base.attributes.get('pbc1'), self.base.attributes.get('pbc2'), self.base.attributes.get('pbc3')) - - @pbc.setter - def pbc(self, value): - """Set the periodic boundary conditions.""" - self.set_pbc(value) - - def set_pbc(self, value): - """Set the periodic boundary conditions.""" - from aiida.common.exceptions import ModificationNotAllowed - - if self.is_stored: - raise ModificationNotAllowed('The StructureData object cannot be modified, it has already been stored') - the_pbc = get_valid_pbc(value) + return self._get_object_ase() - # self._pbc = the_pbc - self.base.attributes.set('pbc1', the_pbc[0]) - self.base.attributes.set('pbc2', the_pbc[1]) - self.base.attributes.set('pbc3', the_pbc[2]) + def get_pymatgen(self, **kwargs): + """Get pymatgen object. Returns pymatgen Structure for structures with periodic boundary conditions + (in 1D, 2D, 3D) and Molecule otherwise. + :param add_spin: True to add the spins to the pymatgen structure. + Default is False (no spin added). - @property - def cell_lengths(self): - """Get the lengths of cell lattice vectors in angstroms.""" - import numpy + .. note:: The spins are set according to the following rule: - cell = self.cell - return [ - numpy.linalg.norm(cell[0]), - numpy.linalg.norm(cell[1]), - numpy.linalg.norm(cell[2]), - ] + * if the kind name ends with 1 -> spin=+1 - @cell_lengths.setter - def cell_lengths(self, value): - self.set_cell_lengths(value) + * if the kind name ends with 2 -> spin=-1 - def set_cell_lengths(self, value): - raise NotImplementedError('Modification is not implemented yet') + .. note:: Requires the pymatgen module (version >= 3.0.13, usage + of earlier versions may cause errors). + """ + return self._get_object_pymatgen(**kwargs) - @property - def cell_angles(self): - """Get the angles between the cell lattice vectors in degrees.""" - import numpy + def get_pymatgen_structure(self, **kwargs): + """Get the pymatgen Structure object with any PBC, provided the cell is not singular. + :param add_spin: True to add the spins to the pymatgen structure. + Default is False (no spin added). - cell = self.cell - lengths = self.cell_lengths - return [ - float(numpy.arccos(x) / numpy.pi * 180) - for x in [ - numpy.vdot(cell[1], cell[2]) / lengths[1] / lengths[2], - numpy.vdot(cell[0], cell[2]) / lengths[0] / lengths[2], - numpy.vdot(cell[0], cell[1]) / lengths[0] / lengths[1], - ] - ] + .. note:: The spins are set according to the following rule: - @cell_angles.setter - def cell_angles(self, value): - self.set_cell_angles(value) + * if the kind name ends with 1 -> spin=+1 - def set_cell_angles(self, value): - raise NotImplementedError('Modification is not implemented yet') + * if the kind name ends with 2 -> spin=-1 - @property - def is_alloy(self): - """Return whether the structure contains any alloy kinds. + .. note:: Requires the pymatgen module (version >= 3.0.13, usage + of earlier versions may cause errors). - :return: a boolean, True if at least one kind is an alloy + :return: a pymatgen Structure object corresponding to this + :py:class:`StructureData ` + object. + :raise ValueError: if the cell is singular, e.g. when it has not been set. + Use `get_pymatgen_molecule` instead, or set a proper cell. """ - return any(kind.is_alloy for kind in self.kinds) + return self._get_object_pymatgen_structure(**kwargs) - @property - def has_vacancies(self): - """Return whether the structure has vacancies in the structure. + def get_pymatgen_molecule(self): + """Get the pymatgen Molecule object. + + .. note:: Requires the pymatgen module (version >= 3.0.13, usage + of earlier versions may cause errors). - :return: a boolean, True if at least one kind has a vacancy + :return: a pymatgen Molecule object corresponding to this + :py:class:`StructureData ` + object. """ - return any(kind.has_vacancies for kind in self.kinds) - - def get_cell_volume(self): - """Returns the three-dimensional cell volume in Angstrom^3. + return self._get_object_pymatgen_molecule() - Use the `get_dimensionality` method in order to get the area/length of lower-dimensional cells. + def append_kind(self, kind): + """Append a kind to the + :py:class:`StructureData `. + It makes a copy of the kind. - :return: a float. + :param kind: the site to append, must be a Kind object. """ - return calc_cell_volume(self.cell) + from aiida.common.exceptions import ModificationNotAllowed - def get_cif(self, converter='ase', store=False, **kwargs): - """Creates :py:class:`aiida.orm.nodes.data.cif.CifData`. + if self.is_stored: + raise ModificationNotAllowed('The StructureData object cannot be modified, it has already been stored') - :param converter: specify the converter. Default 'ase'. - :param store: If True, intermediate calculation gets stored in the - AiiDA database for record. Default False. - :return: :py:class:`aiida.orm.nodes.data.cif.CifData` node. - """ - from aiida.tools.data import structure as structure_tools + new_kind = Kind(kind=kind) # So we make a copy - from .dict import Dict + if kind.name in [k.name for k in self.kinds]: + raise ValueError(f'A kind with the same name ({kind.name}) already exists.') - param = Dict(kwargs) - try: - conv_f = getattr(structure_tools, f'_get_cif_{converter}_inline') - except AttributeError: - raise ValueError(f"No such converter '{converter}' available") - ret_dict = conv_f(struct=self, parameters=param, metadata={'store_provenance': store}) - return ret_dict['cif'] + # If here, no exceptions have been raised, so I add the site. + self.base.attributes.all.setdefault('kinds', []).append(new_kind.get_raw()) + # Note, this is a dict (with integer keys) so it allows for empty spots! + if self._internal_kind_tags is None: + self._internal_kind_tags = {} - def _get_object_phonopyatoms(self): - """Converts StructureData to PhonopyAtoms + self._internal_kind_tags[len(self.base.attributes.get('kinds')) - 1] = kind._internal_tag - :return: a PhonopyAtoms object - """ - from phonopy.structure.atoms import PhonopyAtoms + def append_site(self, site): + """Append a site to the + :py:class:`StructureData `. + It makes a copy of the site. - atoms = PhonopyAtoms(symbols=[_.kind_name for _ in self.sites]) - # Phonopy internally uses scaled positions, so you must store cell first! - atoms.set_cell(self.cell) - atoms.set_positions([_.position for _ in self.sites]) + :param site: the site to append. It must be a Site object. + """ + from aiida.common.exceptions import ModificationNotAllowed - return atoms + if self.is_stored: + raise ModificationNotAllowed('The StructureData object cannot be modified, it has already been stored') - def _get_object_ase(self): - """Converts - :py:class:`StructureData ` - to ase.Atoms + new_site = Site(site=site) # So we make a copy - :return: an ase.Atoms object - """ - import ase + if site.kind_name not in [kind.name for kind in self.kinds]: + raise ValueError( + f"No kind with name '{site.kind_name}', available kinds are: {[kind.name for kind in self.kinds]}" + ) - asecell = ase.Atoms(cell=self.cell, pbc=self.pbc) - _kinds = self.kinds + # If here, no exceptions have been raised, so I add the site. + self.base.attributes.all.setdefault('sites', []).append(new_site.get_raw()) - for site in self.sites: - asecell.append(site.get_ase(kinds=_kinds)) - return asecell + def append_atom(self, **kwargs): + """Append an atom to the Structure, taking care of creating the + corresponding kind. - def _get_object_pymatgen(self, **kwargs): - """Converts - :py:class:`StructureData ` - to pymatgen object + :param ase: the ase Atom object from which we want to create a new atom + (if present, this must be the only parameter) + :param position: the position of the atom (three numbers in angstrom) + :param symbols: passed to the constructor of the Kind object. + :param weights: passed to the constructor of the Kind object. + :param name: passed to the constructor of the Kind object. See also the note below. - :return: a pymatgen Structure for structures with periodic boundary - conditions (in three dimensions) and Molecule otherwise + .. note :: Note on the 'name' parameter (that is, the name of the kind): - .. note:: Requires the pymatgen module (version >= 3.0.13, usage - of earlier versions may cause errors). - """ - if any(self.pbc): - return self._get_object_pymatgen_structure(**kwargs) + * if specified, no checks are done on existing species. Simply, + a new kind with that name is created. If there is a name + clash, a check is done: if the kinds are identical, no error + is issued; otherwise, an error is issued because you are trying + to store two different kinds with the same name. - return self._get_object_pymatgen_molecule(**kwargs) + * if not specified, the name is automatically generated. Before + adding the kind, a check is done. If other species with the + same properties already exist, no new kinds are created, but + the site is added to the existing (identical) kind. + (Actually, the first kind that is encountered). + Otherwise, the name is made unique first, by adding to the string + containing the list of chemical symbols a number starting from 1, + until an unique name is found - def _get_object_pymatgen_structure(self, **kwargs): - """Converts - :py:class:`StructureData ` - to pymatgen Structure object - :param add_spin: True to add the spins to the pymatgen structure. - Default is False (no spin added). + .. note :: checks of equality of species are done using + the :py:meth:`~aiida.orm.nodes.data.structure.Kind.compare_with` method. + """ + aseatom = kwargs.pop('ase', None) + if aseatom is not None: + if kwargs: + raise ValueError( + "If you pass 'ase' as a parameter to " 'append_atom, you cannot pass any further' 'parameter' + ) + position = aseatom.position + kind = Kind(ase=aseatom) + else: + position = kwargs.pop('position', None) + if position is None: + raise ValueError('You have to specify the position of the new atom') + # all remaining parameters + kind = Kind(**kwargs) - .. note:: The spins are set according to the following rule: + # I look for identical species only if the name is not specified + _kinds = self.kinds - * if the kind name ends with 1 -> spin=+1 + if 'name' not in kwargs: + # If the kind is identical to an existing one, I use the existing + # one, otherwise I replace it + exists_already = False + for idx, existing_kind in enumerate(_kinds): + try: + existing_kind._internal_tag = self._internal_kind_tags[idx] + except KeyError: + # self._internal_kind_tags does not contain any info for + # the kind in position idx: I don't have to add anything + # then, and I continue + pass + if kind.compare_with(existing_kind)[0]: + kind = existing_kind + exists_already = True + break + if not exists_already: + # There is not an identical kind. + # By default, the name of 'kind' just contains the elements. + # I then check that the name of 'kind' does not already exist, + # and if it exists I add a number (starting from 1) until I + # find a non-used name. + existing_names = [k.name for k in _kinds] + simplename = kind.name + counter = 1 + while kind.name in existing_names: + kind.name = f'{simplename}{counter}' + counter += 1 + self.append_kind(kind) + else: # 'name' was specified + old_kind = None + for existing_kind in _kinds: + if existing_kind.name == kwargs['name']: + old_kind = existing_kind + break + if old_kind is None: + self.append_kind(kind) + else: + is_the_same, firstdiff = kind.compare_with(old_kind) + if is_the_same: + kind = old_kind + else: + raise ValueError( + 'You are explicitly setting the name ' + "of the kind to '{}', that already " + 'exists, but the two kinds are different!' + ' (first difference: {})'.format(kind.name, firstdiff) + ) - * if the kind name ends with 2 -> spin=-1 + site = Site(kind_name=kind.name, position=position) + self.append_site(site) - :return: a pymatgen Structure object corresponding to this - :py:class:`StructureData ` - object - :raise ValueError: if the cell is not set (i.e. is the default one); - if there are partial occupancies together with spins - (defined by kind names ending with '1' or '2'). + def clear_kinds(self): + """Removes all kinds for the StructureData object. - .. note:: Requires the pymatgen module (version >= 3.0.13, usage - of earlier versions may cause errors) + .. note:: Also clear all sites! """ - from pymatgen.core.lattice import Lattice - from pymatgen.core.structure import Structure - - species = [] - additional_kwargs = {} + from aiida.common.exceptions import ModificationNotAllowed - lattice = Lattice(matrix=self.cell, pbc=self.pbc) + if self.is_stored: + raise ModificationNotAllowed('The StructureData object cannot be modified, it has already been stored') - if kwargs.pop('add_spin', False) and any(n.endswith('1') or n.endswith('2') for n in self.get_kind_names()): - # case when spins are defined -> no partial occupancy allowed - from pymatgen.core.periodic_table import Specie + self.base.attributes.set('kinds', []) + self._internal_kind_tags = {} + self.clear_sites() - oxidation_state = 0 # now I always set the oxidation_state to zero - for site in self.sites: - kind = self.get_kind(site.kind_name) - if len(kind.symbols) != 1 or (len(kind.weights) != 1 or sum(kind.weights) < 1.0): - raise ValueError('Cannot set partial occupancies and spins at the same time') - spin = -1 if kind.name.endswith('1') else 1 if kind.name.endswith('2') else 0 - try: - specie = Specie(kind.symbols[0], oxidation_state, properties={'spin': spin}) - except TypeError: - # As of v2023.9.2, the ``properties`` argument is removed and the ``spin`` argument should be used. - # See: https://github.com/materialsproject/pymatgen/commit/118c245d6082fe0b13e19d348fc1db9c0d512019 - # The ``spin`` argument was introduced in v2023.6.28. - # See: https://github.com/materialsproject/pymatgen/commit/9f2b3939af45d5129e0778d371d814811924aeb6 - specie = Specie(kind.symbols[0], oxidation_state, spin=spin) - species.append(specie) - else: - # case when no spin are defined - for site in self.sites: - kind = self.get_kind(site.kind_name) - species.append(dict(zip(kind.symbols, kind.weights))) - if any( - create_automatic_kind_name(self.get_kind(name).symbols, self.get_kind(name).weights) != name - for name in self.get_site_kindnames() - ): - # add "kind_name" as a properties to each site, whenever - # the kind_name cannot be automatically obtained from the symbols - additional_kwargs['site_properties'] = {'kind_name': self.get_site_kindnames()} + def clear_sites(self): + """Removes all sites for the StructureData object.""" + from aiida.common.exceptions import ModificationNotAllowed - if kwargs: - raise ValueError(f'Unrecognized parameters passed to pymatgen converter: {kwargs.keys()}') + if self.is_stored: + raise ModificationNotAllowed('The StructureData object cannot be modified, it has already been stored') - positions = [list(x.position) for x in self.sites] + self.base.attributes.set('sites', []) + @property + def sites(self): + """Returns a list of sites.""" try: - return Structure(lattice, species, positions, coords_are_cartesian=True, **additional_kwargs) - except ValueError as err: - raise ValueError('Singular cell detected. Probably the cell was not set?') from err + raw_sites = self.base.attributes.get('sites') + except AttributeError: + raw_sites = [] + return [Site(raw=i) for i in raw_sites] - def _get_object_pymatgen_molecule(self, **kwargs): - """Converts - :py:class:`StructureData ` - to pymatgen Molecule object + @property + def kinds(self): + """Returns a list of kinds.""" + try: + raw_kinds = self.base.attributes.get('kinds') + except AttributeError: + raw_kinds = [] + return [Kind(raw=i) for i in raw_kinds] - :return: a pymatgen Molecule object corresponding to this - :py:class:`StructureData ` - object. + def get_kind(self, kind_name): + """Return the kind object associated with the given kind name. - .. note:: Requires the pymatgen module (version >= 3.0.13, usage - of earlier versions may cause errors) - """ - from pymatgen.core.structure import Molecule + :param kind_name: String, the name of the kind you want to get - if kwargs: - raise ValueError(f'Unrecognized parameters passed to pymatgen converter: {kwargs.keys()}') + :return: The Kind object associated with the given kind_name, if + a Kind with the given name is present in the structure. - species = [] - for site in self.sites: - kind = self.get_kind(site.kind_name) - species.append(dict(zip(kind.symbols, kind.weights))) + :raise: ValueError if the kind_name is not present. + """ + # Cache the kinds, if stored, for efficiency + if self.is_stored: + try: + kinds_dict = self._kinds_cache + except AttributeError: + self._kinds_cache = {_.name: _ for _ in self.kinds} + kinds_dict = self._kinds_cache + else: + kinds_dict = {_.name: _ for _ in self.kinds} - positions = [list(site.position) for site in self.sites] - return Molecule(species, positions) + # Will raise ValueError if the kind is not present + try: + return kinds_dict[kind_name] + except KeyError: + raise ValueError(f"Kind name '{kind_name}' unknown") + def get_kind_names(self): + """Return a list of kind names (in the same order of the ``self.kinds`` + property, but return the names rather than Kind objects) -class Kind: - """This class contains the information about the species (kinds) of the system. + .. note:: This is NOT necessarily a list of chemical symbols! Use + get_symbols_set for chemical symbols - It can be a single atom, or an alloy, or even contain vacancies. - """ + :return: a list of strings. + """ + return [k.name for k in self.kinds] - def __init__(self, **kwargs): - """Create a site. - One can either pass: + @property + def cell(self) -> list[list[float]]: + """Returns the cell shape. - :param raw: the raw python dictionary that will be converted to a - Kind object. - :param ase: an ase Atom object - :param kind: a Kind object (to get a copy) + :return: a 3x3 list of lists. + """ + return copy.deepcopy(self.base.attributes.get('cell')) - Or alternatively the following parameters: + @cell.setter + def cell(self, value): + """Set the cell.""" + self.set_cell(value) - :param symbols: a single string for the symbol of this site, or a list - of symbol strings - :param weights: (optional) the weights for each atomic species of - this site. - If only a single symbol is provided, then this value is - optional and the weight is set to 1. - :param mass: (optional) the mass for this site in atomic mass units. - If not provided, the mass is set by the - self.reset_mass() function. - :param name: a string that uniquely identifies the kind, and that - is used to identify the sites. - """ - # Internal variables - self._mass = None - self._symbols = None - self._weights = None - self._name = None + def set_cell(self, value): + """Set the cell.""" + from aiida.common.exceptions import ModificationNotAllowed - # It will be remain to None in general; it is used to further - # identify this species. At the moment, it is used only when importing - # from ASE, if the species had a tag (different from zero). - ## NOTE! This is not persisted on DB but only used while the class - # is loaded in memory (i.e., it is not output with the get_raw() method) - self._internal_tag = None + if self.is_stored: + raise ModificationNotAllowed('The StructureData object cannot be modified, it has already been stored') - # Logic to create the site from the raw format - if 'raw' in kwargs: - if len(kwargs) != 1: - raise ValueError("If you pass 'raw', then you cannot pass any other parameter.") + the_cell = _get_valid_cell(value) + self.base.attributes.set('cell', the_cell) - raw = kwargs['raw'] + def reset_cell(self, new_cell): + """Reset the cell of a structure not yet stored to a new value. - try: - self.set_symbols_and_weights(raw['symbols'], raw['weights']) - except KeyError: - raise ValueError("You didn't specify either 'symbols' or 'weights' in the raw site data.") - try: - self.mass = raw['mass'] - except KeyError: - raise ValueError("You didn't specify the site mass in the raw site data.") + :param new_cell: list specifying the cell vectors - try: - self.name = raw['name'] - except KeyError: - raise ValueError("You didn't specify the name in the raw site data.") + :raises: + ModificationNotAllowed: if object is already stored + """ + from aiida.common.exceptions import ModificationNotAllowed - elif 'kind' in kwargs: - if len(kwargs) != 1: - raise ValueError("If you pass 'kind', then you cannot pass any other parameter.") - oldkind = kwargs['kind'] + if self.is_stored: + raise ModificationNotAllowed() - try: - self.set_symbols_and_weights(oldkind.symbols, oldkind.weights) - self.mass = oldkind.mass - self.name = oldkind.name - self._internal_tag = oldkind._internal_tag - except AttributeError: - raise ValueError( - 'Error using the Kind object. Are you sure ' - 'it is a Kind object? [Introspection says it is ' - '{}]'.format(str(type(oldkind))) - ) + self.base.attributes.set('cell', new_cell) - elif 'ase' in kwargs: - aseatom = kwargs['ase'] - if len(kwargs) != 1: - raise ValueError("If you pass 'ase', then you cannot pass any other parameter.") + def reset_sites_positions(self, new_positions, conserve_particle=True): + """Replace all the Site positions attached to the Structure - try: - import numpy + :param new_positions: list of (3D) positions for every sites. - self.set_symbols_and_weights([aseatom.symbol], [1.0]) - # ASE sets mass to numpy.nan for unstable species - if not numpy.isnan(aseatom.mass): - self.mass = aseatom.mass - else: - self.reset_mass() - except AttributeError: - raise ValueError( - 'Error using the aseatom object. Are you sure ' - 'it is a ase.atom.Atom object? [Introspection says it is ' - '{}]'.format(str(type(aseatom))) - ) - if aseatom.tag != 0: - self.set_automatic_kind_name(tag=aseatom.tag) - self._internal_tag = aseatom.tag - else: - self.set_automatic_kind_name() - else: - if 'symbols' not in kwargs: - raise ValueError( - "'symbols' need to be " - 'specified (at least) to create a Site object. Otherwise, ' - "pass a raw site using the 'raw' parameter." - ) - weights = kwargs.pop('weights', None) - self.set_symbols_and_weights(kwargs.pop('symbols'), weights) - try: - self.mass = kwargs.pop('mass') - except KeyError: - self.reset_mass() - try: - self.name = kwargs.pop('name') - except KeyError: - self.set_automatic_kind_name() - if kwargs: - raise ValueError(f'Unrecognized parameters passed to Kind constructor: {kwargs.keys()}') + :param conserve_particle: if True, allows the possibility of removing a site. + currently not implemented. - def get_raw(self): - """Return the raw version of the site, mapped to a suitable dictionary. - This is the format that is actually used to store each kind of the - structure in the DB. + :raises aiida.common.ModificationNotAllowed: if object is stored already + :raises ValueError: if positions are invalid - :return: a python dictionary with the kind. + .. note:: it is assumed that the order of the new_positions is + given in the same order of the one it's substituting, i.e. the + kind of the site will not be checked. """ - return { - 'symbols': self.symbols, - 'weights': self.weights, - 'mass': self.mass, - 'name': self.name, - } + from aiida.common.exceptions import ModificationNotAllowed - def reset_mass(self): - """Reset the mass to the automatic calculated value. + if self.is_stored: + raise ModificationNotAllowed() - The mass can be set manually; by default, if not provided, - it is the mass of the constituent atoms, weighted with their - weight (after the weight has been normalized to one to take - correctly into account vacancies). + if not conserve_particle: + raise NotImplementedError + else: + # test consistency of th enew input + n_sites = len(self.sites) + if n_sites != len(new_positions) and conserve_particle: + raise ValueError('the new positions should be as many as the previous structure.') - This function uses the internal _symbols and _weights values and - thus assumes that the values are validated. + new_sites = [] + for i in range(n_sites): + try: + this_pos = [float(j) for j in new_positions[i]] + except ValueError: + raise ValueError(f'Expecting a list of floats. Found instead {new_positions[i]}') - It sets the mass to None if the sum of weights is zero. - """ - w_sum = sum(self._weights) + if len(this_pos) != 3: + raise ValueError(f'Expecting a list of lists of length 3. found instead {len(this_pos)}') + + # now append this Site to the new_site list. + new_site = Site(site=self.sites[i]) # So we make a copy + new_site.position = copy.deepcopy(this_pos) + new_sites.append(new_site) - if abs(w_sum) < _SUM_THRESHOLD: - self._mass = None - return + # now clear the old sites, and substitute with the new ones + self.clear_sites() + for this_new_site in new_sites: + self.append_site(this_new_site) - normalized_weights = (i / w_sum for i in self._weights) - element_masses = (_atomic_masses[sym] for sym in self._symbols) - # Weighted mass - self._mass = sum(i * j for i, j in zip(normalized_weights, element_masses)) + @property + def pbc1(self): + return self.base.attributes.get('pbc1') @property - def name(self): - """Return the name of this kind. - The name of a kind is used to identify the species of a site. + def pbc2(self): + return self.base.attributes.get('pbc2') - :return: a string - """ - return self._name + @property + def pbc3(self): + return self.base.attributes.get('pbc3') - @name.setter - def name(self, value): - """Set the name of this site (a string).""" - self._name = str(value) + @property + def pbc(self): + """Get the periodic boundary conditions. - def set_automatic_kind_name(self, tag=None): - """Set the type to a string obtained with the symbols appended one - after the other, without spaces, in alphabetical order; - if the site has a vacancy, a X is appended at the end too. + :return: a tuple of three booleans, each one tells if there are periodic + boundary conditions for the i-th real-space direction (i=1,2,3) """ - name_string = create_automatic_kind_name(self.symbols, self.weights) - if tag is None: - self.name = name_string - else: - self.name = f'{name_string}{tag}' - - def compare_with(self, other_kind): - """Compare with another Kind object to check if they are different. - - .. note:: This does NOT check the 'type' attribute. Instead, it compares - (with reasonable thresholds, where applicable): the mass, and the list - of symbols and of weights. Moreover, it compares the - ``_internal_tag``, if defined (at the moment, defined automatically - only when importing the Kind from ASE, if the atom has a non-zero tag). - Note that the _internal_tag is only used while the class is loaded, - but is not persisted on the database. + # return copy.deepcopy(self._pbc) + return (self.base.attributes.get('pbc1'), self.base.attributes.get('pbc2'), self.base.attributes.get('pbc3')) - :return: A tuple with two elements. The first one is True if the two sites - are 'equivalent' (same mass, symbols and weights), False otherwise. - The second element of the tuple is a string, - which is either None (if the first element was True), or contains - a 'human-readable' description of the first difference encountered - between the two sites. - """ - # Check length of symbols - if len(self.symbols) != len(other_kind.symbols): - return (False, 'Different length of symbols list') + @pbc.setter + def pbc(self, value): + """Set the periodic boundary conditions.""" + self.set_pbc(value) - # Check list of symbols - for i, symbol in enumerate(self.symbols): - if symbol != other_kind.symbols[i]: - return (False, f'Symbol at position {i + 1:d} are different ({symbol} vs. {other_kind.symbols[i]})') - # Check weights (assuming length of weights and of symbols have same - # length, which should be always true - for i, weight in enumerate(self.weights): - if weight != other_kind.weights[i]: - return (False, f'Weight at position {i + 1:d} are different ({weight} vs. {other_kind.weights[i]})') - # Check masses - if abs(self.mass - other_kind.mass) > _MASS_THRESHOLD: - return (False, f'Masses are different ({self.mass} vs. {other_kind.mass})') + def set_pbc(self, value): + """Set the periodic boundary conditions.""" + from aiida.common.exceptions import ModificationNotAllowed - if self._internal_tag != other_kind._internal_tag: - return (False, f'Internal tags are different ({self._internal_tag} vs. {other_kind._internal_tag})') + if self.is_stored: + raise ModificationNotAllowed('The StructureData object cannot be modified, it has already been stored') + the_pbc = get_valid_pbc(value) - # If we got here, the two Site objects are similar enough - # to be considered of the same kind - return (True, '') + # self._pbc = the_pbc + self.base.attributes.set('pbc1', the_pbc[0]) + self.base.attributes.set('pbc2', the_pbc[1]) + self.base.attributes.set('pbc3', the_pbc[2]) @property - def mass(self): - """The mass of this species kind. + def cell_lengths(self): + """Get the lengths of cell lattice vectors in angstroms.""" + import numpy - :return: a float - """ - return self._mass + cell = self.cell + return [ + numpy.linalg.norm(cell[0]), + numpy.linalg.norm(cell[1]), + numpy.linalg.norm(cell[2]), + ] - @mass.setter - def mass(self, value): - the_mass = float(value) - if the_mass <= 0: - raise ValueError('The mass must be positive.') - self._mass = the_mass + @cell_lengths.setter + def cell_lengths(self, value): + self.set_cell_lengths(value) - @property - def weights(self): - """Weights for this species kind. Refer also to - :func:validate_symbols_tuple for the validation rules on the weights. - """ - return copy.deepcopy(self._weights) + def set_cell_lengths(self, value): + raise NotImplementedError('Modification is not implemented yet') - @weights.setter - def weights(self, value): - """If value is a number, a single weight is used. Otherwise, a list or - tuple of numbers is expected. - None is also accepted, corresponding to the list [1.]. - """ - weights_tuple = _create_weights_tuple(value) + @property + def cell_angles(self): + """Get the angles between the cell lattice vectors in degrees.""" + import numpy - if len(weights_tuple) != len(self._symbols): - raise ValueError( - 'Cannot change the number of weights. Use the ' 'set_symbols_and_weights function instead.' - ) - validate_weights_tuple(weights_tuple, _SUM_THRESHOLD) + cell = self.cell + lengths = self.cell_lengths + return [ + float(numpy.arccos(x) / numpy.pi * 180) + for x in [ + numpy.vdot(cell[1], cell[2]) / lengths[1] / lengths[2], + numpy.vdot(cell[0], cell[2]) / lengths[0] / lengths[2], + numpy.vdot(cell[0], cell[1]) / lengths[0] / lengths[1], + ] + ] - self._weights = weights_tuple + @cell_angles.setter + def cell_angles(self, value): + self.set_cell_angles(value) - def get_symbols_string(self): - """Return a string that tries to match as good as possible the symbols - of this kind. If there is only one symbol (no alloy) with 100% - occupancy, just returns the symbol name. Otherwise, groups the full - string in curly brackets, and try to write also the composition - (with 2 precision only). + def set_cell_angles(self, value): + raise NotImplementedError('Modification is not implemented yet') - .. note:: If there is a vacancy (sum of weights<1), we indicate it - with the X symbol followed by 1-sum(weights) (still with 2 - digits precision, so it can be 0.00) + @property + def is_alloy(self): + """Return whether the structure contains any alloy kinds. - .. note:: Note the difference with respect to the symbols and the - symbol properties! + :return: a boolean, True if at least one kind is an alloy """ - return get_symbols_string(self._symbols, self._weights) + return any(kind.is_alloy for kind in self.kinds) @property - def symbol(self): - """If the kind has only one symbol, return it; otherwise, raise a - ValueError. + def has_vacancies(self): + """Return whether the structure has vacancies in the structure. + + :return: a boolean, True if at least one kind has a vacancy """ - if len(self._symbols) == 1: - return self._symbols[0] + return any(kind.has_vacancies for kind in self.kinds) - raise ValueError(f'This kind has more than one symbol (it is an alloy): {self._symbols}') + def get_cell_volume(self): + """Returns the three-dimensional cell volume in Angstrom^3. - @property - def symbols(self): - """List of symbols for this site. If the site is a single atom, - pass a list of one element only, or simply the string for that atom. - For alloys, a list of elements. + Use the `get_dimensionality` method in order to get the area/length of lower-dimensional cells. - .. note:: Note that if you change the list of symbols, the kind - name remains unchanged. + :return: a float. """ - return copy.deepcopy(self._symbols) + return calc_cell_volume(self.cell) - @symbols.setter - def symbols(self, value): - """If value is a string, a single symbol is used. Otherwise, a list or - tuple of strings is expected. + def get_cif(self, converter='ase', store=False, **kwargs): + """Creates :py:class:`aiida.orm.nodes.data.cif.CifData`. - I set a copy of the list, so to avoid that the content changes - after the value is set. + :param converter: specify the converter. Default 'ase'. + :param store: If True, intermediate calculation gets stored in the + AiiDA database for record. Default False. + :return: :py:class:`aiida.orm.nodes.data.cif.CifData` node. """ - symbols_tuple = _create_symbols_tuple(value) + from aiida.tools.data import structure as structure_tools - if len(symbols_tuple) != len(self._weights): - raise ValueError( - 'Cannot change the number of symbols. Use the ' 'set_symbols_and_weights function instead.' - ) - validate_symbols_tuple(symbols_tuple) + from .dict import Dict - self._symbols = symbols_tuple + param = Dict(kwargs) + try: + conv_f = getattr(structure_tools, f'_get_cif_{converter}_inline') + except AttributeError: + raise ValueError(f"No such converter '{converter}' available") + ret_dict = conv_f(struct=self, parameters=param, metadata={'store_provenance': store}) + return ret_dict['cif'] - def set_symbols_and_weights(self, symbols, weights): - """Set the chemical symbols and the weights for the site. + def _get_object_phonopyatoms(self): + """Converts StructureData to PhonopyAtoms - .. note:: Note that the kind name remains unchanged. + :return: a PhonopyAtoms object """ - symbols_tuple = _create_symbols_tuple(symbols) - weights_tuple = _create_weights_tuple(weights) - if len(symbols_tuple) != len(weights_tuple): - raise ValueError('The number of symbols and weights must coincide.') - validate_symbols_tuple(symbols_tuple) - validate_weights_tuple(weights_tuple, _SUM_THRESHOLD) - self._symbols = symbols_tuple - self._weights = weights_tuple - - @property - def is_alloy(self): - """Return whether the Kind is an alloy, i.e. contains more than one element + from phonopy.structure.atoms import PhonopyAtoms - :return: boolean, True if the kind has more than one element, False otherwise. - """ - return len(self._symbols) != 1 + atoms = PhonopyAtoms(symbols=[_.kind_name for _ in self.sites]) + # Phonopy internally uses scaled positions, so you must store cell first! + atoms.set_cell(self.cell) + atoms.set_positions([_.position for _ in self.sites]) - @property - def has_vacancies(self): - """Return whether the Kind contains vacancies, i.e. when the sum of the weights is less than one. + return atoms - .. note:: the property uses the internal variable `_SUM_THRESHOLD` as a threshold. + def _get_object_ase(self): + """Converts + :py:class:`StructureData ` + to ase.Atoms - :return: boolean, True if the sum of the weights is less than one, False otherwise + :return: an ase.Atoms object """ - return has_vacancies(self._weights) - - def __repr__(self): - return f'<{self.__class__.__name__}: {self!s}>' - - def __str__(self): - symbol = self.get_symbols_string() - return f"name '{self.name}', symbol '{symbol}'" + import ase + asecell = ase.Atoms(cell=self.cell, pbc=self.pbc) + _kinds = self.kinds -class Site: - """This class contains the information about a given site of the system. + for site in self.sites: + asecell.append(site.get_ase(kinds=_kinds)) + return asecell - It can be a single atom, or an alloy, or even contain vacancies. - """ + def _get_object_pymatgen(self, **kwargs): + """Converts + :py:class:`StructureData ` + to pymatgen object - def __init__(self, **kwargs): - """Create a site. + :return: a pymatgen Structure for structures with periodic boundary + conditions (in three dimensions) and Molecule otherwise - :param kind_name: a string that identifies the kind (species) of this site. - This has to be found in the list of kinds of the StructureData - object. - Validation will be done at the StructureData level. - :param position: the absolute position (three floats) in angstrom + .. note:: Requires the pymatgen module (version >= 3.0.13, usage + of earlier versions may cause errors). """ - self._kind_name = None - self._position = None + if any(self.pbc): + return self._get_object_pymatgen_structure(**kwargs) - if 'site' in kwargs: - site = kwargs.pop('site') - if kwargs: - raise ValueError("If you pass 'site', you cannot pass any further parameter to the Site constructor") - if not isinstance(site, Site): - raise ValueError("'site' must be of type Site") - self.kind_name = site.kind_name - self.position = site.position - elif 'raw' in kwargs: - raw = kwargs.pop('raw') - if kwargs: - raise ValueError("If you pass 'raw', you cannot pass any further parameter to the Site constructor") - try: - self.kind_name = raw['kind_name'] - self.position = raw['position'] - except KeyError as exc: - raise ValueError(f'Invalid raw object, it does not contain any key {exc.args[0]}') - except TypeError: - raise ValueError('Invalid raw object, it is not a dictionary') + return self._get_object_pymatgen_molecule(**kwargs) - else: - try: - self.kind_name = kwargs.pop('kind_name') - self.position = kwargs.pop('position') - except KeyError as exc: - raise ValueError(f'You need to specify {exc.args[0]}') - if kwargs: - raise ValueError(f'Unrecognized parameters: {kwargs.keys}') + def _get_object_pymatgen_structure(self, **kwargs): + """Converts + :py:class:`StructureData ` + to pymatgen Structure object + :param add_spin: True to add the spins to the pymatgen structure. + Default is False (no spin added). - def get_raw(self): - """Return the raw version of the site, mapped to a suitable dictionary. - This is the format that is actually used to store each site of the - structure in the DB. + .. note:: The spins are set according to the following rule: - :return: a python dictionary with the site. - """ - return { - 'position': self.position, - 'kind_name': self.kind_name, - } + * if the kind name ends with 1 -> spin=+1 - def get_ase(self, kinds): - """Return a ase.Atom object for this site. + * if the kind name ends with 2 -> spin=-1 - :param kinds: the list of kinds from the StructureData object. + :return: a pymatgen Structure object corresponding to this + :py:class:`StructureData ` + object + :raise ValueError: if the cell is not set (i.e. is the default one); + if there are partial occupancies together with spins + (defined by kind names ending with '1' or '2'). - .. note:: If any site is an alloy or has vacancies, a ValueError - is raised (from the site.get_ase() routine). + .. note:: Requires the pymatgen module (version >= 3.0.13, usage + of earlier versions may cause errors) """ - from collections import defaultdict + from pymatgen.core.lattice import Lattice + from pymatgen.core.structure import Structure - import ase + species = [] + additional_kwargs = {} - # I create the list of tags - tag_list = [] - used_tags = defaultdict(list) - for k in kinds: - # Skip alloys and vacancies - if k.is_alloy or k.has_vacancies: - tag_list.append(None) - # If the kind name is equal to the specie name, - # then no tag should be set - elif str(k.name) == str(k.symbols[0]): - tag_list.append(None) - else: - # Name is not the specie name - if k.name.startswith(k.symbols[0]): - try: - new_tag = int(k.name[len(k.symbols[0])]) - tag_list.append(new_tag) - used_tags[k.symbols[0]].append(new_tag) - continue - except ValueError: - pass - tag_list.append(k.symbols[0]) # I use a string as a placeholder + lattice = Lattice(matrix=self.cell, pbc=self.pbc) - for i, _ in enumerate(tag_list): - # If it is a string, it is the name of the element, - # and I have to generate a new integer for this element - # and replace tag_list[i] with this new integer - if isinstance(tag_list[i], str): - # I get a list of used tags for this element - existing_tags = used_tags[tag_list[i]] - if existing_tags: - new_tag = max(existing_tags) + 1 - else: # empty list - new_tag = 1 - # I store it also as a used tag! - used_tags[tag_list[i]].append(new_tag) - # I update the tag - tag_list[i] = new_tag + if kwargs.pop('add_spin', False) and any(n.endswith('1') or n.endswith('2') for n in self.get_kind_names()): + # case when spins are defined -> no partial occupancy allowed + from pymatgen.core.periodic_table import Specie - found = False - for kind_candidate, tag_candidate in zip(kinds, tag_list): - if kind_candidate.name == self.kind_name: - kind = kind_candidate - tag = tag_candidate - found = True - break - if not found: - raise ValueError(f"No kind '{self.kind_name}' has been found in the list of kinds") + oxidation_state = 0 # now I always set the oxidation_state to zero + for site in self.sites: + kind = self.get_kind(site.kind_name) + if len(kind.symbols) != 1 or (len(kind.weights) != 1 or sum(kind.weights) < 1.0): + raise ValueError('Cannot set partial occupancies and spins at the same time') + spin = -1 if kind.name.endswith('1') else 1 if kind.name.endswith('2') else 0 + try: + specie = Specie(kind.symbols[0], oxidation_state, properties={'spin': spin}) + except TypeError: + # As of v2023.9.2, the ``properties`` argument is removed and the ``spin`` argument should be used. + # See: https://github.com/materialsproject/pymatgen/commit/118c245d6082fe0b13e19d348fc1db9c0d512019 + # The ``spin`` argument was introduced in v2023.6.28. + # See: https://github.com/materialsproject/pymatgen/commit/9f2b3939af45d5129e0778d371d814811924aeb6 + specie = Specie(kind.symbols[0], oxidation_state, spin=spin) + species.append(specie) + else: + # case when no spin are defined + for site in self.sites: + kind = self.get_kind(site.kind_name) + species.append(dict(zip(kind.symbols, kind.weights))) + if any( + create_automatic_kind_name(self.get_kind(name).symbols, self.get_kind(name).weights) != name + for name in self.get_site_kindnames() + ): + # add "kind_name" as a properties to each site, whenever + # the kind_name cannot be automatically obtained from the symbols + additional_kwargs['site_properties'] = {'kind_name': self.get_site_kindnames()} - if kind.is_alloy or kind.has_vacancies: - raise ValueError('Cannot convert to ASE if the kind represents an alloy or it has vacancies.') - aseatom = ase.Atom(position=self.position, symbol=str(kind.symbols[0]), mass=kind.mass) - if tag is not None: - aseatom.tag = tag - return aseatom + if kwargs: + raise ValueError(f'Unrecognized parameters passed to pymatgen converter: {kwargs.keys()}') - @property - def kind_name(self): - """Return the kind name of this site (a string). + positions = [list(x.position) for x in self.sites] - The type of a site is used to decide whether two sites are identical - (same mass, symbols, weights, ...) or not. - """ - return self._kind_name + try: + return Structure(lattice, species, positions, coords_are_cartesian=True, **additional_kwargs) + except ValueError as err: + raise ValueError('Singular cell detected. Probably the cell was not set?') from err - @kind_name.setter - def kind_name(self, value): - """Set the type of this site (a string).""" - self._kind_name = str(value) + def _get_object_pymatgen_molecule(self, **kwargs): + """Converts + :py:class:`StructureData ` + to pymatgen Molecule object - @property - def position(self): - """Return the position of this site in absolute coordinates, - in angstrom. - """ - return copy.deepcopy(self._position) + :return: a pymatgen Molecule object corresponding to this + :py:class:`StructureData ` + object. - @position.setter - def position(self, value): - """Set the position of this site in absolute coordinates, - in angstrom. + .. note:: Requires the pymatgen module (version >= 3.0.13, usage + of earlier versions may cause errors) """ - try: - internal_pos = tuple(float(i) for i in value) - if len(internal_pos) != 3: - raise ValueError - # value is not iterable or elements are not floats or len != 3 - except (ValueError, TypeError): - raise ValueError('Wrong format for position, must be a list of three float numbers.') - self._position = internal_pos + from pymatgen.core.structure import Molecule - def __repr__(self): - return f'<{self.__class__.__name__}: {self!s}>' + if kwargs: + raise ValueError(f'Unrecognized parameters passed to pymatgen converter: {kwargs.keys()}') - def __str__(self): - return f"kind name '{self.kind_name}' @ {self.position[0]},{self.position[1]},{self.position[2]}" + species = [] + for site in self.sites: + kind = self.get_kind(site.kind_name) + species.append(dict(zip(kind.symbols, kind.weights))) + + positions = [list(site.position) for site in self.sites] + return Molecule(species, positions) def _get_dimensionality(pbc, cell): diff --git a/src/aiida/orm/nodes/links.py b/src/aiida/orm/nodes/links.py index 2297792d53..3e92a3947e 100644 --- a/src/aiida/orm/nodes/links.py +++ b/src/aiida/orm/nodes/links.py @@ -2,8 +2,7 @@ from __future__ import annotations -import typing as t -from typing import Optional, cast +from typing import TYPE_CHECKING, Any, Sequence, cast from aiida.common import exceptions from aiida.common.escaping import sql_string_match @@ -13,7 +12,7 @@ from ..querybuilder import QueryBuilder from ..utils.links import LinkManager, LinkTriple -if t.TYPE_CHECKING: +if TYPE_CHECKING: from .node import Node @@ -113,9 +112,9 @@ def validate_outgoing(self, target: 'Node', link_type: LinkType, link_label: str def get_stored_link_triples( self, - node_class: Optional[t.Type['Node']] = None, - link_type: t.Union[LinkType, t.Sequence[LinkType]] = (), - link_label_filter: t.Optional[str] = None, + node_class: type['Node'] | None = None, + link_type: LinkType | Sequence[LinkType] = (), + link_label_filter: str | None = None, link_direction: str = 'incoming', only_uuid: bool = False, ) -> list[LinkTriple]: @@ -133,14 +132,14 @@ def get_stored_link_triples( from .node import Node if not isinstance(link_type, (tuple, list)): - link_type = cast(t.Sequence[LinkType], (link_type,)) + link_type = cast(Sequence[LinkType], (link_type,)) if link_type and not all(isinstance(t, LinkType) for t in link_type): raise TypeError(f'link_type should be a LinkType or tuple of LinkType: got {link_type}') node_class = node_class or Node - node_filters: dict[str, t.Any] = {'id': {'==': self._node.pk}} - edge_filters: dict[str, t.Any] = {} + node_filters: dict[str, Any] = {'id': {'==': self._node.pk}} + edge_filters: dict[str, Any] = {} if link_type: edge_filters['type'] = {'in': [t.value for t in link_type]} @@ -173,9 +172,9 @@ def get_stored_link_triples( def get_incoming( self, - node_class: Optional[t.Type['Node']] = None, - link_type: t.Union[LinkType, t.Sequence[LinkType]] = (), - link_label_filter: t.Optional[str] = None, + node_class: type['Node'] | None = None, + link_type: LinkType | Sequence[LinkType] = (), + link_label_filter: str | None = None, only_uuid: bool = False, ) -> LinkManager: """Return a list of link triples that are (directly) incoming into this node. @@ -189,7 +188,7 @@ def get_incoming( :param only_uuid: project only the node UUID instead of the instance onto the `NodeTriple.node` entries """ if not isinstance(link_type, (tuple, list)): - link_type = cast(t.Sequence[LinkType], (link_type,)) + link_type = cast(Sequence[LinkType], (link_type,)) if self._node.is_stored: link_triples = self.get_stored_link_triples( @@ -223,9 +222,9 @@ def get_incoming( def get_outgoing( self, - node_class: Optional[t.Type['Node']] = None, - link_type: t.Union[LinkType, t.Sequence[LinkType]] = (), - link_label_filter: t.Optional[str] = None, + node_class: type['Node'] | None = None, + link_type: LinkType | Sequence[LinkType] = (), + link_label_filter: str | None = None, only_uuid: bool = False, ) -> LinkManager: """Return a list of link triples that are (directly) outgoing of this node. diff --git a/src/aiida/orm/nodes/node.py b/src/aiida/orm/nodes/node.py index b9681111ce..68f12ec6ed 100644 --- a/src/aiida/orm/nodes/node.py +++ b/src/aiida/orm/nodes/node.py @@ -13,7 +13,7 @@ import base64 import datetime from functools import cached_property -from typing import TYPE_CHECKING, Any, ClassVar, Dict, Generic, Iterator, List, Optional, Tuple, Type, TypeVar +from typing import TYPE_CHECKING, Any, ClassVar, Generic, Iterator, Optional, TypeVar from uuid import UUID from aiida.common import exceptions @@ -21,6 +21,7 @@ from aiida.common.links import LinkType from aiida.common.log import AIIDA_LOGGER from aiida.common.pydantic import MetadataField +from aiida.common.typing import Self from aiida.common.warnings import warn_deprecation from aiida.manage import get_manager from aiida.orm.utils.node import ( @@ -30,8 +31,7 @@ ) from ..computers import Computer -from ..entities import Collection as EntityCollection -from ..entities import Entity, from_backend_entity +from ..entities import Collection, Entity, from_backend_entity from ..extras import EntityExtras from ..querybuilder import QueryBuilder from ..users import User @@ -54,11 +54,11 @@ NodeType = TypeVar('NodeType', bound='Node') -class NodeCollection(EntityCollection[NodeType], Generic[NodeType]): +class NodeCollection(Collection[NodeType], Generic[NodeType]): """The collection of nodes.""" @staticmethod - def _entity_base_cls() -> Type['Node']: # type: ignore[override] + def _entity_base_cls() -> type['Node']: # type: ignore[override] return Node def delete(self, pk: int) -> None: @@ -80,7 +80,10 @@ def delete(self, pk: int) -> None: self._backend.nodes.delete(pk) def iter_repo_keys( - self, filters: Optional[dict] = None, subclassing: bool = True, batch_size: int = 100 + self, + filters: dict | None = None, + subclassing: bool = True, + batch_size: int = 100, ) -> Iterator[str]: """Iterate over all repository object keys for this ``Node`` class @@ -140,7 +143,7 @@ def links(self) -> 'NodeLinks': return self._node._CLS_NODE_LINKS(self._node) -class Node(Entity['BackendNode', NodeCollection], metaclass=AbstractNodeMeta): +class Node(Entity['BackendNode'], metaclass=AbstractNodeMeta): """Base class for all nodes in AiiDA. Stores attributes starting with an underscore. @@ -181,10 +184,10 @@ def _query_type_string(cls) -> str: # noqa: N805 # A tuple of attribute names that can be updated even after node is stored # Requires Sealable mixin, but needs empty tuple for base class - _updatable_attributes: Tuple[str, ...] = tuple() + _updatable_attributes: tuple[str, ...] = tuple() # A tuple of attribute names that will be ignored when creating the hash. - _hash_ignored_attributes: Tuple[str, ...] = tuple() + _hash_ignored_attributes: tuple[str, ...] = tuple() # Flag that determines whether the class can be cached. _cachable = False @@ -207,7 +210,7 @@ class Model(Entity.Model): exclude_to_orm=True, exclude_from_cli=True, ) - repository_metadata: Optional[Dict[str, Any]] = MetadataField( + repository_metadata: Optional[dict[str, Any]] = MetadataField( None, description='Virtual hierarchy of the file repository.', is_attribute=False, @@ -235,7 +238,7 @@ class Model(Entity.Model): description: Optional[str] = MetadataField( None, description='The node description', is_attribute=False, exclude_from_cli=True ) - attributes: Optional[Dict[str, Any]] = MetadataField( + attributes: Optional[dict[str, Any]] = MetadataField( None, description='The node attributes', is_attribute=False, @@ -244,7 +247,7 @@ class Model(Entity.Model): exclude_from_cli=True, exclude_to_orm=True, ) - extras: Optional[Dict[str, Any]] = MetadataField( + extras: Optional[dict[str, Any]] = MetadataField( None, description='The node extras', is_attribute=False, @@ -284,10 +287,10 @@ class Model(Entity.Model): def __init__( self, - backend: Optional['StorageBackend'] = None, - user: Optional[User] = None, - computer: Optional[Computer] = None, - extras: Optional[Dict[str, Any]] = None, + backend: 'StorageBackend' | None = None, + user: User | None = None, + computer: Computer | None = None, + extras: dict[str, Any] | None = None, **kwargs: Any, ) -> None: backend = backend or get_manager().get_profile_storage() @@ -309,11 +312,13 @@ def __init__( self.base.extras.set_many(extras) @classmethod - def _from_model(cls, model: Model) -> 'Node': # type: ignore[override] + def _from_model( # type: ignore[override] + cls, model: Model + ) -> Self: """Return an entity instance from an instance of its model.""" fields = cls.model_to_orm_field_values(model) - repository_content = fields.pop('repository_content', {}) + repository_content: dict[str, bytes] = fields.pop('repository_content', {}) node = cls(**fields) for filepath, encoded in repository_content.items(): @@ -326,7 +331,7 @@ def base(self) -> NodeBase: """Return the node base namespace.""" return NodeBase(self) - def _check_mutability_attributes(self, keys: Optional[List[str]] = None) -> None: + def _check_mutability_attributes(self, keys: list[str] | None = None) -> None: """Check if the entity is mutable and raise an exception if not. This is called from `NodeAttributes` methods that modify the attributes. @@ -399,7 +404,7 @@ def class_node_type(cls) -> str: # noqa: N805 return cls._plugin_type_string @classproperty - def entry_point(cls) -> Optional['EntryPoint']: # noqa: N805 + def entry_point(cls) -> 'EntryPoint' | None: # noqa: N805 """Return the entry point associated this node class. :return: the associated entry point or ``None`` if it isn't known. @@ -409,7 +414,7 @@ def entry_point(cls) -> Optional['EntryPoint']: # noqa: N805 return get_entry_point_from_class(cls.__module__, cls.__name__)[1] @property - def logger(self) -> Optional[AiidaLoggerType]: + def logger(self) -> AiidaLoggerType | None: """Return the logger configured for this Node. :return: Logger object @@ -433,7 +438,7 @@ def node_type(self) -> str: return self.backend_entity.node_type @property - def process_type(self) -> Optional[str]: + def process_type(self) -> str | None: """Return the node process type. :return: the process type @@ -481,7 +486,7 @@ def description(self, value: str) -> None: self.backend_entity.description = value @property - def computer(self) -> Optional[Computer]: + def computer(self) -> Computer | None: """Return the computer of this node.""" if self.backend_entity.computer: return from_backend_entity(Computer, self.backend_entity.computer) @@ -489,7 +494,7 @@ def computer(self) -> Optional[Computer]: return None @computer.setter - def computer(self, computer: Optional[Computer]) -> None: + def computer(self, computer: Computer | None) -> None: """Set the computer of this node. :param computer: a `Computer` diff --git a/src/aiida/orm/nodes/process/calculation/calcjob.py b/src/aiida/orm/nodes/process/calculation/calcjob.py index a526fc3b9c..c8730eb6e1 100644 --- a/src/aiida/orm/nodes/process/calculation/calcjob.py +++ b/src/aiida/orm/nodes/process/calculation/calcjob.py @@ -8,6 +8,8 @@ ########################################################################### """Module with `Node` sub class for calculation job processes.""" +from __future__ import annotations + import datetime from typing import TYPE_CHECKING, Any, AnyStr, Dict, List, Optional, Sequence, Tuple, Type, Union @@ -90,11 +92,11 @@ class Model(CalculationNode.Model): description='The detailed job info returned by the scheduler', orm_to_model=lambda node, _: node.get_detailed_job_info(), ) - retrieve_list: Optional[List[str]] = MetadataField( + retrieve_list: Optional[list[str]] = MetadataField( description='The list of files to retrieve from the remote cluster', orm_to_model=lambda node, _: node.get_retrieve_list(), ) - retrieve_temporary_list: Optional[List[str]] = MetadataField( + retrieve_temporary_list: Optional[list[str]] = MetadataField( description='The list of temporary files to retrieve from the remote cluster', orm_to_model=lambda node, _: node.get_retrieve_temporary_list(), ) @@ -286,7 +288,7 @@ def _validate_retrieval_directive(directives: Sequence[Union[str, Tuple[str, str if not isinstance(directive[2], (int, type(None))): raise ValueError('invalid directive, third element has to be an integer representing the depth') - def set_retrieve_list(self, retrieve_list: Sequence[Union[str, Tuple[str, str, str]]]) -> None: + def set_retrieve_list(self, retrieve_list: Sequence[str | tuple[str, str, str]]) -> None: """Set the retrieve list. This list of directives will instruct the daemon what files to retrieve after the calculation has completed. @@ -297,14 +299,14 @@ def set_retrieve_list(self, retrieve_list: Sequence[Union[str, Tuple[str, str, s self._validate_retrieval_directive(retrieve_list) self.base.attributes.set(self.RETRIEVE_LIST_KEY, retrieve_list) - def get_retrieve_list(self) -> Optional[Sequence[Union[str, Tuple[str, str, str]]]]: + def get_retrieve_list(self) -> Optional[Sequence[str | tuple[str, str, str]]]: """Return the list of files/directories to be retrieved on the cluster after the calculation has completed. :return: a list of file directives """ return self.base.attributes.get(self.RETRIEVE_LIST_KEY, None) - def set_retrieve_temporary_list(self, retrieve_temporary_list: Sequence[Union[str, Tuple[str, str, str]]]) -> None: + def set_retrieve_temporary_list(self, retrieve_temporary_list: Sequence[str | tuple[str, str, str]]) -> None: """Set the retrieve temporary list. The retrieve temporary list stores files that are retrieved after completion and made available during parsing @@ -315,7 +317,7 @@ def set_retrieve_temporary_list(self, retrieve_temporary_list: Sequence[Union[st self._validate_retrieval_directive(retrieve_temporary_list) self.base.attributes.set(self.RETRIEVE_TEMPORARY_LIST_KEY, retrieve_temporary_list) - def get_retrieve_temporary_list(self) -> Optional[Sequence[Union[str, Tuple[str, str, str]]]]: + def get_retrieve_temporary_list(self) -> Optional[Sequence[str | tuple[str, str, str]]]: """Return list of files to be retrieved from the cluster which will be available during parsing. :return: a list of file directives diff --git a/src/aiida/orm/nodes/process/process.py b/src/aiida/orm/nodes/process/process.py index 37369d14f6..586fe4574c 100644 --- a/src/aiida/orm/nodes/process/process.py +++ b/src/aiida/orm/nodes/process/process.py @@ -8,9 +8,11 @@ ########################################################################### """Module with `Node` sub class for processes.""" +from __future__ import annotations + import enum from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union +from typing import TYPE_CHECKING, Any, Optional from plumpy.process_states import ProcessState @@ -82,7 +84,7 @@ def is_valid_cache(self, valid: bool) -> None: """ super(ProcessNodeCaching, self.__class__).is_valid_cache.fset(self, valid) - def get_objects_to_hash(self) -> List[Any]: + def get_objects_to_hash(self) -> list[Any]: """Return a list of objects which should be included in the hash.""" res = super().get_objects_to_hash() res.update( @@ -172,11 +174,11 @@ def __str__(self) -> str: return f'{base}' @classproperty - def _hash_ignored_attributes(cls) -> Tuple[str, ...]: # noqa: N805 + def _hash_ignored_attributes(cls) -> tuple[str, ...]: # noqa: N805 return super()._hash_ignored_attributes + ('metadata_inputs',) @classproperty - def _updatable_attributes(cls) -> Tuple[str, ...]: # noqa: N805 + def _updatable_attributes(cls) -> tuple[str, ...]: # noqa: N805 return super()._updatable_attributes + ( cls.PROCESS_PAUSED_KEY, cls.CHECKPOINT_KEY, @@ -197,11 +199,11 @@ class Model(Node.Model, Sealable.Model): exception: Optional[str] = MetadataField(description='The process exception message') paused: bool = MetadataField(description='Whether the process is paused') - def set_metadata_inputs(self, value: Dict[str, Any]) -> None: + def set_metadata_inputs(self, value: dict[str, Any]) -> None: """Set the mapping of inputs corresponding to ``metadata`` ports that were passed to the process.""" return self.base.attributes.set(self.METADATA_INPUTS_KEY, value) - def get_metadata_inputs(self) -> Optional[Dict[str, Any]]: + def get_metadata_inputs(self) -> dict[str, Any] | None: """Return the mapping of inputs corresponding to ``metadata`` ports that were passed to the process.""" return self.base.attributes.get(self.METADATA_INPUTS_KEY, None) @@ -253,7 +255,7 @@ def get_builder_restart(self) -> 'ProcessBuilder': return builder @property - def process_class(self) -> Type['Process']: + def process_class(self) -> type['Process']: """Return the process class that was used to create this node. :return: `Process` class @@ -303,7 +305,7 @@ def set_process_type(self, process_type_string: str) -> None: self.process_type = process_type_string @property - def process_label(self) -> Optional[str]: + def process_label(self) -> str | None: """Return the process label :returns: the process label @@ -318,7 +320,7 @@ def set_process_label(self, label: str) -> None: self.base.attributes.set(self.PROCESS_LABEL_KEY, label) @property - def process_state(self) -> Optional[ProcessState]: + def process_state(self) -> ProcessState | None: """Return the process state :returns: the process state instance of ProcessState enum @@ -330,7 +332,7 @@ def process_state(self) -> Optional[ProcessState]: return ProcessState(state) - def set_process_state(self, state: Union[str, ProcessState, None]): + def set_process_state(self, state: str | ProcessState | None): """Set the process state :param state: value or instance of ProcessState enum @@ -340,7 +342,7 @@ def set_process_state(self, state: Union[str, ProcessState, None]): return self.base.attributes.set(self.PROCESS_STATE_KEY, state) @property - def process_status(self) -> Optional[str]: + def process_status(self) -> str | None: """Return the process status The process status is a generic status message e.g. the reason it might be paused or when it is being killed @@ -349,7 +351,7 @@ def process_status(self) -> Optional[str]: """ return self.base.attributes.get(self.PROCESS_STATUS_KEY, None) - def set_process_status(self, status: Optional[str]) -> None: + def set_process_status(self, status: str | None) -> None: """Set the process status The process status is a generic status message e.g. the reason it might be paused or when it is being killed. @@ -437,7 +439,7 @@ def is_failed(self) -> bool: return self.is_finished and self.exit_status != 0 @property - def exit_code(self) -> Optional['ExitCode']: + def exit_code(self) -> 'ExitCode' | None: """Return the exit code of the process. It is reconstituted from the ``exit_status`` and ``exit_message`` attributes if both of those are defined. @@ -455,17 +457,17 @@ def exit_code(self) -> Optional['ExitCode']: return ExitCode(exit_status, exit_message) @property - def exit_status(self) -> Optional[int]: + def exit_status(self) -> int | None: """Return the exit status of the process :returns: the exit status, an integer exit code or None """ return self.base.attributes.get(self.EXIT_STATUS_KEY, None) - def set_exit_status(self, status: Union[None, enum.Enum, int]) -> None: + def set_exit_status(self, status: enum.Enum | int | None) -> None: """Set the exit status of the process - :param state: an integer exit code or None, which will be interpreted as zero + :param status: the exit status, an integer exit code, or None """ if status is None: status = 0 @@ -479,14 +481,14 @@ def set_exit_status(self, status: Union[None, enum.Enum, int]) -> None: return self.base.attributes.set(self.EXIT_STATUS_KEY, status) @property - def exit_message(self) -> Optional[str]: + def exit_message(self) -> str | None: """Return the exit message of the process :returns: the exit message """ return self.base.attributes.get(self.EXIT_MESSAGE_KEY, None) - def set_exit_message(self, message: Optional[str]) -> None: + def set_exit_message(self, message: str | None) -> None: """Set the exit message of the process, if None nothing will be done :param message: a string message @@ -500,7 +502,7 @@ def set_exit_message(self, message: Optional[str]) -> None: return self.base.attributes.set(self.EXIT_MESSAGE_KEY, message) @property - def exception(self) -> Optional[str]: + def exception(self) -> str | None: """Return the exception of the process or None if the process is not excepted. If the process is marked as excepted yet there is no exception attribute, an empty string will be returned. @@ -523,7 +525,7 @@ def set_exception(self, exception: str) -> None: return self.base.attributes.set(self.EXCEPTION_KEY, exception) @property - def checkpoint(self) -> Optional[str]: + def checkpoint(self) -> str | None: """Return the checkpoint bundle set for the process :returns: checkpoint bundle if it exists, None otherwise @@ -572,7 +574,7 @@ def unpause(self) -> None: pass @property - def called(self) -> List['ProcessNode']: + def called(self) -> list['ProcessNode']: """Return a list of nodes that the process called :returns: list of process nodes called by this process @@ -580,7 +582,7 @@ def called(self) -> List['ProcessNode']: return self.base.links.get_outgoing(link_type=(LinkType.CALL_CALC, LinkType.CALL_WORK)).all_nodes() @property - def called_descendants(self) -> List['ProcessNode']: + def called_descendants(self) -> list['ProcessNode']: """Return a list of all nodes that have been called downstream of this process This will recursively find all the called processes for this process and its children. @@ -594,7 +596,7 @@ def called_descendants(self) -> List['ProcessNode']: return descendants @property - def caller(self) -> Optional['ProcessNode']: + def caller(self) -> 'ProcessNode' | None: """Return the process node that called this process node, or None if it does not have a caller :returns: process node that called this process node instance or None @@ -607,7 +609,7 @@ def caller(self) -> Optional['ProcessNode']: def dump( self, - output_path: Optional[Union[str, Path]] = None, + output_path: str | Path | None = None, # Dump mode options dry_run: bool = False, overwrite: bool = False, diff --git a/src/aiida/orm/nodes/repository.py b/src/aiida/orm/nodes/repository.py index 701ddf34d5..a7f8ca8f1f 100644 --- a/src/aiida/orm/nodes/repository.py +++ b/src/aiida/orm/nodes/repository.py @@ -8,12 +8,12 @@ import pathlib import shutil import tempfile -import typing as t +from typing import TYPE_CHECKING, Any, BinaryIO, Iterable, Iterator, Literal, TextIO, overload from aiida.common import exceptions from aiida.manage import get_config_option -if t.TYPE_CHECKING: +if TYPE_CHECKING: from aiida.common.typing import FilePath from aiida.repository import File, Repository @@ -46,7 +46,7 @@ def __init__(self, node: 'Node') -> None: self._repository_instance: Repository | None = None @property - def metadata(self) -> dict[str, t.Any]: + def metadata(self) -> dict[str, Any]: """Return the repository metadata, representing the virtual file hierarchy. Note, this is only accurate if the node is stored. @@ -183,16 +183,16 @@ def list_object_names(self, path: str | None = None) -> list[str]: """ return self._repository.list_object_names(path) - @t.overload + @overload @contextlib.contextmanager - def open(self, path: FilePath, mode: t.Literal['r']) -> t.Iterator[t.TextIO]: ... + def open(self, path: FilePath, mode: Literal['r']) -> Iterator[TextIO]: ... - @t.overload + @overload @contextlib.contextmanager - def open(self, path: FilePath, mode: t.Literal['rb']) -> t.Iterator[t.BinaryIO]: ... + def open(self, path: FilePath, mode: Literal['rb']) -> Iterator[BinaryIO]: ... @contextlib.contextmanager - def open(self, path: FilePath, mode: t.Literal['r', 'rb'] = 'r') -> t.Iterator[t.BinaryIO] | t.Iterator[t.TextIO]: + def open(self, path: FilePath, mode: Literal['r', 'rb'] = 'r') -> Iterator[BinaryIO] | Iterator[TextIO]: """Open a file handle to an object stored under the given key. .. note:: this should only be used to open a handle to read an existing file. To write a new file use the method @@ -215,7 +215,7 @@ def open(self, path: FilePath, mode: t.Literal['r', 'rb'] = 'r') -> t.Iterator[t yield handle @contextlib.contextmanager - def as_path(self, path: FilePath | None = None) -> t.Iterator[pathlib.Path]: + def as_path(self, path: FilePath | None = None) -> Iterator[pathlib.Path]: """Make the contents of the repository available as a normal filepath on the local file system. :param path: optional relative path of the object within the repository. @@ -249,13 +249,13 @@ def get_object(self, path: FilePath | None = None) -> File: """ return self._repository.get_object(path) - @t.overload - def get_object_content(self, path: str, mode: t.Literal['r']) -> str: ... + @overload + def get_object_content(self, path: str, mode: Literal['r']) -> str: ... - @t.overload - def get_object_content(self, path: str, mode: t.Literal['rb']) -> bytes: ... + @overload + def get_object_content(self, path: str, mode: Literal['rb']) -> bytes: ... - def get_object_content(self, path: str, mode: t.Literal['r', 'rb'] = 'r') -> str | bytes: + def get_object_content(self, path: str, mode: Literal['r', 'rb'] = 'r') -> str | bytes: """Return the content of a object identified by key. :param path: the relative path of the object within the repository. @@ -330,7 +330,7 @@ def put_object_from_tree(self, filepath: str, path: str | None = None): self._repository.put_object_from_tree(filepath, path) self._update_repository_metadata() - def walk(self, path: FilePath | None = None) -> t.Iterable[tuple[pathlib.PurePath, list[str], list[str]]]: + def walk(self, path: FilePath | None = None) -> Iterable[tuple[pathlib.PurePath, list[str], list[str]]]: """Walk over the directories and files contained within this repository. .. note:: the order of the dirname and filename lists that are returned is not necessarily sorted. This is in @@ -343,7 +343,7 @@ def walk(self, path: FilePath | None = None) -> t.Iterable[tuple[pathlib.PurePat """ yield from self._repository.walk(path) - def glob(self) -> t.Iterable[pathlib.PurePath]: + def glob(self) -> Iterable[pathlib.PurePath]: """Yield a recursive list of all paths (files and directories).""" for dirpath, dirnames, filenames in self.walk(): for dirname in dirnames: diff --git a/src/aiida/orm/querybuilder.py b/src/aiida/orm/querybuilder.py index 2587636bb2..49932367b5 100644 --- a/src/aiida/orm/querybuilder.py +++ b/src/aiida/orm/querybuilder.py @@ -22,23 +22,7 @@ import warnings from copy import deepcopy from inspect import isclass as inspect_isclass -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterable, - List, - Literal, - NamedTuple, - Optional, - Sequence, - Set, - Tuple, - Type, - Union, - cast, - overload, -) +from typing import TYPE_CHECKING, Any, Iterable, Literal, NamedTuple, Sequence, Union, cast, overload from aiida.common.log import AIIDA_LOGGER from aiida.common.warnings import warn_deprecation @@ -61,10 +45,10 @@ __all__ = ('QueryBuilder',) # re-usable type annotations -EntityClsType = Type[Union[entities.Entity, 'Process']] +EntityClsType = type[Union[entities.Entity, 'Process']] ProjectType = Union[str, dict, Sequence[Union[str, dict]]] -FilterType = Union[Dict[str, Any], fields.QbFieldFilters] -OrderByType = Union[dict, List[dict], Tuple[dict, ...]] +FilterType = Union[dict[str, Any], fields.QbFieldFilters] +OrderByType = Union[dict, list[dict], tuple[dict, ...]] LOGGER = AIIDA_LOGGER.getChild('querybuilder') @@ -73,7 +57,7 @@ class Classifier(NamedTuple): """A classifier for an entity.""" ormclass_type_string: str - process_type_string: Optional[str] = None + process_type_string: str | None = None class QueryBuilder: @@ -97,17 +81,17 @@ class QueryBuilder: def __init__( self, - backend: Optional['StorageBackend'] = None, + backend: 'StorageBackend' | None = None, *, debug: bool | None = None, - path: Optional[Sequence[Union[str, Dict[str, Any], EntityClsType]]] = (), - filters: Optional[Dict[str, FilterType]] = None, - project: Optional[Dict[str, ProjectType]] = None, - limit: Optional[int] = None, - offset: Optional[int] = None, - order_by: Optional[OrderByType] = None, + path: Sequence[str | dict[str, Any] | EntityClsType] | None = (), + filters: dict[str, FilterType] | None = None, + project: dict[str, ProjectType] | None = None, + limit: int | None = None, + offset: int | None = None, + order_by: OrderByType | None = None, distinct: bool = False, - project_map: Optional[Dict[str, Dict[str, str]]] = None, + project_map: dict[str, dict[str, str]] | None = None, ) -> None: """Instantiates a QueryBuilder instance. @@ -145,17 +129,17 @@ def __init__( # SERIALISABLE ATTRIBUTES # A list storing the path being traversed by the query - self._path: List[PathItemType] = [] + self._path: list[PathItemType] = [] # map tags to filters - self._filters: Dict[str, Dict[str, Any]] = {} + self._filters: dict[str, dict[str, Any]] = {} # map tags to projections: tag -> list(fields) -> func | cast -> value - self._projections: Dict[str, List[Dict[str, Dict[str, Any]]]] = {} + self._projections: dict[str, list[dict[str, dict[str, Any]]]] = {} # mapping: tag -> field -> return key for iterdict/dict methods - self._project_map: Dict[str, Dict[str, str]] = {} + self._project_map: dict[str, dict[str, str]] = {} # list of mappings: tag -> list(fields) -> 'order' | 'cast' -> value (str('asc' | 'desc'), str(cast_key)) - self._order_by: List[Dict[str, List[Dict[str, Dict[str, str]]]]] = [] - self._limit: Optional[int] = None - self._offset: Optional[int] = None + self._order_by: list[dict[str, list[dict[str, dict[str, str]]]]] = [] + self._limit: int | None = None + self._offset: int | None = None self._distinct: bool = distinct # cache of tag mappings, populated during appends @@ -233,7 +217,7 @@ def queryhelp(self) -> 'QueryDictType': return self.as_dict() @classmethod - def from_dict(cls, dct: Dict[str, Any]) -> 'QueryBuilder': + def from_dict(cls, dct: dict[str, Any]) -> 'QueryBuilder': """Create an instance from a dictionary representation of the query.""" return cls(**dct) @@ -250,7 +234,7 @@ def __deepcopy__(self, memo) -> 'QueryBuilder': """Create deep copy of the instance.""" return type(self)(backend=self.backend, **self.as_dict()) # type: ignore[arg-type] - def get_used_tags(self, vertices: bool = True, edges: bool = True) -> List[str]: + def get_used_tags(self, vertices: bool = True, edges: bool = True) -> list[str]: """Returns a list of all the vertices that are being used. :param vertices: If True, adds the tags of vertices to the returned list @@ -266,7 +250,7 @@ def get_used_tags(self, vertices: bool = True, edges: bool = True) -> List[str]: given_tags.append(path['edge_tag']) return given_tags - def _get_unique_tag(self, classifiers: List[Classifier]) -> str: + def _get_unique_tag(self, classifiers: list[Classifier]) -> str: """Using the function get_tag_from_type, I get a tag. I increment an index that is appended to that tag until I have an unused tag. This function is called in :func:`QueryBuilder.append` when no tag is given. @@ -290,19 +274,19 @@ def _get_unique_tag(self, classifiers: List[Classifier]) -> str: def append( self, - cls: Optional[Union[EntityClsType, Sequence[EntityClsType]]] = None, - entity_type: Optional[Union[str, Sequence[str]]] = None, - tag: Optional[str] = None, - filters: Optional[FilterType] = None, - project: Optional[ProjectType] = None, + cls: EntityClsType | Sequence[EntityClsType] | None = None, + entity_type: str | Sequence[str] | None = None, + tag: str | None = None, + filters: FilterType | None = None, + project: ProjectType | None = None, subclassing: bool = True, - edge_tag: Optional[str] = None, - edge_filters: Optional[FilterType] = None, - edge_project: Optional[ProjectType] = None, + edge_tag: str | None = None, + edge_filters: FilterType | None = None, + edge_project: ProjectType | None = None, outerjoin: bool = False, - joining_keyword: Optional[str] = None, - joining_value: Optional[Any] = None, - orm_base: Optional[str] = None, + joining_keyword: str | None = None, + joining_value: Any | None = None, + orm_base: str | None = None, **kwargs: Any, ) -> 'QueryBuilder': """Any iterative procedure to build the path for a graph query @@ -471,9 +455,9 @@ def append( ) if joining_keyword: raise ValueError( - 'You already specified joining specification {}\n' - 'But you now also want to specify {}' - ''.format(joining_keyword, key) + 'You already specified joining specification {}\nBut you now also want to specify {}'.format( + joining_keyword, key + ) ) joining_keyword = key @@ -551,7 +535,7 @@ def append( # EXTENDING THE PATH ################################# # Note: 'type' being a list is a relict of an earlier implementation # Could simply pass all classifiers here. - path_type: Union[List[str], str] + path_type: list[str] | str if len(classifiers) > 1: path_type = [c.ormclass_type_string for c in classifiers] else: @@ -574,7 +558,7 @@ def append( return self - def _init_project_map(self, project_map: Dict[str, Dict[str, str]]) -> None: + def _init_project_map(self, project_map: dict[str, dict[str, str]]) -> None: """Set the project map. Note, this is a private method, @@ -664,17 +648,16 @@ def order_by(self, order_by: OrderByType) -> 'QueryBuilder': this_order_spec = orderspec else: raise TypeError( - 'I was expecting a string or a dictionary\n' 'You provided {} {}\n' ''.format( + 'I was expecting a string or a dictionary\nYou provided {} {}\n'.format( type(orderspec), orderspec ) ) for key in this_order_spec: if key not in allowed_keys: raise ValueError( - 'The allowed keys for an order specification\n' - 'are {}\n' - '{} is not valid\n' - ''.format(', '.join(allowed_keys), key) + 'The allowed keys for an order specification\nare {}\n{} is not valid\n'.format( + ', '.join(allowed_keys), key + ) ) this_order_spec['order'] = this_order_spec.get('order', 'asc') if this_order_spec['order'] not in possible_orders: @@ -691,7 +674,7 @@ def order_by(self, order_by: OrderByType) -> 'QueryBuilder': self._order_by.append(_order_spec) return self - def add_filter(self, tagspec: Union[str, EntityClsType], filter_spec: FilterType) -> 'QueryBuilder': + def add_filter(self, tagspec: str | EntityClsType, filter_spec: FilterType) -> 'QueryBuilder': """Adding a filter to my filters. :param tagspec: A tag string or an ORM class which maps to an existing tag @@ -714,7 +697,7 @@ def add_filter(self, tagspec: Union[str, EntityClsType], filter_spec: FilterType return self @staticmethod - def _process_filters(filters: FilterType) -> Dict[str, Any]: + def _process_filters(filters: FilterType) -> dict[str, Any]: """Process filters.""" if not isinstance(filters, (dict, fields.QbFieldFilters)): raise TypeError('Filters must be either a dictionary or QbFieldFilters') @@ -733,7 +716,7 @@ def _process_filters(filters: FilterType) -> Dict[str, Any]: return processed_filters - def _add_node_type_filter(self, tagspec: str, classifiers: List[Classifier], subclassing: bool): + def _add_node_type_filter(self, tagspec: str, classifiers: list[Classifier], subclassing: bool): """Add a filter based on node type. :param tagspec: The tag, which has to exist already as a key in self._filters @@ -750,7 +733,7 @@ def _add_node_type_filter(self, tagspec: str, classifiers: List[Classifier], sub self.add_filter(tagspec, {'node_type': entity_type_filter}) - def _add_process_type_filter(self, tagspec: str, classifiers: List[Classifier], subclassing: bool) -> None: + def _add_process_type_filter(self, tagspec: str, classifiers: list[Classifier], subclassing: bool) -> None: """Add a filter based on process type. :param tagspec: The tag, which has to exist already as a key in self._filters @@ -773,7 +756,7 @@ def _add_process_type_filter(self, tagspec: str, classifiers: List[Classifier], process_type_filter = _get_process_type_filter(classifiers[0], subclassing) self.add_filter(tagspec, {'process_type': process_type_filter}) - def _add_group_type_filter(self, tagspec: str, classifiers: List[Classifier], subclassing: bool) -> None: + def _add_group_type_filter(self, tagspec: str, classifiers: list[Classifier], subclassing: bool) -> None: """Add a filter based on group type. :param tagspec: The tag, which has to exist already as a key in self._filters @@ -790,7 +773,7 @@ def _add_group_type_filter(self, tagspec: str, classifiers: List[Classifier], su self.add_filter(tagspec, {'type_string': type_string_filter}) - def add_projection(self, tag_spec: Union[str, EntityClsType], projection_spec: ProjectType) -> None: + def add_projection(self, tag_spec: str | EntityClsType, projection_spec: ProjectType) -> None: r"""Adds a projection :param tag_spec: A tag string or an ORM class which maps to an existing tag @@ -902,7 +885,7 @@ def debug(self, msg: str, *objects: Any) -> None: if self._debug: print(f'DEBUG: {msg}' % objects) - def limit(self, limit: Optional[int]) -> 'QueryBuilder': + def limit(self, limit: int | None) -> 'QueryBuilder': """Set the limit (nr of rows to return) :param limit: integers of number of rows of rows to return @@ -912,7 +895,7 @@ def limit(self, limit: Optional[int]) -> 'QueryBuilder': self._limit = limit return self - def offset(self, offset: Optional[int]) -> 'QueryBuilder': + def offset(self, offset: int | None) -> 'QueryBuilder': """Set the offset. If offset is set, that many rows are skipped before returning. *offset* = 0 is the same as omitting setting the offset. If both offset and limit appear, @@ -1068,7 +1051,7 @@ def count(self) -> int: """ return self._impl.count(self.as_dict()) - def iterall(self, batch_size: Optional[int] = 100) -> Iterable[List[Any]]: + def iterall(self, batch_size: int | None = 100) -> Iterable[list[Any]]: """Same as :meth:`.all`, but returns a generator. Be aware that this is only safe if no commit will take place during this transaction. You might also want to read the SQLAlchemy documentation on @@ -1087,7 +1070,7 @@ def iterall(self, batch_size: Optional[int] = 100) -> Iterable[List[Any]]: yield item - def iterdict(self, batch_size: Optional[int] = 100) -> Iterable[Dict[str, Dict[str, Any]]]: + def iterdict(self, batch_size: int | None = 100) -> Iterable[dict[str, dict[str, Any]]]: """Same as :meth:`.dict`, but returns a generator. Be aware that this is only safe if no commit will take place during this transaction. You might also want to read the SQLAlchemy documentation on @@ -1130,7 +1113,7 @@ def all(self, batch_size: int | None = None, flat: bool = False) -> list[list[An return [projection for entry in matches for projection in entry] - def one(self) -> List[Any]: + def one(self) -> list[Any]: """Executes the query asking for exactly one results. Will raise an exception if this is not the case: @@ -1152,7 +1135,7 @@ def one(self) -> List[Any]: raise NotExistent('No result was found') return res[0] - def dict(self, batch_size: Optional[int] = None) -> List[Dict[str, Dict[str, Any]]]: + def dict(self, batch_size: int | None = None) -> list[dict[str, dict[str, Any]]]: """Executes the full query with the order of the rows as returned by the backend. the order inside each row is given by the order of the vertices in the path and the order of the projections for each vertice in the path. @@ -1201,8 +1184,9 @@ def dict(self, batch_size: Optional[int] = None) -> List[Dict[str, Dict[str, Any def _get_ormclass( - cls: Union[None, EntityClsType, Sequence[EntityClsType]], entity_type: Union[None, str, Sequence[str]] -) -> Tuple[EntityTypes, List[Classifier]]: + cls: EntityClsType | Sequence[EntityClsType] | None, + entity_type: str | Sequence[str] | None, +) -> tuple[EntityTypes, list[Classifier]]: """Get ORM classifiers from either class(es) or ormclass_type_string(s). :param cls: a class or tuple/set/list of classes that are either AiiDA ORM classes or backend ORM classes. @@ -1241,7 +1225,7 @@ def _get_ormclass( return ormclass, classifiers -def _get_ormclass_from_cls(cls: EntityClsType) -> Tuple[EntityTypes, Classifier]: +def _get_ormclass_from_cls(cls: EntityClsType) -> tuple[EntityTypes, Classifier]: """Return the correct classifiers for the QueryBuilder from an ORM class. :param cls: an AiiDA ORM class or backend ORM class. @@ -1297,7 +1281,7 @@ def _get_ormclass_from_cls(cls: EntityClsType) -> Tuple[EntityTypes, Classifier] return ormclass, classifiers -def _get_ormclass_from_str(type_string: str) -> Tuple[EntityTypes, Classifier]: +def _get_ormclass_from_str(type_string: str) -> tuple[EntityTypes, Classifier]: """Return the correct classifiers for the QueryBuilder from an ORM type string. :param type_string: type string for ORM class @@ -1379,7 +1363,7 @@ def _get_process_type_filter(classifiers: Classifier, subclassing: bool) -> dict value = classifiers.process_type_string assert value is not None - filters: Dict[str, Any] + filters: dict[str, Any] if not subclassing: filters = {'==': value} @@ -1425,7 +1409,7 @@ class _QueryTagMap: def __init__(self): """Construct a new instance.""" - self._tag_to_type: Dict[str, Union[None, EntityTypes]] = {} + self._tag_to_type: dict[str, EntityTypes | None] = {} # A dictionary for classes passed to the tag given to them # Everything is specified with unique tags, which are strings. # But somebody might not care about giving tags, so to do @@ -1438,7 +1422,7 @@ def __init__(self): # The cls_to_tag_map in this case would be: # {PwCalculation: {'pwcalc'}, StructureData: {'structure'}} - self._cls_to_tag_map: Dict[Any, Set[str]] = {} + self._cls_to_tag_map: dict[Any, set[str]] = {} def __repr__(self) -> str: return repr(list(self._tag_to_type)) @@ -1452,8 +1436,8 @@ def __iter__(self): def add( self, tag: str, - etype: Union[None, EntityTypes] = None, - klasses: Union[None, EntityClsType, Sequence[EntityClsType]] = None, + etype: EntityTypes | None = None, + klasses: EntityClsType | Sequence[EntityClsType] | None = None, ) -> None: """Add a tag.""" self._tag_to_type[tag] = etype @@ -1468,7 +1452,7 @@ def remove(self, tag: str) -> None: for tags in self._cls_to_tag_map.values(): tags.discard(tag) - def get(self, tag_or_cls: Union[str, EntityClsType]) -> str: + def get(self, tag_or_cls: str | EntityClsType) -> str: """Return the tag or, given a class(es), map to a tag. :raises ValueError: if the tag is not found, or the class(es) does not map to a single tag diff --git a/src/aiida/orm/users.py b/src/aiida/orm/users.py index bb091b9fa0..87830e85ae 100644 --- a/src/aiida/orm/users.py +++ b/src/aiida/orm/users.py @@ -8,7 +8,9 @@ ########################################################################### """Module for the ORM user class.""" -from typing import TYPE_CHECKING, Optional, Tuple, Type +from __future__ import annotations + +from typing import TYPE_CHECKING from aiida.common import exceptions from aiida.common.pydantic import MetadataField @@ -27,10 +29,10 @@ class UserCollection(entities.Collection['User']): """The collection of users stored in a backend.""" @staticmethod - def _entity_base_cls() -> Type['User']: + def _entity_base_cls() -> type['User']: return User - def get_or_create(self, email: str, **kwargs) -> Tuple[bool, 'User']: + def get_or_create(self, email: str, **kwargs) -> tuple[bool, 'User']: """Get the existing user with a given email address or create an unstored one :param kwargs: The properties of the user to get or create @@ -43,12 +45,12 @@ def get_or_create(self, email: str, **kwargs) -> Tuple[bool, 'User']: except exceptions.NotExistent: return True, User(backend=self.backend, email=email, **kwargs) - def get_default(self) -> Optional['User']: + def get_default(self) -> 'User' | None: """Get the current default user""" return self.backend.default_user -class User(entities.Entity['BackendUser', UserCollection]): +class User(entities.Entity['BackendUser']): """AiiDA User""" _CLS_COLLECTION = UserCollection @@ -65,7 +67,7 @@ def __init__( first_name: str = '', last_name: str = '', institution: str = '', - backend: Optional['StorageBackend'] = None, + backend: 'StorageBackend' | None = None, ): """Create a new `User`.""" backend = backend or get_manager().get_profile_storage() diff --git a/src/aiida/orm/utils/links.py b/src/aiida/orm/utils/links.py index 4129fd8774..94169dcd51 100644 --- a/src/aiida/orm/utils/links.py +++ b/src/aiida/orm/utils/links.py @@ -8,9 +8,11 @@ ########################################################################### """Utilities for dealing with links between nodes.""" +from __future__ import annotations + from collections import OrderedDict from collections.abc import Mapping -from typing import TYPE_CHECKING, Generator, Iterator, List, NamedTuple, Optional +from typing import TYPE_CHECKING, Generator, Iterator, NamedTuple from aiida.common import exceptions from aiida.common.lang import type_check @@ -42,7 +44,11 @@ class LinkQuadruple(NamedTuple): def link_triple_exists( - source: 'Node', target: 'Node', link_type: 'LinkType', link_label: str, backend: Optional['StorageBackend'] = None + source: 'Node', + target: 'Node', + link_type: 'LinkType', + link_label: str, + backend: 'StorageBackend' | None = None, ) -> bool: """Return whether a link with the given type and label exists between the given source and target node. @@ -75,7 +81,11 @@ def link_triple_exists( def validate_link( - source: 'Node', target: 'Node', link_type: 'LinkType', link_label: str, backend: Optional['StorageBackend'] = None + source: 'Node', + target: 'Node', + link_type: 'LinkType', + link_label: str, + backend: 'StorageBackend' | None = None, ) -> None: """Validate adding a link of the given type and label from a given node to ourself. @@ -237,7 +247,7 @@ class LinkManager: incoming nodes or link labels, respectively. """ - def __init__(self, link_triples: List[LinkTriple]): + def __init__(self, link_triples: list[LinkTriple]): """Initialise the collection.""" self.link_triples = link_triples @@ -280,7 +290,7 @@ def one(self) -> LinkTriple: raise ValueError('no entries found') - def first(self) -> Optional[LinkTriple]: + def first(self) -> LinkTriple | None: """Return the first entry from the iterator. :return: LinkTriple instance or None if no entries were matched @@ -290,28 +300,28 @@ def first(self) -> Optional[LinkTriple]: return None - def all(self) -> List[LinkTriple]: + def all(self) -> list[LinkTriple]: """Return all entries from the list. :return: list of LinkTriple instances """ return self.link_triples - def all_nodes(self) -> List['Node']: + def all_nodes(self) -> list['Node']: """Return a list of all nodes. :return: list of nodes """ return [entry.node for entry in self.all()] - def all_link_pairs(self) -> List[LinkPair]: + def all_link_pairs(self) -> list[LinkPair]: """Return a list of all link pairs. :return: list of LinkPair instances """ return [LinkPair(entry.link_type, entry.link_label) for entry in self.all()] - def all_link_labels(self) -> List[str]: + def all_link_labels(self) -> list[str]: """Return a list of all link labels. :return: list of link labels diff --git a/src/aiida/orm/utils/remote.py b/src/aiida/orm/utils/remote.py index 1e7727d68f..e29d5774f4 100644 --- a/src/aiida/orm/utils/remote.py +++ b/src/aiida/orm/utils/remote.py @@ -11,13 +11,13 @@ from __future__ import annotations import os -import typing as t +from typing import TYPE_CHECKING, Any from aiida import orm from aiida.cmdline.utils import echo from aiida.orm.nodes.data.remote.base import RemoteData -if t.TYPE_CHECKING: +if TYPE_CHECKING: from collections.abc import Sequence from aiida.orm.implementation import StorageBackend @@ -102,7 +102,7 @@ def get_calcjob_remote_paths( from aiida.common import timezone from aiida.orm import CalcJobNode - filters_calc: dict[str, t.Any] = {} + filters_calc: dict[str, Any] = {} filters_computer = {} filters_remote = {} From bf0fee49b1aad7dc8e3906233771ed7eec8f6b9b Mon Sep 17 00:00:00 2001 From: Edan Bainglass Date: Sun, 28 Sep 2025 17:14:29 +0200 Subject: [PATCH 2/7] Try fix for collection typing --- src/aiida/cmdline/utils/common.py | 14 +++++++------- src/aiida/orm/entities.py | 16 ++++++++-------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/aiida/cmdline/utils/common.py b/src/aiida/cmdline/utils/common.py index a14f1bd65d..4782d4a766 100644 --- a/src/aiida/cmdline/utils/common.py +++ b/src/aiida/cmdline/utils/common.py @@ -64,7 +64,7 @@ def get_env_with_venv_bin() -> MutableMapping: config = get_config() currenv = os.environ.copy() - currenv['PATH'] = f"{os.path.dirname(sys.executable)}:{currenv['PATH']}" + currenv['PATH'] = f'{os.path.dirname(sys.executable)}:{currenv["PATH"]}' currenv['AIIDA_PATH'] = config.dirpath currenv['PYTHONUNBUFFERED'] = 'True' @@ -180,18 +180,18 @@ def get_node_info(node: orm.Node, include_summary: bool = True) -> str: nodes_output = node.base.links.get_outgoing(link_type=(LinkType.CREATE, LinkType.RETURN)) if nodes_input: - result += f"\n{format_nested_links(nodes_input.nested(), headers=['Inputs', 'PK', 'Type'])}" + result += f'\n{format_nested_links(nodes_input.nested(), headers=["Inputs", "PK", "Type"])}' if nodes_output: - result += f"\n{format_nested_links(nodes_output.nested(), headers=['Outputs', 'PK', 'Type'])}" + result += f'\n{format_nested_links(nodes_output.nested(), headers=["Outputs", "PK", "Type"])}' if nodes_caller: links = sorted(nodes_caller.all(), key=lambda x: x.node.ctime) - result += f"\n{format_flat_links(links, headers=['Caller', 'PK', 'Type'])}" + result += f'\n{format_flat_links(links, headers=["Caller", "PK", "Type"])}' if nodes_called: links = sorted(nodes_called.all(), key=lambda x: x.node.ctime) - result += f"\n{format_flat_links(links, headers=['Called', 'PK', 'Type'])}" + result += f'\n{format_flat_links(links, headers=["Called", "PK", "Type"])}' log_messages = orm.Log.collection.get_logs_for(node) @@ -253,7 +253,7 @@ def format_recursive(links, depth=0): table = [] for depth, label, pk, class_name in format_recursive(links): - table.append([f"{' ' * (depth * indent_size)}{label}", pk, class_name]) + table.append([f'{" " * (depth * indent_size)}{label}', pk, class_name]) result = f'\n{tabulate(table, headers=headers)}' tb.PRESERVE_WHITESPACE = False @@ -279,7 +279,7 @@ def get_calcjob_report(calcjob: orm.CalcJobNode) -> str: report = [] if calcjob_state == CalcJobState.WITHSCHEDULER: - state_string = f"{calcjob_state}, scheduler state: {scheduler_state if scheduler_state else '(unknown)'}" + state_string = f'{calcjob_state}, scheduler state: {scheduler_state if scheduler_state else "(unknown)"}' else: state_string = f'{calcjob_state}' diff --git a/src/aiida/orm/entities.py b/src/aiida/orm/entities.py index 7b181b7ecb..4db6363d9b 100644 --- a/src/aiida/orm/entities.py +++ b/src/aiida/orm/entities.py @@ -177,7 +177,7 @@ def count(self, filters: 'FilterType' | None = None) -> int: class Entity(abc.ABC, Generic[BackendEntityType], metaclass=EntityFieldMeta): """An AiiDA entity""" - _CLS_COLLECTION: type[Collection[Self]] = Collection + _CLS_COLLECTION: type[Collection[EntityType]] = Collection _logger = log.AIIDA_LOGGER.getChild('orm.entities') class Model(BaseModel, defer_build=True): @@ -207,7 +207,7 @@ def model_to_orm_field_values(cls, model: Model) -> dict[str, Any]: if field_value is None: continue - orm_class: type[Entity] | str | None = None + orm_class: type['Entity'] | str | None = None if orm_class := get_metadata(field, 'orm_class'): if isinstance(orm_class, str): try: @@ -240,7 +240,7 @@ def _to_model(self, repository_path: Path | None = None) -> Model: return self.Model(**fields) @classmethod - def _from_model(cls, model: Model) -> Self: + def _from_model(cls: type[EntityType], model: Model) -> EntityType: """Return an entity instance from an instance of its model.""" fields = cls.model_to_orm_field_values(model) return cls(**fields) @@ -267,7 +267,7 @@ def serialize(self, repository_path: Path | None = None) -> dict[str, Any]: return self._to_model(repository_path).model_dump() @classmethod - def from_serialized(cls, **kwargs: dict[str, Any]) -> EntityType: + def from_serialized(cls: type[EntityType], **kwargs: dict[str, Any]) -> EntityType: """Construct an entity instance from JSON serialized data.""" cls._logger.warning( 'Serialization through pydantic is still an experimental feature and might break in future releases.' @@ -275,7 +275,7 @@ def from_serialized(cls, **kwargs: dict[str, Any]) -> EntityType: return cls._from_model(cls.Model(**kwargs)) # type: ignore[arg-type] @classproperty - def objects(cls: EntityType) -> Collection[Self]: # noqa: N805 + def objects(cls: type[EntityType]) -> Collection[EntityType]: # noqa: N805 """Get a collection for objects of this type, with the default backend. .. deprecated:: This will be removed in v3, use ``collection`` instead. @@ -286,7 +286,7 @@ def objects(cls: EntityType) -> Collection[Self]: # noqa: N805 return cls.collection @classproperty - def collection(cls) -> Collection[Self]: # noqa: N805 + def collection(cls: type[EntityType]) -> Collection[EntityType]: # noqa: N805 """Get a collection for objects of this type, with the default backend. :return: an object that can be used to access entities of this type @@ -294,7 +294,7 @@ def collection(cls) -> Collection[Self]: # noqa: N805 return cls._CLS_COLLECTION.get_cached(cls, get_manager().get_profile_storage()) @classmethod - def get_collection(cls, backend: 'StorageBackend'): + def get_collection(cls: type[EntityType], backend: 'StorageBackend') -> Collection[EntityType]: """Get a collection for objects of this type for a given backend. .. note:: Use the ``collection`` class property instead if the currently loaded backend or backend of the @@ -306,7 +306,7 @@ def get_collection(cls, backend: 'StorageBackend'): return cls._CLS_COLLECTION.get_cached(cls, backend) @classmethod - def get(cls, **kwargs): + def get(cls: type[EntityType], **kwargs) -> EntityType | None: """Get an entity of the collection matching the given filters. .. deprecated: Will be removed in v3, use `Entity.collection.get` instead. From 6105cef759fe8e264ce0447bd20c97acaceb83c3 Mon Sep 17 00:00:00 2001 From: Edan Bainglass Date: Sun, 28 Sep 2025 17:37:00 +0200 Subject: [PATCH 3/7] Restore `CollectionType` generic --- src/aiida/orm/authinfos.py | 2 +- src/aiida/orm/comments.py | 2 +- src/aiida/orm/computers.py | 2 +- src/aiida/orm/entities.py | 11 ++++++----- src/aiida/orm/groups.py | 2 +- src/aiida/orm/logs.py | 2 +- src/aiida/orm/nodes/node.py | 2 +- src/aiida/orm/users.py | 2 +- 8 files changed, 13 insertions(+), 12 deletions(-) diff --git a/src/aiida/orm/authinfos.py b/src/aiida/orm/authinfos.py index 5c8adb9451..eeb3c6ae50 100644 --- a/src/aiida/orm/authinfos.py +++ b/src/aiida/orm/authinfos.py @@ -44,7 +44,7 @@ def delete(self, pk: int) -> None: self._backend.authinfos.delete(pk) -class AuthInfo(entities.Entity['BackendAuthInfo']): +class AuthInfo(entities.Entity['BackendAuthInfo', AuthInfoCollection]): """ORM class that models the authorization information that allows a `User` to connect to a `Computer`.""" _CLS_COLLECTION = AuthInfoCollection diff --git a/src/aiida/orm/comments.py b/src/aiida/orm/comments.py index 964101327a..02891baef4 100644 --- a/src/aiida/orm/comments.py +++ b/src/aiida/orm/comments.py @@ -64,7 +64,7 @@ def delete_many(self, filters: dict) -> list[int]: return self._backend.comments.delete_many(filters) -class Comment(entities.Entity['BackendComment']): +class Comment(entities.Entity['BackendComment', CommentCollection]): """Base class to map a DbComment that represents a comment attached to a certain Node.""" _CLS_COLLECTION = CommentCollection diff --git a/src/aiida/orm/computers.py b/src/aiida/orm/computers.py index 9b5463ce92..82e54e76f7 100644 --- a/src/aiida/orm/computers.py +++ b/src/aiida/orm/computers.py @@ -63,7 +63,7 @@ def delete(self, pk: int) -> None: return self._backend.computers.delete(pk) -class Computer(entities.Entity['BackendComputer']): +class Computer(entities.Entity['BackendComputer', ComputerCollection]): """Computer entity.""" _logger = logging.getLogger(__name__) diff --git a/src/aiida/orm/entities.py b/src/aiida/orm/entities.py index 4db6363d9b..0910c29316 100644 --- a/src/aiida/orm/entities.py +++ b/src/aiida/orm/entities.py @@ -35,6 +35,7 @@ __all__ = ('Collection', 'Entity', 'EntityTypes') +CollectionType = TypeVar('CollectionType', bound='Collection') EntityType = TypeVar('EntityType', bound='Entity') BackendEntityType = TypeVar('BackendEntityType', bound='BackendEntity') @@ -174,10 +175,10 @@ def count(self, filters: 'FilterType' | None = None) -> int: return self.query(filters=filters).count() -class Entity(abc.ABC, Generic[BackendEntityType], metaclass=EntityFieldMeta): +class Entity(abc.ABC, Generic[BackendEntityType, CollectionType], metaclass=EntityFieldMeta): """An AiiDA entity""" - _CLS_COLLECTION: type[Collection[EntityType]] = Collection + _CLS_COLLECTION: type[CollectionType] = Collection # type: ignore[assignment] _logger = log.AIIDA_LOGGER.getChild('orm.entities') class Model(BaseModel, defer_build=True): @@ -275,7 +276,7 @@ def from_serialized(cls: type[EntityType], **kwargs: dict[str, Any]) -> EntityTy return cls._from_model(cls.Model(**kwargs)) # type: ignore[arg-type] @classproperty - def objects(cls: type[EntityType]) -> Collection[EntityType]: # noqa: N805 + def objects(cls: type[EntityType]) -> CollectionType: # noqa: N805 """Get a collection for objects of this type, with the default backend. .. deprecated:: This will be removed in v3, use ``collection`` instead. @@ -286,7 +287,7 @@ def objects(cls: type[EntityType]) -> Collection[EntityType]: # noqa: N805 return cls.collection @classproperty - def collection(cls: type[EntityType]) -> Collection[EntityType]: # noqa: N805 + def collection(cls: type[EntityType]) -> CollectionType: # noqa: N805 """Get a collection for objects of this type, with the default backend. :return: an object that can be used to access entities of this type @@ -294,7 +295,7 @@ def collection(cls: type[EntityType]) -> Collection[EntityType]: # noqa: N805 return cls._CLS_COLLECTION.get_cached(cls, get_manager().get_profile_storage()) @classmethod - def get_collection(cls: type[EntityType], backend: 'StorageBackend') -> Collection[EntityType]: + def get_collection(cls: type[EntityType], backend: 'StorageBackend') -> CollectionType: """Get a collection for objects of this type for a given backend. .. note:: Use the ``collection`` class property instead if the currently loaded backend or backend of the diff --git a/src/aiida/orm/groups.py b/src/aiida/orm/groups.py index 23807cf7e9..8e0b6ba5d4 100644 --- a/src/aiida/orm/groups.py +++ b/src/aiida/orm/groups.py @@ -106,7 +106,7 @@ def extras(self) -> extras.EntityExtras: return extras.EntityExtras(self._group) -class Group(entities.Entity['BackendGroup']): +class Group(entities.Entity['BackendGroup', GroupCollection]): """An AiiDA ORM implementation of group of nodes.""" __type_string: ClassVar[str | None] diff --git a/src/aiida/orm/logs.py b/src/aiida/orm/logs.py index 3a56e4baa3..e03d93c115 100644 --- a/src/aiida/orm/logs.py +++ b/src/aiida/orm/logs.py @@ -126,7 +126,7 @@ def delete_many(self, filters: 'FilterType') -> list[int]: return self._backend.logs.delete_many(filters) -class Log(entities.Entity['BackendLog']): +class Log(entities.Entity['BackendLog', LogCollection]): """An AiiDA Log entity. Corresponds to a logged message against a particular AiiDA node.""" _CLS_COLLECTION = LogCollection diff --git a/src/aiida/orm/nodes/node.py b/src/aiida/orm/nodes/node.py index 68f12ec6ed..3453c41c24 100644 --- a/src/aiida/orm/nodes/node.py +++ b/src/aiida/orm/nodes/node.py @@ -143,7 +143,7 @@ def links(self) -> 'NodeLinks': return self._node._CLS_NODE_LINKS(self._node) -class Node(Entity['BackendNode'], metaclass=AbstractNodeMeta): +class Node(Entity['BackendNode', NodeCollection], metaclass=AbstractNodeMeta): """Base class for all nodes in AiiDA. Stores attributes starting with an underscore. diff --git a/src/aiida/orm/users.py b/src/aiida/orm/users.py index 87830e85ae..a7148c6d7f 100644 --- a/src/aiida/orm/users.py +++ b/src/aiida/orm/users.py @@ -50,7 +50,7 @@ def get_default(self) -> 'User' | None: return self.backend.default_user -class User(entities.Entity['BackendUser']): +class User(entities.Entity['BackendUser', UserCollection]): """AiiDA User""" _CLS_COLLECTION = UserCollection From abbe59f428e96135f10dca241ce38f37501565cf Mon Sep 17 00:00:00 2001 From: Edan Bainglass Date: Sun, 28 Sep 2025 17:44:37 +0200 Subject: [PATCH 4/7] Another fix --- src/aiida/orm/entities.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/aiida/orm/entities.py b/src/aiida/orm/entities.py index 0910c29316..c7ffb66d67 100644 --- a/src/aiida/orm/entities.py +++ b/src/aiida/orm/entities.py @@ -91,7 +91,7 @@ def __call__(self, backend: 'StorageBackend') -> Self: """Get or create a cached collection using a new backend.""" if backend is self._backend: return self - return self.get_cached(self.entity_type, backend=backend) # type: ignore[arg-type] + return self.get_cached(self.entity_type, backend=backend) @property def entity_type(self) -> type[EntityType]: @@ -208,7 +208,6 @@ def model_to_orm_field_values(cls, model: Model) -> dict[str, Any]: if field_value is None: continue - orm_class: type['Entity'] | str | None = None if orm_class := get_metadata(field, 'orm_class'): if isinstance(orm_class, str): try: From 8fc8ffa44e48a6ab03fc012d6850912b9de057a7 Mon Sep 17 00:00:00 2001 From: Edan Bainglass Date: Sun, 28 Sep 2025 18:25:38 +0200 Subject: [PATCH 5/7] Fix tests --- src/aiida/orm/groups.py | 2 ++ tests/orm/test_fields/fields_AuthInfo.yml | 4 ++-- tests/orm/test_fields/fields_Computer.yml | 2 +- tests/orm/test_fields/fields_Group.yml | 6 ++---- tests/orm/test_fields/fields_Log.yml | 2 +- .../fields_aiida.data.core.array.ArrayData.yml | 8 ++++---- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/aiida/orm/groups.py b/src/aiida/orm/groups.py index 8e0b6ba5d4..81ee4199c4 100644 --- a/src/aiida/orm/groups.py +++ b/src/aiida/orm/groups.py @@ -123,6 +123,8 @@ class Model(entities.Entity.Model): time: Optional[datetime.datetime] = MetadataField( description='The creation time of the node', is_attribute=False ) + label: str = MetadataField(description='The group label', is_attribute=False) + description: Optional[str] = MetadataField(description='The group description', is_attribute=False) extras: Optional[dict[str, Any]] = MetadataField( description='The group extras', is_attribute=False, diff --git a/tests/orm/test_fields/fields_AuthInfo.yml b/tests/orm/test_fields/fields_AuthInfo.yml index 505c96da91..8aedb8100b 100644 --- a/tests/orm/test_fields/fields_AuthInfo.yml +++ b/tests/orm/test_fields/fields_AuthInfo.yml @@ -1,6 +1,6 @@ -auth_params: QbDictField('auth_params', dtype=typing.Dict[str, typing.Any], is_attribute=False) +auth_params: QbDictField('auth_params', dtype=dict[str, typing.Any], is_attribute=False) computer: QbNumericField('computer', dtype=, is_attribute=False) enabled: QbField('enabled', dtype=, is_attribute=False) -metadata: QbDictField('metadata', dtype=typing.Dict[str, typing.Any], is_attribute=False) +metadata: QbDictField('metadata', dtype=dict[str, typing.Any], is_attribute=False) pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) user: QbNumericField('user', dtype=, is_attribute=False) diff --git a/tests/orm/test_fields/fields_Computer.yml b/tests/orm/test_fields/fields_Computer.yml index 7d4e37168b..11f175cc0c 100644 --- a/tests/orm/test_fields/fields_Computer.yml +++ b/tests/orm/test_fields/fields_Computer.yml @@ -1,7 +1,7 @@ description: QbStrField('description', dtype=, is_attribute=False) hostname: QbStrField('hostname', dtype=, is_attribute=False) label: QbStrField('label', dtype=, is_attribute=False) -metadata: QbDictField('metadata', dtype=typing.Dict[str, typing.Any], is_attribute=False) +metadata: QbDictField('metadata', dtype=dict[str, typing.Any], is_attribute=False) pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) scheduler_type: QbStrField('scheduler_type', dtype=, is_attribute=False) transport_type: QbStrField('transport_type', dtype=, is_attribute=False) diff --git a/tests/orm/test_fields/fields_Group.yml b/tests/orm/test_fields/fields_Group.yml index 537c76a11d..824ecedec7 100644 --- a/tests/orm/test_fields/fields_Group.yml +++ b/tests/orm/test_fields/fields_Group.yml @@ -1,7 +1,5 @@ -description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) -label: QbStrField('label', dtype=, is_attribute=False) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) time: QbNumericField('time', dtype=typing.Optional[datetime.datetime], is_attribute=False) type_string: QbStrField('type_string', dtype=, is_attribute=False) diff --git a/tests/orm/test_fields/fields_Log.yml b/tests/orm/test_fields/fields_Log.yml index 90bf0a5b0b..faba9b2868 100644 --- a/tests/orm/test_fields/fields_Log.yml +++ b/tests/orm/test_fields/fields_Log.yml @@ -2,7 +2,7 @@ dbnode_id: QbNumericField('dbnode_id', dtype=, is_attribute=False) levelname: QbStrField('levelname', dtype=, is_attribute=False) loggername: QbStrField('loggername', dtype=, is_attribute=False) message: QbStrField('message', dtype=, is_attribute=False) -metadata: QbDictField('metadata', dtype=typing.Dict[str, typing.Any], is_attribute=False) +metadata: QbDictField('metadata', dtype=dict[str, typing.Any], is_attribute=False) pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) time: QbNumericField('time', dtype=, is_attribute=False) uuid: QbStrField('uuid', dtype=, is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.array.ArrayData.yml b/tests/orm/test_fields/fields_aiida.data.core.array.ArrayData.yml index c31304e6f5..3b58b3bdfd 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.array.ArrayData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.array.ArrayData.yml @@ -1,11 +1,11 @@ arrays: QbDictField('arrays', dtype=typing.Optional[dict[str, bytes]], is_attribute=True) -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -13,7 +13,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) From e16d1323cbce68c28bc05485b51d79c98ce9314f Mon Sep 17 00:00:00 2001 From: Edan Bainglass Date: Sun, 28 Sep 2025 18:58:35 +0200 Subject: [PATCH 6/7] Fix enum --- src/aiida/orm/nodes/data/enum.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/aiida/orm/nodes/data/enum.py b/src/aiida/orm/nodes/data/enum.py index 45adb44d62..11252b62ea 100644 --- a/src/aiida/orm/nodes/data/enum.py +++ b/src/aiida/orm/nodes/data/enum.py @@ -17,7 +17,7 @@ class Color(Enum): from __future__ import annotations -from enum import Enum +import enum from typing import Any, TypeVar from plumpy.loaders import get_object_loader @@ -30,10 +30,10 @@ class Color(Enum): __all__ = ('EnumData',) -EnumType = TypeVar('EnumType', bound=Enum) +EnumType = TypeVar('EnumType', bound=enum.Enum) -@to_aiida_type.register(Enum) +@to_aiida_type.register(enum.Enum) def _(value): return EnumData(member=value) @@ -53,14 +53,14 @@ class of the enumeration) in the ``KEY_NAME``, ``KEY_VALUE`` and ``KEY_IDENTIFIE KEY_IDENTIFIER = 'identifier' class Model(Data.Model): - member: Enum = MetadataField( + member: enum.Enum = MetadataField( description='The member name.', orm_to_model=lambda node, _: node.get_member(), # type: ignore[attr-defined] ) - def __init__(self, member: Enum, *args, **kwargs): + def __init__(self, member: enum.Enum, *args, **kwargs): """Construct the node for the to enum member that is to be wrapped.""" - type_check(member, Enum) + type_check(member, enum.Enum) super().__init__(*args, **kwargs) data = { @@ -115,7 +115,7 @@ def get_member(self) -> EnumType: # type: ignore[misc, type-var] def __eq__(self, other: Any) -> bool: """Return whether the other object is equivalent to ourselves.""" - if isinstance(other, Enum): + if isinstance(other, enum.Enum): try: return self.get_member() == other except (ImportError, ValueError): From bbd8a1377c4c135ed6325f55457168b65307ecc2 Mon Sep 17 00:00:00 2001 From: Edan Bainglass Date: Sun, 28 Sep 2025 20:00:41 +0200 Subject: [PATCH 7/7] Update tests --- tests/orm/test_fields/fields_Group.yml | 2 ++ ..._aiida.data.core.array.bands.BandsData.yml | 21 +++++++++---------- ...da.data.core.array.kpoints.KpointsData.yml | 18 ++++++++-------- ...a.core.array.projection.ProjectionData.yml | 8 +++---- ...a.core.array.trajectory.TrajectoryData.yml | 10 ++++----- ...fields_aiida.data.core.array.xy.XyData.yml | 8 +++---- .../fields_aiida.data.core.base.BaseType.yml | 8 +++---- .../fields_aiida.data.core.bool.Bool.yml | 8 +++---- .../fields_aiida.data.core.cif.CifData.yml | 12 +++++------ .../fields_aiida.data.core.code.Code.yml | 8 +++---- ...a.data.core.code.abstract.AbstractCode.yml | 8 +++---- ...e.code.containerized.ContainerizedCode.yml | 8 +++---- ...data.core.code.installed.InstalledCode.yml | 8 +++---- ...a.data.core.code.portable.PortableCode.yml | 8 +++---- .../fields_aiida.data.core.dict.Dict.yml | 11 +++++----- .../fields_aiida.data.core.enum.EnumData.yml | 8 +++---- .../fields_aiida.data.core.float.Float.yml | 8 +++---- ...elds_aiida.data.core.folder.FolderData.yml | 8 +++---- .../fields_aiida.data.core.int.Int.yml | 8 +++---- ..._aiida.data.core.jsonable.JsonableData.yml | 8 +++---- .../fields_aiida.data.core.list.List.yml | 10 ++++----- ...ds_aiida.data.core.numeric.NumericType.yml | 8 +++---- ...ds_aiida.data.core.orbital.OrbitalData.yml | 8 +++---- ...elds_aiida.data.core.remote.RemoteData.yml | 8 +++---- ...data.core.remote.stash.RemoteStashData.yml | 8 +++---- ...ash.compress.RemoteStashCompressedData.yml | 10 ++++----- ...ote.stash.custom.RemoteStashCustomData.yml | 10 ++++----- ...ote.stash.folder.RemoteStashFolderData.yml | 10 ++++----- ...da.data.core.singlefile.SinglefileData.yml | 8 +++---- .../fields_aiida.data.core.str.Str.yml | 8 +++---- ...iida.data.core.structure.StructureData.yml | 14 ++++++------- .../fields_aiida.data.core.upf.UpfData.yml | 8 +++---- .../fields_aiida.node.data.Data.yml | 11 ++++++---- .../fields_aiida.node.process.ProcessNode.yml | 11 ++++++---- ...de.process.calculation.CalculationNode.yml | 11 ++++++---- ...culation.calcfunction.CalcFunctionNode.yml | 11 ++++++---- ...rocess.calculation.calcjob.CalcJobNode.yml | 19 ++++++++++------- ...ida.node.process.workflow.WorkflowNode.yml | 11 ++++++---- ...ocess.workflow.workchain.WorkChainNode.yml | 11 ++++++---- ...workflow.workfunction.WorkFunctionNode.yml | 11 ++++++---- 40 files changed, 209 insertions(+), 183 deletions(-) diff --git a/tests/orm/test_fields/fields_Group.yml b/tests/orm/test_fields/fields_Group.yml index 824ecedec7..4e9f50a689 100644 --- a/tests/orm/test_fields/fields_Group.yml +++ b/tests/orm/test_fields/fields_Group.yml @@ -1,5 +1,7 @@ +description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) +label: QbStrField('label', dtype=, is_attribute=False) pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) time: QbNumericField('time', dtype=typing.Optional[datetime.datetime], is_attribute=False) type_string: QbStrField('type_string', dtype=, is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.array.bands.BandsData.yml b/tests/orm/test_fields/fields_aiida.data.core.array.bands.BandsData.yml index f0bca48c81..b7b1d8e48f 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.array.bands.BandsData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.array.bands.BandsData.yml @@ -1,21 +1,20 @@ -array_labels: QbArrayField('array_labels', dtype=typing.Optional[typing.List[str]], - is_attribute=True) +array_labels: QbArrayField('array_labels', dtype=typing.Optional[list[str]], is_attribute=True) arrays: QbDictField('arrays', dtype=typing.Optional[dict[str, bytes]], is_attribute=True) -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) -cell: QbArrayField('cell', dtype=typing.List[typing.List[float]], is_attribute=True) +cell: QbArrayField('cell', dtype=list[list[float]], is_attribute=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) -label_numbers: QbArrayField('label_numbers', dtype=typing.List[int], is_attribute=True) -labels: QbArrayField('labels', dtype=typing.List[str], is_attribute=True) -mesh: QbArrayField('mesh', dtype=typing.List[int], is_attribute=True) +label_numbers: QbArrayField('label_numbers', dtype=list[int], is_attribute=True) +labels: QbArrayField('labels', dtype=list[str], is_attribute=True) +mesh: QbArrayField('mesh', dtype=list[int], is_attribute=True) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) -offset: QbArrayField('offset', dtype=typing.List[float], is_attribute=True) +offset: QbArrayField('offset', dtype=list[float], is_attribute=True) pbc1: QbField('pbc1', dtype=, is_attribute=True) pbc2: QbField('pbc2', dtype=, is_attribute=True) pbc3: QbField('pbc3', dtype=, is_attribute=True) @@ -23,7 +22,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) units: QbStrField('units', dtype=, is_attribute=True) diff --git a/tests/orm/test_fields/fields_aiida.data.core.array.kpoints.KpointsData.yml b/tests/orm/test_fields/fields_aiida.data.core.array.kpoints.KpointsData.yml index 6d0aaa2b6f..b0c35d3da0 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.array.kpoints.KpointsData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.array.kpoints.KpointsData.yml @@ -1,19 +1,19 @@ arrays: QbDictField('arrays', dtype=typing.Optional[dict[str, bytes]], is_attribute=True) -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) -cell: QbArrayField('cell', dtype=typing.List[typing.List[float]], is_attribute=True) +cell: QbArrayField('cell', dtype=list[list[float]], is_attribute=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) -label_numbers: QbArrayField('label_numbers', dtype=typing.List[int], is_attribute=True) -labels: QbArrayField('labels', dtype=typing.List[str], is_attribute=True) -mesh: QbArrayField('mesh', dtype=typing.List[int], is_attribute=True) +label_numbers: QbArrayField('label_numbers', dtype=list[int], is_attribute=True) +labels: QbArrayField('labels', dtype=list[str], is_attribute=True) +mesh: QbArrayField('mesh', dtype=list[int], is_attribute=True) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) -offset: QbArrayField('offset', dtype=typing.List[float], is_attribute=True) +offset: QbArrayField('offset', dtype=list[float], is_attribute=True) pbc1: QbField('pbc1', dtype=, is_attribute=True) pbc2: QbField('pbc2', dtype=, is_attribute=True) pbc3: QbField('pbc3', dtype=, is_attribute=True) @@ -21,7 +21,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.array.projection.ProjectionData.yml b/tests/orm/test_fields/fields_aiida.data.core.array.projection.ProjectionData.yml index c31304e6f5..3b58b3bdfd 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.array.projection.ProjectionData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.array.projection.ProjectionData.yml @@ -1,11 +1,11 @@ arrays: QbDictField('arrays', dtype=typing.Optional[dict[str, bytes]], is_attribute=True) -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -13,7 +13,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.array.trajectory.TrajectoryData.yml b/tests/orm/test_fields/fields_aiida.data.core.array.trajectory.TrajectoryData.yml index 87aaa30148..dd721d6615 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.array.trajectory.TrajectoryData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.array.trajectory.TrajectoryData.yml @@ -1,11 +1,11 @@ arrays: QbDictField('arrays', dtype=typing.Optional[dict[str, bytes]], is_attribute=True) -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -13,10 +13,10 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) -symbols: QbArrayField('symbols', dtype=typing.List[str], is_attribute=True) +symbols: QbArrayField('symbols', dtype=list[str], is_attribute=True) units_positions: QbStrField('units_positions', dtype=, is_attribute=True) units_times: QbStrField('units_times', dtype=, is_attribute=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.array.xy.XyData.yml b/tests/orm/test_fields/fields_aiida.data.core.array.xy.XyData.yml index c31304e6f5..3b58b3bdfd 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.array.xy.XyData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.array.xy.XyData.yml @@ -1,11 +1,11 @@ arrays: QbDictField('arrays', dtype=typing.Optional[dict[str, bytes]], is_attribute=True) -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -13,7 +13,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.base.BaseType.yml b/tests/orm/test_fields/fields_aiida.data.core.base.BaseType.yml index 457621f596..f484e9a3d7 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.base.BaseType.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.base.BaseType.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,7 +12,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.bool.Bool.yml b/tests/orm/test_fields/fields_aiida.data.core.bool.Bool.yml index 457621f596..f484e9a3d7 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.bool.Bool.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.bool.Bool.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,7 +12,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.cif.CifData.yml b/tests/orm/test_fields/fields_aiida.data.core.cif.CifData.yml index da4e1d40d9..99b1486b86 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.cif.CifData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.cif.CifData.yml @@ -1,13 +1,13 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) content: QbField('content', dtype=, is_attribute=True) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) filename: QbStrField('filename', dtype=typing.Optional[str], is_attribute=True) -formulae: QbArrayField('formulae', dtype=typing.Optional[typing.List[str]], is_attribute=True) +formulae: QbArrayField('formulae', dtype=typing.Optional[list[str]], is_attribute=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) md5: QbStrField('md5', dtype=typing.Optional[str], is_attribute=True) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -16,10 +16,10 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) -spacegroup_numbers: QbArrayField('spacegroup_numbers', dtype=typing.Optional[typing.List[str]], +spacegroup_numbers: QbArrayField('spacegroup_numbers', dtype=typing.Optional[list[str]], is_attribute=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) uuid: QbStrField('uuid', dtype=typing.Optional[str], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.code.Code.yml b/tests/orm/test_fields/fields_aiida.data.core.code.Code.yml index 8ebaa0804d..235ebffda7 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.code.Code.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.code.Code.yml @@ -1,13 +1,13 @@ append_text: QbStrField('append_text', dtype=, is_attribute=True) -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) default_calc_job_plugin: QbStrField('default_calc_job_plugin', dtype=typing.Optional[str], is_attribute=True) description: QbStrField('description', dtype=, is_attribute=True) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) input_plugin: QbStrField('input_plugin', dtype=typing.Optional[str], is_attribute=True) is_local: QbField('is_local', dtype=typing.Optional[bool], is_attribute=True) label: QbStrField('label', dtype=, is_attribute=True) @@ -20,7 +20,7 @@ process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribut remote_exec_path: QbStrField('remote_exec_path', dtype=typing.Optional[str], is_attribute=True) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) use_double_quotes: QbField('use_double_quotes', dtype=, is_attribute=True) diff --git a/tests/orm/test_fields/fields_aiida.data.core.code.abstract.AbstractCode.yml b/tests/orm/test_fields/fields_aiida.data.core.code.abstract.AbstractCode.yml index 4dc178d9fc..33939a8907 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.code.abstract.AbstractCode.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.code.abstract.AbstractCode.yml @@ -1,13 +1,13 @@ append_text: QbStrField('append_text', dtype=, is_attribute=True) -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) default_calc_job_plugin: QbStrField('default_calc_job_plugin', dtype=typing.Optional[str], is_attribute=True) description: QbStrField('description', dtype=, is_attribute=True) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=, is_attribute=True) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -16,7 +16,7 @@ prepend_text: QbStrField('prepend_text', dtype=, is_attribute=True) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) use_double_quotes: QbField('use_double_quotes', dtype=, is_attribute=True) diff --git a/tests/orm/test_fields/fields_aiida.data.core.code.containerized.ContainerizedCode.yml b/tests/orm/test_fields/fields_aiida.data.core.code.containerized.ContainerizedCode.yml index e7f12a1a94..562909bce9 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.code.containerized.ContainerizedCode.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.code.containerized.ContainerizedCode.yml @@ -1,5 +1,5 @@ append_text: QbStrField('append_text', dtype=, is_attribute=True) -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbStrField('computer', dtype=, is_attribute=True) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -7,8 +7,8 @@ default_calc_job_plugin: QbStrField('default_calc_job_plugin', dtype=typing.Opti is_attribute=True) description: QbStrField('description', dtype=, is_attribute=True) engine_command: QbStrField('engine_command', dtype=, is_attribute=True) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) filepath_executable: QbStrField('filepath_executable', dtype=, is_attribute=True) image_name: QbStrField('image_name', dtype=, is_attribute=True) label: QbStrField('label', dtype=, is_attribute=True) @@ -19,7 +19,7 @@ prepend_text: QbStrField('prepend_text', dtype=, is_attribute=True) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) use_double_quotes: QbField('use_double_quotes', dtype=, is_attribute=True) diff --git a/tests/orm/test_fields/fields_aiida.data.core.code.installed.InstalledCode.yml b/tests/orm/test_fields/fields_aiida.data.core.code.installed.InstalledCode.yml index 15089b4a3d..6d8cef99e7 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.code.installed.InstalledCode.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.code.installed.InstalledCode.yml @@ -1,13 +1,13 @@ append_text: QbStrField('append_text', dtype=, is_attribute=True) -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbStrField('computer', dtype=, is_attribute=True) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) default_calc_job_plugin: QbStrField('default_calc_job_plugin', dtype=typing.Optional[str], is_attribute=True) description: QbStrField('description', dtype=, is_attribute=True) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) filepath_executable: QbStrField('filepath_executable', dtype=, is_attribute=True) label: QbStrField('label', dtype=, is_attribute=True) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -17,7 +17,7 @@ prepend_text: QbStrField('prepend_text', dtype=, is_attribute=True) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) use_double_quotes: QbField('use_double_quotes', dtype=, is_attribute=True) diff --git a/tests/orm/test_fields/fields_aiida.data.core.code.portable.PortableCode.yml b/tests/orm/test_fields/fields_aiida.data.core.code.portable.PortableCode.yml index b874b26466..46e0cd7902 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.code.portable.PortableCode.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.code.portable.PortableCode.yml @@ -1,13 +1,13 @@ append_text: QbStrField('append_text', dtype=, is_attribute=True) -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) default_calc_job_plugin: QbStrField('default_calc_job_plugin', dtype=typing.Optional[str], is_attribute=True) description: QbStrField('description', dtype=, is_attribute=True) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) filepath_executable: QbStrField('filepath_executable', dtype=, is_attribute=True) filepath_files: QbStrField('filepath_files', dtype=, is_attribute=False) label: QbStrField('label', dtype=, is_attribute=True) @@ -18,7 +18,7 @@ prepend_text: QbStrField('prepend_text', dtype=, is_attribute=True) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) use_double_quotes: QbField('use_double_quotes', dtype=, is_attribute=True) diff --git a/tests/orm/test_fields/fields_aiida.data.core.dict.Dict.yml b/tests/orm/test_fields/fields_aiida.data.core.dict.Dict.yml index 710d253b4d..f50e6bf638 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.dict.Dict.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.dict.Dict.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,10 +12,9 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) uuid: QbStrField('uuid', dtype=typing.Optional[str], is_attribute=False) -value: QbDictField('value', dtype=typing.Dict[str, typing.Any], is_attribute=False, - is_subscriptable=True) +value: QbDictField('value', dtype=dict[str, typing.Any], is_attribute=False, is_subscriptable=True) diff --git a/tests/orm/test_fields/fields_aiida.data.core.enum.EnumData.yml b/tests/orm/test_fields/fields_aiida.data.core.enum.EnumData.yml index cfc3976079..497da0eadd 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.enum.EnumData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.enum.EnumData.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) member: QbField('member', dtype=, is_attribute=True) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -13,7 +13,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.float.Float.yml b/tests/orm/test_fields/fields_aiida.data.core.float.Float.yml index 457621f596..f484e9a3d7 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.float.Float.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.float.Float.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,7 +12,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.folder.FolderData.yml b/tests/orm/test_fields/fields_aiida.data.core.folder.FolderData.yml index 5bee2ef441..6b733a12a3 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.folder.FolderData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.folder.FolderData.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,7 +12,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.int.Int.yml b/tests/orm/test_fields/fields_aiida.data.core.int.Int.yml index 457621f596..f484e9a3d7 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.int.Int.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.int.Int.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,7 +12,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.jsonable.JsonableData.yml b/tests/orm/test_fields/fields_aiida.data.core.jsonable.JsonableData.yml index 1166fbc570..6eb9ec67d2 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.jsonable.JsonableData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.jsonable.JsonableData.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -14,7 +14,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.list.List.yml b/tests/orm/test_fields/fields_aiida.data.core.list.List.yml index 4edd6d3380..ba2233902f 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.list.List.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.list.List.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,9 +12,9 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) uuid: QbStrField('uuid', dtype=typing.Optional[str], is_attribute=False) -value: QbArrayField('value', dtype=typing.List[typing.Any], is_attribute=True) +value: QbArrayField('value', dtype=list[typing.Any], is_attribute=True) diff --git a/tests/orm/test_fields/fields_aiida.data.core.numeric.NumericType.yml b/tests/orm/test_fields/fields_aiida.data.core.numeric.NumericType.yml index 457621f596..f484e9a3d7 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.numeric.NumericType.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.numeric.NumericType.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,7 +12,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.orbital.OrbitalData.yml b/tests/orm/test_fields/fields_aiida.data.core.orbital.OrbitalData.yml index 5bee2ef441..6b733a12a3 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.orbital.OrbitalData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.orbital.OrbitalData.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,7 +12,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.remote.RemoteData.yml b/tests/orm/test_fields/fields_aiida.data.core.remote.RemoteData.yml index 5086780659..c8679ee6d7 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.remote.RemoteData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.remote.RemoteData.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -13,7 +13,7 @@ process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribut remote_path: QbStrField('remote_path', dtype=typing.Optional[str], is_attribute=True) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.remote.stash.RemoteStashData.yml b/tests/orm/test_fields/fields_aiida.data.core.remote.stash.RemoteStashData.yml index 35e6e4188e..9892cb2c9c 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.remote.stash.RemoteStashData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.remote.stash.RemoteStashData.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,7 +12,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) stash_mode: QbField('stash_mode', dtype=, is_attribute=True) diff --git a/tests/orm/test_fields/fields_aiida.data.core.remote.stash.compress.RemoteStashCompressedData.yml b/tests/orm/test_fields/fields_aiida.data.core.remote.stash.compress.RemoteStashCompressedData.yml index 40e941f933..253aacb721 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.remote.stash.compress.RemoteStashCompressedData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.remote.stash.compress.RemoteStashCompressedData.yml @@ -1,11 +1,11 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) dereference: QbField('dereference', dtype=, is_attribute=True) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -13,10 +13,10 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) -source_list: QbArrayField('source_list', dtype=typing.List[str], is_attribute=True) +source_list: QbArrayField('source_list', dtype=list[str], is_attribute=True) stash_mode: QbField('stash_mode', dtype=, is_attribute=True) target_basepath: QbStrField('target_basepath', dtype=, is_attribute=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.remote.stash.custom.RemoteStashCustomData.yml b/tests/orm/test_fields/fields_aiida.data.core.remote.stash.custom.RemoteStashCustomData.yml index 82e177e738..c24bc93014 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.remote.stash.custom.RemoteStashCustomData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.remote.stash.custom.RemoteStashCustomData.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,10 +12,10 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) -source_list: QbArrayField('source_list', dtype=typing.List[str], is_attribute=True) +source_list: QbArrayField('source_list', dtype=list[str], is_attribute=True) stash_mode: QbField('stash_mode', dtype=, is_attribute=True) target_basepath: QbStrField('target_basepath', dtype=, is_attribute=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.remote.stash.folder.RemoteStashFolderData.yml b/tests/orm/test_fields/fields_aiida.data.core.remote.stash.folder.RemoteStashFolderData.yml index 82e177e738..c24bc93014 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.remote.stash.folder.RemoteStashFolderData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.remote.stash.folder.RemoteStashFolderData.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,10 +12,10 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) -source_list: QbArrayField('source_list', dtype=typing.List[str], is_attribute=True) +source_list: QbArrayField('source_list', dtype=list[str], is_attribute=True) stash_mode: QbField('stash_mode', dtype=, is_attribute=True) target_basepath: QbStrField('target_basepath', dtype=, is_attribute=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.singlefile.SinglefileData.yml b/tests/orm/test_fields/fields_aiida.data.core.singlefile.SinglefileData.yml index 71f35c46f7..aebf040bcb 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.singlefile.SinglefileData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.singlefile.SinglefileData.yml @@ -1,11 +1,11 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) content: QbField('content', dtype=, is_attribute=True) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) filename: QbStrField('filename', dtype=typing.Optional[str], is_attribute=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -14,7 +14,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.str.Str.yml b/tests/orm/test_fields/fields_aiida.data.core.str.Str.yml index 457621f596..f484e9a3d7 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.str.Str.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.str.Str.yml @@ -1,10 +1,10 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -12,7 +12,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.structure.StructureData.yml b/tests/orm/test_fields/fields_aiida.data.core.structure.StructureData.yml index 8e94962d01..09c76ed966 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.structure.StructureData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.structure.StructureData.yml @@ -1,12 +1,12 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) -cell: QbArrayField('cell', dtype=typing.List[typing.List[float]], is_attribute=True) +cell: QbArrayField('cell', dtype=list[list[float]], is_attribute=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) -kinds: QbArrayField('kinds', dtype=typing.Optional[typing.List[dict]], is_attribute=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) +kinds: QbArrayField('kinds', dtype=typing.Optional[list[dict]], is_attribute=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) @@ -17,9 +17,9 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) -sites: QbArrayField('sites', dtype=typing.Optional[typing.List[dict]], is_attribute=True) +sites: QbArrayField('sites', dtype=typing.Optional[list[dict]], is_attribute=True) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) uuid: QbStrField('uuid', dtype=typing.Optional[str], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.data.core.upf.UpfData.yml b/tests/orm/test_fields/fields_aiida.data.core.upf.UpfData.yml index 71f35c46f7..aebf040bcb 100644 --- a/tests/orm/test_fields/fields_aiida.data.core.upf.UpfData.yml +++ b/tests/orm/test_fields/fields_aiida.data.core.upf.UpfData.yml @@ -1,11 +1,11 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) content: QbField('content', dtype=, is_attribute=True) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], - is_attribute=False, is_subscriptable=True) +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, + is_subscriptable=True) filename: QbStrField('filename', dtype=typing.Optional[str], is_attribute=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -14,7 +14,7 @@ pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.node.data.Data.yml b/tests/orm/test_fields/fields_aiida.node.data.Data.yml index 5bee2ef441..383564fbf5 100644 --- a/tests/orm/test_fields/fields_aiida.node.data.Data.yml +++ b/tests/orm/test_fields/fields_aiida.node.data.Data.yml @@ -1,18 +1,21 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: + QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) description: QbStrField('description', dtype=typing.Optional[str], is_attribute=False) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) node_type: QbStrField('node_type', dtype=typing.Optional[str], is_attribute=False) pk: QbNumericField('pk', dtype=typing.Optional[int], is_attribute=False) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) -repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, +repository_content: + QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: + QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) source: QbDictField('source', dtype=typing.Optional[dict], is_attribute=True, is_subscriptable=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.node.process.ProcessNode.yml b/tests/orm/test_fields/fields_aiida.node.process.ProcessNode.yml index d8928ee1a4..fad354bbbd 100644 --- a/tests/orm/test_fields/fields_aiida.node.process.ProcessNode.yml +++ b/tests/orm/test_fields/fields_aiida.node.process.ProcessNode.yml @@ -1,4 +1,5 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: + QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -6,7 +7,7 @@ description: QbStrField('description', dtype=typing.Optional[str], is_attribute= exception: QbStrField('exception', dtype=typing.Optional[str], is_attribute=True) exit_message: QbStrField('exit_message', dtype=typing.Optional[str], is_attribute=True) exit_status: QbNumericField('exit_status', dtype=typing.Optional[int], is_attribute=True) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -17,9 +18,11 @@ process_label: QbStrField('process_label', dtype=typing.Optional[str], is_attrib process_state: QbStrField('process_state', dtype=typing.Optional[str], is_attribute=True) process_status: QbStrField('process_status', dtype=typing.Optional[str], is_attribute=True) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) -repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, +repository_content: + QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: + QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) sealed: QbField('sealed', dtype=, is_attribute=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.node.process.calculation.CalculationNode.yml b/tests/orm/test_fields/fields_aiida.node.process.calculation.CalculationNode.yml index d8928ee1a4..fad354bbbd 100644 --- a/tests/orm/test_fields/fields_aiida.node.process.calculation.CalculationNode.yml +++ b/tests/orm/test_fields/fields_aiida.node.process.calculation.CalculationNode.yml @@ -1,4 +1,5 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: + QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -6,7 +7,7 @@ description: QbStrField('description', dtype=typing.Optional[str], is_attribute= exception: QbStrField('exception', dtype=typing.Optional[str], is_attribute=True) exit_message: QbStrField('exit_message', dtype=typing.Optional[str], is_attribute=True) exit_status: QbNumericField('exit_status', dtype=typing.Optional[int], is_attribute=True) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -17,9 +18,11 @@ process_label: QbStrField('process_label', dtype=typing.Optional[str], is_attrib process_state: QbStrField('process_state', dtype=typing.Optional[str], is_attribute=True) process_status: QbStrField('process_status', dtype=typing.Optional[str], is_attribute=True) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) -repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, +repository_content: + QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: + QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) sealed: QbField('sealed', dtype=, is_attribute=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.node.process.calculation.calcfunction.CalcFunctionNode.yml b/tests/orm/test_fields/fields_aiida.node.process.calculation.calcfunction.CalcFunctionNode.yml index d8928ee1a4..fad354bbbd 100644 --- a/tests/orm/test_fields/fields_aiida.node.process.calculation.calcfunction.CalcFunctionNode.yml +++ b/tests/orm/test_fields/fields_aiida.node.process.calculation.calcfunction.CalcFunctionNode.yml @@ -1,4 +1,5 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: + QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -6,7 +7,7 @@ description: QbStrField('description', dtype=typing.Optional[str], is_attribute= exception: QbStrField('exception', dtype=typing.Optional[str], is_attribute=True) exit_message: QbStrField('exit_message', dtype=typing.Optional[str], is_attribute=True) exit_status: QbNumericField('exit_status', dtype=typing.Optional[int], is_attribute=True) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -17,9 +18,11 @@ process_label: QbStrField('process_label', dtype=typing.Optional[str], is_attrib process_state: QbStrField('process_state', dtype=typing.Optional[str], is_attribute=True) process_status: QbStrField('process_status', dtype=typing.Optional[str], is_attribute=True) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) -repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, +repository_content: + QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: + QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) sealed: QbField('sealed', dtype=, is_attribute=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.node.process.calculation.calcjob.CalcJobNode.yml b/tests/orm/test_fields/fields_aiida.node.process.calculation.calcjob.CalcJobNode.yml index 8da4b34cb8..2e94fd9af7 100644 --- a/tests/orm/test_fields/fields_aiida.node.process.calculation.calcjob.CalcJobNode.yml +++ b/tests/orm/test_fields/fields_aiida.node.process.calculation.calcjob.CalcJobNode.yml @@ -1,4 +1,5 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: + QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -7,7 +8,7 @@ detailed_job_info: QbDictField('detailed_job_info', dtype=typing.Optional[dict], exception: QbStrField('exception', dtype=typing.Optional[str], is_attribute=True) exit_message: QbStrField('exit_message', dtype=typing.Optional[str], is_attribute=True) exit_status: QbNumericField('exit_status', dtype=typing.Optional[int], is_attribute=True) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) imported: QbField('imported', dtype=typing.Optional[bool], is_attribute=True) job_id: QbStrField('job_id', dtype=typing.Optional[str], is_attribute=True) @@ -22,15 +23,19 @@ process_state: QbStrField('process_state', dtype=typing.Optional[str], is_attrib process_status: QbStrField('process_status', dtype=typing.Optional[str], is_attribute=True) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) remote_workdir: QbStrField('remote_workdir', dtype=typing.Optional[str], is_attribute=True) -repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, +repository_content: + QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: + QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) -retrieve_list: QbArrayField('retrieve_list', dtype=typing.Optional[typing.List[str]], +retrieve_list: QbArrayField('retrieve_list', dtype=typing.Optional[list[str]], is_attribute=True) -retrieve_temporary_list: QbArrayField('retrieve_temporary_list', dtype=typing.Optional[typing.List[str]], +retrieve_temporary_list: + QbArrayField('retrieve_temporary_list', dtype=typing.Optional[list[str]], is_attribute=True) -scheduler_lastchecktime: QbStrField('scheduler_lastchecktime', dtype=typing.Optional[str], +scheduler_lastchecktime: + QbStrField('scheduler_lastchecktime', dtype=typing.Optional[str], is_attribute=True) scheduler_state: QbStrField('scheduler_state', dtype=typing.Optional[str], is_attribute=True) sealed: QbField('sealed', dtype=, is_attribute=True) diff --git a/tests/orm/test_fields/fields_aiida.node.process.workflow.WorkflowNode.yml b/tests/orm/test_fields/fields_aiida.node.process.workflow.WorkflowNode.yml index d8928ee1a4..fad354bbbd 100644 --- a/tests/orm/test_fields/fields_aiida.node.process.workflow.WorkflowNode.yml +++ b/tests/orm/test_fields/fields_aiida.node.process.workflow.WorkflowNode.yml @@ -1,4 +1,5 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: + QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -6,7 +7,7 @@ description: QbStrField('description', dtype=typing.Optional[str], is_attribute= exception: QbStrField('exception', dtype=typing.Optional[str], is_attribute=True) exit_message: QbStrField('exit_message', dtype=typing.Optional[str], is_attribute=True) exit_status: QbNumericField('exit_status', dtype=typing.Optional[int], is_attribute=True) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -17,9 +18,11 @@ process_label: QbStrField('process_label', dtype=typing.Optional[str], is_attrib process_state: QbStrField('process_state', dtype=typing.Optional[str], is_attribute=True) process_status: QbStrField('process_status', dtype=typing.Optional[str], is_attribute=True) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) -repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, +repository_content: + QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: + QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) sealed: QbField('sealed', dtype=, is_attribute=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.node.process.workflow.workchain.WorkChainNode.yml b/tests/orm/test_fields/fields_aiida.node.process.workflow.workchain.WorkChainNode.yml index d8928ee1a4..fad354bbbd 100644 --- a/tests/orm/test_fields/fields_aiida.node.process.workflow.workchain.WorkChainNode.yml +++ b/tests/orm/test_fields/fields_aiida.node.process.workflow.workchain.WorkChainNode.yml @@ -1,4 +1,5 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: + QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -6,7 +7,7 @@ description: QbStrField('description', dtype=typing.Optional[str], is_attribute= exception: QbStrField('exception', dtype=typing.Optional[str], is_attribute=True) exit_message: QbStrField('exit_message', dtype=typing.Optional[str], is_attribute=True) exit_status: QbNumericField('exit_status', dtype=typing.Optional[int], is_attribute=True) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -17,9 +18,11 @@ process_label: QbStrField('process_label', dtype=typing.Optional[str], is_attrib process_state: QbStrField('process_state', dtype=typing.Optional[str], is_attribute=True) process_status: QbStrField('process_status', dtype=typing.Optional[str], is_attribute=True) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) -repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, +repository_content: + QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: + QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) sealed: QbField('sealed', dtype=, is_attribute=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False) diff --git a/tests/orm/test_fields/fields_aiida.node.process.workflow.workfunction.WorkFunctionNode.yml b/tests/orm/test_fields/fields_aiida.node.process.workflow.workfunction.WorkFunctionNode.yml index d8928ee1a4..fad354bbbd 100644 --- a/tests/orm/test_fields/fields_aiida.node.process.workflow.workfunction.WorkFunctionNode.yml +++ b/tests/orm/test_fields/fields_aiida.node.process.workflow.workfunction.WorkFunctionNode.yml @@ -1,4 +1,5 @@ -attributes: QbDictField('attributes', dtype=typing.Optional[typing.Dict[str, typing.Any]], +attributes: + QbDictField('attributes', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) computer: QbNumericField('computer', dtype=typing.Optional[int], is_attribute=False) ctime: QbNumericField('ctime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -6,7 +7,7 @@ description: QbStrField('description', dtype=typing.Optional[str], is_attribute= exception: QbStrField('exception', dtype=typing.Optional[str], is_attribute=True) exit_message: QbStrField('exit_message', dtype=typing.Optional[str], is_attribute=True) exit_status: QbNumericField('exit_status', dtype=typing.Optional[int], is_attribute=True) -extras: QbDictField('extras', dtype=typing.Optional[typing.Dict[str, typing.Any]], +extras: QbDictField('extras', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False, is_subscriptable=True) label: QbStrField('label', dtype=typing.Optional[str], is_attribute=False) mtime: QbNumericField('mtime', dtype=typing.Optional[datetime.datetime], is_attribute=False) @@ -17,9 +18,11 @@ process_label: QbStrField('process_label', dtype=typing.Optional[str], is_attrib process_state: QbStrField('process_state', dtype=typing.Optional[str], is_attribute=True) process_status: QbStrField('process_status', dtype=typing.Optional[str], is_attribute=True) process_type: QbStrField('process_type', dtype=typing.Optional[str], is_attribute=False) -repository_content: QbDictField('repository_content', dtype=typing.Optional[dict[str, +repository_content: + QbDictField('repository_content', dtype=typing.Optional[dict[str, bytes]], is_attribute=False) -repository_metadata: QbDictField('repository_metadata', dtype=typing.Optional[typing.Dict[str, +repository_metadata: + QbDictField('repository_metadata', dtype=typing.Optional[dict[str, typing.Any]], is_attribute=False) sealed: QbField('sealed', dtype=, is_attribute=True) user: QbNumericField('user', dtype=typing.Optional[int], is_attribute=False)