diff --git a/.ci/test_daemon.py b/.ci/test_daemon.py index eba74ccee6..4e9dbff2a7 100644 --- a/.ci/test_daemon.py +++ b/.ci/test_daemon.py @@ -304,6 +304,14 @@ def run_base_restart_workchain(): assert node.exit_status == ArithmeticAddBaseWorkChain.exit_codes.ERROR_TOO_BIG.status, node.exit_status # pylint: disable=no-member assert len(node.called) == 1 + # Check that overriding default handler enabled status works + inputs['add']['y'] = Int(1) + inputs['handler_overrides'] = Dict(dict={'disabled_handler': True}) + results, node = run.get_node(ArithmeticAddBaseWorkChain, **inputs) + assert not node.is_finished_ok, node.process_state + assert node.exit_status == ArithmeticAddBaseWorkChain.exit_codes.ERROR_ENABLED_DOOM.status, node.exit_status # pylint: disable=no-member + assert len(node.called) == 1 + def main(): """Launch a bunch of calculation jobs and workchains.""" diff --git a/.ci/workchains.py b/.ci/workchains.py index c511006697..110334f0ae 100644 --- a/.ci/workchains.py +++ b/.ci/workchains.py @@ -9,7 +9,7 @@ ########################################################################### from aiida.common import AttributeDict from aiida.engine import calcfunction, workfunction, WorkChain, ToContext, append_, while_, ExitCode -from aiida.engine import BaseRestartWorkChain, register_process_handler, ProcessHandlerReport +from aiida.engine import BaseRestartWorkChain, process_handler, ProcessHandlerReport from aiida.engine.persistence import ObjectLoader from aiida.orm import Int, List, Str from aiida.plugins import CalculationFactory @@ -38,6 +38,7 @@ def define(cls, spec): cls.results, ) spec.exit_code(100, 'ERROR_TOO_BIG', message='The sum was too big.') + spec.exit_code(110, 'ERROR_ENABLED_DOOM', message='You should not have done that.') def setup(self): """Call the `setup` of the `BaseRestartWorkChain` and then create the inputs dictionary in `self.ctx.inputs`. @@ -48,26 +49,28 @@ def setup(self): super().setup() self.ctx.inputs = AttributeDict(self.exposed_inputs(ArithmeticAddCalculation, 'add')) - -@register_process_handler(ArithmeticAddBaseWorkChain, priority=500) -def sanity_check_not_too_big(self, node): - """My puny brain cannot deal with numbers that I cannot count on my hand.""" - if node.is_finished_ok and node.outputs.sum > 10: - return ProcessHandlerReport(True, self.exit_codes.ERROR_TOO_BIG) - - -@register_process_handler(ArithmeticAddBaseWorkChain, priority=450, exit_codes=ExitCode(1000, 'Unicorn encountered')) -def a_magic_unicorn_appeared(self, node): - """As we all know unicorns do not exist so we should never have to deal with it.""" - raise RuntimeError('this handler should never even have been called') - - -@register_process_handler(ArithmeticAddBaseWorkChain, priority=400, exit_codes=ArithmeticAddCalculation.exit_codes.ERROR_NEGATIVE_NUMBER) -def error_negative_sum(self, node): - """What even is a negative number, how can I have minus three melons?!.""" - self.ctx.inputs.x = Int(abs(node.inputs.x.value)) - self.ctx.inputs.y = Int(abs(node.inputs.y.value)) - return ProcessHandlerReport(True) + @process_handler(priority=500) + def sanity_check_not_too_big(self, node): + """My puny brain cannot deal with numbers that I cannot count on my hand.""" + if node.is_finished_ok and node.outputs.sum > 10: + return ProcessHandlerReport(True, self.exit_codes.ERROR_TOO_BIG) + + @process_handler(priority=460, enabled=False) + def disabled_handler(self, node): + """By default this is not enabled and so should never be called, irrespective of exit codes of sub process.""" + return ProcessHandlerReport(True, self.exit_codes.ERROR_ENABLED_DOOM) + + @process_handler(priority=450, exit_codes=ExitCode(1000, 'Unicorn encountered')) + def a_magic_unicorn_appeared(self, node): + """As we all know unicorns do not exist so we should never have to deal with it.""" + raise RuntimeError('this handler should never even have been called') + + @process_handler(priority=400, exit_codes=ArithmeticAddCalculation.exit_codes.ERROR_NEGATIVE_NUMBER) + def error_negative_sum(self, node): + """What even is a negative number, how can I have minus three melons?!.""" + self.ctx.inputs.x = Int(abs(node.inputs.x.value)) + self.ctx.inputs.y = Int(abs(node.inputs.y.value)) + return ProcessHandlerReport(True) class NestedWorkChain(WorkChain): diff --git a/.docker/opt/configure-aiida.sh b/.docker/opt/configure-aiida.sh index de69ea4a70..c728cc64d8 100755 --- a/.docker/opt/configure-aiida.sh +++ b/.docker/opt/configure-aiida.sh @@ -8,20 +8,7 @@ set -x # Environment. export SHELL=/bin/bash -# Activate conda. -__conda_setup="$('/opt/conda/bin/conda' 'shell.bash' 'hook' 2> /dev/null)" -if [ $? -eq 0 ]; then - eval "$__conda_setup" -else - if [ -f "/opt/conda/etc/profile.d/conda.sh" ]; then - . "/opt/conda/etc/profile.d/conda.sh" - else - export PATH="/opt/conda/bin:$PATH" - fi -fi -unset __conda_setup - -# Very important to run after conda activation, otherwise AiiDA won't work. +# Update the list of installed aiida plugins. reentry scan # Setup AiiDA autocompletion. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c8f484f96b..4ffd8c5940 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -96,12 +96,13 @@ jobs: steps: - uses: actions/checkout@v1 - - uses: harmon758/postgresql-action@v1 + - uses: CasperWA/postgresql-action@v1.2 with: - postgresql version: '11' + postgresql version: '10' postgresql db: test_${{ matrix.backend }} - postgresql user: 'postgres' + postgresql user: postgres postgresql password: '' + postgresql auth: trust - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 @@ -115,7 +116,7 @@ jobs: echo 'deb https://dl.bintray.com/rabbitmq/debian bionic main' | sudo tee -a /etc/apt/sources.list.d/bintray.rabbitmq.list sudo rm -f /etc/apt/sources.list.d/dotnetdev.list /etc/apt/sources.list.d/microsoft-prod.list sudo apt update - sudo apt install postgresql postgresql-server-dev-all postgresql-client rabbitmq-server graphviz + sudo apt install postgresql-10 rabbitmq-server graphviz sudo systemctl status rabbitmq-server.service - name: Install python dependencies diff --git a/.github/workflows/conda.sh b/.github/workflows/conda.sh index 608fe81c02..5ad1b6628b 100755 --- a/.github/workflows/conda.sh +++ b/.github/workflows/conda.sh @@ -7,10 +7,6 @@ export PATH="$HOME/miniconda/bin:$PATH" hash -r conda config --set always_yes yes --set changeps1 no -# Workaround for https://github.com/conda/conda/issues/9337 -pip uninstall -y setuptools -conda install setuptools - conda update -q conda conda info -a -conda env create -f environment.yml -n test-environment python=$PYTHON_VERSION +conda env create -f environment.yml -n test-environment diff --git a/CHANGELOG.md b/CHANGELOG.md index 1880194fe1..b9ca0d93b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,30 @@ # Changelog +## v1.1.1 + +### Changes +- Emit a warning when input port specifies a node instance as default [[#3466]](https://github.com/aiidateam/aiida-core/pull/3466) +- `BaseRestartWorkChain`: require process handlers to be instance methods [[#3782]](https://github.com/aiidateam/aiida-core/pull/3782) +- `BaseRestartWorkChain`: add method to enable/disable process handlers [[#3786]](https://github.com/aiidateam/aiida-core/pull/3786) +- Docker container: remove conda activation from configure-aiida.sh script [[#3791]](https://github.com/aiidateam/aiida-core/pull/3791) +- Add fixtures to clear the database before or after tests [[#3783]](https://github.com/aiidateam/aiida-core/pull/3783) +- `verdi status`: add the configuration directory path to the output [[#3587]](https://github.com/aiidateam/aiida-core/pull/3587) +- `QueryBuilder`: add support for `datetime.date` objects in filters [[#3796]](https://github.com/aiidateam/aiida-core/pull/3796) + +### Bug fixes +- Fix bugs in `Node._store_from_cache` and `Node.repository.erase` that could result in calculations not being reused [[#3777]](https://github.com/aiidateam/aiida-core/pull/3777) +- Caching: fix configuration spec and validation [[#3785]](https://github.com/aiidateam/aiida-core/pull/3785) +- Write migrated config to disk in `Config.from_file` [[#3797]](https://github.com/aiidateam/aiida-core/pull/3797) +- Validate label string at code setup stage [[#3793]](https://github.com/aiidateam/aiida-core/pull/3793) +- Reuse `prepend_text` and `append_text` in `verdi computer/code duplicate` [[#3788]](https://github.com/aiidateam/aiida-core/pull/3788) +- Fix broken imports of `urllib` in various locations including `verdi import` [[#3767]](https://github.com/aiidateam/aiida-core/pull/3767) +- Match headers with actual output for `verdi data structure list` [[#3756]](https://github.com/aiidateam/aiida-core/pull/3756) +- Disable caching for the `Data` node subclass (this should not affect usual caching behavior) [[#3807]](https://github.com/aiidateam/aiida-core/pull/3807) + + ## v1.1.0 -**Note Bene:** although this is a minor version release, the support for python 2 is dropped [(#3566)](https://github.com/aiidateam/aiida-core/pull/3566) following the reasoning outlined in the corresponding [AEP001](https://github.com/aiidateam/AEP/tree/master/001_drop_python2). +**Nota Bene:** although this is a minor version release, the support for python 2 is dropped [(#3566)](https://github.com/aiidateam/aiida-core/pull/3566) following the reasoning outlined in the corresponding [AEP001](https://github.com/aiidateam/AEP/tree/master/001_drop_python2). Critical bug fixes for python 2 will be supported until July 1 2020 on the `v1.0.*` release series. With the addition of python 3.8 [(#3719)](https://github.com/aiidateam/aiida-core/pull/3719), this version is now compatible with all current python versions that are not end-of-life: * 3.5 diff --git a/aiida/__init__.py b/aiida/__init__.py index 060f0993fb..a6a582127c 100644 --- a/aiida/__init__.py +++ b/aiida/__init__.py @@ -32,7 +32,7 @@ 'For further information please visit http://www.aiida.net/. All rights reserved.' ) __license__ = 'MIT license, see LICENSE.txt file.' -__version__ = '1.1.0' +__version__ = '1.1.1' __authors__ = 'The AiiDA team.' __paper__ = ( 'G. Pizzi, A. Cepellotti, R. Sabatini, N. Marzari, and B. Kozinsky,' diff --git a/aiida/cmdline/commands/cmd_code.py b/aiida/cmdline/commands/cmd_code.py index 1c2d6eef73..4a77b61462 100644 --- a/aiida/cmdline/commands/cmd_code.py +++ b/aiida/cmdline/commands/cmd_code.py @@ -94,7 +94,11 @@ def setup_code(non_interactive, **kwargs): kwargs['code_type'] = CodeBuilder.CodeType.STORE_AND_UPLOAD code_builder = CodeBuilder(**kwargs) - code = code_builder.new() + + try: + code = code_builder.new() + except InputValidationError as exception: + echo.echo_critical('invalid inputs: {}'.format(exception)) try: code.store() @@ -115,8 +119,8 @@ def setup_code(non_interactive, **kwargs): @options_code.REMOTE_ABS_PATH(contextual_default=partial(get_default, 'remote_abs_path')) @options_code.FOLDER(contextual_default=partial(get_default, 'code_folder')) @options_code.REL_PATH(contextual_default=partial(get_default, 'code_rel_path')) -@options.PREPEND_TEXT() -@options.APPEND_TEXT() +@options.PREPEND_TEXT(cls=options.ContextualDefaultOption, contextual_default=partial(get_default, 'prepend_text')) +@options.APPEND_TEXT(cls=options.ContextualDefaultOption, contextual_default=partial(get_default, 'append_text')) @options.NON_INTERACTIVE() @click.option('--hide-original', is_flag=True, default=False, help='Hide the code being copied.') @click.pass_context diff --git a/aiida/cmdline/commands/cmd_computer.py b/aiida/cmdline/commands/cmd_computer.py index 397dc93659..ad5dc457b5 100644 --- a/aiida/cmdline/commands/cmd_computer.py +++ b/aiida/cmdline/commands/cmd_computer.py @@ -277,8 +277,12 @@ def computer_setup(ctx, non_interactive, **kwargs): @options_computer.WORKDIR(contextual_default=partial(get_parameter_default, 'work_dir')) @options_computer.MPI_RUN_COMMAND(contextual_default=partial(get_parameter_default, 'mpirun_command')) @options_computer.MPI_PROCS_PER_MACHINE(contextual_default=partial(get_parameter_default, 'mpiprocs_per_machine')) -@options.PREPEND_TEXT() -@options.APPEND_TEXT() +@options.PREPEND_TEXT( + cls=options.ContextualDefaultOption, contextual_default=partial(get_parameter_default, 'prepend_text') +) +@options.APPEND_TEXT( + cls=options.ContextualDefaultOption, contextual_default=partial(get_parameter_default, 'append_text') +) @options.NON_INTERACTIVE() @click.pass_context @with_dbenv() diff --git a/aiida/cmdline/commands/cmd_data/cmd_structure.py b/aiida/cmdline/commands/cmd_data/cmd_structure.py index 57217b303d..c0d205197e 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_structure.py +++ b/aiida/cmdline/commands/cmd_data/cmd_structure.py @@ -18,7 +18,7 @@ from aiida.cmdline.params import arguments, options, types from aiida.cmdline.utils import decorators, echo -LIST_PROJECT_HEADERS = ['Id', 'Label', 'Kinds', 'Sites'] +LIST_PROJECT_HEADERS = ['Id', 'Label', 'Formula'] EXPORT_FORMATS = ['cif', 'xsf', 'xyz'] VISUALIZATION_FORMATS = ['ase', 'jmol', 'vesta', 'vmd', 'xcrysden'] @@ -63,7 +63,8 @@ def structure_list(elements, raw, formula_mode, past_days, groups, all_users): elements_only = False lst = data_list( - StructureData, LIST_PROJECT_HEADERS, elements, elements_only, formula_mode, past_days, groups, all_users + StructureData, ['Id', 'Label', 'Kinds', 'Sites'], elements, elements_only, formula_mode, past_days, groups, + all_users ) entry_list = [] @@ -99,7 +100,7 @@ def structure_list(elements, raw, formula_mode, past_days, groups, all_users): # referenced by the site except KeyError: formula = '<>' - entry_list.append([str(pid), str(formula), label]) + entry_list.append([str(pid), label, str(formula)]) counter = 0 struct_list_data = list() diff --git a/aiida/cmdline/commands/cmd_import.py b/aiida/cmdline/commands/cmd_import.py index 46396a40d8..73054b182e 100644 --- a/aiida/cmdline/commands/cmd_import.py +++ b/aiida/cmdline/commands/cmd_import.py @@ -11,7 +11,7 @@ # pylint: disable=broad-except,too-many-arguments,too-many-locals,too-many-branches from enum import Enum import traceback -import urllib +import urllib.request import click from aiida.cmdline.commands.cmd_verdi import verdi diff --git a/aiida/cmdline/commands/cmd_profile.py b/aiida/cmdline/commands/cmd_profile.py index 8d5563139e..6ac1671237 100644 --- a/aiida/cmdline/commands/cmd_profile.py +++ b/aiida/cmdline/commands/cmd_profile.py @@ -52,10 +52,11 @@ def profile_list(): @arguments.PROFILE(default=defaults.get_default_profile) def profile_show(profile): """Show details for a profile.""" + if profile is None: echo.echo_critical('no profile to show') - echo.echo_info('Configuration for: {}'.format(profile.name)) + echo.echo_info('Profile: {}'.format(profile.name)) data = sorted([(k.lower(), v) for k, v in profile.dictionary.items()]) echo.echo(tabulate.tabulate(data)) diff --git a/aiida/cmdline/commands/cmd_status.py b/aiida/cmdline/commands/cmd_status.py index 312e8d2b9b..74a597f45e 100644 --- a/aiida/cmdline/commands/cmd_status.py +++ b/aiida/cmdline/commands/cmd_status.py @@ -49,12 +49,16 @@ def verdi_status(): from aiida.common.utils import Capturing from aiida.manage.external.rmq import get_rmq_url from aiida.manage.manager import get_manager + from aiida.manage.configuration.settings import AIIDA_CONFIG_FOLDER exit_code = ExitCode.SUCCESS + + # path to configuration file + print_status(ServiceStatus.UP, 'config dir', AIIDA_CONFIG_FOLDER) + manager = get_manager() profile = manager.get_profile() - # getting the profile try: profile = manager.get_profile() print_status(ServiceStatus.UP, 'profile', 'On profile {}'.format(profile.name)) diff --git a/aiida/cmdline/params/options/__init__.py b/aiida/cmdline/params/options/__init__.py index c67e520bb8..c2352ac70a 100644 --- a/aiida/cmdline/params/options/__init__.py +++ b/aiida/cmdline/params/options/__init__.py @@ -16,6 +16,7 @@ from .. import types from .multivalue import MultipleValueOption from .overridable import OverridableOption +from .contextualdefault import ContextualDefaultOption from .config import ConfigFileOption __all__ = ( diff --git a/aiida/cmdline/params/options/contextualdefault.py b/aiida/cmdline/params/options/contextualdefault.py new file mode 100644 index 0000000000..1642b45127 --- /dev/null +++ b/aiida/cmdline/params/options/contextualdefault.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +.. py:module::contextualdefault + :synopsis: Tools for options which allow for a default callable that needs + also the context ctx +""" + +import click + + +class ContextualDefaultOption(click.Option): + """A class that extends click.Option allowing to define a default callable + that also get the context ctx as a parameter. + """ + + def __init__(self, *args, contextual_default=None, **kwargs): + self._contextual_default = contextual_default + super().__init__(*args, **kwargs) + + def get_default(self, ctx): + """If a contextual default is defined, use it, otherwise behave normally.""" + if self._contextual_default is None: + return super().get_default(ctx) + return self._contextual_default(ctx) diff --git a/aiida/cmdline/params/types/path.py b/aiida/cmdline/params/types/path.py index 3a597e816a..20a96cd436 100644 --- a/aiida/cmdline/params/types/path.py +++ b/aiida/cmdline/params/types/path.py @@ -9,7 +9,9 @@ ########################################################################### """Click parameter types for paths.""" import os -import urllib +# See https://stackoverflow.com/a/41217363/1069467 +import urllib.request +import urllib.error import click @@ -53,10 +55,11 @@ def __repr__(self): class ImportPath(click.Path): """AiiDA extension of Click's Path-type to include URLs An ImportPath can either be a `click.Path`-type or a URL. + :param timeout_seconds: Timeout time in seconds that a URL response is expected. :value timeout_seconds: Must be an int in the range [0;60], extrema included. - If an int outside the range [0;60] is given, the value will be set to the respective extremum value. - If any other type than int is given a TypeError will be raised. + If an int outside the range [0;60] is given, the value will be set to the respective extremum value. + If any other type than int is given a TypeError will be raised. """ # pylint: disable=protected-access diff --git a/aiida/cmdline/utils/multi_line_input.py b/aiida/cmdline/utils/multi_line_input.py index 131bf68682..32632fa616 100644 --- a/aiida/cmdline/utils/multi_line_input.py +++ b/aiida/cmdline/utils/multi_line_input.py @@ -76,7 +76,7 @@ def edit_pre_post(pre=None, post=None, summary=None): pre = re.sub(r'(^#=.*$\n)+', '', pre, flags=re.M).strip() post = re.sub(r'(^#=.*$\n)+', '', post, flags=re.M).strip() else: - pre, post = ('', '') + pre, post = (pre or '', post or '') return pre, post diff --git a/aiida/common/hashing.py b/aiida/common/hashing.py index 1f73f150f2..d43b603054 100644 --- a/aiida/common/hashing.py +++ b/aiida/common/hashing.py @@ -8,6 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Common password and hash generation functions.""" +import datetime import hashlib try: # Python > 3.5 from hashlib import blake2b @@ -18,7 +19,6 @@ import time import uuid from collections import abc, OrderedDict -from datetime import datetime from functools import singledispatch from itertools import chain from operator import itemgetter @@ -234,17 +234,23 @@ def _(val, **kwargs): return [_single_digest('none')] -@_make_hash.register(datetime) +@_make_hash.register(datetime.datetime) def _(val, **kwargs): """hashes the little-endian rep of the float .""" # see also https://stackoverflow.com/a/8778548 for an excellent elaboration if val.tzinfo is None or val.utcoffset() is None: val = val.replace(tzinfo=pytz.utc) - timestamp = val.timestamp() + timestamp = val.timestamp() return [_single_digest('datetime', float_to_text(timestamp, sig=AIIDA_FLOAT_PRECISION).encode('utf-8'))] +@_make_hash.register(datetime.date) +def _(val, **kwargs): + """Hashes the string representation in ISO format of the `datetime.date` object.""" + return [_single_digest('date', val.isoformat().encode('utf-8'))] + + @_make_hash.register(uuid.UUID) def _(val, **kwargs): return [_single_digest('uuid', val.bytes)] diff --git a/aiida/common/lang.py b/aiida/common/lang.py index d85ce10b35..defeea73dc 100644 --- a/aiida/common/lang.py +++ b/aiida/common/lang.py @@ -9,8 +9,8 @@ ########################################################################### """Utilities that extend the basic python language.""" import functools +import inspect import keyword -from inspect import stack, currentframe, getfullargspec def isidentifier(identifier): @@ -40,41 +40,6 @@ def type_check(what, of_type, msg=None, allow_none=False): raise TypeError(msg) -def protected_decorator(check=False): - """Decorator to ensure that the decorated method is not called from outside the class hierarchy.""" - - def wrap(func): # pylint: disable=missing-docstring - if isinstance(func, property): - raise RuntimeError('Protected must go after @property decorator') - - args = getfullargspec(func)[0] - if not args: - raise RuntimeError('Can only use the protected decorator on member functions') - - # We can only perform checks if the interpreter is capable of giving - # us the stack i.e. currentframe() produces a valid object - if check and currentframe() is not None: - - @functools.wraps(func) - def wrapped_fn(self, *args, **kwargs): # pylint: disable=missing-docstring - try: - calling_class = stack()[1][0].f_locals['self'] - assert self is calling_class - except (KeyError, AssertionError): - raise RuntimeError( - 'Cannot access protected function {} from outside' - ' class hierarchy'.format(func.__name__) - ) - - return func(self, *args, **kwargs) - else: - wrapped_fn = func - - return wrapped_fn - - return wrap - - def override_decorator(check=False): """Decorator to signal that a method from a base class is being overridden completely.""" @@ -82,7 +47,7 @@ def wrap(func): # pylint: disable=missing-docstring if isinstance(func, property): raise RuntimeError('Override must go after @property decorator') - args = getfullargspec(func)[0] + args = inspect.getfullargspec(func)[0] if not args: raise RuntimeError('Can only use the override decorator on member functions') @@ -104,7 +69,6 @@ def wrapped_fn(self, *args, **kwargs): # pylint: disable=missing-docstring return wrap -protected = protected_decorator(check=False) # pylint: disable=invalid-name override = override_decorator(check=False) # pylint: disable=invalid-name @@ -122,85 +86,3 @@ def __init__(self, getter): def __get__(self, instance, owner): return self.getter(owner) - - -class abstractclassmethod(classmethod): # pylint: disable=too-few-public-methods, invalid-name - """ - A decorator indicating abstract classmethods. - - Backported from python3. - """ - __isabstractmethod__ = True - - def __init__(self, callable): # pylint: disable=redefined-builtin - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractstaticmethod(staticmethod): # pylint: disable=too-few-public-methods, invalid-name - """ - A decorator indicating abstract staticmethods. - - Similar to abstractmethod. - Backported from python3. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): # pylint: disable=redefined-builtin - callable.__isabstractmethod__ = True # pylint: disable=redefined-builtin - super().__init__(callable) - - -class combomethod: # pylint: disable=invalid-name,too-few-public-methods - """ - A decorator that wraps a function that can be both a classmethod or - instancemethod and behaves accordingly:: - - class A(): - - @combomethod - def do(self, **kwargs): - isclass = kwargs.get('isclass') - if isclass: - print("I am a class", self) - else: - print("I am an instance", self) - - A.do() - A().do() - - >>> I am a class __main__.A - >>> I am an instance <__main__.A instance at 0x7f2efb116e60> - - Attention: For ease of handling, pass keyword **isclass** - equal to True if this was called as a classmethod and False if this - was called as an instance. - The argument self is therefore ambiguous! - """ - - def __init__(self, method): - self.method = method - - def __get__(self, obj=None, objtype=None): # pylint: disable=missing-docstring - - @functools.wraps(self.method) - def _wrapper(*args, **kwargs): # pylint: disable=missing-docstring - kwargs.pop('isclass', None) - if obj is not None: - return self.method(obj, *args, isclass=False, **kwargs) - return self.method(objtype, *args, isclass=True, **kwargs) - - return _wrapper - - -class EmptyContextManager: # pylint: disable=too-few-public-methods - """ - A dummy/no-op context manager. - """ - - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_value, traceback): - pass diff --git a/aiida/engine/processes/calcjobs/calcjob.py b/aiida/engine/processes/calcjobs/calcjob.py index 7aa82ab9d7..0e6b234ef0 100644 --- a/aiida/engine/processes/calcjobs/calcjob.py +++ b/aiida/engine/processes/calcjobs/calcjob.py @@ -148,12 +148,12 @@ def define(cls, spec): help='Set the quality of service to use in for the queue on the remote computer') spec.input('metadata.options.withmpi', valid_type=bool, default=False, help='Set the calculation to use mpi',) - spec.input('metadata.options.mpirun_extra_params', valid_type=(list, tuple), default=[], + spec.input('metadata.options.mpirun_extra_params', valid_type=(list, tuple), default=lambda: [], help='Set the extra params to pass to the mpirun (or equivalent) command after the one provided in ' 'computer.mpirun_command. Example: mpirun -np 8 extra_params[0] extra_params[1] ... exec.x',) spec.input('metadata.options.import_sys_environment', valid_type=bool, default=True, help='If set to true, the submission script will load the system environment variables',) - spec.input('metadata.options.environment_variables', valid_type=dict, default={}, + spec.input('metadata.options.environment_variables', valid_type=dict, default=lambda: {}, help='Set a dictionary of custom environment variables for this calculation',) spec.input('metadata.options.priority', valid_type=str, required=False, help='Set the priority of the job to be queued') diff --git a/aiida/engine/processes/ports.py b/aiida/engine/processes/ports.py index 725da48032..1340ba9b0d 100644 --- a/aiida/engine/processes/ports.py +++ b/aiida/engine/processes/ports.py @@ -8,9 +8,9 @@ # For further information please visit http://www.aiida.net # ########################################################################### """AiiDA specific implementation of plumpy Ports and PortNamespaces for the ProcessSpec.""" - import collections import re +import warnings from plumpy import ports @@ -96,6 +96,22 @@ class InputPort(WithSerialize, WithNonDb, ports.InputPort): value serialization to database storable types and support non database storable input types as well. """ + def __init__(self, *args, **kwargs): + """Override the constructor to check the type of the default if set and warn if not immutable.""" + # pylint: disable=redefined-builtin,too-many-arguments + from aiida.orm import Node + + if 'default' in kwargs: + default = kwargs['default'] + # If the default is specified and it is a node instance, raise a warning. This is to try and prevent that + # people set node instances as defaults which can cause various problems. + if default is not ports.UNSPECIFIED and isinstance(default, Node): + message = 'default of input port `{}` is a `Node` instance, which can lead to unexpected side effects.'\ + ' It is advised to use a lambda instead, e.g.: `default=lambda: orm.Int(5)`.'.format(args[0]) + warnings.warn(UserWarning(message)) # pylint: disable=no-member + + super(InputPort, self).__init__(*args, **kwargs) + def get_description(self): """ Return a description of the InputPort, which will be a dictionary of its attributes diff --git a/aiida/engine/processes/process.py b/aiida/engine/processes/process.py index 26c9b49524..09090df055 100644 --- a/aiida/engine/processes/process.py +++ b/aiida/engine/processes/process.py @@ -23,7 +23,7 @@ from aiida import orm from aiida.common import exceptions from aiida.common.extendeddicts import AttributeDict -from aiida.common.lang import classproperty, override, protected +from aiida.common.lang import classproperty, override from aiida.common.links import LinkType from aiida.common.log import LOG_LEVEL_REPORT @@ -456,7 +456,6 @@ def runner(self): """ return self._runner - @protected def get_parent_calc(self): """ Get the parent process node @@ -495,7 +494,6 @@ def build_process_type(cls): return process_type - @protected def report(self, msg, *args, **kwargs): """Log a message to the logger, which should get saved to the database through the attached DbLogHandler. diff --git a/aiida/engine/processes/workchains/restart.py b/aiida/engine/processes/workchains/restart.py index 772eda48cc..481f056755 100644 --- a/aiida/engine/processes/workchains/restart.py +++ b/aiida/engine/processes/workchains/restart.py @@ -8,17 +8,47 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Base implementation of `WorkChain` class that implements a simple automated restart mechanism for sub processes.""" +import functools + from aiida import orm from aiida.common import AttributeDict -from aiida.common.lang import classproperty from .context import ToContext, append_ from .workchain import WorkChain -from .utils import ProcessHandlerReport +from .utils import ProcessHandlerReport, process_handler __all__ = ('BaseRestartWorkChain',) +def validate_handler_overrides(process_class, handler_overrides, ctx): # pylint: disable=inconsistent-return-statements,unused-argument + """Validator for the `handler_overrides` input port of the `BaseRestartWorkChain. + + The `handler_overrides` should be a dictionary where keys are strings that are the name of a process handler, i.e. a + instance method of the `process_class` that has been decorated with the `process_handler` decorator. The values + should be boolean. + + .. note:: the normal signature of a port validator is `(value, ctx)` but since for the validation here we need a + reference to the process class, we add it and the class is bound to the method in the port declaration in the + `define` method. + + :param process_class: the `BaseRestartWorkChain` (sub) class + :param handler_overrides: the input `Dict` node + :param ctx: the `PortNamespace` in which the port is embedded + """ + if not handler_overrides: + return + + for handler, override in handler_overrides.get_dict().items(): + if not isinstance(handler, str): + return 'The key `{}` is not a string.'.format(handler) + + if not process_class.is_process_handler(handler): + return 'The key `{}` is not a process handler of {}'.format(handler, process_class) + + if not isinstance(override, bool): + return 'The value of key `{}` is not a boolean.'.format(handler) + + class BaseRestartWorkChain(WorkChain): """Base restart work chain. @@ -28,8 +58,9 @@ class BaseRestartWorkChain(WorkChain): are recoverable. This work chain implements the most basic functionality to achieve this goal. It will launch the sub process, - restarting until it is completed successfully or the maximum number of iterations is reached. It can recover from - errors through error handlers that can be registered to the class through the `register_process_handler` decorator. + restarting until it is completed successfully or the maximum number of iterations is reached. After completion of + the sub process it will be inspected, and a list of process handlers are called successively. These process handlers + are defined as class methods that are decorated with :meth:`~aiida.engine.process_handler`. The idea is to sub class this work chain and leverage the generic error handling that is implemented in the few outline methods. The minimally required outline would look something like the following:: @@ -57,13 +88,21 @@ class BaseRestartWorkChain(WorkChain): process will be run with those inputs. The `_process_class` attribute should be set to the `Process` class that should be run in the loop. + Finally, to define handlers that will be called during the `inspect_process` simply define a class method with the + signature `(self, node)` and decorate it with the `process_handler` decorator, for example:: + + @process_handler + def handle_problem(self, node): + if some_problem: + self.ctx.inputs = improved_inputs + return ProcessHandlerReport() + + The `process_handler` and `ProcessHandlerReport` support various arguments to control the flow of the logic of the + `inspect_process`. Refer to their respective documentation for details. """ - _verbose = False _process_class = None - - _handler_entry_point = None - __handlers = tuple() + _considered_handlers_extra = 'considered_handlers' @classmethod def define(cls, spec): @@ -74,6 +113,11 @@ def define(cls, spec): help='Maximum number of iterations the work chain will restart the process to finish successfully.') spec.input('clean_workdir', valid_type=orm.Bool, default=lambda: orm.Bool(False), help='If `True`, work directories of all called calculation jobs will be cleaned at the end of execution.') + spec.input('handler_overrides', + valid_type=orm.Dict, required=False, validator=functools.partial(validate_handler_overrides, cls), + help='Mapping where keys are process handler names and the values are a boolean, where `True` will enable ' + 'the corresponding handler and `False` will disable it. This overrides the default value set by the ' + '`enabled` keyword of the `process_handler` decorator with which the method is decorated.') spec.exit_code(301, 'ERROR_SUB_PROCESS_EXCEPTED', message='The sub process excepted.') spec.exit_code(302, 'ERROR_SUB_PROCESS_KILLED', @@ -85,6 +129,8 @@ def define(cls, spec): def setup(self): """Initialize context variables that are used during the logical flow of the `BaseRestartWorkChain`.""" + overrides = self.inputs.handler_overrides.get_dict() if 'handler_overrides' in self.inputs else {} + self.ctx.handler_overrides = overrides self.ctx.process_name = self._process_class.__name__ self.ctx.unhandled_failure = False self.ctx.is_finished = False @@ -113,12 +159,12 @@ def run_process(self): inputs = self._wrap_bare_dict_inputs(self._process_class.spec().inputs, unwrapped_inputs) node = self.submit(self._process_class, **inputs) - # Add a new empty list to the `called_process_handlers` extra. If any errors handled registered through the - # `register_process_handler` decorator return an `ProcessHandlerReport`, their name will be appended to that - # list. - called_process_handlers = self.node.get_extra('called_process_handlers', []) - called_process_handlers.append([]) - self.node.set_extra('called_process_handlers', called_process_handlers) + # Add a new empty list to the `BaseRestartWorkChain._considered_handlers_extra` extra. This will contain the + # name and return value of all class methods, decorated with `process_handler`, that are called during + # the `inspect_process` outline step. + considered_handlers = self.node.get_extra(self._considered_handlers_extra, []) + considered_handlers.append([]) + self.node.set_extra(self._considered_handlers_extra, considered_handlers) self.report('launching {}<{}> iteration #{}'.format(self.ctx.process_name, node.pk, self.ctx.iteration)) @@ -153,18 +199,22 @@ def inspect_process(self): # pylint: disable=inconsistent-return-statements,too if node.is_killed: return self.exit_codes.ERROR_SUB_PROCESS_KILLED # pylint: disable=no-member - # Sort the handlers with a priority defined, based on their priority in reverse order - handlers = [handler for handler in self._handlers if handler.priority] - handlers = sorted(handlers, key=lambda x: x.priority, reverse=True) - last_report = None - for handler in handlers: + # Sort the handlers with a priority defined, based on their priority in reverse order + for handler in sorted(self.get_process_handlers(), key=lambda handler: handler.priority, reverse=True): - report = handler.method(self, node) + # Skip if the handler is enabled, either explicitly through `handler_overrides` or by default + if not self.ctx.handler_overrides.get(handler.__name__, handler.enabled): + continue + + # Even though the `handler` is an instance method, the `get_process_handlers` method returns unbound methods + # so we have to pass in `self` manually. Also, always pass the `node` as an argument because the + # `process_handler` decorator with which the handler is decorated relies on this behavior. + report = handler(self, node) if report is not None and not isinstance(report, ProcessHandlerReport): - name = handler.method.__name__ + name = handler.__name__ raise RuntimeError('handler `{}` returned a value that is not a ProcessHandlerReport'.format(name)) # If an actual report was returned, save it so it is not overridden by next handler returning `None` @@ -234,8 +284,6 @@ def results(self): # pylint: disable=inconsistent-return-statements name, self.ctx.process_name, node.pk)) else: self.out(name, output) - if self._verbose: - self.report("attaching the node {}<{}> as '{}'".format(output.__class__.__name__, output.pk, name)) def __init__(self, *args, **kwargs): """Construct the instance.""" @@ -245,23 +293,25 @@ def __init__(self, *args, **kwargs): if self._process_class is None or not issubclass(self._process_class, Process): raise ValueError('no valid Process class defined for `_process_class` attribute') - @classproperty - def _handlers(cls): # pylint: disable=no-self-argument - """Return the tuple of all registered handlers for this class and of any parent class. + @classmethod + def is_process_handler(cls, process_handler_name): + """Return whether the given method name corresponds to a process handler of this class. - :return: tuple of handler methods + :param process_handler_name: string name of the instance method + :return: boolean, True if corresponds to process handler, False otherwise """ - return getattr(super(), '__handlers', tuple()) + cls.__handlers + # pylint: disable=comparison-with-callable + if isinstance(process_handler_name, str): + handler = getattr(cls, process_handler_name, {}) + else: + handler = process_handler_name - @classmethod - def register_handler(cls, name, handler): - """Register a new handler to this class. + return getattr(handler, 'decorator', None) == process_handler - :param name: the name under which to register the handler - :param handler: a method with the signature `self, node`. - """ - setattr(cls, name, handler) - cls.__handlers = cls.__handlers + (handler,) + @classmethod + def get_process_handlers(cls): + from inspect import getmembers + return [method[1] for method in getmembers(cls) if cls.is_process_handler(method[1])] def on_terminated(self): """Clean the working directories of all child calculation jobs if `clean_workdir=True` in the inputs.""" diff --git a/aiida/engine/processes/workchains/utils.py b/aiida/engine/processes/workchains/utils.py index e7e2fd3847..9869aa3a36 100644 --- a/aiida/engine/processes/workchains/utils.py +++ b/aiida/engine/processes/workchains/utils.py @@ -9,27 +9,13 @@ ########################################################################### """Utilities for `WorkChain` implementations.""" from collections import namedtuple -from functools import wraps +from functools import partial +from inspect import getfullargspec +from wrapt import decorator from ..exit_code import ExitCode -__all__ = ('ProcessHandler', 'ProcessHandlerReport', 'register_process_handler') - -ProcessHandler = namedtuple('ProcessHandler', 'method priority exit_codes') -ProcessHandler.__new__.__defaults__ = (None, 0, None) -"""A namedtuple to define a process handler for a :class:`aiida.engine.BaseRestartWorkChain`. - -The `method` element refers to a function decorated by the `register_process_handler` that has turned it into a bound -method of the target `WorkChain` class. The method takes an instance of a :class:`~aiida.orm.ProcessNode` as its sole -argument. The method can return an optional `ProcessHandlerReport` to signal whether other handlers still need to be -considered or whether the work chain should be terminated immediately. The priority determines in which order the -handler methods are executed, with the higher priority being executed first. - -:param method: the decorated process handling function turned into a bound work chain class method -:param priority: integer denoting the process handler's priority -:param exit_codes: single or list of `ExitCode` instances. A handler that defines this should only be called for a given - completed process if its exit status is a member of `exit_codes`. -""" +__all__ = ('ProcessHandlerReport', 'process_handler') ProcessHandlerReport = namedtuple('ProcessHandlerReport', 'do_break exit_code') ProcessHandlerReport.__new__.__defaults__ = (False, ExitCode()) @@ -49,16 +35,12 @@ """ -def register_process_handler(cls, *, priority=None, exit_codes=None): - """Decorator to register a function as a handler for a :class:`~aiida.engine.BaseRestartWorkChain`. +def process_handler(wrapped=None, *, priority=0, exit_codes=None, enabled=True): + """Decorator to register a :class:`~aiida.engine.BaseRestartWorkChain` instance method as a process handler. - The function expects two arguments, a work chain class and a priortity. The decorator will add the function as a - class method to the work chain class and add an :class:`~aiida.engine.ProcessHandler` tuple to the `__handlers` - private attribute of the work chain. During the `inspect_process` outline method, the work chain will retrieve all - the registered handlers through the :meth:`~aiida.engine.BaseRestartWorkChain._handlers` property and loop over them - sorted with respect to their priority in reverse. If the work chain class defines the - :attr:`~aiida.engine.BaseRestartWorkChain._verbose` attribute and is set to `True`, a report message will be fired - when the process handler is executed. + The decorator will validate the `priority` and `exit_codes` optional keyword arguments and then add itself as an + attribute to the `wrapped` instance method. This is used in the `inspect_process` to return all instance methods of + the class that have been decorated by this function and therefore are considered to be process handlers. Requirements on the function signature of process handling functions. The function to which the decorator is applied needs to take two arguments: @@ -74,58 +56,65 @@ class method to the work chain class and add an :class:`~aiida.engine.ProcessHan :param cls: the work chain class to register the process handler with :param priority: optional integer that defines the order in which registered handlers will be called during the - handling of a finished process. Higher priorities will be handled first. + handling of a finished process. Higher priorities will be handled first. Default value is `0`. Multiple handlers + with the same priority is allowed, but the order of those is not well defined. :param exit_codes: single or list of `ExitCode` instances. If defined, the handler will return `None` if the exit code set on the `node` does not appear in the `exit_codes`. This is useful to have a handler called only when the process failed with a specific exit code. + :param enabled: boolean, by default True, which will cause the handler to be called during `inspect_process`. When + set to `False`, the handler will be skipped. This static value can be overridden on a per work chain instance + basis through the input `handler_overrides`. """ - if priority is None: - priority = 0 + if wrapped is None: + return partial(process_handler, priority=priority, exit_codes=exit_codes, enabled=enabled) if not isinstance(priority, int): - raise TypeError('the `priority` should be an integer.') + raise TypeError('the `priority` keyword should be an integer.') if exit_codes is not None and not isinstance(exit_codes, list): exit_codes = [exit_codes] if exit_codes and any([not isinstance(exit_code, ExitCode) for exit_code in exit_codes]): - raise TypeError('`exit_codes` should be an instance of `ExitCode` or list thereof.') + raise TypeError('`exit_codes` keyword should be an instance of `ExitCode` or list thereof.') - def process_handler_decorator(handler): - """Decorate a function to dynamically register a handler to a `WorkChain` class.""" + if not isinstance(enabled, bool): + raise TypeError('the `enabled` keyword should be a boolean.') - @wraps(handler) - def process_handler(self, node): - """Wrap handler to add a log to the report if the handler is called and verbosity is turned on.""" - verbose = hasattr(cls, '_verbose') and cls._verbose # pylint: disable=protected-access + handler_args = getfullargspec(wrapped)[0] - if exit_codes and node.exit_status not in [exit_code.status for exit_code in exit_codes]: - if verbose: - self.report('skipped {} because of exit code filter'.format(handler.__name__)) - return None + if len(handler_args) != 2: + raise TypeError('process handler `{}` has invalid signature: should be (self, node)'.format(wrapped.__name__)) - if verbose: - self.report('({}){}'.format(priority, handler.__name__)) + wrapped.decorator = process_handler + wrapped.priority = priority + wrapped.enabled = enabled - result = handler(self, node) + @decorator + def wrapper(wrapped, instance, args, kwargs): - # If a handler report is returned, attach the handler's name to node's attributes - if isinstance(result, ProcessHandlerReport): - try: - called_process_handlers = self.node.get_extra('called_process_handlers', []) - current_process = called_process_handlers[-1] - except IndexError: - # The extra was never initialized, so we skip this functionality - pass - else: - # Append the name of the handler to the last list in `called_process_handlers` and save it - current_process.append(handler.__name__) - self.node.set_extra('called_process_handlers', called_process_handlers) + # When the handler will be called by the `BaseRestartWorkChain` it will pass the node as the only argument + node = args[0] - return result + if exit_codes and node.exit_status not in [exit_code.status for exit_code in exit_codes]: + result = None + else: + result = wrapped(*args, **kwargs) - cls.register_handler(handler.__name__, ProcessHandler(process_handler, priority, exit_codes)) + # Append the name and return value of the current process handler to the `considered_handlers` extra. + try: + considered_handlers = instance.node.get_extra(instance._considered_handlers_extra, []) # pylint: disable=protected-access + current_process = considered_handlers[-1] + except IndexError: + # The extra was never initialized, so we skip this functionality + pass + else: + # Append the name of the handler to the last list in `considered_handlers` and save it + serialized = result + if isinstance(serialized, ProcessHandlerReport): + serialized = {'do_break': serialized.do_break, 'exit_status': serialized.exit_code.status} + current_process.append((wrapped.__name__, serialized)) + instance.node.set_extra(instance._considered_handlers_extra, considered_handlers) # pylint: disable=protected-access - return process_handler + return result - return process_handler_decorator + return wrapper(wrapped) # pylint: disable=no-value-for-parameter diff --git a/aiida/manage/caching.py b/aiida/manage/caching.py index 53def0e8d7..d8079fd747 100644 --- a/aiida/manage/caching.py +++ b/aiida/manage/caching.py @@ -9,16 +9,20 @@ ########################################################################### """Definition of caching mechanism and configuration for calculations.""" import os +import re import copy -import warnings +import keyword from enum import Enum -from contextlib import contextmanager +from collections import namedtuple +from contextlib import contextmanager, suppress import yaml from wrapt import decorator from aiida.common import exceptions -from aiida.common.warnings import AiidaDeprecationWarning +from aiida.common.lang import type_check + +from aiida.plugins.entry_point import ENTRY_POINT_STRING_SEPARATOR, entry_point_group_to_module_path_map __all__ = ('get_use_cache', 'enable_caching', 'disable_caching') @@ -45,7 +49,6 @@ def _get_config(config_file): :return: the configuration dictionary """ from aiida.manage.configuration import get_profile - from aiida.plugins.entry_point import is_valid_entry_point_string, load_entry_point_from_string profile = get_profile() @@ -62,32 +65,26 @@ def _get_config(config_file): # Validate configuration for key in config: if key not in DEFAULT_CONFIG: - raise ValueError("Configuration error: Invalid key '{}' in cache_config.yml".format(key)) + raise exceptions.ConfigurationError("Configuration error: Invalid key '{}' in cache_config.yml".format(key)) # Add defaults where key is either completely missing or specifies no values in which case it will be `None` for key, default_config in DEFAULT_CONFIG.items(): if key not in config or config[key] is None: config[key] = default_config - # Validate the entry point identifiers - for key in [ConfigKeys.ENABLED.value, ConfigKeys.DISABLED.value]: - - # If the key is defined in the file but contains no values, it will be `None` - if config[key] is None: - continue - - for identifier in config[key]: - if not is_valid_entry_point_string(identifier): - raise exceptions.ConfigurationError( - "entry point '{}' in 'cache_config.yml' is not a valid entry point string.".format(identifier) - ) + try: + type_check(config[ConfigKeys.DEFAULT.value], bool) + type_check(config[ConfigKeys.ENABLED.value], list) + type_check(config[ConfigKeys.DISABLED.value], list) + except TypeError as exc: + raise exceptions.ConfigurationError('Invalid type in caching configuration file.') from exc - try: - load_entry_point_from_string(identifier) - except exceptions.EntryPointError as exception: - raise exceptions.ConfigurationError( - "entry point '{}' in 'cache_config.yml' can not be loaded: {}.".format(identifier, exception) - ) + # Check validity of enabled and disabled entries + try: + for identifier in config[ConfigKeys.ENABLED.value] + config[ConfigKeys.DISABLED.value]: + _validate_identifier_pattern(identifier=identifier) + except ValueError as exc: + raise exceptions.ConfigurationError('Invalid identifier pattern in enable or disable list.') from exc return config @@ -118,35 +115,63 @@ def _with_config(wrapped, _, args, kwargs): @_with_config -def get_use_cache(node_class=None, identifier=None): - """Return whether the caching mechanism should be used for the given entry point according to the configuration. +def get_use_cache(*, identifier=None): + """Return whether the caching mechanism should be used for the given process type according to the configuration. - :param node_class: the Node class or sub class to check if enabled for caching - :param identifier: the full entry point string of the process, e.g. `aiida.calculations:arithmetic.add` + :param identifier: Process type string of the node + :type identifier: str :return: boolean, True if caching is enabled, False otherwise - :raises ValueError: if the configuration is invalid by defining the class both enabled and disabled + :raises: `~aiida.common.exceptions.ConfigurationError` if the configuration is invalid, either due to a general + configuration error, or by defining the class both enabled and disabled """ - from aiida.common.lang import type_check - - if node_class is not None: - warnings.warn( # pylint: disable=no-member - 'the `node_class` argument is deprecated and will be removed in `v2.0.0`. ' - 'Use the `identifier` argument instead', AiidaDeprecationWarning - ) + type_check(identifier, str, allow_none=True) if identifier is not None: type_check(identifier, str) - enabled = identifier in _CONFIG[ConfigKeys.ENABLED.value] - disabled = identifier in _CONFIG[ConfigKeys.DISABLED.value] - - if enabled and disabled: - raise ValueError('Invalid configuration: caching for {} is both enabled and disabled.'.format(identifier)) - elif enabled: + enable_matches = [ + pattern for pattern in _CONFIG[ConfigKeys.ENABLED.value] + if _match_wildcard(string=identifier, pattern=pattern) + ] + disable_matches = [ + pattern for pattern in _CONFIG[ConfigKeys.DISABLED.value] + if _match_wildcard(string=identifier, pattern=pattern) + ] + + if enable_matches and disable_matches: + # If both enable and disable have matching identifier, we search for + # the most specific one. This is determined by checking whether + # all other patterns match the specific pattern. + PatternWithResult = namedtuple('PatternWithResult', ['pattern', 'use_cache']) + most_specific = [] + for specific_pattern in enable_matches: + if all( + _match_wildcard(string=specific_pattern, pattern=other_pattern) + for other_pattern in enable_matches + disable_matches + ): + most_specific.append(PatternWithResult(pattern=specific_pattern, use_cache=True)) + for specific_pattern in disable_matches: + if all( + _match_wildcard(string=specific_pattern, pattern=other_pattern) + for other_pattern in enable_matches + disable_matches + ): + most_specific.append(PatternWithResult(pattern=specific_pattern, use_cache=False)) + + if len(most_specific) > 1: + raise exceptions.ConfigurationError(( + 'Invalid configuration: multiple matches for identifier {}' + ', but the most specific identifier is not unique. Candidates: {}' + ).format(identifier, [match.pattern for match in most_specific])) + if not most_specific: + raise exceptions.ConfigurationError( + 'Invalid configuration: multiple matches for identifier {}, but none of them is most specific.'. + format(identifier) + ) + return most_specific[0].use_cache + if enable_matches: return True - elif disabled: + if disable_matches: return False - return _CONFIG[ConfigKeys.DEFAULT.value] @@ -163,54 +188,124 @@ def _reset_config(): @contextmanager -def enable_caching(node_class=None, identifier=None): +def enable_caching(*, identifier=None): """Context manager to enable caching, either for a specific node class, or globally. .. warning:: this does not affect the behavior of the daemon, only the local Python interpreter. - :param node_class: Node class for which caching should be enabled. - :param identifier: the full entry point string of the process, e.g. `aiida.calculations:arithmetic.add` + :param identifier: Process type string of the node, or a pattern with '*' wildcard that matches it. + :type identifier: str """ - if node_class is not None: - warnings.warn( # pylint: disable=no-member - 'the `node_class` argument is deprecated and will be removed in `v2.0.0`. ' - 'Use the `identifier` argument instead', AiidaDeprecationWarning - ) + type_check(identifier, str, allow_none=True) with _reset_config(): if identifier is None: _CONFIG[ConfigKeys.DEFAULT.value] = True + _CONFIG[ConfigKeys.DISABLED.value] = [] else: + _validate_identifier_pattern(identifier=identifier) _CONFIG[ConfigKeys.ENABLED.value].append(identifier) - try: + with suppress(ValueError): _CONFIG[ConfigKeys.DISABLED.value].remove(identifier) - except ValueError: - pass yield @contextmanager -def disable_caching(node_class=None, identifier=None): +def disable_caching(*, identifier=None): """Context manager to disable caching, either for a specific node class, or globally. .. warning:: this does not affect the behavior of the daemon, only the local Python interpreter. - :param node_class: Node class for which caching should be disabled. - :param identifier: the full entry point string of the process, e.g. `aiida.calculations:arithmetic.add` + :param identifier: Process type string of the node, or a pattern with '*' wildcard that matches it. + :type identifier: str """ - if node_class is not None: - warnings.warn( # pylint: disable=no-member - 'the `node_class` argument is deprecated and will be removed in `v2.0.0`. ' - 'Use the `identifier` argument instead', AiidaDeprecationWarning - ) + type_check(identifier, str, allow_none=True) with _reset_config(): if identifier is None: _CONFIG[ConfigKeys.DEFAULT.value] = False + _CONFIG[ConfigKeys.ENABLED.value] = [] else: + _validate_identifier_pattern(identifier=identifier) _CONFIG[ConfigKeys.DISABLED.value].append(identifier) - try: + with suppress(ValueError): _CONFIG[ConfigKeys.ENABLED.value].remove(identifier) - except ValueError: - pass yield + + +def _match_wildcard(*, string, pattern): + """ + Helper function to check whether a given name matches a pattern + which can contain '*' wildcards. + """ + regexp = '.*'.join(re.escape(part) for part in pattern.split('*')) + return re.fullmatch(pattern=regexp, string=string) is not None + + +def _validate_identifier_pattern(*, identifier): + """ + The identifier (without wildcards) can have one of two forms: + + 1. + + where `group_name` is one of the keys in `entry_point_group_to_module_path_map` + and `tail` can be anything _except_ `ENTRY_POINT_STRING_SEPARATOR`. + + 2. a fully qualified Python name + + this is a colon-separated string, where each part satisfies + `part.isidentifier() and not keyword.iskeyword(part)` + + This function checks if an identifier _with_ wildcards can possibly + match one of these two forms. If it can not, a `ValueError` is raised. + + :param identifier: Process type string, or a pattern with '*' wildcard that matches it. + :type identifier: str + """ + common_error_msg = "Invalid identifier pattern '{}': ".format(identifier) + assert ENTRY_POINT_STRING_SEPARATOR not in '.*' # The logic of this function depends on this + # Check if it can be an entry point string + if identifier.count(ENTRY_POINT_STRING_SEPARATOR) > 1: + raise ValueError( + common_error_msg + + "Can contain at most one entry point string separator '{}'".format(ENTRY_POINT_STRING_SEPARATOR) + ) + # If there is one separator, it must be an entry point string. + # Check if the left hand side is a matching pattern + if ENTRY_POINT_STRING_SEPARATOR in identifier: + group_pattern, _ = identifier.split(ENTRY_POINT_STRING_SEPARATOR) + if not any( + _match_wildcard(string=group_name, pattern=group_pattern) + for group_name in entry_point_group_to_module_path_map + ): + raise ValueError( + common_error_msg + "Group name pattern '{}' does not match any of the AiiDA entry point group names.". + format(group_pattern) + ) + # The group name pattern matches, and there are no further + # entry point string separators in the identifier, hence it is + # a valid pattern. + return + # The separator might be swallowed in a wildcard, for example + # aiida.* or aiida.calculations* + if '*' in identifier: + group_part, _ = identifier.split('*', 1) + if any(group_name.startswith(group_part) for group_name in entry_point_group_to_module_path_map): + return + # Finally, check if it could be a fully qualified Python name + for identifier_part in identifier.split('.'): + # If it contains a wildcard, we can not check for keywords. + # Replacing all wildcards with a single letter must give an + # identifier - this checks for invalid characters, and that it + # does not start with a number. + if '*' in identifier_part: + if not identifier_part.replace('*', 'a').isidentifier(): + raise ValueError( + common_error_msg + + "Identifier part '{}' can not match a fully qualified Python name.".format(identifier_part) + ) + else: + if not identifier_part.isidentifier(): + raise ValueError(common_error_msg + "'{}' is not a valid Python identifier.".format(identifier_part)) + if keyword.iskeyword(identifier_part): + raise ValueError(common_error_msg + "'{}' is a reserved Python keyword.".format(identifier_part)) diff --git a/aiida/manage/configuration/__init__.py b/aiida/manage/configuration/__init__.py index 695666bf6b..2b5618f394 100644 --- a/aiida/manage/configuration/__init__.py +++ b/aiida/manage/configuration/__init__.py @@ -22,7 +22,7 @@ __all__ = ( config.__all__ + options.__all__ + profile.__all__ + - ('get_config', 'get_config_option', 'load_profile', 'reset_config') + ('get_config', 'get_config_option', 'get_config_path', 'load_profile', 'reset_config') ) @@ -66,6 +66,14 @@ def load_profile(profile=None): return PROFILE +def get_config_path(): + """Returns path to .aiida configuration directory.""" + import os + from .settings import AIIDA_CONFIG_FOLDER, DEFAULT_CONFIG_FILE_NAME + + return os.path.join(AIIDA_CONFIG_FOLDER, DEFAULT_CONFIG_FILE_NAME) + + def load_config(create=False): """Instantiate Config object representing an AiiDA configuration file. @@ -82,9 +90,8 @@ def load_config(create=False): import os from aiida.common import exceptions from .config import Config - from .settings import AIIDA_CONFIG_FOLDER, DEFAULT_CONFIG_FILE_NAME - filepath = os.path.join(AIIDA_CONFIG_FOLDER, DEFAULT_CONFIG_FILE_NAME) + filepath = get_config_path() if not os.path.isfile(filepath) and not create: raise exceptions.MissingConfigurationError('configuration file {} does not exist'.format(filepath)) diff --git a/aiida/manage/configuration/config.py b/aiida/manage/configuration/config.py index cab9fcf4c4..9961ba57dd 100644 --- a/aiida/manage/configuration/config.py +++ b/aiida/manage/configuration/config.py @@ -34,7 +34,9 @@ class Config: # pylint: disable=too-many-public-methods def from_file(cls, filepath): """Instantiate a configuration object from the contents of a given file. - .. note:: if the filepath does not exist an empty file will be created with the default configuration. + .. note:: if the filepath does not exist an empty file will be created with the current default configuration + and will be written to disk. If the filepath does already exist but contains a configuration with an + outdated schema, the content will be migrated and then written to disk. :param filepath: the absolute path to the configuration file :return: `Config` instance @@ -56,6 +58,7 @@ def from_file(cls, filepath): echo.echo_warning('original backed up to `{}`'.format(filepath_backup)) config = Config(filepath, check_and_migrate_config(config)) + config.store() return config diff --git a/aiida/manage/configuration/setup.py b/aiida/manage/configuration/setup.py index 51659aab8d..b84ce7d71b 100644 --- a/aiida/manage/configuration/setup.py +++ b/aiida/manage/configuration/setup.py @@ -9,7 +9,7 @@ ########################################################################### """Module that defines methods required to setup a new AiiDA instance.""" import os -import urllib +import urllib.parse import click diff --git a/aiida/manage/tests/pytest_fixtures.py b/aiida/manage/tests/pytest_fixtures.py index 6e7d1550d2..5e79c30bdf 100644 --- a/aiida/manage/tests/pytest_fixtures.py +++ b/aiida/manage/tests/pytest_fixtures.py @@ -37,7 +37,16 @@ def aiida_profile(): @pytest.fixture(scope='function') -def clear_database(aiida_profile): # pylint: disable=redefined-outer-name +def clear_database(clear_database_after_test): # pylint: disable=redefined-outer-name,unused-argument + """Alias for 'clear_database_after_test'. + + Clears the database after each test. Use of the explicit + 'clear_database_after_test' is preferred. + """ + + +@pytest.fixture(scope='function') +def clear_database_after_test(aiida_profile): # pylint: disable=redefined-outer-name """Clear the database after each test. """ yield @@ -45,6 +54,14 @@ def clear_database(aiida_profile): # pylint: disable=redefined-outer-name aiida_profile.reset_db() +@pytest.fixture(scope='function') +def clear_database_before_test(aiida_profile): # pylint: disable=redefined-outer-name + """Clear the database before each test. + """ + aiida_profile.reset_db() + yield + + @pytest.fixture(scope='function') def temp_dir(): """Get a temporary directory. @@ -134,6 +151,9 @@ def get_code(entry_point, executable, computer=aiida_localhost): executable_path = shutil.which(executable) + if not executable_path: + raise ValueError('The executable "{}" was not found in the $PATH.'.format(executable)) + code = Code( input_plugin_name=entry_point, remote_computer_exec=[computer, executable_path], diff --git a/aiida/orm/implementation/django/comments.py b/aiida/orm/implementation/django/comments.py index b90a833898..8d1ceac867 100644 --- a/aiida/orm/implementation/django/comments.py +++ b/aiida/orm/implementation/django/comments.py @@ -9,6 +9,7 @@ ########################################################################### """Django implementations for the Comment entity and collection.""" # pylint: disable=import-error,no-name-in-module +import contextlib from datetime import datetime from django.core.exceptions import ObjectDoesNotExist @@ -61,13 +62,14 @@ def __init__(self, backend, node, user, content=None, ctime=None, mtime=None): def store(self): """Can only store if both the node and user are stored as well.""" - from aiida.common.lang import EmptyContextManager from aiida.backends.djsite.db.models import suppress_auto_now if self._dbmodel.dbnode.id is None or self._dbmodel.user.id is None: raise exceptions.ModificationNotAllowed('The corresponding node and/or user are not stored') - with suppress_auto_now([(models.DbComment, ['mtime'])]) if self.mtime else EmptyContextManager(): + # `contextlib.suppress` provides empty context and can be replaced with `contextlib.nullcontext` after we drop + # support for python 3.6 + with suppress_auto_now([(models.DbComment, ['mtime'])]) if self.mtime else contextlib.suppress(): super().store() @property diff --git a/aiida/orm/implementation/django/nodes.py b/aiida/orm/implementation/django/nodes.py index 039a4c2025..fc393e2114 100644 --- a/aiida/orm/implementation/django/nodes.py +++ b/aiida/orm/implementation/django/nodes.py @@ -488,14 +488,16 @@ def store(self, links=None, with_transaction=True, clean=True): # pylint: disab :param with_transaction: if False, do not use a transaction because the caller will already have opened one. :param clean: boolean, if True, will clean the attributes and extras before attempting to store """ - from aiida.common.lang import EmptyContextManager + import contextlib from aiida.backends.djsite.db.models import suppress_auto_now if clean: self.clean_values() - with transaction.atomic() if with_transaction else EmptyContextManager(): - with suppress_auto_now([(models.DbNode, ['mtime'])]) if self.mtime else EmptyContextManager(): + # `contextlib.suppress` provides empty context and can be replaced with `contextlib.nullcontext` after we drop + # support for python 3.6 + with transaction.atomic() if with_transaction else contextlib.suppress(): + with suppress_auto_now([(models.DbNode, ['mtime'])]) if self.mtime else contextlib.suppress(): # We need to save the node model instance itself first such that it has a pk # that can be used in the foreign keys that will be needed for setting the # attributes and links diff --git a/aiida/orm/implementation/querybuilder.py b/aiida/orm/implementation/querybuilder.py index 6ef8f33342..9d026e13fb 100644 --- a/aiida/orm/implementation/querybuilder.py +++ b/aiida/orm/implementation/querybuilder.py @@ -24,7 +24,7 @@ from sqlalchemy.dialects.postgresql import JSONB from aiida.common import exceptions -from aiida.common.lang import abstractclassmethod, type_check +from aiida.common.lang import type_check from aiida.common.exceptions import InputValidationError __all__ = ('BackendQueryBuilder',) @@ -126,7 +126,7 @@ def modify_expansions(self, alias, expansions): This is important for the schema having attributes in a different table. """ - @abstractclassmethod + @abc.abstractclassmethod def get_filter_expr_from_attributes(cls, operator, value, attr_key, column=None, column_name=None, alias=None): # pylint: disable=too-many-arguments """ Returns an valid SQLAlchemy expression. @@ -383,8 +383,7 @@ def iterdict(self, query, batch_size, tag_to_projected_properties_dict, tag_to_a self.get_session().close() raise - @staticmethod - @abstractclassmethod + @abc.abstractstaticmethod def get_table_name(aliased_class): """Returns the table name given an Aliased class.""" diff --git a/aiida/orm/nodes/data/code.py b/aiida/orm/nodes/data/code.py index f4fb1b40b3..de80d881ae 100644 --- a/aiida/orm/nodes/data/code.py +++ b/aiida/orm/nodes/data/code.py @@ -120,24 +120,40 @@ def full_label(self): """ return '{}@{}'.format(self.label, self.get_computer_name()) + @property + def label(self): + """Return the node label. + + :return: the label + """ + return super().label + + @label.setter + def label(self, value): + """Set the label. + + :param value: the new value to set + """ + if '@' in str(value): + msg = "Code labels must not contain the '@' symbol" + raise InputValidationError(msg) + + super(Code, self.__class__).label.fset(self, value) + def relabel(self, new_label, raise_error=True): """Relabel this code. :param new_label: new code label :param raise_error: Set to False in order to return a list of errors instead of raising them. + + .. deprecated:: 1.2.0 + Will remove raise_error in `v2.0.0`. Use `try/except` instead. """ suffix = '@{}'.format(self.get_computer_name()) if new_label.endswith(suffix): new_label = new_label[:-len(suffix)] - if '@' in new_label: - msg = "Code labels must not contain the '@' symbol" - if raise_error: - raise InputValidationError(msg) - else: - return InputValidationError(msg) - self.label = new_label def get_description(self): @@ -145,7 +161,7 @@ def get_description(self): :return: string description of this Code instance """ - return '{}'.format(self.label) + return '{}'.format(self.description) @classmethod def get_code_helper(cls, label, machinename=None): diff --git a/aiida/orm/nodes/node.py b/aiida/orm/nodes/node.py index 06237738e2..86f6d9ace3 100644 --- a/aiida/orm/nodes/node.py +++ b/aiida/orm/nodes/node.py @@ -85,7 +85,7 @@ def delete(self, node_id): _hash_ignored_attributes = tuple() # Flag that determines whether the class can be cached. - _cachable = True + _cachable = False # Base path within the repository where to put objects by default _repository_base_path = 'path' @@ -1090,6 +1090,11 @@ def _store_from_cache(self, cache_node, with_transaction): if key != Sealable.SEALED_KEY: self.set_attribute(key, value) + # The erase() removes the current content of the sandbox folder. + # If this was not done, the content of the sandbox folder could + # become mangled when copying over the content of the cache + # source repository folder. + self._repository.erase() self.put_object_from_tree(cache_node._repository._get_base_folder().abspath) # pylint: disable=protected-access self._store(with_transaction=with_transaction, clean=False) diff --git a/aiida/orm/nodes/process/process.py b/aiida/orm/nodes/process/process.py index e8cc039093..890645a1d0 100644 --- a/aiida/orm/nodes/process/process.py +++ b/aiida/orm/nodes/process/process.py @@ -46,9 +46,6 @@ class ProcessNode(Sealable, Node): # The link_type might not be correct while the object is being created. _hash_ignored_inputs = ['CALL_CALC', 'CALL_WORK'] - # Specific sub classes should be marked as cacheable when appropriate - _cachable = False - _unstorable_message = 'only Data, WorkflowNode, CalculationNode or their subclasses can be stored' def __str__(self): diff --git a/aiida/orm/nodes/process/workflow/workchain.py b/aiida/orm/nodes/process/workflow/workchain.py index 6cceeb14e4..1c4e7a01be 100644 --- a/aiida/orm/nodes/process/workflow/workchain.py +++ b/aiida/orm/nodes/process/workflow/workchain.py @@ -19,8 +19,6 @@ class WorkChainNode(WorkflowNode): """ORM class for all nodes representing the execution of a WorkChain.""" - _cachable = False - STEPPER_STATE_INFO_KEY = 'stepper_state_info' @classproperty diff --git a/aiida/orm/utils/loaders.py b/aiida/orm/utils/loaders.py index b7abb8be29..5e73ff46e1 100644 --- a/aiida/orm/utils/loaders.py +++ b/aiida/orm/utils/loaders.py @@ -8,10 +8,11 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Module with `OrmEntityLoader` and its sub classes that simplify loading entities through their identifiers.""" +from abc import abstractclassmethod from enum import Enum from aiida.common.exceptions import MultipleObjectsError, NotExistent -from aiida.common.lang import abstractclassmethod, classproperty +from aiida.common.lang import classproperty from aiida.orm.querybuilder import QueryBuilder __all__ = ( diff --git a/aiida/orm/utils/repository.py b/aiida/orm/utils/repository.py index 519c96c616..a8aa5155b2 100644 --- a/aiida/orm/utils/repository.py +++ b/aiida/orm/utils/repository.py @@ -262,7 +262,7 @@ def erase(self, force=False): if not force: self.validate_mutability() - self._repo_folder.erase() + self._get_base_folder().erase() def store(self): """Store the contents of the sandbox folder into the repository folder.""" diff --git a/aiida/restapi/common/utils.py b/aiida/restapi/common/utils.py index a29e131c12..4abc4dd873 100644 --- a/aiida/restapi/common/utils.py +++ b/aiida/restapi/common/utils.py @@ -8,7 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """ Util methods """ -import urllib +import urllib.parse from datetime import datetime, timedelta from flask import jsonify diff --git a/aiida/tools/importexport/common/utils.py b/aiida/tools/importexport/common/utils.py index 3a2453b4cf..ae4843677f 100644 --- a/aiida/tools/importexport/common/utils.py +++ b/aiida/tools/importexport/common/utils.py @@ -10,7 +10,8 @@ """ Utility functions for import/export of AiiDA entities """ # pylint: disable=inconsistent-return-statements,too-many-branches,too-many-return-statements # pylint: disable=too-many-nested-blocks,too-many-locals -import urllib +import urllib.request +import urllib.parse from html.parser import HTMLParser from aiida.tools.importexport.common.config import ( diff --git a/docs/requirements_for_rtd.txt b/docs/requirements_for_rtd.txt index 2d98e5c8eb..6ac780528c 100644 --- a/docs/requirements_for_rtd.txt +++ b/docs/requirements_for_rtd.txt @@ -52,4 +52,4 @@ tabulate~=0.8.5 tornado<5.0 tzlocal~=2.0 upf_to_json~=0.9.2 -wrapt~=1.11 \ No newline at end of file +wrapt~=1.11.1 \ No newline at end of file diff --git a/docs/source/developer_guide/core/caching.rst b/docs/source/developer_guide/core/caching.rst index b8ec01d423..29bd390038 100644 --- a/docs/source/developer_guide/core/caching.rst +++ b/docs/source/developer_guide/core/caching.rst @@ -42,7 +42,7 @@ The ``WorkflowNode`` example As discussed in the :ref:`user guide `, nodes which can have ``RETURN`` links cannot be cached. This is enforced on two levels: -* The ``_cachable`` property is set to ``False`` in the :class:`~aiida.orm.nodes.process.ProcessNode`, and only re-enabled in :class:`~aiida.orm.nodes.process.calculation.calcjob.CalcJobNode` and :class:`~aiida.orm.nodes.process.calculation.calcfunction.CalcFunctionNode`. +* The ``_cachable`` property is set to ``False`` in the :class:`~aiida.orm.nodes.Node`, and only re-enabled in :class:`~aiida.orm.nodes.process.calculation.calculation.CalculationNode` (which affects CalcJobs and calcfunctions). This means that a :class:`~aiida.orm.nodes.process.workflow.workflow.WorkflowNode` will not be cached. * The ``_store_from_cache`` method, which is used to "clone" an existing node, will raise an error if the existing node has any ``RETURN`` links. This extra safe-guard prevents cases where a user might incorrectly override the ``_cachable`` property on a ``WorkflowNode`` subclass. diff --git a/docs/source/working_with_aiida/caching.rst b/docs/source/working_with_aiida/caching.rst index def40a245d..4e2f864352 100644 --- a/docs/source/working_with_aiida/caching.rst +++ b/docs/source/working_with_aiida/caching.rst @@ -104,6 +104,30 @@ Besides an on/off switch per profile, the ``.aiida/cache_config.yml`` provides c In this example, caching is disabled by default, but explicitly enabled for calculaions of the ``PwCalculation`` class, identified by the ``aiida.calculations:quantumespresso.pw`` entry point string. It also shows how to disable caching for particular calculations (which has no effect here due to the profile-wide default). +For calculations which do not have an entry point, you need to specify the fully qualified Python name instead. For example, the ``seekpath_structure_analysis`` calcfunction defined in ``aiida_quantumespresso.workflows.functions.seekpath_structure_analysis`` is labelled as ``aiida_quantumespresso.workflows.functions.seekpath_structure_analysis.seekpath_structure_analysis``. From an existing :class:`~aiida.orm.nodes.process.calculation.CalculationNode`, you can get the identifier string through the ``process_type`` attribute. + +The caching configuration also accepts ``*`` wildcards. For example, the following configuration enables caching for all calculation entry points defined by ``aiida-quantumespresso``, and the ``seekpath_structure_analysis`` calcfunction. Note that the ``*.seekpath_structure_analysis`` entry needs to be quoted, because it starts with ``*`` which is a special character in YAML. + +.. code:: yaml + + profile-name: + default: False + enabled: + - aiida.calculations:quantumespresso.* + - '*.seekpath_structure_analysis' + +You can even override a wildcard with a more specific entry. The following configuration enables caching for all ``aiida.calculation`` entry points, except those of ``aiida-quantumespresso``: + +.. code:: yaml + + profile-name: + default: False + enabled: + - aiida.calculations:* + disabled: + - aiida.calculations:quantumespresso.* + + Instance level .............. diff --git a/environment.yml b/environment.yml index cfb93017b5..249c93a130 100644 --- a/environment.yml +++ b/environment.yml @@ -1,4 +1,4 @@ -# Usage: conda env create -n myenvname -f environment.yml python=3.6 +# Usage: conda env create -n myenvname -f environment.yml --- name: aiida channels: @@ -6,6 +6,7 @@ channels: - conda-forge - etetoolkit dependencies: +- python~=3.7 - aldjemy~=0.9.1 - alembic~=1.2 - circus~=0.16.1 @@ -36,4 +37,4 @@ dependencies: - tornado<5.0 - tzlocal~=2.0 - upf_to_json~=0.9.2 -- wrapt~=1.11 +- wrapt~=1.11.1 diff --git a/pyproject.toml b/pyproject.toml index 04dfb768c0..a4e1ecf2f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,4 @@ [build-system] # Minimum requirements for the build system to execute. -requires = ["setuptools", "wheel", "reentry~=1.3"] +requires = ["setuptools>=40.8.0", "wheel", "reentry~=1.3"] +build-backend = "setuptools.build_meta:__legacy__" diff --git a/setup.json b/setup.json index 2065b896b4..9b1eecbebb 100644 --- a/setup.json +++ b/setup.json @@ -1,6 +1,6 @@ { "name": "aiida-core", - "version": "1.1.0", + "version": "1.1.1", "url": "http://www.aiida.net/", "license": "MIT License", "author": "The AiiDA team", @@ -51,7 +51,7 @@ "tornado<5.0", "tzlocal~=2.0", "upf_to_json~=0.9.2", - "wrapt~=1.11" + "wrapt~=1.11.1" ], "extras_require": { "ssh_kerberos": [ diff --git a/tests/backends/__init__.py b/tests/backends/__init__.py index e69de29bb2..2776a55f97 100644 --- a/tests/backends/__init__.py +++ b/tests/backends/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### diff --git a/tests/cmdline/commands/test_code.py b/tests/cmdline/commands/test_code.py index acb8cb132b..d50bfc78b3 100644 --- a/tests/cmdline/commands/test_code.py +++ b/tests/cmdline/commands/test_code.py @@ -152,6 +152,8 @@ def setUp(self): ) code.label = 'code' code.description = 'desc' + code.set_prepend_text('text to prepend') + code.set_append_text('text to append') code.store() self.code = code @@ -237,7 +239,7 @@ def test_code_show(self): self.assertTrue(str(self.code.pk) in result.output) def test_code_duplicate_interactive(self): - """Test code duplication interacgtive.""" + """Test code duplication interactive.""" os.environ['VISUAL'] = 'sleep 1; vim -cwq' os.environ['EDITOR'] = 'sleep 1; vim -cwq' label = 'code_duplicate_interactive' @@ -252,7 +254,7 @@ def test_code_duplicate_interactive(self): self.assertEqual(self.code.get_append_text(), new_code.get_append_text()) def test_code_duplicate_non_interactive(self): - """Test code duplication non-interacgtive.""" + """Test code duplication non-interactive.""" label = 'code_duplicate_noninteractive' result = self.cli_runner.invoke(code_duplicate, ['--non-interactive', '--label=' + label, str(self.code.pk)]) self.assertIsNone(result.exception, result.output) diff --git a/tests/cmdline/commands/test_computer.py b/tests/cmdline/commands/test_computer.py index 3dbdd6a3a6..574110121d 100644 --- a/tests/cmdline/commands/test_computer.py +++ b/tests/cmdline/commands/test_computer.py @@ -557,6 +557,8 @@ def setUpClass(cls, *args, **kwargs): workdir='/tmp/aiida' ) cls.comp.set_default_mpiprocs_per_machine(1) + cls.comp.set_prepend_text('text to prepend') + cls.comp.set_append_text('text to append') cls.comp.store() def setUp(self): diff --git a/tests/cmdline/commands/test_status.py b/tests/cmdline/commands/test_status.py index 145d6b5095..f76ba44e2b 100644 --- a/tests/cmdline/commands/test_status.py +++ b/tests/cmdline/commands/test_status.py @@ -33,6 +33,7 @@ def test_status_1(self): options = [] result = self.cli_runner.invoke(cmd_status.verdi_status, options) self.assertIsInstance(result.exception, SystemExit) + self.assertIn('config', result.output) self.assertIn('profile', result.output) self.assertIn('postgres', result.output) self.assertIn('rabbitmq', result.output) diff --git a/tests/cmdline/params/options/__init__.py b/tests/cmdline/params/options/__init__.py index e69de29bb2..2776a55f97 100644 --- a/tests/cmdline/params/options/__init__.py +++ b/tests/cmdline/params/options/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### diff --git a/tests/common/test_lang.py b/tests/common/test_lang.py deleted file mode 100644 index 33541f1557..0000000000 --- a/tests/common/test_lang.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Tests for the :py:mod:`~aiida.common.lang` module.""" - -from aiida.backends.testbase import AiidaTestCase -from aiida.common import lang - -protected_checked = lang.protected_decorator(check=True) # pylint: disable=invalid-name -protected_unchecked = lang.protected_decorator(check=False) # pylint: disable=invalid-name - - -class Protected: - """Class to test the protected decorator.""" - - # pylint: disable=no-self-use - - def member_internal(self): - """Class method that is allowed to call the protected methods.""" - result = self.member_unprotected() - result &= self.member_protected_checked() - result &= self.member_protected_unchecked() - result &= self.property_correct_order - return result - - def member_unprotected(self): - """Simple unproctected method.""" - return True - - @protected_unchecked - def member_protected_unchecked(self): - """Decorated method but without check - can be called outside scope.""" - return True - - @protected_checked - def member_protected_checked(self): - """Decorated method but without check - cannot be called outside scope.""" - return True - - @property - @protected_checked - def property_correct_order(self): - """Decorated property but without check - cannot be called outside scope.""" - return True - - -class TestProtectedDecorator(AiidaTestCase): - """Tests for the :py:func:`~aiida.common.lang.protected` decorator.""" - - # pylint: disable=unused-variable - - def test_free_function(self): - """Decorating a free function should raise.""" - with self.assertRaises(RuntimeError): - - @protected_unchecked - def some_function(): - pass - - def test_property_incorrect_order(self): - """The protected decorator should raise if applied before the property decorator.""" - with self.assertRaises(RuntimeError): - - class InvalidProtected: - """Invalid use of protected decorator.""" - - @staticmethod - @protected_checked - @property - def property_incorrect_order(): - return True - - def test_internal(self): - """Calling protected internally from a public class method should work.""" - instance = Protected() - self.assertTrue(instance.member_internal()) - - def test_unprotected(self): - """Test that calling unprotected class method is allowed.""" - instance = Protected() - self.assertTrue(instance.member_unprotected()) - - def test_protected_unchecked(self): - """Test that calling an unchecked protected class method is allowed.""" - instance = Protected() - self.assertTrue(instance.member_protected_unchecked()) - - def test_protected_checked(self): - """Test that calling a checked protected class method raises.""" - instance = Protected() - with self.assertRaises(RuntimeError): - self.assertTrue(instance.member_protected_checked()) - - def test_protected_property_correct_order(self): - """Test that calling a checked protected property raises.""" - instance = Protected() - with self.assertRaises(RuntimeError): - self.assertEqual(instance.property_correct_order, True) diff --git a/tests/engine/processes/__init__.py b/tests/engine/processes/__init__.py index e69de29bb2..2776a55f97 100644 --- a/tests/engine/processes/__init__.py +++ b/tests/engine/processes/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### diff --git a/tests/engine/processes/workchains/__init__.py b/tests/engine/processes/workchains/__init__.py index e69de29bb2..2776a55f97 100644 --- a/tests/engine/processes/workchains/__init__.py +++ b/tests/engine/processes/workchains/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### diff --git a/tests/engine/processes/workchains/test_restart.py b/tests/engine/processes/workchains/test_restart.py new file mode 100644 index 0000000000..5cbabaa73b --- /dev/null +++ b/tests/engine/processes/workchains/test_restart.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Tests for `aiida.engine.processes.workchains.restart` module.""" +from aiida.backends.testbase import AiidaTestCase +from aiida.engine.processes.workchains.restart import BaseRestartWorkChain +from aiida.engine.processes.workchains.utils import process_handler + + +class TestBaseRestartWorkChain(AiidaTestCase): + """Tests for the `BaseRestartWorkChain` class.""" + + @staticmethod + def test_is_process_handler(): + """Test the `BaseRestartWorkChain.is_process_handler` class method.""" + + class SomeWorkChain(BaseRestartWorkChain): + """Dummy class.""" + + @process_handler() + def handler_a(self, node): + pass + + def not_a_handler(self, node): + pass + + assert SomeWorkChain.is_process_handler('handler_a') + assert not SomeWorkChain.is_process_handler('not_a_handler') + assert not SomeWorkChain.is_process_handler('unexisting_method') + + @staticmethod + def test_get_process_handler(): + """Test the `BaseRestartWorkChain.get_process_handlers` class method.""" + + class SomeWorkChain(BaseRestartWorkChain): + """Dummy class.""" + + @process_handler + def handler_a(self, node): + pass + + def not_a_handler(self, node): + pass + + assert [handler.__name__ for handler in SomeWorkChain.get_process_handlers()] == ['handler_a'] diff --git a/tests/engine/processes/workchains/test_utils.py b/tests/engine/processes/workchains/test_utils.py index 2f007a136b..00f1e127a3 100644 --- a/tests/engine/processes/workchains/test_utils.py +++ b/tests/engine/processes/workchains/test_utils.py @@ -7,12 +7,12 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=no-self-use,unused-argument,unused-variable +# pylint: disable=no-self-use,unused-argument,unused-variable,function-redefined,missing-class-docstring,missing-function-docstring """Tests for `aiida.engine.processes.workchains.utils` module.""" from aiida.backends.testbase import AiidaTestCase from aiida.engine import ExitCode, ProcessState from aiida.engine.processes.workchains.restart import BaseRestartWorkChain -from aiida.engine.processes.workchains.utils import register_process_handler, ProcessHandlerReport +from aiida.engine.processes.workchains.utils import process_handler, ProcessHandlerReport from aiida.orm import ProcessNode from aiida.plugins import CalculationFactory @@ -20,27 +20,33 @@ class TestRegisterProcessHandler(AiidaTestCase): - """Tests for the `register_process_handler` decorator.""" + """Tests for the `process_handler` decorator.""" def test_priority_keyword_only(self): """The `priority` should be keyword only.""" with self.assertRaises(TypeError): - @register_process_handler(BaseRestartWorkChain, 400) # pylint: disable=too-many-function-args + class SomeWorkChain(BaseRestartWorkChain): + + @process_handler(400) # pylint: disable=too-many-function-args + def _(self, node): + pass + + class SomeWorkChain(BaseRestartWorkChain): + + @process_handler(priority=400) def _(self, node): pass - @register_process_handler(BaseRestartWorkChain, priority=400) - def _(self, node): - pass - def test_priority_type(self): """The `priority` should be an integer.""" with self.assertRaises(TypeError): - @register_process_handler(BaseRestartWorkChain, priority='400') - def _(self, node): - pass + class SomeWorkChain(BaseRestartWorkChain): + + @process_handler(priority='400') + def _(self, node): + pass def test_priority(self): """Test that the handlers are called in order of their `priority`.""" @@ -50,36 +56,36 @@ class ArithmeticAddBaseWorkChain(BaseRestartWorkChain): _process_class = ArithmeticAddCalculation - # Register some handlers that should be called in order of 4 -> 3 -> 2 -> 1 but are on purpose registered in a - # different order. When called, they should add their name to `handlers_called` attribute of the node. This can - # then be checked after invoking `inspect_process` to ensure they were called in the right order - @register_process_handler(ArithmeticAddBaseWorkChain, priority=100) - def handler_01(self, node): - handlers_called = node.get_attribute(attribute_key, default=[]) - handlers_called.append('handler_01') - node.set_attribute(attribute_key, handlers_called) - return ProcessHandlerReport(False, ExitCode(100)) - - @register_process_handler(ArithmeticAddBaseWorkChain, priority=300) - def handler_03(self, node): - handlers_called = node.get_attribute(attribute_key, default=[]) - handlers_called.append('handler_03') - node.set_attribute(attribute_key, handlers_called) - return ProcessHandlerReport(False, ExitCode(300)) - - @register_process_handler(ArithmeticAddBaseWorkChain, priority=200) - def handler_02(self, node): - handlers_called = node.get_attribute(attribute_key, default=[]) - handlers_called.append('handler_02') - node.set_attribute(attribute_key, handlers_called) - return ProcessHandlerReport(False, ExitCode(200)) - - @register_process_handler(ArithmeticAddBaseWorkChain, priority=400) - def handler_04(self, node): - handlers_called = node.get_attribute(attribute_key, default=[]) - handlers_called.append('handler_04') - node.set_attribute(attribute_key, handlers_called) - return ProcessHandlerReport(False, ExitCode(400)) + # Register some handlers that should be called in order of 4 -> 3 -> 2 -> 1 but are on purpose registered in + # a different order. When called, they should add their name to `handlers_called` attribute of the node. + # This can then be checked after invoking `inspect_process` to ensure they were called in the right order + @process_handler(priority=100) + def handler_01(self, node): + handlers_called = node.get_attribute(attribute_key, default=[]) + handlers_called.append('handler_01') + node.set_attribute(attribute_key, handlers_called) + return ProcessHandlerReport(False, ExitCode(100)) + + @process_handler(priority=300) + def handler_03(self, node): + handlers_called = node.get_attribute(attribute_key, default=[]) + handlers_called.append('handler_03') + node.set_attribute(attribute_key, handlers_called) + return ProcessHandlerReport(False, ExitCode(300)) + + @process_handler(priority=200) + def handler_02(self, node): + handlers_called = node.get_attribute(attribute_key, default=[]) + handlers_called.append('handler_02') + node.set_attribute(attribute_key, handlers_called) + return ProcessHandlerReport(False, ExitCode(200)) + + @process_handler(priority=400) + def handler_04(self, node): + handlers_called = node.get_attribute(attribute_key, default=[]) + handlers_called.append('handler_04') + node.set_attribute(attribute_key, handlers_called) + return ProcessHandlerReport(False, ExitCode(400)) child = ProcessNode() child.set_process_state(ProcessState.FINISHED) @@ -97,14 +103,18 @@ def test_exit_codes_keyword_only(self): """The `exit_codes` should be keyword only.""" with self.assertRaises(TypeError): - @register_process_handler(BaseRestartWorkChain, ExitCode()) # pylint: disable=too-many-function-args + class SomeWorkChain(BaseRestartWorkChain): + + @process_handler(ExitCode()) # pylint: disable=too-many-function-args + def _(self, node): + pass + + class SomeWorkChain(BaseRestartWorkChain): + + @process_handler(exit_codes=ExitCode()) def _(self, node): pass - @register_process_handler(BaseRestartWorkChain, exit_codes=ExitCode()) - def _(self, node): - pass - def test_exit_codes_type(self): """The `exit_codes` should be single or list of `ExitCode` instances.""" incorrect_types = [ @@ -117,24 +127,26 @@ def test_exit_codes_type(self): with self.assertRaises(TypeError): for incorrect_type in incorrect_types: - @register_process_handler(BaseRestartWorkChain, exit_codes=incorrect_type) - def _(self, node): - pass + class SomeWorkChain(BaseRestartWorkChain): - @register_process_handler(BaseRestartWorkChain, exit_codes=ExitCode(400, 'Some exit code')) - def _(self, node): - pass + @process_handler(exit_codes=incorrect_type) + def _(self, node): + pass - @register_process_handler(BaseRestartWorkChain, exit_codes=[ExitCode(400, 'a'), ExitCode(401, 'b')]) - def _(self, node): - pass + class SomeWorkChain(BaseRestartWorkChain): - def test_exit_codes_filter(self): - """Test that the `exit_codes` argument properly filters, returning `None` if the `node` has different status.""" + @process_handler(exit_codes=ExitCode(400, 'Some exit code')) + def _(self, node): + pass - class ArithmeticAddBaseWorkChain(BaseRestartWorkChain): + class SomeWorkChain(BaseRestartWorkChain): - _process_class = ArithmeticAddCalculation + @process_handler(exit_codes=[ExitCode(400, 'a'), ExitCode(401, 'b')]) + def _(self, node): + pass + + def test_exit_codes_filter(self): + """Test that the `exit_codes` argument properly filters, returning `None` if the `node` has different status.""" exit_code_filter = ExitCode(400) @@ -146,17 +158,56 @@ class ArithmeticAddBaseWorkChain(BaseRestartWorkChain): node_skip = ProcessNode() node_skip.set_exit_status(200) # Some other exit status - @register_process_handler(ArithmeticAddBaseWorkChain, exit_codes=exit_code_filter) - def _(self, node): - return ProcessHandlerReport() + class ArithmeticAddBaseWorkChain(BaseRestartWorkChain): + + _process_class = ArithmeticAddCalculation + + @process_handler(exit_codes=exit_code_filter) + def _(self, node): + return ProcessHandlerReport() # Create dummy process instance process = ArithmeticAddBaseWorkChain() # Loop over all handlers, which should be just the one, and call it with the two different nodes - for handler in process._handlers: # pylint: disable=protected-access + for handler in process.get_process_handlers(): # The `node_match` should match the `exit_codes` filter and so return a report instance - assert isinstance(handler.method(process, node_match), ProcessHandlerReport) + assert isinstance(handler(process, node_match), ProcessHandlerReport) # The `node_skip` has a wrong exit status and so should get skipped, returning `None` - assert handler.method(process, node_skip) is None + assert handler(process, node_skip) is None + + def test_enabled_keyword_only(self): + """The `enabled` should be keyword only.""" + with self.assertRaises(TypeError): + + class SomeWorkChain(BaseRestartWorkChain): + + @process_handler(True) # pylint: disable=too-many-function-args + def _(self, node): + pass + + class SomeWorkChain(BaseRestartWorkChain): + + @process_handler(enabled=False) + def _(self, node): + pass + + def test_enabled(self): + """The `enabled` should be keyword only.""" + + class SomeWorkChain(BaseRestartWorkChain): + + @process_handler + def enabled_handler(self, node): + pass + + assert SomeWorkChain.enabled_handler.enabled # pylint: disable=no-member + + class SomeWorkChain(BaseRestartWorkChain): + + @process_handler(enabled=False) + def disabled_handler(self, node): + pass + + assert not SomeWorkChain.disabled_handler.enabled # pylint: disable=no-member diff --git a/tests/engine/test_calcfunctions.py b/tests/engine/test_calcfunctions.py index d00b1d55c7..5ff6329db2 100644 --- a/tests/engine/test_calcfunctions.py +++ b/tests/engine/test_calcfunctions.py @@ -88,7 +88,7 @@ def test_calcfunction_caching(self): self.assertEqual(EXECUTION_COUNTER, 1) # Caching a CalcFunctionNode should be possible - with enable_caching(CalcFunctionNode): + with enable_caching(identifier='*.execution_counter_calcfunction'): input_node = Int(5) result, cached = execution_counter_calcfunction.run_get_node(input_node) @@ -109,7 +109,7 @@ def add_calcfunction(data): # pylint: disable=redefined-outer-name """This calcfunction has a different source code from the one created at the module level.""" return Int(data.value + 2) - with enable_caching(CalcFunctionNode): + with enable_caching(identifier='*.add_calcfunction'): result_cached, cached = add_calcfunction.run_get_node(self.default_int) self.assertNotEqual(result_original, result_cached) self.assertFalse(cached.is_created_from_cache) diff --git a/tests/engine/test_process.py b/tests/engine/test_process.py index 32619e01d7..3405c94a85 100644 --- a/tests/engine/test_process.py +++ b/tests/engine/test_process.py @@ -317,7 +317,7 @@ class TestProcess1(Process): @classmethod def define(cls, spec): super().define(spec) - spec.input('add_outputs', valid_type=orm.Bool, default=orm.Bool(False)) + spec.input('add_outputs', valid_type=orm.Bool, default=lambda: orm.Bool(False)) spec.output_namespace('integer.namespace', valid_type=orm.Int, dynamic=True) spec.output('required_string', valid_type=orm.Str, required=True) diff --git a/tests/engine/test_process_builder.py b/tests/engine/test_process_builder.py index 53e617b02d..39782ac81e 100644 --- a/tests/engine/test_process_builder.py +++ b/tests/engine/test_process_builder.py @@ -31,7 +31,7 @@ def define(cls, spec): spec.input('name.spaced', valid_type=orm.Int, help='Namespaced port') spec.input('name_spaced', valid_type=orm.Str, help='Not actually a namespaced port') spec.input('boolean', valid_type=orm.Bool, help='A pointless boolean') - spec.input('default', valid_type=orm.Int, default=orm.Int(DEFAULT_INT).store()) + spec.input('default', valid_type=orm.Int, default=lambda: orm.Int(DEFAULT_INT).store()) class LazyProcessNamespace(Process): diff --git a/tests/engine/test_process_function.py b/tests/engine/test_process_function.py index fefcd22a83..548b6a1dae 100644 --- a/tests/engine/test_process_function.py +++ b/tests/engine/test_process_function.py @@ -52,7 +52,7 @@ def function_args(data_a): return data_a @workfunction - def function_args_with_default(data_a=orm.Int(DEFAULT_INT)): + def function_args_with_default(data_a=lambda: orm.Int(DEFAULT_INT)): return data_a @calcfunction @@ -72,12 +72,12 @@ def function_args_and_kwargs(data_a, **kwargs): return result @workfunction - def function_args_and_default(data_a, data_b=orm.Int(DEFAULT_INT)): + def function_args_and_default(data_a, data_b=lambda: orm.Int(DEFAULT_INT)): return {'data_a': data_a, 'data_b': data_b} @workfunction def function_defaults( - data_a=orm.Int(DEFAULT_INT), metadata={ + data_a=lambda: orm.Int(DEFAULT_INT), metadata={ 'label': DEFAULT_LABEL, 'description': DEFAULT_DESCRIPTION } diff --git a/tests/engine/test_work_chain.py b/tests/engine/test_work_chain.py index f57e6a9f6f..d3c3e642d7 100644 --- a/tests/engine/test_work_chain.py +++ b/tests/engine/test_work_chain.py @@ -7,12 +7,12 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### - import inspect import unittest import plumpy import plumpy.test_utils +import pytest from tornado import gen from aiida import orm @@ -87,8 +87,8 @@ class Wf(WorkChain): @classmethod def define(cls, spec): super().define(spec) - spec.input('value', default=Str('A')) - spec.input('n', default=Int(3)) + spec.input('value', default=lambda: Str('A')) + spec.input('n', default=lambda: Int(3)) spec.outputs.dynamic = True spec.outline( cls.s1, @@ -155,8 +155,8 @@ class PotentialFailureWorkChain(WorkChain): def define(cls, spec): super().define(spec) spec.input('success', valid_type=Bool) - spec.input('through_return', valid_type=Bool, default=Bool(False)) - spec.input('through_exit_code', valid_type=Bool, default=Bool(False)) + spec.input('through_return', valid_type=Bool, default=lambda: Bool(False)) + spec.input('through_exit_code', valid_type=Bool, default=lambda: Bool(False)) spec.exit_code(cls.EXIT_STATUS, 'EXIT_STATUS', cls.EXIT_MESSAGE) spec.outline(if_(cls.should_return_out_of_outline)(return_(cls.EXIT_STATUS)), cls.failure, cls.success) spec.output(cls.OUTPUT_LABEL, required=False) @@ -943,7 +943,7 @@ class SubWorkChain(WorkChain): @classmethod def define(cls, spec): super().define(spec) - spec.input('kill', default=Bool(False)) + spec.input('kill', default=lambda: Bool(False)) spec.outline(cls.begin, cls.check) def begin(self): @@ -961,7 +961,7 @@ class MainWorkChain(WorkChain): @classmethod def define(cls, spec): super().define(spec) - spec.input('kill', default=Bool(False)) + spec.input('kill', default=lambda: Bool(False)) spec.outline(cls.submit_child, cls.check) def submit_child(self): @@ -1275,7 +1275,7 @@ def test_expose(self): } }) - @unittest.skip('Reenable when issue #2515 is solved: references to deleted ORM instances') + @unittest.skip('Functionality of `WorkChain.exposed_outputs` is broken.') def test_nested_expose(self): res = launch.run( GrandParentExposeWorkChain, @@ -1312,17 +1312,22 @@ def test_nested_expose(self): } }) + @pytest.mark.filterwarnings('ignore::UserWarning') def test_issue_1741_expose_inputs(self): - """Test that expose inputs works correctly when copying a stored default value""" + """Test that expose inputs works correctly when copying a stored default value. + + .. note:: a node instance is used for a port default, which is normally not advisable, but given that this + regression test relies on the default being a stored node, we cannot change it. Given that the default is + only used within this test, it should not pose any problems. - stored_a = Int(5).store() + """ class Parent(WorkChain): @classmethod def define(cls, spec): super().define(spec) - spec.input('a', default=stored_a) + spec.input('a', default=Int(5).store()) spec.outline(cls.step1) def step1(self): @@ -1401,7 +1406,7 @@ class Child(WorkChain): @classmethod def define(cls, spec): super().define(spec) - spec.input('a', valid_type=Bool, default=Bool(True)) + spec.input('a', valid_type=Bool, default=lambda: Bool(True)) def run(self): pass diff --git a/tests/engine/test_workfunctions.py b/tests/engine/test_workfunctions.py index 23aed08990..9f62d54a77 100644 --- a/tests/engine/test_workfunctions.py +++ b/tests/engine/test_workfunctions.py @@ -72,7 +72,7 @@ def test_workfunction_caching(self): _ = self.test_workfunction(self.default_int) # Caching should always be disabled for a WorkFunctionNode - with enable_caching(identifier=WorkFunctionNode): + with enable_caching(): _, cached = self.test_workfunction.run_get_node(self.default_int) self.assertFalse(cached.is_created_from_cache) diff --git a/tests/manage/configuration/test_config.py b/tests/manage/configuration/test_config.py index 7e4c0544e2..e9bb5e620b 100644 --- a/tests/manage/configuration/test_config.py +++ b/tests/manage/configuration/test_config.py @@ -178,6 +178,60 @@ def tearDown(self): if self.config_filebase and os.path.isdir(self.config_filebase): shutil.rmtree(self.config_filebase) + def compare_config_in_memory_and_on_disk(self, config, filepath): + """Verify that the contents of `config` are identical to the contents of the file with path `filepath`. + + :param config: instance of `Config` + :param filepath: absolute filepath to a configuration file + :raises AssertionError: if content of `config` is not equal to that of file on disk + """ + from io import BytesIO + + # Write the content of the `config` in memory to a byte stream + stream = BytesIO() + config._write(stream) # pylint: disable=protected-access + in_memory = stream.getvalue() + + # Read the content stored on disk + with open(filepath, 'rb') as handle: + on_disk = handle.read() + + # Compare content of in memory config and the one on disk + self.assertEqual(in_memory, on_disk) + + def test_from_file(self): + """Test the `Config.from_file` class method. + + Regression test for #3790: make sure configuration is written to disk after it is loaded and migrated. + """ + + # If the config file does not exist, a completely new file is created with a migrated config + filepath_nonexisting = os.path.join(self.config_filebase, 'config_nonexisting.json') + config = Config.from_file(filepath_nonexisting) + + # Make sure that the migrated config is written to disk, by loading it from disk and comparing to the content + # of the in memory config object. This we do by calling `Config._write` passing a simple stream to have the + # class write the file to memory instead of overwriting the file on disk. + self.compare_config_in_memory_and_on_disk(config, filepath_nonexisting) + + # Now repeat the test for an existing file. The previous filepath now *does* exist and is migrated + filepath_existing = filepath_nonexisting + config = Config.from_file(filepath_existing) + + self.compare_config_in_memory_and_on_disk(config, filepath_existing) + + # Finally, we test that an existing configuration file with an outdated schema is migrated and written to disk + with tempfile.NamedTemporaryFile() as handle: + + # Write content of configuration with old schema to disk + filepath = os.path.join(os.path.dirname(__file__), 'migrations', 'test_samples', 'input', '0.json') + with open(filepath, 'rb') as source: + handle.write(source.read()) + handle.flush() + + config = Config.from_file(handle.name) + self.compare_config_in_memory_and_on_disk(config, handle.name) + def test_basic_properties(self): """Test the basic properties of the Config class.""" config = Config(self.config_filepath, self.config_dictionary) diff --git a/tests/manage/test_caching.py b/tests/manage/test_caching.py deleted file mode 100644 index 460281554c..0000000000 --- a/tests/manage/test_caching.py +++ /dev/null @@ -1,120 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Tests for the functionality that reads and modifies the caching configuration file.""" - -import tempfile -import unittest -import yaml - -from aiida.common import exceptions -from aiida.manage.caching import configure, get_use_cache, enable_caching, disable_caching -from aiida.manage.configuration import get_profile - - -class CacheConfigTest(unittest.TestCase): - """Tests the caching configuration.""" - - def setUp(self): - """Write a temporary config file, and load the configuration.""" - self.config_reference = { - get_profile().name: { - 'default': True, - 'enabled': ['aiida.calculations:arithmetic.add'], - 'disabled': ['aiida.calculations:templatereplacer'] - } - } - with tempfile.NamedTemporaryFile() as handle: - yaml.dump(self.config_reference, handle, encoding='utf-8') - configure(config_file=handle.name) - - def tearDown(self): # pylint: disable=no-self-use - """Reset the configuration.""" - configure() - - def test_empty_enabled_disabled(self): # pylint: disable=no-self-use - """Test that `aiida.manage.caching.configure` does not except when either `enabled` or `disabled` is `None`. - - This will happen when the configuration file specifies either one of the keys but no actual values, e.g.:: - - profile_name: - default: False - enabled: - - In this case, the dictionary parsed by yaml will contain `None` for the `enabled` key. - Now this will be unlikely, but the same holds when all values are commented:: - - profile_name: - default: False - enabled: - # - aiida.calculations:templatereplacer - - which is not unlikely to occurr in the wild. - """ - configuration = {get_profile().name: {'default': True, 'enabled': None, 'disabled': None}} - with tempfile.NamedTemporaryFile() as handle: - yaml.dump(configuration, handle, encoding='utf-8') - configure(config_file=handle.name) - - # Check that `get_use_cache` also does not except - get_use_cache(identifier='aiida.calculations:templatereplacer') - - def test_invalid_enabled_disabled_directives(self): - """Test that `configure` raises for invalid enable or disable directives.""" - - def load_configuration(identifier): - """Write the caching file for given configuration and load it.""" - configuration = {get_profile().name: {'default': True, 'enabled': [identifier]}} - with tempfile.NamedTemporaryFile() as handle: - yaml.dump(configuration, handle, encoding='utf-8') - configure(config_file=handle.name) - - with self.assertRaises(exceptions.ConfigurationError): - load_configuration(1) # entry point string needs to be a string - - with self.assertRaises(exceptions.ConfigurationError): - load_configuration('templatereplacer') # entry point string needs to be fully qualified - - with self.assertRaises(exceptions.ConfigurationError): - load_configuration('calculations:templatereplacer') # entry point string needs to be fully qualified - - with self.assertRaises(exceptions.ConfigurationError): - load_configuration('aiida.nonexistent_group:templatereplacer') # invalid entry point group - - def test_invalid_config(self): - """Test `get_use_cache` raises a `TypeError` if identifier is not a valid entry point string.""" - with self.assertRaises(TypeError): - get_use_cache(identifier=int) - - def test_default(self): - """Verify that when not specifying any specific identifier, the `default` is used, which is set to `True`.""" - self.assertTrue(get_use_cache()) - - def test_caching_enabled(self): - """Test `get_use_cache` when specifying identifier.""" - self.assertFalse(get_use_cache(identifier='aiida.calculations:templatereplacer')) - - def test_contextmanager_enable_explicit(self): - """Test the `enable_caching` context manager.""" - with enable_caching(identifier='aiida.calculations:templatereplacer'): - self.assertTrue(get_use_cache(identifier='aiida.calculations:templatereplacer')) - - def test_contextmanager_disable_global(self): - """Test the `disable_caching` context manager without specific identifier.""" - with disable_caching(): - self.assertTrue( - get_use_cache(identifier='aiida.calculations:arithmetic.add') - ) # explicitly set, hence not overwritten - self.assertFalse(get_use_cache(identifier='aiida.calculations:templatereplacer')) - - def test_disable_caching(self): - """Test the `disable_caching` context manager with specific identifier.""" - with disable_caching(identifier='aiida.calculations:arithmetic.add'): - self.assertFalse(get_use_cache(identifier='aiida.calculations:arithmetic.add')) - self.assertTrue(get_use_cache(identifier='aiida.calculations:arithmetic.add')) diff --git a/tests/manage/test_caching_config.py b/tests/manage/test_caching_config.py new file mode 100644 index 0000000000..5a92987ab3 --- /dev/null +++ b/tests/manage/test_caching_config.py @@ -0,0 +1,267 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Tests for the functionality that reads and modifies the caching configuration file.""" + +# pylint: disable=redefined-outer-name + +import tempfile +import contextlib + +import yaml +import pytest + +from aiida.common import exceptions +from aiida.manage.configuration import get_profile +from aiida.manage.caching import configure, get_use_cache, enable_caching, disable_caching + + +@pytest.fixture +def configure_caching(): + """ + Fixture to set the caching configuration in the test profile to + a specific dictionary. This is done by creating a temporary + caching configuration file. + """ + + @contextlib.contextmanager + def inner(config_dict): + with tempfile.NamedTemporaryFile() as handle: + yaml.dump({get_profile().name: config_dict}, handle, encoding='utf-8') + configure(config_file=handle.name) + yield + # reset the configuration + configure() + + return inner + + +@pytest.fixture +def use_default_configuration(configure_caching): # pylint: disable=redefined-outer-name + """ + Fixture to load a default caching configuration. + """ + with configure_caching( + config_dict={ + 'default': True, + 'enabled': ['aiida.calculations:arithmetic.add'], + 'disabled': ['aiida.calculations:templatereplacer'] + } + ): + yield + + +def test_empty_enabled_disabled(configure_caching): + """Test that `aiida.manage.caching.configure` does not except when either `enabled` or `disabled` is `None`. + + This will happen when the configuration file specifies either one of the keys but no actual values, e.g.:: + + profile_name: + default: False + enabled: + + In this case, the dictionary parsed by yaml will contain `None` for the `enabled` key. + Now this will be unlikely, but the same holds when all values are commented:: + + profile_name: + default: False + enabled: + # - aiida.calculations:templatereplacer + + which is not unlikely to occurr in the wild. + """ + with configure_caching(config_dict={'default': True, 'enabled': None, 'disabled': None}): + # Check that `get_use_cache` also does not except, and works as expected + assert get_use_cache(identifier='aiida.calculations:templatereplacer') + + +def test_no_enabled_disabled(configure_caching): + """Test that `aiida.manage.caching.configure` does not except when either `enabled` or `disabled` do not exist. + + This will happen when the configuration file does not specify these values:: + + profile_name: + default: False + """ + with configure_caching(config_dict={'default': False}): + # Check that `get_use_cache` also does not except, and works as expected + assert not get_use_cache(identifier='aiida.calculations:templatereplacer') + + +@pytest.mark.parametrize( + 'config_dict', [{ + 'wrong_key': ['foo'] + }, { + 'default': 2 + }, { + 'enabled': 4 + }, { + 'default': 'string' + }, { + 'enabled': ['aiida.spam:Ni'] + }, { + 'default': True, + 'enabled': ['aiida.calculations:With:second_separator'] + }, { + 'enabled': ['aiida.sp*:Ni'] + }, { + 'disabled': ['aiida.sp*!bar'] + }, { + 'enabled': ['startswith.number.2bad'] + }, { + 'enabled': ['some.thing.in.this.is.a.keyword'] + }] +) +def test_invalid_configuration_dict(configure_caching, config_dict): + """Test that `configure` raises for invalid configurations.""" + + with pytest.raises(exceptions.ConfigurationError): + with configure_caching(config_dict): + pass + + +def test_invalid_identifier(use_default_configuration): # pylint: disable=unused-argument + """Test `get_use_cache` raises a `TypeError` if identifier is not a string.""" + with pytest.raises(TypeError): + get_use_cache(identifier=int) + + +def test_default(use_default_configuration): # pylint: disable=unused-argument + """Verify that when not specifying any specific identifier, the `default` is used, which is set to `True`.""" + assert get_use_cache() + + +@pytest.mark.parametrize(['config_dict', 'enabled', 'disabled'], [ + ({ + 'default': True, + 'enabled': ['aiida.calculations:arithmetic.add'], + 'disabled': ['aiida.calculations:templatereplacer'] + }, ['some_identifier', 'aiida.calculations:arithmetic.add', 'aiida.calculations:TEMPLATEREPLACER' + ], ['aiida.calculations:templatereplacer']), + ({ + 'default': False, + 'enabled': ['aiida.calculations:arithmetic.add'], + 'disabled': ['aiida.calculations:templatereplacer'] + }, ['aiida.calculations:arithmetic.add'], ['aiida.calculations:templatereplacer', 'some_identifier']), + ({ + 'default': False, + 'enabled': ['aiida.calculations:*'], + }, ['aiida.calculations:templatereplacer', 'aiida.calculations:arithmetic.add'], ['some_identifier']), + ({ + 'default': False, + 'enabled': ['aiida.calcul*'], + }, ['aiida.calculations:templatereplacer', 'aiida.calculations:arithmetic.add'], ['some_identifier']), + ({ + 'default': False, + 'enabled': ['aiida.calculations:*'], + 'disabled': ['aiida.calculations:arithmetic.add'] + }, ['aiida.calculations:templatereplacer', 'aiida.calculations:ARIthmetic.add' + ], ['some_identifier', 'aiida.calculations:arithmetic.add']), + ({ + 'default': False, + 'enabled': ['aiida.calculations:ar*thmetic.add'], + 'disabled': ['aiida.calculations:*'], + }, ['aiida.calculations:arithmetic.add', 'aiida.calculations:arblarghthmetic.add' + ], ['some_identifier', 'aiida.calculations:templatereplacer']), +]) +def test_configuration(configure_caching, config_dict, enabled, disabled): + """Check that different caching configurations give the expected result. + """ + with configure_caching(config_dict=config_dict): + for identifier in enabled: + assert get_use_cache(identifier=identifier) + for identifier in disabled: + assert not get_use_cache(identifier=identifier) + + +@pytest.mark.parametrize( + ['config_dict', 'valid_identifiers', 'invalid_identifiers'], + [({ + 'default': False, + 'enabled': ['aiida.calculations:*thmetic.add'], + 'disabled': ['aiida.calculations:arith*ic.add'] + }, ['some_identifier', 'aiida.calculations:templatereplacer'], ['aiida.calculations:arithmetic.add']), + ({ + 'default': False, + 'enabled': ['aiida.calculations:arithmetic.add'], + 'disabled': ['aiida.calculations:arithmetic.add'] + }, ['some_identifier', 'aiida.calculations:templatereplacer'], ['aiida.calculations:arithmetic.add'])] +) +def test_ambiguous_configuration(configure_caching, config_dict, valid_identifiers, invalid_identifiers): + """ + Check that calling 'get_use_cache' on identifiers for which the + configuration is ambiguous raises a ConfigurationError. + """ + with configure_caching(config_dict=config_dict): + for identifier in valid_identifiers: + get_use_cache(identifier=identifier) + for identifier in invalid_identifiers: + with pytest.raises(exceptions.ConfigurationError): + get_use_cache(identifier=identifier) + + +def test_enable_caching_specific(configure_caching): + """ + Check that using enable_caching for a specific identifier works. + """ + identifier = 'some_ident' + with configure_caching({'default': False}): + with enable_caching(identifier=identifier): + assert get_use_cache(identifier=identifier) + + +def test_enable_caching_global(configure_caching): + """ + Check that using enable_caching for a specific identifier works. + """ + specific_identifier = 'some_ident' + with configure_caching(config_dict={'default': False, 'disabled': [specific_identifier]}): + with enable_caching(): + assert get_use_cache(identifier='some_other_ident') + assert get_use_cache(identifier=specific_identifier) + + +def test_disable_caching_specific(configure_caching): + """ + Check that using disable_caching for a specific identifier works. + """ + identifier = 'some_ident' + with configure_caching({'default': True}): + with disable_caching(identifier=identifier): + assert not get_use_cache(identifier=identifier) + + +def test_disable_caching_global(configure_caching): + """ + Check that using disable_caching for a specific identifier works. + """ + specific_identifier = 'some_ident' + with configure_caching(config_dict={'default': True, 'enabled': [specific_identifier]}): + with disable_caching(): + assert not get_use_cache(identifier='some_other_ident') + assert not get_use_cache(identifier=specific_identifier) + + +@pytest.mark.parametrize( + 'identifier', [ + 'aiida.spam:Ni', 'aiida.calculations:With:second_separator', 'aiida.sp*:Ni', 'aiida.sp*!bar', + 'startswith.number.2bad', 'some.thing.in.this.is.a.keyword' + ] +) +def test_enable_disable_invalid(identifier): + """ + Test that the enable and disable context managers raise when given + an invalid identifier. + """ + with pytest.raises(ValueError): + with enable_caching(identifier=identifier): + pass + with pytest.raises(ValueError): + with disable_caching(identifier=identifier): + pass diff --git a/tests/orm/node/test_node.py b/tests/orm/node/test_node.py index ddfedf594c..345d820244 100644 --- a/tests/orm/node/test_node.py +++ b/tests/orm/node/test_node.py @@ -11,6 +11,7 @@ """Tests for the Node ORM class.""" import os +import tempfile from aiida.backends.testbase import AiidaTestCase from aiida.common import exceptions, LinkType @@ -729,3 +730,28 @@ def test_tab_completable_properties(self): self.assertEqual(workflow.outputs.result_b.pk, output2.pk) with self.assertRaises(exceptions.NotExistent): _ = workflow.outputs.some_label + + +# Clearing the DB is needed because other parts of the tests (not using +# the fixture infrastructure) delete the User. +def test_store_from_cache(clear_database_before_test): # pylint: disable=unused-argument + """ + Regression test for storing a Node with (nested) repository + content with caching. + """ + data = Data() + with tempfile.TemporaryDirectory() as tmpdir: + dir_path = os.path.join(tmpdir, 'directory') + os.makedirs(dir_path) + with open(os.path.join(dir_path, 'file'), 'w') as file: + file.write('content') + data.put_object_from_tree(tmpdir) + + data.store() + + clone = data.clone() + clone._store_from_cache(data, with_transaction=True) # pylint: disable=protected-access + + assert clone.is_stored + assert clone.get_cache_source() == data.uuid + assert data.get_hash() == clone.get_hash() diff --git a/tests/orm/test_querybuilder.py b/tests/orm/test_querybuilder.py index fd4c9e5220..68fad02f9d 100644 --- a/tests/orm/test_querybuilder.py +++ b/tests/orm/test_querybuilder.py @@ -24,6 +24,16 @@ def setUp(self): self.clean_db() self.insert_data() + def test_date_filters_support(self): + """Verify that `datetime.date` is supported in filters.""" + from datetime import datetime, date, timedelta + + orm.Data(ctime=datetime.now() - timedelta(days=3)).store() + orm.Data(ctime=datetime.now() - timedelta(days=1)).store() + + builder = orm.QueryBuilder().append(orm.Node, filters={'ctime': {'>': date.today() - timedelta(days=1)}}) + self.assertEqual(builder.count(), 1) + def test_ormclass_type_classification(self): """ This tests the classifications of the QueryBuilder @@ -121,8 +131,8 @@ class PotentialFailureWorkChain(WorkChain): def define(cls, spec): super().define(spec) spec.input('success', valid_type=orm.Bool) - spec.input('through_return', valid_type=orm.Bool, default=orm.Bool(False)) - spec.input('through_exit_code', valid_type=orm.Bool, default=orm.Bool(False)) + spec.input('through_return', valid_type=orm.Bool, default=lambda: orm.Bool(False)) + spec.input('through_exit_code', valid_type=orm.Bool, default=lambda: orm.Bool(False)) spec.exit_code(cls.EXIT_STATUS, 'EXIT_STATUS', cls.EXIT_MESSAGE) spec.outline(if_(cls.should_return_out_of_outline)(return_(cls.EXIT_STATUS)), cls.failure, cls.success) spec.output(cls.OUTPUT_LABEL, required=False) diff --git a/tests/orm/utils/test_repository.py b/tests/orm/utils/test_repository.py index 9e600ace19..a490459edb 100644 --- a/tests/orm/utils/test_repository.py +++ b/tests/orm/utils/test_repository.py @@ -14,8 +14,9 @@ import tempfile from aiida.backends.testbase import AiidaTestCase -from aiida.orm import Node +from aiida.orm import Node, Data from aiida.orm.utils.repository import File, FileType +from aiida.common.exceptions import ModificationNotAllowed class TestRepository(AiidaTestCase): @@ -147,3 +148,46 @@ def test_put_object_from_tree(self): key = os.path.join(basepath, 'subdir', 'a.txt') content = self.get_file_content(os.path.join('subdir', 'a.txt')) self.assertEqual(node.get_object_content(key), content) + + def test_erase_unstored(self): + """ + Test that _repository.erase removes the content of an unstored + node. + """ + node = Node() + node.put_object_from_tree(self.tempdir, '') + + self.assertEqual(sorted(node.list_object_names()), ['c.txt', 'subdir']) + self.assertEqual(sorted(node.list_object_names('subdir')), ['a.txt', 'b.txt', 'nested']) + + node._repository.erase() # pylint: disable=protected-access + self.assertEqual(node.list_object_names(), []) + + def test_erase_stored_force(self): + """ + Test that _repository.erase removes the content of an stored + Data node when passing force=True. + """ + node = Data() + node.put_object_from_tree(self.tempdir, '') + node.store() + + self.assertEqual(sorted(node.list_object_names()), ['c.txt', 'subdir']) + self.assertEqual(sorted(node.list_object_names('subdir')), ['a.txt', 'b.txt', 'nested']) + + node._repository.erase(force=True) # pylint: disable=protected-access + self.assertEqual(node.list_object_names(), []) + + def test_erase_stored_raise(self): + """ + Test that trying to erase the repository content of a stored + Data node without the force flag raises. + """ + node = Data() + node.put_object_from_tree(self.tempdir, '') + node.store() + + self.assertEqual(sorted(node.list_object_names()), ['c.txt', 'subdir']) + self.assertEqual(sorted(node.list_object_names('subdir')), ['a.txt', 'b.txt', 'nested']) + + self.assertRaises(ModificationNotAllowed, node._repository.erase) # pylint: disable=protected-access diff --git a/tests/schedulers/__init__.py b/tests/schedulers/__init__.py index e69de29bb2..2776a55f97 100644 --- a/tests/schedulers/__init__.py +++ b/tests/schedulers/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### diff --git a/tests/tools/dbimporters/__init__.py b/tests/tools/dbimporters/__init__.py index e69de29bb2..2776a55f97 100644 --- a/tests/tools/dbimporters/__init__.py +++ b/tests/tools/dbimporters/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### diff --git a/tests/tools/dbimporters/test_icsd.py b/tests/tools/dbimporters/test_icsd.py index cea1f27c95..0cd2cd7248 100644 --- a/tests/tools/dbimporters/test_icsd.py +++ b/tests/tools/dbimporters/test_icsd.py @@ -10,12 +10,10 @@ """ Tests for IcsdDbImporter """ -import urllib - +import urllib.request import pytest from aiida.backends.testbase import AiidaTestCase - from aiida.tools.dbimporters.plugins import icsd diff --git a/tests/transports/__init__.py b/tests/transports/__init__.py index e69de29bb2..2776a55f97 100644 --- a/tests/transports/__init__.py +++ b/tests/transports/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### diff --git a/utils/validate_consistency.py b/utils/validate_consistency.py index bcfb36abc6..6604347ded 100644 --- a/utils/validate_consistency.py +++ b/utils/validate_consistency.py @@ -288,7 +288,10 @@ def update_environment_yml(): replacements = {'psycopg2-binary': 'psycopg2', 'graphviz': 'python-graphviz'} install_requires = get_setup_json()['install_requires'] - conda_requires = [] + # python version cannot be overriden from outside environment.yml + # (even if it is not specified at all in environment.yml) + # https://github.com/conda/conda/issues/9506 + conda_requires = ['python~=3.7'] for req in install_requires: # skip packages required for specific python versions # (environment.yml aims at the latest python version) @@ -309,7 +312,7 @@ def update_environment_yml(): environment_filename = 'environment.yml' file_path = os.path.join(ROOT_DIR, environment_filename) with open(file_path, 'w') as env_file: - env_file.write('# Usage: conda env create -n myenvname -f environment.yml python=3.6\n') + env_file.write('# Usage: conda env create -n myenvname -f environment.yml\n') yaml.safe_dump( environment, env_file, explicit_start=True, default_flow_style=False, encoding='utf-8', allow_unicode=True )