From 18e6686e783126cea79499976bfb748faeeeb46e Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Tue, 2 Apr 2024 21:08:42 +0200 Subject: [PATCH] Add improved more configurable versions of `pytest` fixtures The `pytest` fixtures are intended to help plugin packages to easily write unit tests. Arguably the most important fixture is the `aiida_profile` fixture which automatically provides a ready-to-go profile. The downside is that it uses the `core.psql_dos` storage backend, which was historically the only available storage. Now there are other storage plugins available. Not only would it be useful to allow a user to easily configure which storage plugin to use for the test profile, it would make sense to change the default from `core.psql_dos` to a storage plugin that doesn't require a PostgreSQL database. Sure, the plugin currently uses `pgtest` to create a test cluster on the fly so the test database is created on the fly and doesn't affect production databases, however, it still does require the PostgreSQL libraries to be installed, or the `pg_ctl` binary won't be found and the fixture fails. The `aiida_profile` fixture now instead uses the `core.sqlite_dos` by default and configures to use no broker, which also means that RabbitMQ is no longer needed. This makes it possible to run the tests by default without any services running, making it much easier to get started running tests on any environment that just has `aiida-core` installed. --- docs/source/howto/plugins_develop.rst | 49 ++- docs/source/nitpick-exceptions | 3 + docs/source/topics/plugins.rst | 126 +++++--- src/aiida/manage/tests/pytest_fixtures.py | 6 + src/aiida/tools/pytest_fixtures/__init__.py | 57 ++++ .../tools/pytest_fixtures/configuration.py | 245 ++++++++++++++ src/aiida/tools/pytest_fixtures/daemon.py | 156 +++++++++ .../tools/pytest_fixtures/entry_points.py | 128 ++++++++ src/aiida/tools/pytest_fixtures/globals.py | 19 ++ src/aiida/tools/pytest_fixtures/orm.py | 300 ++++++++++++++++++ src/aiida/tools/pytest_fixtures/storage.py | 87 +++++ tests/benchmark/test_archive.py | 4 +- tests/brokers/test_rabbitmq.py | 4 +- tests/cmdline/commands/test_code.py | 47 ++- .../commands/test_code/test_code_export.yml | 2 +- tests/cmdline/commands/test_devel.py | 4 +- tests/cmdline/commands/test_profile.py | 2 +- tests/cmdline/commands/test_rabbitmq.py | 4 +- tests/cmdline/utils/test_common.py | 4 +- tests/conftest.py | 28 +- tests/engine/daemon/test_execmanager.py | 4 +- .../processes/calcjobs/test_calc_job.py | 60 ++-- .../processes/calcjobs/test_monitors.py | 10 +- tests/engine/processes/test_builder.py | 4 +- tests/engine/processes/test_control.py | 8 +- tests/engine/test_launch.py | 8 +- tests/engine/test_memory_leaks.py | 8 +- tests/engine/test_process_function.py | 8 +- tests/engine/test_runners.py | 4 +- tests/manage/tests/test_pytest_fixtures.py | 9 +- tests/orm/data/code/test_installed.py | 1 + tests/orm/nodes/process/test_process.py | 4 +- tests/orm/test_fields.py | 9 +- tests/schedulers/test_direct.py | 6 +- tests/storage/psql_dos/test_alembic_cli.py | 2 +- tests/storage/sqlite_zip/test_backend.py | 12 +- .../archive/migration/test_legacy_to_main.py | 4 +- .../archive/migration/test_prov_redesign.py | 14 +- tests/tools/archive/orm/test_attributes.py | 4 +- tests/tools/archive/orm/test_authinfo.py | 2 +- tests/tools/archive/orm/test_calculations.py | 8 +- tests/tools/archive/orm/test_codes.py | 6 +- tests/tools/archive/orm/test_comments.py | 16 +- tests/tools/archive/orm/test_computers.py | 12 +- tests/tools/archive/orm/test_extras.py | 2 +- tests/tools/archive/orm/test_groups.py | 8 +- tests/tools/archive/orm/test_links.py | 24 +- tests/tools/archive/orm/test_logs.py | 14 +- tests/tools/archive/orm/test_users.py | 6 +- tests/tools/archive/test_complex.py | 4 +- tests/tools/archive/test_repository.py | 4 +- tests/tools/archive/test_simple.py | 6 +- tests/tools/archive/test_specific_import.py | 6 +- .../pytest_fixtures/test_configuration.py | 40 +++ 54 files changed, 1361 insertions(+), 251 deletions(-) create mode 100644 src/aiida/tools/pytest_fixtures/__init__.py create mode 100644 src/aiida/tools/pytest_fixtures/configuration.py create mode 100644 src/aiida/tools/pytest_fixtures/daemon.py create mode 100644 src/aiida/tools/pytest_fixtures/entry_points.py create mode 100644 src/aiida/tools/pytest_fixtures/globals.py create mode 100644 src/aiida/tools/pytest_fixtures/orm.py create mode 100644 src/aiida/tools/pytest_fixtures/storage.py create mode 100644 tests/tools/pytest_fixtures/test_configuration.py diff --git a/docs/source/howto/plugins_develop.rst b/docs/source/howto/plugins_develop.rst index 2634406617..f751adc0cd 100644 --- a/docs/source/howto/plugins_develop.rst +++ b/docs/source/howto/plugins_develop.rst @@ -158,45 +158,34 @@ AiiDA's fixtures Many tests require a full AiiDA environment to be set up before the test starts, e.g. some AiiDA data nodes. The pytest library has the concept of `fixtures`_ for encapsulating code you would like to run before a test starts. -AiiDA ships with a number of fixtures in :py:mod:`aiida.manage.tests.pytest_fixtures` that take care of setting up the test environment for you (for more details, see :ref:`topics:plugins:testfixtures`). +AiiDA ships with a number of fixtures in :py:mod:`aiida.tools.pytest_fixtures` that take care of setting up the test environment for you (for more details, see :ref:`topics:plugins:testfixtures`). In order to make these fixtures available to your tests, create a ``conftest.py`` (see also `pytest docs `_) at the root level of your plugin package as follows:: - import pytest - pytest_plugins = ['aiida.manage.tests.pytest_fixtures'] # make AiiDA's fixtures available - # tip: look inside aiida.manage.tests.pytest_fixtures to see which fixtures are provided + import pytest + pytest_plugins = 'aiida.tools.pytest_fixtures' # make AiiDA's fixtures available + # tip: look inside aiida.tools.pytest_fixtures to see which fixtures are provided - @pytest.fixture(scope='function') # a fixture that will run once per test function that requests it - def integer_input(): - """Integer input for test run.""" - from aiida.orm import Int - input_value = Int(5) - return input_value +By importing AiiDA's fixtures, the ``aiida_profile`` fixture gets used automatically, which takes care that a test profile is created and loaded. +The profile (and configuration directory in which it is hosted) is only temporary and is automatically removed after the test session finished. +This ensures that any production profiles on the system are not affected by the tests. - @pytest.fixture(scope='function', autouse=True) # a fixture that automatically runs once per test function - def clear_database_auto(clear_database): # request AiiDA's "clear_database" fixture - """Automatically clear database in between tests.""" - pass +Other fixtures have to be explicitly used in a test to be of use. +They usually allow creating some resources that are required by the test, such as a ``Code`` node: -You can now start writing tests e.g. in a ``tests/test_calculations.py`` file:: + def test_calculation(aiida_code_installed): + """Test running a calculation using a ``CalcJob`` plugin.""" + from aiida.engine import run - # No need to import fixtures here - they are added by pytest "automagically" + code = aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash') + builder = code.get_builder() + builder.x = orm.Int(1) + builder.y = orm.Int(2) - def test_qe_calculation(aiida_local_code_factory, integer_input): # requesting "aiida_local_code_factory" and "integer_input" fixtures - """Test running a calculation using a CalcJob plugin.""" - from aiida.engine import run - from aiida.plugins import CalculationFactory + results, node = run.get_node(builder) - # search for 'pw.x' executable in PATH, set up an AiiDA code for it and return it - code = aiida_local_code_factory(entry_point='quantumespresso.pw', executable='pw.x') - # ... - inputs = { 'code': code, 'int_input': integer_input, ... } # use "integer_input" fixture - - # run a calculation using this code ... - result = run(CalculationFactory('quantumespresso.pw'), **inputs) - - # check outputs of calculation - assert result['...'] == ... + assert node.is_finished_ok + assert results['sum'] == 3 In order to run your tests, simply type ``pytest`` at the root level or your package. pytest automatically discovers and executes files, classes and function names starting with the word ``test``. diff --git a/docs/source/nitpick-exceptions b/docs/source/nitpick-exceptions index e0b18ecb2b..c127906862 100644 --- a/docs/source/nitpick-exceptions +++ b/docs/source/nitpick-exceptions @@ -190,6 +190,9 @@ py:class _asyncio.Future py:class tqdm.std.tqdm +py:class pytest.TempPathFactory +py:class PGTest + py:class IPython.core.magic.Magics py:class HTMLParser.HTMLParser diff --git a/docs/source/topics/plugins.rst b/docs/source/topics/plugins.rst index eaee6d7342..1ae670aa1e 100644 --- a/docs/source/topics/plugins.rst +++ b/docs/source/topics/plugins.rst @@ -344,9 +344,9 @@ To make use of these fixtures, create a ``conftest.py`` file in your ``tests`` f .. code-block:: python - pytest_plugins = ['aiida.manage.tests.pytest_fixtures'] + pytest_plugins = 'aiida.tools.pytest_fixtures -Just by adding this line, the fixtures that are provided by the :mod:`~aiida.manage.tests.pytest_fixtures` module are automatically imported. +Just by adding this line, the fixtures that are provided by the :mod:`~aiida.tools.pytest_fixtures` module are automatically imported. The module provides the following fixtures: * :ref:`aiida_manager `: Return the global instance of the :class:`~aiida.manage.manager.Manager` @@ -354,20 +354,27 @@ The module provides the following fixtures: * :ref:`aiida_profile_clean `: Same as ``aiida_profile`` but the storage backend is cleaned * :ref:`aiida_profile_clean_class `: Same as ``aiida_profile_clean`` but should be used at the class scope * :ref:`aiida_profile_factory `: Create a temporary profile ready to be used for testing -* :ref:`aiida_instance `: Return the :class:`~aiida.manage.configuration.config.Config` instance that is used for the test session +* :ref:`aiida_config `: Return the :class:`~aiida.manage.configuration.config.Config` instance that is used for the test session * :ref:`config_psql_dos `: Return a profile configuration for the :class:`~aiida.storage.psql_dos.backend.PsqlDosBackend` -* :ref:`postgres_cluster `: Create a temporary and isolated PostgreSQL cluster using ``pgtest`` and cleanup after the yield -* :ref:`aiida_local_code_factory `: Setup a :class:`~aiida.orm.nodes.data.code.installed.InstalledCode` instance on the ``localhost`` computer +* :ref:`postgres_cluster `: Create a temporary and isolated PostgreSQL cluster using ``pgtest`` and cleanup after the yielder * :ref:`aiida_computer `: Setup a :class:`~aiida.orm.computers.Computer` instance * :ref:`aiida_computer_local `: Setup the localhost as a :class:`~aiida.orm.computers.Computer` using local transport * :ref:`aiida_computer_ssh `: Setup the localhost as a :class:`~aiida.orm.computers.Computer` using SSH transport * :ref:`aiida_localhost `: Shortcut for that immediately returns a :class:`~aiida.orm.computers.Computer` instance for the ``localhost`` computer instead of a factory +* :ref:`aiida_code `: Setup a :class:`~aiida.orm.nodes.data.code.abstract.AbstractCode` instance +* :ref:`aiida_code_installed `: Setup a :class:`~aiida.orm.nodes.data.code.installed.InstalledCode` instance on a given computer * :ref:`submit_and_await `: Submit a process or process builder to the daemon and wait for it to reach a certain process state * :ref:`started_daemon_client `: Same as ``daemon_client`` but the daemon is guaranteed to be running * :ref:`stopped_daemon_client `: Same as ``daemon_client`` but the daemon is guaranteed to *not* be running * :ref:`daemon_client `: Return a :class:`~aiida.engine.daemon.client.DaemonClient` instance to control the daemon * :ref:`entry_points `: Return a :class:`~aiida.manage.tests.pytest_fixtures.EntryPointManager` instance to add and remove entry points +.. note:: + + Before v2.6, test fixtures were located in :mod:`aiida.manage.tests.pytest_fixtures`. + This module is now deprecated and will be removed in the future. + Some fixtures have analogs in :mod:`aiida.tools.pytest_fixtures` that are drop-in replacements, but in general, there are differences in the interface and functionality. + .. _topics:plugins:testfixtures:aiida-manager: @@ -395,17 +402,21 @@ By default, the fixture will generate a completely temporary independent AiiDA i This includes: * A temporary ``.aiida`` configuration folder with configuration files -* A temporary PostgreSQL cluster -* A temporary test profile complete with storage backend (creates a database in the temporary PostgreSQL cluster) +* A temporary test profile configured with ``core.sqlite_dos`` storage backend + +.. note:: + + The profile uses ``core.sqlite_dos`` instead of the standard ``core.psql_dos`` storage plugin as it doesn't require PostgreSQL to be installed. + Since the functionality of PostgreSQL is not needed for most common test cases, this choice makes it easier to start writing and running tests. The temporary test instance and profile are automatically destroyed at the end of the test session. The fixture guarantees that no changes are made to the actual instance of AiiDA with its configuration and profiles. -The creation of the temporary instance and profile takes a few seconds at the beginning of the test suite to setup. -It is possible to avoid this by creating a dedicated test profile once and telling the fixture to use that instead of generating one each time: +.. note:: -* Create a profile, by using `verdi setup` or `verdi quicksetup` and specify the ``--test-profile`` flag -* Set the ``AIIDA_TEST_PROFILE`` environment variable to the name of the test profile: ``export AIIDA_TEST_PROFILE=`` + The profile does not configure RabbitMQ as a broker since it is not required for most test cases useful for plugins. + This means, however, that any functionality that requires a broker is not available, such as running the daemon and submitting processes to the daemon. + If that functionality is required, a profile should be created and loaded that configures a broker. Although the fixture is automatically used, and so there is no need to explicitly pass it into a test function, it may still be useful, as it can be used to clean the storage backend from all data: @@ -419,7 +430,7 @@ still be useful, as it can be used to clean the storage backend from all data: assert QueryBuilder().append(Data).count() != 0 # The following call clears the storage backend, deleting all data, except for the default user. - aiida_profile.clear_profile() + aiida_profile.reset_storage() assert QueryBuilder().append(Data).count() == 0 @@ -434,8 +445,8 @@ Note that a default user will be inserted into the database after cleaning it. .. code-block:: python - def test(aiida_profile_clean): - """The profile storage is guaranteed to be emptied at the start of this test.""" + def test(aiida_profile_clean): + """The profile storage is guaranteed to be emptied at the start of this test.""" This functionality can be useful if it is easier to setup and write the test if there is no pre-existing data. However, cleaning the storage may take a non-negligible amount of time, so only use it when really needed in order to keep tests running as fast as possible. @@ -488,17 +499,17 @@ Can be useful to create a test profile for a custom storage backend: Note that the configuration above is not actually functional and the actual configuration depends on the storage implementation that is used. -.. _topics:plugins:testfixtures:aiida-instance: +.. _topics:plugins:testfixtures:aiida-config: -``aiida_instance`` +``aiida_config`` ------------------ Return the :class:`~aiida.manage.configuration.config.Config` instance that is used for the test session. .. code-block:: python - def test(aiida_instance): - aiida_instance.get_option('logging.aiida_loglevel') + def test(aiida_config): + aiida_config.get_option('logging.aiida_loglevel') .. _topics:plugins:testfixtures:config-psql-dos: @@ -511,16 +522,17 @@ This can be used in combination with the ``aiida_profile_factory`` fixture to cr .. code-block:: python - @pytest.fixture(scope='session') - def psql_dos_profile(aiida_profile_factory, config_psql_dos) -> Profile: - """Return a test profile configured for the :class:`~aiida.storage.psql_dos.PsqlDosStorage`.""" - configuration = config_psql_dos() - configuration['storage']['config']['repository_uri'] = '/some/custom/path' - yield aiida_profile_factory(configuration) + @pytest.fixture(scope='session') + def psql_dos_profile(aiida_profile_factory, config_psql_dos) -> Profile: + """Return a test profile configured for the :class:`~aiida.storage.psql_dos.PsqlDosStorage`.""" + configuration = config_psql_dos() + configuration['repository_uri'] = '/some/custom/path' + with aiida_profile_factory(storage_backend='core.psql_dos', storage_config=configuration) as profile: + yield profile Note that this is only useful if the storage configuration needs to be customized. -If any configuration works, simply use the ``aiida_profile`` fixture straight away, which uses the ``PsqlDosStorage`` storage backend by default. +If any configuration works, simply use the ``aiida_profile`` fixture straight away. .. _topics:plugins:testfixtures:postgres-cluster: @@ -555,25 +567,6 @@ This fixture returns a :class:`~aiida.orm.computers.Computer` that represents th aiida_localhost.get_minimum_job_poll_interval() -.. _topics:plugins:testfixtures:aiida-local-code-factory: - -``aiida_local_code_factory`` ----------------------------- - -This test is useful if a test requires an :class:`~aiida.orm.nodes.data.code.installed.InstalledCode` instance. -For example: - -.. code-block:: python - - def test(aiida_local_code_factory): - code = aiida_local_code_factory( - entry_point='core.arithmetic.add', - executable='/usr/bin/bash' - ) - -By default, it will use the ``localhost`` computer returned by the ``aiida_localhost`` fixture. - - .. _topics:plugins:testfixtures:aiida-computer: ``aiida_computer`` @@ -679,6 +672,45 @@ If you need a guarantee that the computer is not configured, make sure to clean assert not localhost.is_configured +.. _topics:plugins:testfixtures:aiida-code: + +``aiida_code`` +---------------------------- + +This fixture is useful if a test requires an :class:`~aiida.orm.nodes.data.code.abstract.AbstractCode` instance. +For example: + +.. code-block:: python + + def test(aiida_localhost, aiida_code): + from aiida.orm import InstalledCode + code = aiida_code( + 'core.code.installed', + label='test-code', + computer=aiida_localhost, + filepath_executable='/bin/bash' + ) + assert isinstance(code, InstalledCode) + + +.. _topics:plugins:testfixtures:aiida-code-installed: + +``aiida_code_installed`` +---------------------------- + +This test is useful if a test requires an :class:`~aiida.orm.nodes.data.code.installed.InstalledCode` instance. +For example: + +.. code-block:: python + + def test(aiida_code_installed): + from aiida.orm import InstalledCode + code = aiida_code_installed() + assert isinstance(code, InstalledCode) + +By default, it will use the ``localhost`` computer returned by the ``aiida_localhost`` fixture. + + .. _topics:plugins:testfixtures:submit-and-await: ``submit_and_await`` @@ -690,8 +722,8 @@ By default it will wait for the process to reach ``ProcessState.FINISHED``: .. code-block:: python - def test(aiida_local_code_factory, submit_and_await): - code = aiida_local_code_factory('core.arithmetic.add', '/usr/bin/bash') + def test(aiida_code_installed, submit_and_await): + code = aiida_code_installed(filepath_executable='core.arithmetic.add', filepath_executable='/usr/bin/bash') builder = code.get_builder() builder.x = orm.Int(1) builder.y = orm.Int(1) @@ -762,7 +794,7 @@ Return a :class:`~aiida.manage.tests.pytest_fixtures.EntryPointManager` instance class CustomParser(Parser): """Parser implementation.""" - entry_points.add(CustomParser, 'custom.parser') + entry_points.add(CustomParser, 'aiida.parsers:custom.parser') assert ParserFactory('custom.parser', CustomParser) diff --git a/src/aiida/manage/tests/pytest_fixtures.py b/src/aiida/manage/tests/pytest_fixtures.py index 7aceafa5d6..96b7abaaad 100644 --- a/src/aiida/manage/tests/pytest_fixtures.py +++ b/src/aiida/manage/tests/pytest_fixtures.py @@ -51,6 +51,12 @@ from aiida.manage.configuration.config import Config +warn_deprecation( + 'The module `aiida.manage.tests.pytest_fixtures` is deprecated, please use `aiida.tools.pytest_fixtures` instead.', + version=3, +) + + def recursive_merge(left: dict[t.Any, t.Any], right: dict[t.Any, t.Any]) -> None: """Recursively merge the ``right`` dictionary into the ``left`` dictionary. diff --git a/src/aiida/tools/pytest_fixtures/__init__.py b/src/aiida/tools/pytest_fixtures/__init__.py new file mode 100644 index 0000000000..6181183d13 --- /dev/null +++ b/src/aiida/tools/pytest_fixtures/__init__.py @@ -0,0 +1,57 @@ +"""Fixtures to simplify writing unit tests for AiiDA with ``pytest``.""" +# AUTO-GENERATED + +# fmt: off + +from .configuration import ( + aiida_config, + aiida_config_factory, + aiida_config_tmp, + aiida_profile, + aiida_profile_clean, + aiida_profile_clean_class, + aiida_profile_factory, + aiida_profile_tmp, +) +from .daemon import daemon_client, started_daemon_client, stopped_daemon_client, submit_and_await +from .entry_points import entry_points +from .globals import aiida_manager +from .orm import ( + aiida_code, + aiida_code_installed, + aiida_computer, + aiida_computer_local, + aiida_computer_ssh, + aiida_localhost, + ssh_key, +) +from .storage import config_psql_dos, postgres_cluster + +__all__ = ( + 'aiida_code_installed', + 'aiida_code', + 'aiida_computer_local', + 'aiida_computer_ssh', + 'aiida_computer', + 'aiida_config_factory', + 'aiida_config_tmp', + 'aiida_config', + 'aiida_localhost', + 'aiida_manager', + 'aiida_profile_clean_class', + 'aiida_profile_clean', + 'aiida_profile_factory', + 'aiida_profile_tmp', + 'aiida_profile', + 'config_psql_dos', + 'daemon_client', + 'entry_points', + 'postgres_cluster', + 'ssh_key', + 'started_daemon_client', + 'stopped_daemon_client', + 'submit_and_await', +) + + +# fmt: on diff --git a/src/aiida/tools/pytest_fixtures/configuration.py b/src/aiida/tools/pytest_fixtures/configuration.py new file mode 100644 index 0000000000..b5ef8c4a63 --- /dev/null +++ b/src/aiida/tools/pytest_fixtures/configuration.py @@ -0,0 +1,245 @@ +"""Fixtures to create and load temporary AiiDA configuration directories and profiles.""" + +from __future__ import annotations + +import contextlib +import os +import pathlib +import secrets +import typing as t + +import pytest + +if t.TYPE_CHECKING: + from aiida.manage.configuration.config import Config + + +@pytest.fixture(scope='session') +def aiida_config_factory(): + """Return a factory to create and load a new temporary AiiDA configuration directory. + + The factory is a context manager that returns a loaded :class:`aiida.manage.configuration.config.Config`. It + requires a path on the local file system where the configuration directory is to be created as an argument. If + another configuration directory was already loaded that is automatically restored at the end of the context manager. + This way, any changes made to the configuration during the context are fully temporary and automatically undone + after the test. + + Usage:: + + def test(aiida_config_factory, tmp_path_factory): + import secrets + with aiida_config_factory(tmp_path_factory.mktemp(secrets.token_hex(16))) as config: + yield config + + The factory has the following signature to allow further configuring the profile that is created and loaded: + + :param dirpath: The path to create the configuration directory in. + :returns `~aiida.manage.configuration.config.Config`: The loaded temporary config. + """ + + @contextlib.contextmanager + def factory(dirpath: pathlib.Path): + from aiida.common.exceptions import MissingConfigurationError + from aiida.manage.configuration import get_config, reset_config, settings + + try: + current_config = get_config() + except MissingConfigurationError: + current_config = None + + current_path_variable = os.environ.get(settings.DEFAULT_AIIDA_PATH_VARIABLE) + + reset_config() + + dirpath_config = dirpath / settings.DEFAULT_CONFIG_DIR_NAME + os.environ[settings.DEFAULT_AIIDA_PATH_VARIABLE] = str(dirpath_config) + settings.set_configuration_directory(dirpath_config) + config = get_config(create=True) + + try: + yield config + finally: + if current_config: + reset_config() + settings.set_configuration_directory(pathlib.Path(current_config.dirpath)) + get_config() + + if current_path_variable is None: + os.environ.pop(settings.DEFAULT_AIIDA_PATH_VARIABLE, None) + else: + os.environ[settings.DEFAULT_AIIDA_PATH_VARIABLE] = current_path_variable + + return factory + + +@pytest.fixture(scope='session') +def aiida_profile_factory(): + """Return a factory to create and load a new temporary AiiDA profile. + + The factory is a context manager that returns a loaded :class:`aiida.manage.configuration.profile.Profile`. It + requires a :class:`aiida.manage.configuration.config.Config` instance to which the profile is added. If another + profile was already loaded, that is automatically restored at the end of the context manager. This way, any changes + made to the profile during the context are fully temporary and automatically undone after the test. The created + ``Profile`` instance dynamically has the method ``reset_storage`` added which, when called, deletes all content of + the storage, recreating the default user. The daemon is also stopped if it was running. + + Usage:: + + def test(aiida_config_tmp, aiida_profile_factory): + with aiida_profile_factory(aiida_config_tmp) as profile: + yield profile + + The factory has the following signature to allow further configuring the profile that is created and loaded: + + :param storage_backend: The storage plugin to use. Defaults to ``core.sqlite_dos``. + :param storage_config: The configuration to use for the selected storage plugin. + :param broker_backend: The broker plugin to use. Defaults to defining no broker. + :param broker_config: The configuration to use for the selected broker plugin. + :param name: The name of the profile. Defaults to a random string. + :param name: The email to use for the default user. Defaults to ``test@localhost``. + :returns `~aiida.manage.configuration.profile.Profile`: The loaded temporary profile. + """ + + @contextlib.contextmanager + def factory( + config: 'Config', + *, + storage_backend: str = 'core.sqlite_dos', + storage_config: dict[str, t.Any] | None = None, + broker_backend: str | None = None, + broker_config: dict[str, t.Any] | None = None, + name: str | None = None, + email: str = 'test@localhost', + ): + from aiida.manage.configuration import create_profile, profile_context + from aiida.manage.manager import get_manager + + manager = get_manager() + storage_config = storage_config or {'filepath': str(pathlib.Path(config.dirpath) / 'storage')} + + if broker_backend and broker_config is None: + broker_config = { + 'broker_protocol': 'amqp', + 'broker_username': 'guest', + 'broker_password': 'guest', + 'broker_host': '127.0.0.1', + 'broker_port': 5672, + 'broker_virtual_host': '', + } + + profile = create_profile( + config, + storage_backend=storage_backend, + storage_config=storage_config, + broker_backend=broker_backend, + broker_config=broker_config, + name=name or secrets.token_hex(16), + email=email, + is_test_profile=True, + ) + config.set_default_profile(profile.name) + + def reset_storage(): + """Reset the storage of the profile. + + This ensures that the contents of the profile are reset as well as the ``Manager``, which may hold + references to data that will be destroyed. The daemon will also be stopped if it was running. + """ + from aiida.engine.daemon.client import DaemonException, get_daemon_client + from aiida.orm import User + + if broker_backend: + daemon_client = get_daemon_client() + + if daemon_client.is_daemon_running: + try: + daemon_client.stop_daemon(wait=True) + except DaemonException: + pass + + manager.get_profile_storage()._clear() + manager.reset_profile() + + User(email=profile.default_user_email or email).store() + + # Add the ``reset_storage`` method, such that users can empty the storage through the ``Profile`` instance that + # is returned by this fixture. + setattr(profile, 'reset_storage', reset_storage) + + with profile_context(profile, allow_switch=True): + yield profile + + return factory + + +@pytest.fixture(scope='session', autouse=True) +def aiida_config(tmp_path_factory, aiida_config_factory): + """Return a loaded temporary AiiDA configuration directory. + + This fixture is session-scoped and used automatically as soon as these fixtures are imported. + + :returns :class:`~aiida.manage.configuration.config.Config`: The loaded temporary config. + """ + with aiida_config_factory(tmp_path_factory.mktemp(secrets.token_hex(16))) as config: + yield config + + +@pytest.fixture(scope='session', autouse=True) +def aiida_profile(aiida_config, aiida_profile_factory): + """Return a loaded temporary AiiDA profile. + + This fixture is session-scoped and used automatically as soon as these fixtures are imported. The profile defines + no broker and uses the ``core.sqlite_dos`` storage backend, meaning it requires no services to run. + + :returns :class:`~aiida.manage.configuration.profile.Profile`: The loaded temporary profile. + """ + with aiida_profile_factory(aiida_config) as profile: + yield profile + + +@pytest.fixture(scope='function') +def aiida_profile_clean(aiida_profile): + """Return a loaded temporary AiiDA profile where the data storage is cleaned before the start of the test. + + This is a function-scoped version of the ``aiida_profile`` fixture. + + :returns :class:`~aiida.manage.configuration.profile.Profile`: The loaded temporary profile. + """ + aiida_profile.reset_storage() + yield aiida_profile + + +@pytest.fixture(scope='class') +def aiida_profile_clean_class(aiida_profile): + """Return a loaded temporary AiiDA profile where the data storage is cleaned before the start of the test. + + This is a class-scoped version of the ``aiida_profile`` fixture. + + :returns `~aiida.manage.configuration.profile.Profile`: The loaded temporary profile. + """ + aiida_profile.reset_storage() + yield aiida_profile + + +@pytest.fixture(scope='function') +def aiida_config_tmp(tmp_path, aiida_config_factory): + """Create and load a temporary AiiDA configuration directory. + + This fixture is function-scoped and automatically restores any previously loaded config after the test. + + :returns :class:`~aiida.manage.configuration.config.Config`: The loaded temporary config. + """ + with aiida_config_factory(tmp_path) as config: + yield config + + +@pytest.fixture(scope='function') +def aiida_profile_tmp(aiida_config_tmp, aiida_profile_factory): + """Create and load a temporary AiiDA profile. + + This fixture is function-scoped and automatically restores any previously loaded profile after the test. + + :returns :class:`~aiida.manage.configuration.profile.Profile`: The loaded temporary profile. + """ + with aiida_profile_factory(aiida_config_tmp) as profile: + yield profile diff --git a/src/aiida/tools/pytest_fixtures/daemon.py b/src/aiida/tools/pytest_fixtures/daemon.py new file mode 100644 index 0000000000..74e3620193 --- /dev/null +++ b/src/aiida/tools/pytest_fixtures/daemon.py @@ -0,0 +1,156 @@ +"""Fixtures to interact with the daemon.""" + +from __future__ import annotations + +import pathlib +import typing as t + +import pytest + +if t.TYPE_CHECKING: + from aiida.engine import Process, ProcessBuilder + from aiida.orm import ProcessNode + + +@pytest.fixture(scope='session') +def daemon_client(aiida_profile): + """Return a daemon client for the configured test profile for the test session. + + The daemon will be automatically stopped at the end of the test session. + + Usage:: + + def test(daemon_client): + from aiida.engine.daemon.client import DaemonClient + assert isinstance(daemon_client, DaemonClient) + + """ + from aiida.engine.daemon import get_daemon_client + from aiida.engine.daemon.client import DaemonNotRunningException, DaemonTimeoutException + + daemon_client = get_daemon_client(aiida_profile.name) + + try: + yield daemon_client + finally: + try: + daemon_client.stop_daemon(wait=True) + except DaemonNotRunningException: + pass + # Give an additional grace period by manually waiting for the daemon to be stopped. In certain unit test + # scenarios, the built in wait time in ``daemon_client.stop_daemon`` is not sufficient and even though the + # daemon is stopped, ``daemon_client.is_daemon_running`` will return false for a little bit longer. + daemon_client._await_condition( + lambda: not daemon_client.is_daemon_running, + DaemonTimeoutException('The daemon failed to stop.'), + ) + + +@pytest.fixture +def started_daemon_client(daemon_client): + """Ensure that the daemon is running for the test profile and return the associated client. + + Usage:: + + def test(started_daemon_client): + assert started_daemon_client.is_daemon_running + + """ + if not daemon_client.is_daemon_running: + daemon_client.start_daemon() + assert daemon_client.is_daemon_running + + yield daemon_client + + +@pytest.fixture +def stopped_daemon_client(daemon_client): + """Ensure that the daemon is not running for the test profile and return the associated client. + + Usage:: + + def test(stopped_daemon_client): + assert not stopped_daemon_client.is_daemon_running + + """ + from aiida.engine.daemon.client import DaemonTimeoutException + + if daemon_client.is_daemon_running: + daemon_client.stop_daemon(wait=True) + # Give an additional grace period by manually waiting for the daemon to be stopped. In certain unit test + # scenarios, the built in wait time in ``daemon_client.stop_daemon`` is not sufficient and even though the + # daemon is stopped, ``daemon_client.is_daemon_running`` will return false for a little bit longer. + daemon_client._await_condition( + lambda: not daemon_client.is_daemon_running, + DaemonTimeoutException('The daemon failed to stop.'), + ) + + yield daemon_client + + +@pytest.fixture +def submit_and_await(started_daemon_client): + """Return a factory to submit a process and wait for it to achieve the given state. + + This fixture automatically loads the ``started_daemon_client`` fixture ensuring the daemon is already running, + therefore it is not necessary to manually start the daemon. + + Usage:: + + def test(submit_and_await): + inputs = { + ... + } + node = submit_and_await(SomeProcess, **inputs) + + The factory has the following signature: + + :param submittable: A process, a process builder or a process node. If it is a process or builder, it is submitted + first before awaiting the desired state. + :param state: The process state to wait for, by default it waits for the submittable to be ``FINISHED``. + :param timeout: The time to wait for the process to achieve the state. + :param kwargs: If the ``submittable`` is a process class, it is instantiated with the ``kwargs`` as inputs. + :raises RuntimeError: If the process fails to achieve the specified state before the timeout expires. + :returns `~aiida.orm.nodes.process.process.ProcessNode`: The process node. + """ + from aiida.engine import ProcessState + + def factory( + submittable: 'Process' | 'ProcessBuilder' | 'ProcessNode', + state: ProcessState = ProcessState.FINISHED, + timeout: int = 20, + **kwargs, + ): + import inspect + import time + + from aiida.engine import Process, ProcessBuilder, submit + from aiida.orm import ProcessNode + + if inspect.isclass(submittable) and issubclass(submittable, Process): + node = submit(submittable, **kwargs) + elif isinstance(submittable, ProcessBuilder): + node = submit(submittable) + elif isinstance(submittable, ProcessNode): + node = submittable + else: + raise ValueError(f'type of submittable `{type(submittable)}` is not supported.') + + start_time = time.time() + + while node.process_state is not state: + if node.is_excepted: + raise RuntimeError(f'The process excepted: {node.exception}') + + if time.time() - start_time >= timeout: + daemon_log_file = pathlib.Path(started_daemon_client.daemon_log_file).read_text(encoding='utf-8') + daemon_status = 'running' if started_daemon_client.is_daemon_running else 'stopped' + raise RuntimeError( + f'Timed out waiting for process with state `{node.process_state}` to enter state `{state}`.\n' + f'Daemon <{started_daemon_client.profile.name}|{daemon_status}> log file content: \n' + f'{daemon_log_file}' + ) + + return node + + return factory diff --git a/src/aiida/tools/pytest_fixtures/entry_points.py b/src/aiida/tools/pytest_fixtures/entry_points.py new file mode 100644 index 0000000000..fbcc585802 --- /dev/null +++ b/src/aiida/tools/pytest_fixtures/entry_points.py @@ -0,0 +1,128 @@ +"""Fixtures to temporarily add and remove entry points.""" + +from __future__ import annotations + +import typing as t + +import importlib_metadata +import pytest + + +class EntryPointManager: + """Manager to temporarily add or remove entry points.""" + + def __init__(self, entry_points: importlib_metadata.EntryPoints): + self.entry_points = entry_points + + def eps(self) -> importlib_metadata.EntryPoints: + return self.entry_points + + def eps_select(self, group, name=None) -> importlib_metadata.EntryPoints: + if name is None: + return self.eps().select(group=group) + return self.eps().select(group=group, name=name) + + @staticmethod + def _validate_entry_point(entry_point_string: str | None, group: str | None, name: str | None) -> tuple[str, str]: + """Validate the definition of the entry point. + + :param entry_point_string: Fully qualified entry point string. + :param name: Entry point name. + :param group: Entry point group. + :returns: The entry point group and name. + :raises TypeError: If `entry_point_string`, `group` or `name` are not a string, when defined. + :raises ValueError: If `entry_point_string` is not defined, nor a `group` and `name`. + :raises ValueError: If `entry_point_string` is not a complete entry point string with group and name. + """ + from aiida.common.lang import type_check + from aiida.plugins import entry_point + + if entry_point_string is not None: + try: + group, name = entry_point.parse_entry_point_string(entry_point_string) + except TypeError as exception: + raise TypeError('`entry_point_string` should be a string when defined.') from exception + except ValueError as exception: + raise ValueError('invalid `entry_point_string` format, should `group:name`.') from exception + + if name is None or group is None: + raise ValueError('neither `entry_point_string` is defined, nor `name` and `group`.') + + type_check(group, str) + type_check(name, str) + + return group, name + + def add( + self, + value: type | str, + entry_point_string: str | None = None, + *, + name: str | None = None, + group: str | None = None, + ) -> None: + """Add an entry point. + + :param value: The class or function to register as entry point. The resource needs to be importable, so it can't + be inlined. Alternatively, the fully qualified name can be passed as a string. + :param entry_point_string: Fully qualified entry point string. + :param name: Entry point name. + :param group: Entry point group. + :returns: The entry point group and name. + :raises TypeError: If `entry_point_string`, `group` or `name` are not a string, when defined. + :raises ValueError: If `entry_point_string` is not defined, nor a `group` and `name`. + :raises ValueError: If `entry_point_string` is not a complete entry point string with group and name. + """ + if not isinstance(value, str): + value = f'{value.__module__}:{value.__name__}' + + group, name = self._validate_entry_point(entry_point_string, group, name) + entry_point = importlib_metadata.EntryPoint(name, value, group) + self.entry_points = importlib_metadata.EntryPoints(self.entry_points + (entry_point,)) + + def remove( + self, entry_point_string: str | None = None, *, name: str | None = None, group: str | None = None + ) -> None: + """Remove an entry point. + + :param value: Entry point value, fully qualified import path name. + :param entry_point_string: Fully qualified entry point string. + :param name: Entry point name. + :param group: Entry point group. + :returns: The entry point group and name. + :raises TypeError: If `entry_point_string`, `group` or `name` are not a string, when defined. + :raises ValueError: If `entry_point_string` is not defined, nor a `group` and `name`. + :raises ValueError: If `entry_point_string` is not a complete entry point string with group and name. + """ + group, name = self._validate_entry_point(entry_point_string, group, name) + try: + self.entry_points[name] + except KeyError: + raise KeyError(f'entry point `{name}` does not exist in group `{group}`.') + self.entry_points = importlib_metadata.EntryPoints( + (ep for ep in self.entry_points if not (ep.name == name and ep.group == group)) + ) + + +@pytest.fixture +def entry_points(monkeypatch) -> t.Generator[EntryPointManager, None, None]: + """Return an instance of the ``EntryPointManager`` which allows to temporarily add or remove entry points. + + This fixture monkey patches the entry point caches returned by the :func:`aiida.plugins.entry_point.eps` and + :func:`aiida.plugins.entry_point.eps_select` functions to class methods of the ``EntryPointManager`` so that we can + dynamically add and/or remove entry points. + + Usage:: + + def test(entry_points): + entry_points.add(SomeCalcJob, 'aiida.calculations:some.entry_point') + # or, alternatively + entry_points.add(SomeCalcJob, group='aiida.calculations', name='some.entry_point') + """ + from aiida.plugins import entry_point + + # Note: a deepcopy is not needed here as ``eps()`` returns an immutable ``EntryPoints`` tuple type. + epm = EntryPointManager(entry_point.eps()) + monkeypatch.setattr(entry_point, 'eps', epm.eps) + monkeypatch.setattr(entry_point, 'eps_select', epm.eps_select) + yield epm diff --git a/src/aiida/tools/pytest_fixtures/globals.py b/src/aiida/tools/pytest_fixtures/globals.py new file mode 100644 index 0000000000..f67b6ffc23 --- /dev/null +++ b/src/aiida/tools/pytest_fixtures/globals.py @@ -0,0 +1,19 @@ +"""Fixtures that provide access to global singletons.""" + +import typing as t + +import pytest + +if t.TYPE_CHECKING: + from aiida.manage.manager import Manager + + +@pytest.fixture(scope='session') +def aiida_manager() -> 'Manager': + """Return the global :class:`~aiida.manage.manager.Manager` instance. + + :returns :class:`~aiida.manage.manager.Manager`: The global manager instance. + """ + from aiida.manage import get_manager + + return get_manager() diff --git a/src/aiida/tools/pytest_fixtures/orm.py b/src/aiida/tools/pytest_fixtures/orm.py new file mode 100644 index 0000000000..0ed7ea18d7 --- /dev/null +++ b/src/aiida/tools/pytest_fixtures/orm.py @@ -0,0 +1,300 @@ +"""Fixtures that provides ORM instances.""" + +from __future__ import annotations + +import pathlib +import typing as t + +import pytest + +if t.TYPE_CHECKING: + from aiida.orm import Computer + + +@pytest.fixture(scope='session') +def ssh_key(tmp_path_factory) -> t.Generator[pathlib.Path, None, None]: + """Generate a temporary SSH key pair for the test session and return the filepath of the private key. + + The filepath of the public key is the same as the private key, but it adds the ``.pub`` file extension. + + :returns: The filepath of the generated private key. + """ + from uuid import uuid4 + + from cryptography.hazmat.backends import default_backend as crypto_default_backend + from cryptography.hazmat.primitives import serialization as crypto_serialization + from cryptography.hazmat.primitives.asymmetric import rsa + + key = rsa.generate_private_key( + backend=crypto_default_backend(), + public_exponent=65537, + key_size=2048, + ) + + private_key = key.private_bytes( + crypto_serialization.Encoding.PEM, + crypto_serialization.PrivateFormat.PKCS8, + crypto_serialization.NoEncryption(), + ) + + public_key = key.public_key().public_bytes( + crypto_serialization.Encoding.OpenSSH, + crypto_serialization.PublicFormat.OpenSSH, + ) + + dirpath = tmp_path_factory.mktemp('keys') + filename = uuid4().hex + filepath_private_key = dirpath / filename + filepath_public_key = dirpath / f'{filename}.pub' + + filepath_private_key.write_bytes(private_key) + filepath_public_key.write_bytes(public_key) + + try: + yield filepath_private_key + finally: + filepath_private_key.unlink(missing_ok=True) + filepath_public_key.unlink(missing_ok=True) + + +@pytest.fixture +def aiida_computer(tmp_path) -> t.Callable[[], 'Computer']: + """Return a factory to create a new or load an existing :class:`aiida.orm.computers.Computer` instance. + + The database is queried for an existing computer with the same ``label``, ``hostname``, ``scheduler_type`` and + ``transport_type``. If it exists, it means it was probably created by this fixture in a previous call and it is + simply returned. Otherwise a new instance is created. Note that the computer is not explicitly configured, unless + ``configure_kwargs`` are specified. By default the ``localhost`` hostname is used with the ``core.direct`` and + ``core.local`` scheduler and transport plugins. + + The factory has the following signature: + + :param label: The computer label. If not specified, a random UUID4 is used. + :param hostname: The hostname of the computer. Defaults to ``localhost``. + :param scheduler_type: The scheduler plugin to use. Defaults to ``core.direct``. + :param transport_type: The transport plugin to use. Defaults to ``core.local``. + :param minimum_job_poll_interval: The default minimum job poll interval to set. Defaults to 0. + :param default_mpiprocs_per_machine: The default number of MPI procs to set. Defaults to 1. + :param configuration_kwargs: Optional keyword arguments that, if defined, are used to configure the computer + by calling :meth:`aiida.orm.computers.Computer.configure`. + :return: A stored computer instance. + """ + + def factory( + label: str | None = None, + hostname='localhost', + scheduler_type='core.direct', + transport_type='core.local', + minimum_job_poll_interval: int = 0, + default_mpiprocs_per_machine: int = 1, + configuration_kwargs: dict[t.Any, t.Any] | None = None, + ) -> 'Computer': + import uuid + + from aiida.common.exceptions import NotExistent + from aiida.orm import Computer + + label = label or f'test-computer-{uuid.uuid4().hex}' + + try: + computer = Computer.collection.get( + label=label, hostname=hostname, scheduler_type=scheduler_type, transport_type=transport_type + ) + except NotExistent: + computer = Computer( + label=label, + hostname=hostname, + workdir=str(tmp_path), + transport_type=transport_type, + scheduler_type=scheduler_type, + ) + computer.store() + computer.set_minimum_job_poll_interval(minimum_job_poll_interval) + computer.set_default_mpiprocs_per_machine(default_mpiprocs_per_machine) + + if configuration_kwargs: + computer.configure(**configuration_kwargs) + + return computer + + return factory + + +@pytest.fixture +def aiida_computer_local(aiida_computer) -> t.Callable[[], Computer]: + """Factory to return a :class:`aiida.orm.computers.Computer` instance with ``core.local`` transport. + + Usage:: + + def test(aiida_computer_ssh): + computer = aiida_computer_ssh(label='some-label', configure=True) + assert computer.transport_type == 'core.local' + assert computer.is_configured + + The factory has the following signature: + + :param label: The computer label. If not specified, a random UUID4 is used. + :param configure: Boolean, if ``True``, ensures the computer is configured, otherwise the computer is returned + as is. Note that if a computer with the given label already exists and it was configured before, the + computer will not be "un-"configured. If an unconfigured computer is absolutely required, make sure to first + delete the existing computer or specify another label. + :return: A stored computer instance. + """ + + def factory(label: str | None = None, configure: bool = True) -> Computer: + computer = aiida_computer(label=label, hostname='localhost', transport_type='core.local') + + if configure: + computer.configure() + + return computer + + return factory + + +@pytest.fixture +def aiida_computer_ssh(aiida_computer, ssh_key) -> t.Callable[[], 'Computer']: + """Factory to return a :class:`aiida.orm.computers.Computer` instance with ``core.ssh`` transport. + + If ``configure=True``, an SSH key pair is automatically added to the ``.ssh`` folder of the user, allowing an + actual SSH connection to be made to the localhost. + + Usage:: + + def test(aiida_computer_ssh): + computer = aiida_computer_ssh(label='some-label', configure=True) + assert computer.transport_type == 'core.ssh' + assert computer.is_configured + + The factory has the following signature: + + :param label: The computer label. If not specified, a random UUID4 is used. + :param configure: Boolean, if ``True``, ensures the computer is configured, otherwise the computer is returned + as is. Note that if a computer with the given label already exists and it was configured before, the + computer will not be "un-"configured. If an unconfigured computer is absolutely required, make sure to first + delete the existing computer or specify another label. + :return: A stored computer instance. + """ + + def factory(label: str | None = None, configure: bool = True) -> 'Computer': + computer = aiida_computer(label=label, hostname='localhost', transport_type='core.ssh') + + if configure: + computer.configure( + key_filename=str(ssh_key), + key_policy='AutoAddPolicy', + ) + + return computer + + return factory + + +@pytest.fixture +def aiida_localhost(aiida_computer_local) -> 'Computer': + """Return a :class:`aiida.orm.computers.Computer` instance representing localhost with ``core.local`` transport. + + Usage:: + + def test(aiida_localhost): + assert aiida_localhost.transport_type == 'core.local' + + :return: The computer. + """ + return aiida_computer_local(label='localhost') + + +@pytest.fixture +def aiida_code(): + """Return a factory to create a new or load an existing :class:`aiida.orm.nodes.data.code.abstract.AbstractCode`. + + Usage:: + + def test(aiida_localhost, aiida_code): + from aiida.orm import InstalledCode + code = aiida_code( + 'core.code.installed', + label='test-code', + computer=aiida_localhost, + filepath_executable='/bin/bash' + ) + assert isinstance(code, InstalledCode) + + The factory has the following signature: + + :param entry_point: Entry point of the code plugin. + :param label: The label of the code. Default to a randomly generated string. + :param kwargs: Additional keyword arguments that are passed to the code's constructor. + :return: The created or loaded code instance. + """ + + def factory(entry_point: str, label: str | None = None, **kwargs): + import uuid + + from aiida.common.exceptions import MultipleObjectsError, NotExistent + from aiida.orm import QueryBuilder + from aiida.plugins import DataFactory + + cls = DataFactory(entry_point) + label = label or f'test-code-{uuid.uuid4().hex}' + + try: + code = QueryBuilder().append(cls, filters={'label': label}).one()[0] + except (MultipleObjectsError, NotExistent): + code = cls(label=label, **kwargs).store() + + return code + + return factory + + +@pytest.fixture +def aiida_code_installed(aiida_code, aiida_localhost): + """Return a factory to create a new or load an existing :class:`aiida.orm.nodes.data.code.installed.InstalledCode`. + + Usage:: + + def test(aiida_code_installed): + from aiida.orm import InstalledCode + code = aiida_code_installed() + assert isinstance(code, InstalledCode) + + The factory has the following signature: + + :param label: The label of the code. Default to a randomly generated string. + :param default_calc_job_plugin: Optional default calcjob plugin to set. + :param computer: The computer to set. Defaults to localhost computer of the ``aiida_localhost`` fixture. + :param filepath_executable: The filepath of the executable. Defaults to ``/bin/bash``. + :param use_double_quotes: Whether the executable and arguments of the code in the submission script should be + escaped with single or double quotes. + :param with_mpi: Whether the executable should be run as an MPI program. + :param prepend_text: Optional bash commands that should be executed in the submission script before the executable. + :param append_text: Optional bash commands that should be executed in the submission script after the executable. + :return: The created or loaded code instance. + """ + + def factory( + label: str | None = None, + description: str | None = None, + default_calc_job_plugin: str | None = None, + computer: Computer = aiida_localhost, + filepath_executable: str = '/bin/bash', + use_double_quotes: bool = False, + with_mpi: bool | None = None, + prepend_text: str = '', + append_text: str = '', + ): + return aiida_code( + 'core.code.installed', + label=label, + description=description, + default_calc_job_plugin=default_calc_job_plugin, + computer=computer, + filepath_executable=filepath_executable, + use_double_quotes=use_double_quotes, + with_mpi=with_mpi, + prepend_text=prepend_text, + append_text=append_text, + ) + + return factory diff --git a/src/aiida/tools/pytest_fixtures/storage.py b/src/aiida/tools/pytest_fixtures/storage.py new file mode 100644 index 0000000000..d76c9b4452 --- /dev/null +++ b/src/aiida/tools/pytest_fixtures/storage.py @@ -0,0 +1,87 @@ +"""Fixtures providing resources for storage plugins.""" + +from __future__ import annotations + +import typing as t + +import pytest + +if t.TYPE_CHECKING: + from pgtest.pgtest import PGTest + + +@pytest.fixture(scope='session') +def postgres_cluster(): + """Create a temporary and isolated PostgreSQL cluster using ``pgtest`` and cleanup after the yield. + + :param database_name: Name of the database. + :param database_username: Username to use for authentication. + :param database_password: Password to use for authentication. + :returns: Dictionary with parameters to connect to the PostgreSQL cluster. + """ + from uuid import uuid4 + + from pgtest.pgtest import PGTest + + def create_database( + database_name: str | None = None, database_username: str | None = None, database_password: str | None = None + ) -> dict[str, str]: + from aiida.manage.external.postgres import Postgres + + postgres_config = { + 'database_engine': 'postgresql_psycopg2', + 'database_name': database_name or str(uuid4()), + 'database_username': database_username or 'guest', + 'database_password': database_password or 'guest', + } + + postgres = Postgres(interactive=False, quiet=True, dbinfo=cluster.dsn) # type: ignore[union-attr] + if not postgres.dbuser_exists(postgres_config['database_username']): + postgres.create_dbuser( + postgres_config['database_username'], postgres_config['database_password'], 'CREATEDB' + ) + postgres.create_db(postgres_config['database_username'], postgres_config['database_name']) + + postgres_config['database_hostname'] = postgres.host_for_psycopg2 + postgres_config['database_port'] = postgres.port_for_psycopg2 + + return postgres_config + + cluster = None + try: + cluster = PGTest() + cluster.create_database = create_database + yield cluster + finally: + if cluster is not None: + cluster.close() + + +@pytest.fixture(scope='session') +def config_psql_dos( + tmp_path_factory: pytest.TempPathFactory, + postgres_cluster: 'PGTest', +) -> t.Callable[[str | None, str | None, str | None], dict[str, t.Any]]: + """Return a profile configuration for the :class:`~aiida.storage.psql_dos.backend.PsqlDosBackend`. + + The factory has the following signature to allow further configuring the database that is created: + + :param database_name: Name of the database to be created. + :param database_username: Username to use for authentication. + :param database_password: Password to use for authentication. + :returns: The dictionary with the storage configuration for the ``core.psql_dos`` storage plugin. + """ + + def factory( + database_name: str | None = None, database_username: str | None = None, database_password: str | None = None + ) -> dict[str, t.Any]: + storage_config: dict[str, t.Any] = postgres_cluster.create_database( + database_name=database_name, + database_username=database_username, + database_password=database_password, + ) + storage_config['repository_uri'] = f'file://{tmp_path_factory.mktemp("repository")}' + + return storage_config + + return factory diff --git a/tests/benchmark/test_archive.py b/tests/benchmark/test_archive.py index af4aca3fd5..2a01818ac1 100644 --- a/tests/benchmark/test_archive.py +++ b/tests/benchmark/test_archive.py @@ -90,7 +90,7 @@ def _run(): @pytest.mark.benchmark(group='import-export') def test_import(aiida_profile, benchmark, tmp_path, depth, breadth, num_objects): """Benchmark importing a provenance graph.""" - aiida_profile.clear_profile() + aiida_profile.reset_storage() root_node = Dict() recursive_provenance(root_node, depth=depth, breadth=breadth, num_objects=num_objects) root_uuid = root_node.uuid @@ -99,7 +99,7 @@ def test_import(aiida_profile, benchmark, tmp_path, depth, breadth, num_objects) create_archive([root_node], **kwargs) def _setup(): - aiida_profile.clear_profile() + aiida_profile.reset_storage() def _run(): import_archive(str(out_path)) diff --git a/tests/brokers/test_rabbitmq.py b/tests/brokers/test_rabbitmq.py index bd771b9c26..0e828fa12c 100644 --- a/tests/brokers/test_rabbitmq.py +++ b/tests/brokers/test_rabbitmq.py @@ -91,7 +91,7 @@ def test_add_broadcast_subscriber(communicator): @pytest.mark.requires_rmq @pytest.mark.usefixtures('aiida_profile_clean') -def test_duplicate_subscriber_identifier(aiida_local_code_factory, started_daemon_client, submit_and_await): +def test_duplicate_subscriber_identifier(aiida_code_installed, started_daemon_client, submit_and_await): """Test that a ``DuplicateSubscriberError`` in ``ProcessLauncher._continue`` does not except the process. It is possible that when a daemon worker tries to continue a process, that a ``kiwipy.DuplicateSubscriberError`` is @@ -114,7 +114,7 @@ def test_duplicate_subscriber_identifier(aiida_local_code_factory, started_daemo process, that should not incur this inception and that is not what we are testing here. This test should therefore be ran with a single daemon worker. """ - code = aiida_local_code_factory(entry_point='core.arithmetic.add', executable='/bin/bash') + code = aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash') builder = code.get_builder() builder.x = Int(1) diff --git a/tests/cmdline/commands/test_code.py b/tests/cmdline/commands/test_code.py index 75b9c7f462..e05004a233 100644 --- a/tests/cmdline/commands/test_code.py +++ b/tests/cmdline/commands/test_code.py @@ -210,11 +210,13 @@ def test_mixed(run_cli_command, aiida_localhost, non_interactive_editor): @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) -def test_code_duplicate_interactive(run_cli_command, aiida_local_code_factory, non_interactive_editor): +def test_code_duplicate_interactive(run_cli_command, aiida_code_installed, non_interactive_editor): """Test code duplication interactive.""" label = 'code_duplicate_interactive' user_input = f'\n\n{label}\n\n\n\n' - code = aiida_local_code_factory('core.arithmetic.add', '/bin/cat', label='code') + code = aiida_code_installed( + default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/cat', description='code' + ) run_cli_command(cmd_code.code_duplicate, [str(code.pk)], user_input=user_input) duplicate = load_code(label) @@ -225,14 +227,16 @@ def test_code_duplicate_interactive(run_cli_command, aiida_local_code_factory, n @pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) -def test_code_duplicate_ignore(run_cli_command, aiida_local_code_factory, non_interactive_editor): +def test_code_duplicate_ignore(run_cli_command, aiida_code_installed, non_interactive_editor): """Providing "!" to description should lead to empty description. Regression test for: https://github.com/aiidateam/aiida-core/issues/3770 """ label = 'code_duplicate_interactive' user_input = f'\n\n{label}\n!\n\n\n' - code = aiida_local_code_factory('core.arithmetic.add', '/bin/cat', label='code') + code = aiida_code_installed( + default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/cat', label='code' + ) run_cli_command(cmd_code.code_duplicate, [str(code.pk)], user_input=user_input) duplicate = load_code(label) @@ -240,10 +244,15 @@ def test_code_duplicate_ignore(run_cli_command, aiida_local_code_factory, non_in @pytest.mark.usefixtures('aiida_profile_clean') -def test_code_export(run_cli_command, aiida_local_code_factory, tmp_path, file_regression): +def test_code_export(run_cli_command, aiida_code_installed, tmp_path, file_regression): """Test export the code setup to str.""" prepend_text = 'module load something\n some command' - code = aiida_local_code_factory('core.arithmetic.add', '/bin/cat', label='code', prepend_text=prepend_text) + code = aiida_code_installed( + default_calc_job_plugin='core.arithmetic.add', + filepath_executable='/bin/cat', + label='code', + prepend_text=prepend_text, + ) filepath = tmp_path / 'code.yml' options = [str(code.pk), str(filepath)] run_cli_command(cmd_code.export, options) @@ -312,11 +321,16 @@ def test_from_config_url(non_interactive_editor, run_cli_command, aiida_localhos @pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) def test_code_setup_remote_duplicate_full_label_interactive( - run_cli_command, aiida_local_code_factory, aiida_localhost, non_interactive_editor + run_cli_command, aiida_code_installed, aiida_localhost, non_interactive_editor ): """Test ``verdi code setup`` for a remote code in interactive mode specifying an existing full label.""" label = 'some-label' - aiida_local_code_factory('core.arithmetic.add', '/bin/cat', computer=aiida_localhost, label=label) + aiida_code_installed( + default_calc_job_plugin='core.arithmetic.add', + filepath_executable='/bin/cat', + computer=aiida_localhost, + label=label, + ) assert isinstance(load_code(label), InstalledCode) label_unique = 'label-unique' @@ -329,11 +343,16 @@ def test_code_setup_remote_duplicate_full_label_interactive( @pytest.mark.parametrize('label_first', (True, False)) def test_code_setup_remote_duplicate_full_label_non_interactive( - run_cli_command, aiida_local_code_factory, aiida_localhost, label_first + run_cli_command, aiida_code_installed, aiida_localhost, label_first ): """Test ``verdi code setup`` for a remote code in non-interactive mode specifying an existing full label.""" label = f'some-label-{label_first}' - aiida_local_code_factory('core.arithmetic.add', '/bin/cat', computer=aiida_localhost, label=label) + aiida_code_installed( + default_calc_job_plugin='core.arithmetic.add', + filepath_executable='/bin/cat', + computer=aiida_localhost, + label=label, + ) assert isinstance(load_code(label), InstalledCode) options = ['-n', '-D', 'd', '-P', 'core.arithmetic.add', '--on-computer', '--remote-abs-path=/remote/abs/path'] @@ -349,9 +368,7 @@ def test_code_setup_remote_duplicate_full_label_non_interactive( @pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) -def test_code_setup_local_duplicate_full_label_interactive( - run_cli_command, aiida_local_code_factory, aiida_localhost, non_interactive_editor, tmp_path -): +def test_code_setup_local_duplicate_full_label_interactive(run_cli_command, non_interactive_editor, tmp_path): """Test ``verdi code setup`` for a local code in interactive mode specifying an existing full label.""" filepath = tmp_path / 'bash' filepath.write_text('fake bash') @@ -369,9 +386,7 @@ def test_code_setup_local_duplicate_full_label_interactive( @pytest.mark.usefixtures('aiida_profile_clean') -def test_code_setup_local_duplicate_full_label_non_interactive( - run_cli_command, aiida_local_code_factory, aiida_localhost -): +def test_code_setup_local_duplicate_full_label_non_interactive(run_cli_command): """Test ``verdi code setup`` for a local code in non-interactive mode specifying an existing full label.""" label = 'some-label' code = PortableCode(filepath_executable='bash', filepath_files=pathlib.Path('/bin/bash')) diff --git a/tests/cmdline/commands/test_code/test_code_export.yml b/tests/cmdline/commands/test_code/test_code_export.yml index 68391abcc6..640717a1d2 100644 --- a/tests/cmdline/commands/test_code/test_code_export.yml +++ b/tests/cmdline/commands/test_code/test_code_export.yml @@ -1,7 +1,7 @@ append_text: '' computer: localhost default_calc_job_plugin: core.arithmetic.add -description: code +description: '' filepath_executable: /bin/cat label: code prepend_text: "module load something\n some command" diff --git a/tests/cmdline/commands/test_devel.py b/tests/cmdline/commands/test_devel.py index 66a1579027..7dc2fdff0a 100644 --- a/tests/cmdline/commands/test_devel.py +++ b/tests/cmdline/commands/test_devel.py @@ -47,9 +47,9 @@ def test_launch_add_daemon(run_cli_command, submit_and_await): assert node.is_finished_ok -def test_launch_add_code(run_cli_command, aiida_local_code_factory): +def test_launch_add_code(run_cli_command, aiida_code_installed): """Test ``verdi devel launch-add`` passing an explicit ``Code``.""" - code = aiida_local_code_factory('core.arithmetic.add', '/bin/bash') + code = aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash') result = run_cli_command(cmd_devel.devel_launch_arithmetic_add, ['-X', str(code.pk)]) assert not re.search(r'Warning: No `localhost` computer exists yet: creating and configuring', result.stdout) diff --git a/tests/cmdline/commands/test_profile.py b/tests/cmdline/commands/test_profile.py index b0c3a4dfc5..4c382e732c 100644 --- a/tests/cmdline/commands/test_profile.py +++ b/tests/cmdline/commands/test_profile.py @@ -179,7 +179,7 @@ def test_setup(config_psql_dos, run_cli_command, isolated_config, tmp_path, entr if entry_point == 'core.psql_dos': options = [] - for key, value in config_psql_dos()['storage']['config'].items(): + for key, value in config_psql_dos().items(): options.append(f'--{key.replace("_", "-")}') options.append(str(value)) else: diff --git a/tests/cmdline/commands/test_rabbitmq.py b/tests/cmdline/commands/test_rabbitmq.py index 4fc1c4c551..4d9c0ac449 100644 --- a/tests/cmdline/commands/test_rabbitmq.py +++ b/tests/cmdline/commands/test_rabbitmq.py @@ -68,9 +68,9 @@ def test_tasks_revive_without_daemon(run_cli_command): @pytest.mark.usefixtures('started_daemon_client') -def test_revive(run_cli_command, monkeypatch, aiida_local_code_factory, submit_and_await): +def test_revive(run_cli_command, monkeypatch, aiida_code_installed, submit_and_await): """Test ``tasks revive``.""" - code = aiida_local_code_factory('core.arithmetic.add', '/bin/bash') + code = aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash') builder = code.get_builder() builder.x = Int(1) builder.y = Int(1) diff --git a/tests/cmdline/utils/test_common.py b/tests/cmdline/utils/test_common.py index a020d1211f..863f17d7a4 100644 --- a/tests/cmdline/utils/test_common.py +++ b/tests/cmdline/utils/test_common.py @@ -14,9 +14,9 @@ from aiida.orm import CalcFunctionNode, CalculationNode, WorkflowNode -def test_get_node_summary(aiida_local_code_factory): +def test_get_node_summary(aiida_code_installed): """Test the ``get_node_summary`` utility.""" - code = aiida_local_code_factory(entry_point='core.arithmetic.add', executable='/bin/bash') + code = aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash') node = CalculationNode() node.computer = code.computer node.base.links.add_incoming(code, link_type=LinkType.INPUT_CALC, link_label='code') diff --git a/tests/conftest.py b/tests/conftest.py index b144384804..936794b5e2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -30,7 +30,21 @@ if t.TYPE_CHECKING: from aiida.manage.configuration.config import Config -pytest_plugins = ['aiida.manage.tests.pytest_fixtures', 'sphinx.testing.fixtures'] +pytest_plugins = ['aiida.tools.pytest_fixtures', 'sphinx.testing.fixtures'] + + +@pytest.fixture(scope='session') +def aiida_profile(aiida_config, aiida_profile_factory, config_psql_dos): + """Create and load a profile with ``core.psql_dos`` as a storage backend and RabbitMQ as the broker. + + This overrides the ``aiida_profile`` fixture provided by ``aiida-core`` which runs with ``core.sqlite_dos`` and + without broker. However, tests in this package make use of the daemon which requires a broker and the tests should + be run against the main storage backend, which is ``core.sqlite_dos``. + """ + with aiida_profile_factory( + aiida_config, storage_backend='core.psql_dos', storage_config=config_psql_dos(), broker_backend='core.rabbitmq' + ) as profile: + yield profile @pytest.fixture() @@ -488,7 +502,7 @@ def output_lines(self) -> list[str]: @pytest.fixture -def run_cli_command(reset_log_level, aiida_instance, aiida_profile): +def run_cli_command(reset_log_level, aiida_config, aiida_profile): """Run a ``click`` command with the given options. The call will raise if the command triggered an exception or the exit code returned is non-zero. @@ -541,12 +555,12 @@ def factory( parameters = [str(param) for param in parameters or []] try: - config_show_deprecations = aiida_instance.get_option('warnings.showdeprecations') + config_show_deprecations = aiida_config.get_option('warnings.showdeprecations') if config_show_deprecations and suppress_warnings: - aiida_instance.set_option('warnings.showdeprecations', False) + aiida_config.set_option('warnings.showdeprecations', False) if use_subprocess: - aiida_instance.store() + aiida_config.store() if use_subprocess: result = run_cli_command_subprocess( @@ -565,9 +579,9 @@ def factory( assert result.exit_code == 0, (result.exit_code, result.stderr) finally: if config_show_deprecations and suppress_warnings: - aiida_instance.set_option('warnings.showdeprecations', config_show_deprecations) + aiida_config.set_option('warnings.showdeprecations', config_show_deprecations) if use_subprocess: - aiida_instance.store() + aiida_config.store() return result diff --git a/tests/engine/daemon/test_execmanager.py b/tests/engine/daemon/test_execmanager.py index 416cede16c..351f6bf297 100644 --- a/tests/engine/daemon/test_execmanager.py +++ b/tests/engine/daemon/test_execmanager.py @@ -81,12 +81,12 @@ def file_hierarchy_simple(): @pytest.fixture -def node_and_calc_info(aiida_localhost, aiida_local_code_factory): +def node_and_calc_info(aiida_localhost, aiida_code_installed): """Return a ``CalcJobNode`` and associated ``CalcInfo`` instance.""" node = CalcJobNode(computer=aiida_localhost) node.store() - code = aiida_local_code_factory('core.arithmetic.add', '/bin/bash').store() + code = aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash').store() code_info = CodeInfo() code_info.code_uuid = code.uuid diff --git a/tests/engine/processes/calcjobs/test_calc_job.py b/tests/engine/processes/calcjobs/test_calc_job.py index af1b323200..52e3602d73 100644 --- a/tests/engine/processes/calcjobs/test_calc_job.py +++ b/tests/engine/processes/calcjobs/test_calc_job.py @@ -42,11 +42,11 @@ def raise_exception(exception, *args, **kwargs): @pytest.fixture -def get_calcjob_builder(aiida_local_code_factory): +def get_calcjob_builder(aiida_code_installed): """Return a builder for the ``ArithmeticAddCalculation`` that is ready to go.""" def _factory(**kwargs): - code = aiida_local_code_factory('core.arithmetic.add', 'bash') + code = aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='bash') builder = code.get_builder() builder.x = orm.Int(1) builder.y = orm.Int(1) @@ -187,7 +187,7 @@ def prepare_for_submission(self, folder): ), ) def test_multi_codes_with_mpi( - aiida_local_code_factory, + aiida_code_installed, fixture_sandbox, manager, code_key, @@ -209,9 +209,12 @@ def test_multi_codes_with_mpi( from aiida.engine.utils import instantiate_process inputs = { - 'code': aiida_local_code_factory('core.arithmetic.add', '/bin/bash'), - code_key: aiida_local_code_factory( - 'core.arithmetic.add', '/bin/bash', label=str(uuid.uuid4()), with_mpi=with_mpi_code + 'code': aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash'), + code_key: aiida_code_installed( + default_calc_job_plugin='core.arithmetic.add', + filepath_executable='/bin/bash', + label=str(uuid.uuid4()), + with_mpi=with_mpi_code, ), 'metadata': { 'options': { @@ -249,15 +252,18 @@ def define(cls, spec): @pytest.mark.requires_rmq @pytest.mark.usefixtures('chdir_tmp_path') @pytest.mark.parametrize('parallel_run', [True, False]) -def test_multi_codes_run_parallel(aiida_local_code_factory, file_regression, parallel_run): +def test_multi_codes_run_parallel(aiida_code_installed, file_regression, parallel_run): """Test codes_run_mode set in CalcJob""" inputs = { - 'code': aiida_local_code_factory('core.arithmetic.add', '/bin/bash'), - 'code_info_with_mpi_none': aiida_local_code_factory( - 'core.arithmetic.add', '/bin/bash', label=str(uuid.uuid4()) + 'code': aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash'), + 'code_info_with_mpi_none': aiida_code_installed( + default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash', label=str(uuid.uuid4()) ), - 'code_info_with_mpi_false': aiida_local_code_factory( - 'core.arithmetic.add', '/bin/bash', label=str(uuid.uuid4()), with_mpi=False + 'code_info_with_mpi_false': aiida_code_installed( + default_calc_job_plugin='core.arithmetic.add', + filepath_executable='/bin/bash', + label=str(uuid.uuid4()), + with_mpi=False, ), 'parallel_run': orm.Bool(parallel_run), 'metadata': {'dry_run': True, 'options': {'resources': {'num_machines': 1, 'num_mpiprocs_per_machine': 1}}}, @@ -275,13 +281,17 @@ def test_multi_codes_run_parallel(aiida_local_code_factory, file_regression, par @pytest.mark.requires_rmq @pytest.mark.usefixtures('chdir_tmp_path') @pytest.mark.parametrize('computer_use_double_quotes', [True, False]) -def test_computer_double_quotes(aiida_computer, aiida_local_code_factory, file_regression, computer_use_double_quotes): +def test_computer_double_quotes( + aiida_computer_local, aiida_code_installed, file_regression, computer_use_double_quotes +): """Test that bash script quote escape behaviour can be controlled""" - computer = aiida_computer(label=f'test-code-computer-{computer_use_double_quotes}') + computer = aiida_computer_local(label=f'test-code-computer-{computer_use_double_quotes}') computer.set_use_double_quotes(computer_use_double_quotes) inputs = { - 'code': aiida_local_code_factory('core.arithmetic.add', '/bin/bash', computer), + 'code': aiida_code_installed( + default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash', computer=computer + ), 'metadata': { 'dry_run': True, 'options': { @@ -814,14 +824,16 @@ def test_get_importer(self): @pytest.fixture -def generate_process(aiida_local_code_factory): +def generate_process(aiida_code_installed): """Instantiate a process with default inputs and return the `Process` instance.""" from aiida.engine.utils import instantiate_process from aiida.manage import get_manager def _generate_process(inputs=None): base_inputs = { - 'code': aiida_local_code_factory('core.arithmetic.add', '/bin/bash'), + 'code': aiida_code_installed( + default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash' + ), 'x': orm.Int(1), 'y': orm.Int(2), 'metadata': {'options': {}}, @@ -980,7 +992,7 @@ def test_parse_exit_code_priority( final, generate_calc_job, fixture_sandbox, - aiida_local_code_factory, + aiida_code_installed, monkeypatch, ): """Test the logic around exit codes in the `CalcJob.parse` method. @@ -1020,7 +1032,7 @@ def parse_retrieved_output(_, __): monkeypatch.setattr(CalcJob, 'parse_retrieved_output', parse_retrieved_output) inputs = { - 'code': aiida_local_code_factory('core.arithmetic.add', '/bin/bash'), + 'code': aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash'), 'x': Int(1), 'y': Int(2), } @@ -1318,11 +1330,13 @@ class TestImport: """Test the functionality to import existing calculations completed outside of AiiDA.""" @pytest.fixture(autouse=True) - def init_profile(self, aiida_localhost, aiida_local_code_factory): + def init_profile(self, aiida_localhost, aiida_code_installed): """Initialize the profile.""" self.computer = aiida_localhost self.inputs = { - 'code': aiida_local_code_factory('core.arithmetic.add', '/bin/bash', computer=aiida_localhost), + 'code': aiida_code_installed( + default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash', computer=aiida_localhost + ), 'x': orm.Int(1), 'y': orm.Int(2), 'metadata': { @@ -1458,12 +1472,12 @@ def test_file_copy_operation_order_default(fixture_sandbox, arithmetic_add_input ] -def test_file_copy_operation_order_invalid(fixture_sandbox, runner, aiida_local_code_factory): +def test_file_copy_operation_order_invalid(fixture_sandbox, runner, aiida_code_installed): """Test the ``CalcInfo.file_copy_operation_order`` is causes exception if set to invalid type.""" from aiida.engine.utils import instantiate_process inputs = { - 'code': aiida_local_code_factory('core.arithmetic.add', '/bin/true'), + 'code': aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/true'), 'metadata': {'options': {'resources': {'num_machines': 1}}}, } process = instantiate_process(runner, FileCopyOperationOrderInvalid, **inputs) diff --git a/tests/engine/processes/calcjobs/test_monitors.py b/tests/engine/processes/calcjobs/test_monitors.py index 18225e6861..cd1d4f51ce 100644 --- a/tests/engine/processes/calcjobs/test_monitors.py +++ b/tests/engine/processes/calcjobs/test_monitors.py @@ -188,11 +188,11 @@ def monitor_emit_warning(node, transport, **kwargs): AIIDA_LOGGER.warning('monitor_emit_warning monitor was called') -def test_calc_job_monitors_process_poll_interval_integrated(entry_points, aiida_local_code_factory, aiida_caplog): +def test_calc_job_monitors_process_poll_interval_integrated(entry_points, aiida_code_installed, caplog): """Test the ``minimum_poll_interval`` input by actually running through the engine.""" entry_points.add(monitor_emit_warning, 'aiida.calculations.monitors:core.emit_warning') - code = aiida_local_code_factory('core.arithmetic.add', '/bin/bash') + code = aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash') builder = code.get_builder() builder.x = Int(1) builder.y = Int(1) @@ -203,16 +203,16 @@ def test_calc_job_monitors_process_poll_interval_integrated(entry_points, aiida_ assert node.is_finished_ok # Check that the number of log messages emitted by the monitor is just 1 as it should have been called just once. - logs = [rec.message for rec in aiida_caplog.records if rec.message == 'monitor_emit_warning monitor was called'] + logs = [rec.message for rec in caplog.records if rec.message == 'monitor_emit_warning monitor was called'] assert len(logs) == 1 -def test_calc_job_monitors_outputs(entry_points, aiida_local_code_factory): +def test_calc_job_monitors_outputs(entry_points, aiida_code_installed): """Test a monitor that returns outputs to be attached to the node.""" entry_points.add(StoreMessageCalculation, 'aiida.calculations:core.store_message') entry_points.add(monitor_store_message, 'aiida.calculations.monitors:core.store_message') - code = aiida_local_code_factory('core.store_message', '/bin/bash') + code = aiida_code_installed(default_calc_job_plugin='core.store_message', filepath_executable='/bin/bash') builder = code.get_builder() builder.x = Int(1) builder.y = Int(1) diff --git a/tests/engine/processes/test_builder.py b/tests/engine/processes/test_builder.py index 6d8935967a..9a57362248 100644 --- a/tests/engine/processes/test_builder.py +++ b/tests/engine/processes/test_builder.py @@ -280,9 +280,9 @@ def test_port_names_overlapping_mutable_mapping_methods(): assert builder.boolean == orm.Bool(False) -def test_calc_job_node_get_builder_restart(aiida_local_code_factory): +def test_calc_job_node_get_builder_restart(aiida_code_installed): """Test the `CalcJobNode.get_builder_restart` method.""" - code = aiida_local_code_factory('core.arithmetic.add', '/bin/bash') + code = aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash') inputs = { 'metadata': { 'label': 'some-label', diff --git a/tests/engine/processes/test_control.py b/tests/engine/processes/test_control.py index 3bbf3e38eb..51222c653c 100644 --- a/tests/engine/processes/test_control.py +++ b/tests/engine/processes/test_control.py @@ -23,10 +23,10 @@ def test_processes_all_exclusivity(submit_and_await, action): @pytest.mark.usefixtures('aiida_profile_clean', 'stopped_daemon_client') @pytest.mark.parametrize('action', (control.pause_processes, control.play_processes, control.kill_processes)) -def test_daemon_not_running(action, aiida_caplog): +def test_daemon_not_running(action, caplog): """Test that control methods warns if the daemon is not running.""" action(all_entries=True) - assert 'The daemon is not running' in aiida_caplog.records[0].message + assert 'The daemon is not running' in caplog.records[0].message @pytest.mark.usefixtures('aiida_profile_clean', 'started_daemon_client') @@ -98,9 +98,9 @@ def test_kill_processes_all_entries(submit_and_await): @pytest.mark.usefixtures('aiida_profile_clean', 'started_daemon_client') -def test_revive(monkeypatch, aiida_local_code_factory, submit_and_await): +def test_revive(monkeypatch, aiida_code_installed, submit_and_await): """Test :func:`aiida.engine.processes.control.revive_processes`.""" - code = aiida_local_code_factory('core.arithmetic.add', '/bin/bash') + code = aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash') builder = code.get_builder() builder.x = Int(1) builder.y = Int(1) diff --git a/tests/engine/test_launch.py b/tests/engine/test_launch.py index f979e52d90..fedd42cd98 100644 --- a/tests/engine/test_launch.py +++ b/tests/engine/test_launch.py @@ -69,10 +69,10 @@ def add(self): @pytest.mark.usefixtures('started_daemon_client') -def test_submit_wait(aiida_local_code_factory): +def test_submit_wait(aiida_code_installed): """Test the ``wait`` argument of :meth:`aiida.engine.launch.submit`.""" builder = ArithmeticAddCalculation.get_builder() - builder.code = aiida_local_code_factory('core.arithmetic.add', '/bin/bash') + builder.code = aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash') builder.x = orm.Int(1) builder.y = orm.Int(1) builder.metadata = {'options': {'resources': {'num_machines': 1, 'num_mpiprocs_per_machine': 1}}} @@ -94,10 +94,10 @@ def test_await_processes_invalid(): @pytest.mark.usefixtures('started_daemon_client') -def test_await_processes(aiida_local_code_factory, caplog): +def test_await_processes(aiida_code_installed, caplog): """Test :func:`aiida.engine.launch.await_processes`.""" builder = ArithmeticAddCalculation.get_builder() - builder.code = aiida_local_code_factory('core.arithmetic.add', '/bin/bash') + builder.code = aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash') builder.x = orm.Int(1) builder.y = orm.Int(2) builder.metadata = {'options': {'resources': {'num_machines': 1}}} diff --git a/tests/engine/test_memory_leaks.py b/tests/engine/test_memory_leaks.py index 20f445706f..9cfbe7653a 100644 --- a/tests/engine/test_memory_leaks.py +++ b/tests/engine/test_memory_leaks.py @@ -54,9 +54,13 @@ def test_leak_run_process(): @pytest.mark.skipif(sys.version_info >= (3, 12), reason='Garbage collecting hangs on Python 3.12') @pytest.mark.usefixtures('aiida_profile', 'check_memory_leaks') -def test_leak_local_calcjob(aiida_local_code_factory): +def test_leak_local_calcjob(aiida_code_installed): """Test whether running a local CalcJob leaks memory.""" - inputs = {'x': orm.Int(1), 'y': orm.Int(2), 'code': aiida_local_code_factory('core.arithmetic.add', '/bin/bash')} + inputs = { + 'x': orm.Int(1), + 'y': orm.Int(2), + 'code': aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash'), + } run_finished_ok(ArithmeticAddCalculation, **inputs) diff --git a/tests/engine/test_process_function.py b/tests/engine/test_process_function.py index 7bd68f98fa..8880f70169 100644 --- a/tests/engine/test_process_function.py +++ b/tests/engine/test_process_function.py @@ -694,7 +694,7 @@ def function( assert input_namespace[key].valid_type == valid_types, key -def test_type_hinting_spec_inference_pep_604(aiida_caplog): +def test_type_hinting_spec_inference_pep_604(caplog): """Test the parsing of type hinting that uses union typing of PEP 604 which is only available to Python 3.10 and up. Even though adding ``from __future__ import annotations`` should backport this functionality to Python 3.9 and older @@ -724,7 +724,7 @@ def function( ('c', (orm.Dict, type(None))), ) else: - assert 'function `function` has invalid type hints: unsupported operand type' in aiida_caplog.records[0].message + assert 'function `function` has invalid type hints: unsupported operand type' in caplog.records[0].message expected = ( ('a', (orm.Data,)), ('b', (orm.Data,)), @@ -770,7 +770,7 @@ def function(param_a, param_b, param_c): assert input_namespace['param_c'].help is None -def test_help_text_spec_inference_invalid_docstring(aiida_caplog, monkeypatch): +def test_help_text_spec_inference_invalid_docstring(caplog, monkeypatch): """Test the parsing of docstrings does not except for invalid docstrings, but simply logs a warning.""" import docstring_parser @@ -785,4 +785,4 @@ def function(): # Now call the spec to have it parse the docstring. function.spec() - assert 'function `function` has a docstring that could not be parsed' in aiida_caplog.records[0].message + assert 'function `function` has a docstring that could not be parsed' in caplog.records[0].message diff --git a/tests/engine/test_runners.py b/tests/engine/test_runners.py index ba04469de2..4f746e5d34 100644 --- a/tests/engine/test_runners.py +++ b/tests/engine/test_runners.py @@ -83,13 +83,13 @@ def test_submit(runner): runner.submit(Proc, **inputs) -def test_run_return_value_cached(aiida_local_code_factory): +def test_run_return_value_cached(aiida_code_installed): """Test that :meth:`aiida.engine.runners.Runner._run` return process results even when cached. Regression test for https://github.com/aiidateam/aiida-core/issues/5994. """ inputs = { - 'code': aiida_local_code_factory('core.arithmetic.add', '/bin/bash'), + 'code': aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash'), 'x': Int(1), 'y': Int(-2), } diff --git a/tests/manage/tests/test_pytest_fixtures.py b/tests/manage/tests/test_pytest_fixtures.py index 49d41aee86..ab2e9d82b6 100644 --- a/tests/manage/tests/test_pytest_fixtures.py +++ b/tests/manage/tests/test_pytest_fixtures.py @@ -2,6 +2,7 @@ import uuid +import pytest from aiida.manage.configuration import get_config from aiida.manage.configuration.config import Config from aiida.orm import Computer @@ -18,12 +19,7 @@ def test_aiida_localhost(aiida_localhost): assert aiida_localhost.label == 'localhost' -def test_aiida_local_code(aiida_local_code_factory): - """Test the ``aiida_local_code_factory`` fixture.""" - code = aiida_local_code_factory(entry_point='core.templatereplacer', executable='diff') - assert code.computer.label == 'localhost' - - +@pytest.mark.usefixtures('aiida_profile_clean') def test_aiida_computer_local(aiida_computer_local): """Test the ``aiida_computer_local`` fixture.""" computer = aiida_computer_local() @@ -46,6 +42,7 @@ def test_aiida_computer_local(aiida_computer_local): assert not computer_unconfigured.is_configured +@pytest.mark.usefixtures('aiida_profile_clean') def test_aiida_computer_ssh(aiida_computer_ssh): """Test the ``aiida_computer_ssh`` fixture.""" computer = aiida_computer_ssh() diff --git a/tests/orm/data/code/test_installed.py b/tests/orm/data/code/test_installed.py index 016f8f7a10..5e1fda4273 100644 --- a/tests/orm/data/code/test_installed.py +++ b/tests/orm/data/code/test_installed.py @@ -99,6 +99,7 @@ def computer(request, aiida_computer_local, aiida_computer_ssh): raise ValueError(f'unsupported request parameter: {request.param}') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('computer', ('core.local', 'core.ssh'), indirect=True) def test_validate_filepath_executable(ssh_key, computer): """Test the :meth:`aiida.orm.nodes.data.code.installed.InstalledCode.validate_filepath_executable` method.""" diff --git a/tests/orm/nodes/process/test_process.py b/tests/orm/nodes/process/test_process.py index 93c0dbc98d..7fe3f30703 100644 --- a/tests/orm/nodes/process/test_process.py +++ b/tests/orm/nodes/process/test_process.py @@ -76,10 +76,10 @@ def test_is_valid_cache(process_nodes): assert node.base.caching.is_valid_cache == is_valid_cache, node -def test_get_builder_restart(aiida_local_code_factory): +def test_get_builder_restart(aiida_code_installed): """Test :meth:`aiida.orm.nodes.process.process.ProcessNode.get_builder_restart`.""" inputs = { - 'code': aiida_local_code_factory('core.arithmetic.add', '/bin/bash'), + 'code': aiida_code_installed(default_calc_job_plugin='core.arithmetic.add', filepath_executable='/bin/bash'), 'x': Int(1), 'y': Int(1), 'metadata': {'options': {'resources': {'num_machines': 1, 'num_mpiprocs_per_machine': 1}}}, diff --git a/tests/orm/test_fields.py b/tests/orm/test_fields.py index 7126c6d405..46d69d20a7 100644 --- a/tests/orm/test_fields.py +++ b/tests/orm/test_fields.py @@ -85,7 +85,8 @@ def test_disallowed_alias_for_db_field(): ) -def test_query_new_class(clear_database_before_test, monkeypatch): +@pytest.mark.usefixtures('aiida_profile_clean') +def test_query_new_class(monkeypatch): """Test that fields are correctly registered on a new data class, and can be used in a query. """ @@ -186,7 +187,8 @@ def test_filter_comparators(): } -def test_query_filters(clear_database_before_test): +@pytest.mark.usefixtures('aiida_profile_clean') +def test_query_filters(): """Test using fields to generate a query filter.""" node = orm.Data().store() orm.Data().store() @@ -203,7 +205,8 @@ def test_query_filters(clear_database_before_test): assert result == [[node.pk]] -def test_query_subscriptable(clear_database_before_test): +@pytest.mark.usefixtures('aiida_profile_clean') +def test_query_subscriptable(): """Test using subscriptable fields in a query.""" node = orm.Dict(dict={'a': 1}).store() node.base.extras.set('b', 2) diff --git a/tests/schedulers/test_direct.py b/tests/schedulers/test_direct.py index 4c5a20bda5..8879bdc88e 100644 --- a/tests/schedulers/test_direct.py +++ b/tests/schedulers/test_direct.py @@ -54,12 +54,12 @@ def test_parse_joblist_output_incorrect(scheduler): scheduler._parse_joblist_output(retval=0, stdout='aaa', stderr='') -def test_submit_script_rerunnable(scheduler, template, aiida_caplog): +def test_submit_script_rerunnable(scheduler, template, caplog): """Test that setting the ``rerunnable`` option gives a warning.""" template.rerunnable = True scheduler.get_submit_script(template) - assert 'rerunnable' in aiida_caplog.text - assert 'has no effect' in aiida_caplog.text + assert 'rerunnable' in caplog.text + assert 'has no effect' in caplog.text def test_submit_script_with_num_cores_per_mpiproc(scheduler, template): diff --git a/tests/storage/psql_dos/test_alembic_cli.py b/tests/storage/psql_dos/test_alembic_cli.py index a04618b4a5..709c10bfbf 100644 --- a/tests/storage/psql_dos/test_alembic_cli.py +++ b/tests/storage/psql_dos/test_alembic_cli.py @@ -16,5 +16,5 @@ def test_history(): """Test the 'history' command.""" runner = CliRunner() result = runner.invoke(alembic_cli, ['history']) - assert result.exit_code == 0 + assert result.exit_code == 0, result.output assert 'head' in result.output diff --git a/tests/storage/sqlite_zip/test_backend.py b/tests/storage/sqlite_zip/test_backend.py index ee07111929..7b42f8455c 100644 --- a/tests/storage/sqlite_zip/test_backend.py +++ b/tests/storage/sqlite_zip/test_backend.py @@ -8,7 +8,7 @@ from pydantic_core import ValidationError -def test_initialise(tmp_path, aiida_caplog): +def test_initialise(tmp_path, caplog): """Test :meth:`aiida.storage.sqlite_zip.backend.SqliteZipBackend.initialise`.""" filepath_archive = tmp_path / 'archive.zip' profile = SqliteZipBackend.create_profile(filepath_archive) @@ -17,10 +17,10 @@ def test_initialise(tmp_path, aiida_caplog): assert filepath_archive.exists() validate_storage(filepath_archive) - assert any('Initialising a new SqliteZipBackend' in record.message for record in aiida_caplog.records) + assert any('Initialising a new SqliteZipBackend' in record.message for record in caplog.records) -def test_initialise_reset_true(tmp_path, aiida_caplog): +def test_initialise_reset_true(tmp_path, caplog): """Test :meth:`aiida.storage.sqlite_zip.backend.SqliteZipBackend.initialise` with ``reset=True``.""" filepath_archive = tmp_path / 'archive.zip' filepath_archive.touch() @@ -30,10 +30,10 @@ def test_initialise_reset_true(tmp_path, aiida_caplog): assert filepath_archive.exists() validate_storage(filepath_archive) - assert any('Resetting existing SqliteZipBackend' in record.message for record in aiida_caplog.records) + assert any('Resetting existing SqliteZipBackend' in record.message for record in caplog.records) -def test_initialise_reset_false(tmp_path, aiida_caplog): +def test_initialise_reset_false(tmp_path, caplog): """Test :meth:`aiida.storage.sqlite_zip.backend.SqliteZipBackend.initialise` with ``reset=True``.""" filepath_archive = tmp_path / 'archive.zip' @@ -48,7 +48,7 @@ def test_initialise_reset_false(tmp_path, aiida_caplog): assert filepath_archive.exists() validate_storage(filepath_archive) - assert any('Migrating existing SqliteZipBackend' in record.message for record in aiida_caplog.records) + assert any('Migrating existing SqliteZipBackend' in record.message for record in caplog.records) @pytest.mark.usefixtures('chdir_tmp_path') diff --git a/tests/tools/archive/migration/test_legacy_to_main.py b/tests/tools/archive/migration/test_legacy_to_main.py index edbb51314d..a0b51134a4 100644 --- a/tests/tools/archive/migration/test_legacy_to_main.py +++ b/tests/tools/archive/migration/test_legacy_to_main.py @@ -22,11 +22,11 @@ def test_dangling_links(tmp_path): migrate(filepath_archive, tmp_path / 'archive.aiida', 'main_0001') -def test_missing_nodes_in_groups(tmp_path, aiida_caplog): +def test_missing_nodes_in_groups(tmp_path, caplog): """Test that groups with listed node UUIDs that are not in the archive are correctly handled.""" filepath_archive = get_archive_file('0.10_unknown_nodes_in_group.aiida', 'export/migrate') migrate(filepath_archive, tmp_path / 'archive.aiida', 'main_0001') - assert 'Dropped unknown nodes in groups' in aiida_caplog.text, aiida_caplog.text + assert 'Dropped unknown nodes in groups' in caplog.text, caplog.text def test_fields_with_null_values(tmp_path): diff --git a/tests/tools/archive/migration/test_prov_redesign.py b/tests/tools/archive/migration/test_prov_redesign.py index 6ba76f9c9d..6beb0de2e4 100644 --- a/tests/tools/archive/migration/test_prov_redesign.py +++ b/tests/tools/archive/migration/test_prov_redesign.py @@ -25,7 +25,7 @@ def test_base_data_type_change(tmp_path, aiida_profile): """Base Data types type string changed Example: Bool: “data.base.Bool.” → “data.bool.Bool.” """ - aiida_profile.clear_profile() + aiida_profile.reset_storage() # Test content test_content = ('Hello', 6, -1.2399834e12, False) @@ -56,7 +56,7 @@ def test_base_data_type_change(tmp_path, aiida_profile): create_archive(export_nodes, filename=filename) # Clean the database - aiida_profile.clear_profile() + aiida_profile.reset_storage() # Import nodes again import_archive(filename) @@ -88,7 +88,7 @@ def test_node_process_type(aiida_profile, tmp_path): from tests.utils.processes import AddProcess - aiida_profile.clear_profile() + aiida_profile.reset_storage() # Node types node_type = 'process.workflow.WorkflowNode.' @@ -110,7 +110,7 @@ def test_node_process_type(aiida_profile, tmp_path): create_archive([node], filename=filename) # Clean the database and reimport data - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(filename) # Retrieve node and check exactly one node is imported @@ -152,7 +152,7 @@ def test_code_type_change(aiida_profile, tmp_path, aiida_localhost): create_archive([code], filename=filename) # Clean the database and reimport - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(filename) # Retrieve Code node and make sure exactly 1 is retrieved @@ -195,7 +195,7 @@ def test_group_name_and_type_change(tmp_path, aiida_profile): """ from aiida.orm.nodes.data.upf import upload_upf_family - aiida_profile.clear_profile() + aiida_profile.reset_storage() # To be saved groups_label = ['Users', 'UpfData'] @@ -238,7 +238,7 @@ def test_group_name_and_type_change(tmp_path, aiida_profile): create_archive([group_user, group_upf], filename=filename) # Clean the database and reimport - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(filename) # Retrieve Groups and make sure exactly 3 are retrieved (including the "import group") diff --git a/tests/tools/archive/orm/test_attributes.py b/tests/tools/archive/orm/test_attributes.py index ad9bcc10eb..440f0b25ce 100644 --- a/tests/tools/archive/orm/test_attributes.py +++ b/tests/tools/archive/orm/test_attributes.py @@ -14,7 +14,7 @@ def test_import_of_attributes(tmp_path, aiida_profile): """Check if attributes are properly imported""" - aiida_profile.clear_profile() + aiida_profile.reset_storage() # Create Data with attributes data = orm.Data() data.label = 'my_test_data_node' @@ -26,7 +26,7 @@ def test_import_of_attributes(tmp_path, aiida_profile): create_archive([data], filename=export_file) # Clean db - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) builder = orm.QueryBuilder().append(orm.Data, filters={'label': 'my_test_data_node'}) diff --git a/tests/tools/archive/orm/test_authinfo.py b/tests/tools/archive/orm/test_authinfo.py index bd0d362f68..e20c51270a 100644 --- a/tests/tools/archive/orm/test_authinfo.py +++ b/tests/tools/archive/orm/test_authinfo.py @@ -44,7 +44,7 @@ def test_import_authinfo(aiida_profile, tmp_path, aiida_localhost): """Test archive import, including authinfo""" filename1 = tmp_path / 'export1.aiida' create_archive([aiida_localhost], filename=filename1, include_authinfos=True) - aiida_profile.clear_profile() + aiida_profile.reset_storage() # create a computer + authinfo, so that the PKs are different than the original ones # (to check that they are correctly translated) computer = orm.Computer( diff --git a/tests/tools/archive/orm/test_calculations.py b/tests/tools/archive/orm/test_calculations.py index 927ea9f48f..de7b9058fe 100644 --- a/tests/tools/archive/orm/test_calculations.py +++ b/tests/tools/archive/orm/test_calculations.py @@ -19,7 +19,7 @@ @pytest.mark.requires_rmq def test_calcfunction(tmp_path, aiida_profile): """Test @calcfunction""" - aiida_profile.clear_profile() + aiida_profile.reset_storage() @calcfunction def add(a, b): @@ -42,7 +42,7 @@ def max_(**kwargs): # At this point we export the generated data filename1 = tmp_path / 'export1.aiida' create_archive([res], filename=filename1, return_backward=True) - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(filename1) # Check that the imported nodes are correctly imported and that the value is preserved for uuid, value in uuids_values: @@ -54,7 +54,7 @@ def max_(**kwargs): def test_workcalculation(tmp_path, aiida_profile): """Test simple master/slave WorkChainNodes""" - aiida_profile.clear_profile() + aiida_profile.reset_storage() master = orm.WorkChainNode() slave = orm.WorkChainNode() @@ -77,7 +77,7 @@ def test_workcalculation(tmp_path, aiida_profile): uuids_values = [(v.uuid, v.value) for v in (output_1,)] filename1 = tmp_path / 'export1.aiida' create_archive([output_1], filename=filename1) - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(filename1) for uuid, value in uuids_values: diff --git a/tests/tools/archive/orm/test_codes.py b/tests/tools/archive/orm/test_codes.py index a54257cbbd..4ac55584d2 100644 --- a/tests/tools/archive/orm/test_codes.py +++ b/tests/tools/archive/orm/test_codes.py @@ -29,7 +29,7 @@ def test_that_solo_code_is_exported_correctly(aiida_profile, tmp_path, aiida_loc export_file = tmp_path / 'export.aiida' create_archive([code], filename=export_file) - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) @@ -60,7 +60,7 @@ def test_input_code(aiida_profile, tmp_path, aiida_localhost): export_file = tmp_path / 'export.aiida' create_archive([calc], filename=export_file) - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) @@ -90,7 +90,7 @@ def test_solo_code(aiida_profile, tmp_path, aiida_localhost): export_file = tmp_path / 'export.aiida' create_archive([code], filename=export_file) - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) diff --git a/tests/tools/archive/orm/test_comments.py b/tests/tools/archive/orm/test_comments.py index 66b5987b5c..61935827d2 100644 --- a/tests/tools/archive/orm/test_comments.py +++ b/tests/tools/archive/orm/test_comments.py @@ -46,7 +46,7 @@ def test_multiple_imports_for_single_node(tmp_path, aiida_profile): create_archive([node], filename=export_file_full) # Clean database and reimport "EXISTING" DB - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file_existing) # Check correct import @@ -113,7 +113,7 @@ def test_exclude_comments_flag(tmp_path, aiida_profile): create_archive([node], filename=export_file, include_comments=False) # Clean database and reimport exported file - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) # Get node, users, and comments @@ -157,7 +157,7 @@ def test_calc_and_data_nodes_with_comments(tmp_path, aiida_profile): create_archive([calc_node, data_node], filename=export_file) # Clean database and reimport exported file - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) # Get nodes and comments @@ -208,7 +208,7 @@ def test_multiple_user_comments_single_node(tmp_path, aiida_profile): create_archive([node], filename=export_file) # Clean database and reimport exported file - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) # Get node, users, and comments @@ -293,7 +293,7 @@ def test_mtime_of_imported_comments(tmp_path, aiida_profile_clean): # Export, reset database and reimport export_file = tmp_path / 'export.aiida' create_archive([calc], filename=export_file) - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() import_archive(export_file) # Retrieve node and comment @@ -470,7 +470,7 @@ def test_reimport_of_comments_for_single_node(tmp_path, aiida_profile_clean): create_archive([calc], filename=export_file_full) # Clean database - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() ## Part II # Reimport "EXISTING" DB @@ -509,7 +509,7 @@ def test_reimport_of_comments_for_single_node(tmp_path, aiida_profile_clean): create_archive([calc], filename=export_file_new) # Clean database - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() ## Part III # Reimport "EXISTING" DB @@ -578,7 +578,7 @@ def test_import_newest(tmp_path, aiida_profile): export_file_new = tmp_path / 'export_new.aiida' create_archive([node], filename=export_file_new, include_comments=True) - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file_old) assert orm.Comment.collection.get(uuid=comment_1_uuid).content == 'Comment old' diff --git a/tests/tools/archive/orm/test_computers.py b/tests/tools/archive/orm/test_computers.py index db8eeaf231..e3807c9a48 100644 --- a/tests/tools/archive/orm/test_computers.py +++ b/tests/tools/archive/orm/test_computers.py @@ -58,7 +58,7 @@ def test_same_computer_import(aiida_profile, tmp_path, aiida_localhost): create_archive([calc2], filename=filename2) # Clean the local database - aiida_profile.clear_profile() + aiida_profile.reset_storage() # Check that there are no computers builder = orm.QueryBuilder() @@ -153,7 +153,7 @@ def test_same_computer_different_name_import(aiida_profile, tmp_path, aiida_loca create_archive([calc2], filename=filename2) # Clean the local database - aiida_profile.clear_profile() + aiida_profile.reset_storage() # Check that there are no computers builder = orm.QueryBuilder() @@ -212,7 +212,7 @@ def test_different_computer_same_name_import(aiida_profile, tmp_path, aiida_loca create_archive([calc1], filename=filename1) # Reset the database - aiida_profile.clear_profile() + aiida_profile.reset_storage() # Store a second calculation calc2_label = 'calc2' @@ -228,7 +228,7 @@ def test_different_computer_same_name_import(aiida_profile, tmp_path, aiida_loca create_archive([calc2], filename=filename2) # Reset the database - aiida_profile.clear_profile() + aiida_profile.reset_storage() # Store a third calculation calc3_label = 'calc3' @@ -244,7 +244,7 @@ def test_different_computer_same_name_import(aiida_profile, tmp_path, aiida_loca create_archive([calc3], filename=filename3) # Clean the local database - aiida_profile.clear_profile() + aiida_profile.reset_storage() # Check that there are no computers builder = orm.QueryBuilder() @@ -294,7 +294,7 @@ def test_import_of_computer_json_params(aiida_profile_clean, tmp_path, aiida_loc create_archive([calc1], filename=filename1) # Clean the local database - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() # Import the data import_archive(filename1) diff --git a/tests/tools/archive/orm/test_extras.py b/tests/tools/archive/orm/test_extras.py index de1d1079f7..8435e44e32 100644 --- a/tests/tools/archive/orm/test_extras.py +++ b/tests/tools/archive/orm/test_extras.py @@ -22,7 +22,7 @@ def new_archive(aiida_profile, tmp_path): data.base.extras.set_many({'b': 2, 'c': 3}) archive_file = tmp_path / 'export.aiida' create_archive([data], filename=archive_file) - aiida_profile.clear_profile() + aiida_profile.reset_storage() yield archive_file diff --git a/tests/tools/archive/orm/test_groups.py b/tests/tools/archive/orm/test_groups.py index e23fa6c88f..f4651248b6 100644 --- a/tests/tools/archive/orm/test_groups.py +++ b/tests/tools/archive/orm/test_groups.py @@ -51,7 +51,7 @@ def test_nodes_in_group(aiida_profile, tmp_path, aiida_localhost): filename1 = tmp_path / 'export1.aiida' create_archive([sd1, jc1, gr1], filename=filename1) n_uuids = [sd1.uuid, jc1.uuid] - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(filename1) # Check that the imported nodes are correctly imported and that @@ -89,7 +89,7 @@ def test_group_export(tmp_path, aiida_profile): filename = tmp_path / 'export.aiida' create_archive([group], filename=filename) n_uuids = [sd1.uuid] - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(filename) # Check that the imported nodes are correctly imported and that @@ -130,7 +130,7 @@ def test_group_import_existing(tmp_path, aiida_profile): # At this point we export the generated data filename = tmp_path / 'export1.aiida' create_archive([group], filename=filename) - aiida_profile.clear_profile() + aiida_profile.reset_storage() # Creating a group of the same name group = orm.Group(label='node_group_existing') @@ -167,7 +167,7 @@ def test_import_to_group(tmp_path, aiida_profile): # Export Nodes filename = tmp_path / 'export.aiida' create_archive([data1, data2], filename=filename) - aiida_profile.clear_profile() + aiida_profile.reset_storage() # Create Group, do not store group_label = 'import_madness' diff --git a/tests/tools/archive/orm/test_links.py b/tests/tools/archive/orm/test_links.py index 630a4a4daf..83203def2a 100644 --- a/tests/tools/archive/orm/test_links.py +++ b/tests/tools/archive/orm/test_links.py @@ -47,7 +47,7 @@ def test_links_to_unknown_nodes(tmp_path, aiida_profile): with ArchiveFormatSqlZip().open(filename, 'r') as archive: assert archive.querybuilder().append(entity_type='link').count() == 1 - aiida_profile.clear_profile() + aiida_profile.reset_storage() # since the query builder only looks for links between known nodes, # this should not import the erroneous link @@ -75,7 +75,7 @@ def test_input_and_create_links(tmp_path, aiida_profile): export_file = tmp_path.joinpath('export.aiida') create_archive([node_output], filename=export_file) - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) import_links = get_all_node_links() @@ -252,7 +252,7 @@ def test_complex_workflow_graph_links(aiida_profile_clean, tmp_path, aiida_local export_file = tmp_path.joinpath('export.aiida') create_archive(graph_nodes, filename=export_file) - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() import_archive(export_file) import_links = get_all_node_links() @@ -273,7 +273,7 @@ def test_complex_workflow_graph_export_sets(aiida_profile, tmp_path, aiida_local create_archive([export_node], filename=export_file, overwrite=True) export_node_str = str(export_node) - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) @@ -319,7 +319,7 @@ def test_high_level_workflow_links(aiida_profile, tmp_path, aiida_localhost_fact for calcs in high_level_calc_nodes: for works in high_level_work_nodes: - aiida_profile.clear_profile() + aiida_profile.reset_storage() graph_nodes, _ = construct_complex_graph(aiida_localhost_factory, calc_nodes=calcs, work_nodes=works) @@ -351,7 +351,7 @@ def test_high_level_workflow_links(aiida_profile, tmp_path, aiida_localhost_fact export_file = tmp_path.joinpath('export.aiida') create_archive(graph_nodes, filename=export_file, overwrite=True) - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) import_links = get_all_node_links() @@ -585,10 +585,10 @@ def test_link_flags(aiida_profile, tmp_path, aiida_localhost_factory): ), ) - link_flags_import_helper(input_links_forward, aiida_profile.clear_profile) - link_flags_import_helper(create_return_links_backward, aiida_profile.clear_profile) - link_flags_import_helper(call_links_backward_calc1, aiida_profile.clear_profile) - link_flags_import_helper(call_links_backward_work2, aiida_profile.clear_profile) + link_flags_import_helper(input_links_forward, aiida_profile.reset_storage) + link_flags_import_helper(create_return_links_backward, aiida_profile.reset_storage) + link_flags_import_helper(call_links_backward_calc1, aiida_profile.reset_storage) + link_flags_import_helper(call_links_backward_work2, aiida_profile.reset_storage) def test_double_return_links_for_workflows(tmp_path, aiida_profile_clean): @@ -616,7 +616,7 @@ def test_double_return_links_for_workflows(tmp_path, aiida_profile_clean): export_file = tmp_path.joinpath('export.aiida') create_archive([data_out, work1, work2, data_in], filename=export_file) - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() import_archive(export_file) @@ -655,7 +655,7 @@ def test_multiple_post_return_links(tmp_path, aiida_profile_clean): create_archive([data], filename=data_provenance, return_backward=False) create_archive([data], filename=all_provenance, return_backward=True) - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() # import data provenance import_archive(data_provenance) diff --git a/tests/tools/archive/orm/test_logs.py b/tests/tools/archive/orm/test_logs.py index 64ca715aa3..a6c80edb6b 100644 --- a/tests/tools/archive/orm/test_logs.py +++ b/tests/tools/archive/orm/test_logs.py @@ -33,7 +33,7 @@ def test_critical_log_msg_and_metadata(tmp_path, aiida_profile): export_file = tmp_path.joinpath('export.aiida') create_archive([calc], filename=export_file) - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) @@ -65,7 +65,7 @@ def test_exclude_logs_flag(tmp_path, aiida_profile): create_archive([calc], filename=export_file, include_logs=False) # Clean database and reimport exported data - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) # Finding all the log messages @@ -102,7 +102,7 @@ def test_export_of_imported_logs(tmp_path, aiida_profile): create_archive([calc], filename=export_file) # Clean database and reimport exported data - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(export_file) # Finding all the log messages @@ -123,7 +123,7 @@ def test_export_of_imported_logs(tmp_path, aiida_profile): create_archive([calc], filename=re_export_file) # Clean database and reimport exported data - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(re_export_file) # Finding all the log messages @@ -166,7 +166,7 @@ def test_multiple_imports_for_single_node(tmp_path, aiida_profile_clean): create_archive([node], filename=export_file_full) # Clean database and reimport "EXISTING" DB - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() import_archive(export_file_existing) # Check correct import @@ -289,7 +289,7 @@ def test_reimport_of_logs_for_single_node(tmp_path, aiida_profile_clean): create_archive([calc], filename=export_file_full) # Clean database - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() ## Part II # Reimport "EXISTING" DB @@ -327,7 +327,7 @@ def test_reimport_of_logs_for_single_node(tmp_path, aiida_profile_clean): create_archive([calc], filename=export_file_new) # Clean database - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() ## Part III # Reimport "EXISTING" DB diff --git a/tests/tools/archive/orm/test_users.py b/tests/tools/archive/orm/test_users.py index ad961ed311..10601981bf 100644 --- a/tests/tools/archive/orm/test_users.py +++ b/tests/tools/archive/orm/test_users.py @@ -66,7 +66,7 @@ def test_nodes_belonging_to_different_users(aiida_profile, tmp_path, aiida_local filename = tmp_path.joinpath('export.aiida') create_archive([sd3], filename=filename) - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(filename) # Check that the imported nodes are correctly imported and that @@ -121,7 +121,7 @@ def test_non_default_user_nodes(aiida_profile_clean, tmp_path, aiida_localhost_f filename1 = tmp_path.joinpath('export1.aiidaz') create_archive([sd2], filename=filename1) uuids1 = [sd1.uuid, jc1.uuid, sd2.uuid] - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() import_archive(filename1) # Check that the imported nodes are correctly imported and that @@ -152,7 +152,7 @@ def test_non_default_user_nodes(aiida_profile_clean, tmp_path, aiida_localhost_f filename2 = tmp_path.joinpath('export2.aiida') create_archive([sd3], filename=filename2) - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() import_archive(filename2) # Check that the imported nodes are correctly imported and that diff --git a/tests/tools/archive/test_complex.py b/tests/tools/archive/test_complex.py index 252d8d8f9c..3bb132c308 100644 --- a/tests/tools/archive/test_complex.py +++ b/tests/tools/archive/test_complex.py @@ -78,7 +78,7 @@ def test_complex_graph_import_export(aiida_profile_clean, tmp_path, aiida_localh filename = tmp_path / 'export.aiida' create_archive([fd1], filename=filename) - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() import_archive(filename) @@ -150,7 +150,7 @@ def test_reexport(aiida_profile_clean, tmp_path): # this also checks if group memberships are preserved! create_archive([group] + list(group.nodes), filename=filename) # cleaning the DB! - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() # reimporting the data from the file import_archive(filename) # creating the hash from db content diff --git a/tests/tools/archive/test_repository.py b/tests/tools/archive/test_repository.py index cb96bda752..b93bc3e862 100644 --- a/tests/tools/archive/test_repository.py +++ b/tests/tools/archive/test_repository.py @@ -27,7 +27,7 @@ def test_export_repository(aiida_profile_clean, tmp_path): filepath = os.path.join(tmp_path / 'export.aiida') create_archive([node], filename=filepath) - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() import_archive(filepath) loaded = orm.load_node(uuid=node_uuid) @@ -53,7 +53,7 @@ def test_export_repository_after_maintain(aiida_profile_clean, tmp_path): filepath = os.path.join(tmp_path / 'export.aiida') create_archive([node], filename=filepath) - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() import_archive(filepath) loaded = orm.load_node(uuid=node_uuid) diff --git a/tests/tools/archive/test_simple.py b/tests/tools/archive/test_simple.py index 5000fafd5c..0e97ad9409 100644 --- a/tests/tools/archive/test_simple.py +++ b/tests/tools/archive/test_simple.py @@ -45,7 +45,7 @@ def test_base_data_nodes(aiida_profile, tmp_path, entities): # actually export now create(filename=filename) # cleaning: - aiida_profile.clear_profile() + aiida_profile.reset_storage() # Importing back the data: import_archive(filename) # Checking whether values are preserved: @@ -79,7 +79,7 @@ def test_calc_of_structuredata(aiida_profile, tmp_path, aiida_localhost): create_archive([calc], filename=filename) - aiida_profile.clear_profile() + aiida_profile.reset_storage() import_archive(filename) for uuid, value in attrs.items(): @@ -111,7 +111,7 @@ def test_check_for_export_format_version(aiida_profile, tmp_path): (outpath / subpath.at).write_bytes(subpath.read_bytes()) # then try to import it - aiida_profile.clear_profile() + aiida_profile.reset_storage() with pytest.raises(IncompatibleStorageSchema): import_archive(filename2) diff --git a/tests/tools/archive/test_specific_import.py b/tests/tools/archive/test_specific_import.py index 8832ddb55a..c7fed34a8e 100644 --- a/tests/tools/archive/test_specific_import.py +++ b/tests/tools/archive/test_specific_import.py @@ -38,7 +38,7 @@ def test_simple_import(aiida_profile_clean, tmp_path): assert orm.QueryBuilder().append(orm.Node).count() == len(nodes) # Clean the database and verify there are no nodes left - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() assert orm.QueryBuilder().append(orm.Node).count() == 0 # After importing we should have the original number of nodes again @@ -94,7 +94,7 @@ def test_cycle_structure_data(aiida_profile_clean, aiida_localhost, tmp_path): assert orm.QueryBuilder().append(orm.Node).count() == len(nodes) # Clean the database and verify there are no nodes left - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() assert orm.QueryBuilder().append(orm.Node).count() == 0 # After importing we should have the original number of nodes again @@ -155,7 +155,7 @@ def test_import_checkpoints(aiida_profile_clean, tmp_path): assert orm.QueryBuilder().append(orm.Node).count() == len(nodes) # Clean the database and verify there are no nodes left - aiida_profile_clean.clear_profile() + aiida_profile_clean.reset_storage() assert orm.QueryBuilder().append(orm.Node).count() == 0 import_archive(archive_path) diff --git a/tests/tools/pytest_fixtures/test_configuration.py b/tests/tools/pytest_fixtures/test_configuration.py new file mode 100644 index 0000000000..574d0d4f6a --- /dev/null +++ b/tests/tools/pytest_fixtures/test_configuration.py @@ -0,0 +1,40 @@ +"""Test the pytest fixtures.""" + +import tempfile + + +def test_aiida_config(): + """Test that ``aiida_config`` fixture is loaded by default and creates a config instance in temp directory.""" + from aiida.manage.configuration import get_config + from aiida.manage.configuration.config import Config + + config = get_config() + assert isinstance(config, Config) + assert config.dirpath.startswith(tempfile.gettempdir()) + + +def test_aiida_config_tmp(aiida_config_tmp): + """Test that ``aiida_config_tmp`` returns a config instance in temp directory.""" + from aiida.manage.configuration.config import Config + + assert isinstance(aiida_config_tmp, Config) + assert aiida_config_tmp.dirpath.startswith(tempfile.gettempdir()) + + +def test_aiida_profile(): + """Test that ``aiida_profile`` fixture is loaded by default and loads a temporary test profile.""" + from aiida.manage.configuration import get_profile + from aiida.manage.configuration.profile import Profile + + profile = get_profile() + assert isinstance(profile, Profile) + assert profile.is_test_profile + + +def test_aiida_profile_tmp(aiida_profile, aiida_profile_tmp): + """Test that ``aiida_profile_tmp`` returns a new profile instance in temporary config directory.""" + from aiida.manage.configuration.profile import Profile + + assert isinstance(aiida_profile_tmp, Profile) + assert aiida_profile_tmp.is_test_profile + assert aiida_profile_tmp.uuid != aiida_profile.uuid