From 4e31c3319653784b656450bb150cf43f14640f12 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 1 Oct 2024 15:30:25 +0200 Subject: [PATCH 001/168] feat: add testnet cleanup on interrupted test run - Introduced `_cleanup_testnet` function to handle testnet cleanup. - Modified the trap to call `_interrupted` function on SIGINT. - Added `pytest_keyboard_interrupt` hook to create a status file. - Updated `setup_venv.sh` to create venv only if it doesn't exist. - Enhanced `testenv_setup_teardown` to handle interrupted test runs. --- .github/regression.sh | 37 +++++++++++++++++++++++++--- .github/setup_venv.sh | 19 ++++++++++---- cardano_node_tests/tests/conftest.py | 21 ++++++++++++++-- 3 files changed, 67 insertions(+), 10 deletions(-) diff --git a/.github/regression.sh b/.github/regression.sh index 8b11066f2..44c30be0b 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -1,5 +1,5 @@ #! /usr/bin/env -S nix develop --accept-flake-config .#base -c bash -# shellcheck shell=bash +# shellcheck shell=bash disable=SC2317 set -xeuo pipefail @@ -139,8 +139,39 @@ _cleanup() { stop_postgres || true } +_cleanup_testnet() { + [ -z "${BOOTSTRAP_DIR:-""}" ] && return + + _PYTEST_CURRENT="$(find "$WORKDIR" -type l -name pytest-current)" + [ -z "$_PYTEST_CURRENT" ] && return + _PYTEST_CURRENT="$(readlink -m "$_PYTEST_CURRENT")" + export _PYTEST_CURRENT + + echo "::endgroup::" # end group for the group that was interrupted + echo "::group::Testnet cleanup" + + # shellcheck disable=SC2016 + nix develop --accept-flake-config .#venv --command bash -c ' + . .github/setup_venv.sh + export PATH="${PWD}/.bin":"$WORKDIR/cardano-cli/cardano-cli-build/bin":"$PATH" + export CARDANO_NODE_SOCKET_PATH="$CARDANO_NODE_SOCKET_PATH_CI" + cleanup_dir="${_PYTEST_CURRENT}/../cleanup-${_PYTEST_CURRENT##*/}-script" + mkdir "$cleanup_dir" + cd "$cleanup_dir" + testnet-cleanup -a "$_PYTEST_CURRENT" + ' + + echo "::endgroup::" +} + # cleanup on Ctrl+C -trap 'set +e; _cleanup; exit 130' SIGINT +_interrupted() { + # Do testnet cleanup only on interrupted testrun. When not interrupted, + # cleanup is done as part of a testrun. + _cleanup_testnet + _cleanup +} +trap 'set +e; _interrupted; exit 130' SIGINT echo "::group::Nix env setup" printf "start: %(%H:%M:%S)T\n" -1 @@ -156,7 +187,7 @@ nix develop --accept-flake-config .#venv --command bash -c ' echo "::endgroup::" # end group for "Nix env setup" echo "::group::Python venv setup" - . .github/setup_venv.sh + . .github/setup_venv.sh clean echo "::endgroup::" # end group for "Python venv setup" echo "::group::Pytest run" diff --git a/.github/setup_venv.sh b/.github/setup_venv.sh index 2a839f07d..c2706a2a9 100644 --- a/.github/setup_venv.sh +++ b/.github/setup_venv.sh @@ -1,16 +1,25 @@ #!/bin/bash -VENV_DIR="${VENV_DIR:-"$WORKDIR/.env"}" +_VENV_DIR="${_VENV_DIR:-"$WORKDIR/.env"}" if [ "${1:-""}" = "clean" ]; then - rm -rf "$VENV_DIR" + rm -rf "$_VENV_DIR" +fi + +_REQS_INSTALLED="true" +if [ ! -e "$_VENV_DIR" ]; then + _REQS_INSTALLED="" + python3 -m venv "$_VENV_DIR" fi -python3 -m venv "$VENV_DIR" # shellcheck disable=SC1090,SC1091 -. "$VENV_DIR/bin/activate" +. "$_VENV_DIR/bin/activate" PYTHONPATH="$(echo "$VIRTUAL_ENV"/lib/python3*/site-packages):$PYTHONPATH" export PYTHONPATH -pip install -r requirements_freeze.txt +if [ -z "$_REQS_INSTALLED" ]; then + pip install -r requirements_freeze.txt +fi + +unset _VENV_DIR _REQS_INSTALLED diff --git a/cardano_node_tests/tests/conftest.py b/cardano_node_tests/tests/conftest.py index 9e54e8100..5d9df2e57 100644 --- a/cardano_node_tests/tests/conftest.py +++ b/cardano_node_tests/tests/conftest.py @@ -30,6 +30,7 @@ from cardano_node_tests.utils.versions import VERSIONS LOGGER = logging.getLogger(__name__) +INTERRUPTED_NAME = ".session_interrupted" # make sure there's enough time to stop all cluster instances at the end of session workermanage.NodeManager.EXIT_TIMEOUT = 30 @@ -183,6 +184,13 @@ def _skip_disabled(item: tp.Any) -> None: _skip_disabled(item) +@pytest.hookimpl(tryfirst=True) +def pytest_keyboard_interrupt() -> None: + """Create a status file indicating that the test run was interrupted.""" + session_basetemp = temptools.get_basetemp() + (session_basetemp / INTERRUPTED_NAME).touch() + + @pytest.fixture(scope="session") def init_pytest_temp_dirs(tmp_path_factory: TempPathFactory) -> None: """Init `PytestTempDirs`.""" @@ -262,6 +270,7 @@ def testenv_setup_teardown( ) -> tp.Generator[None, None, None]: """Setup and teardown test environment.""" pytest_root_tmp = temptools.get_pytest_root_tmp() + session_basetemp = temptools.get_basetemp() running_session_glob = ".running_session" with locking.FileLockIfXdist(f"{pytest_root_tmp}/{cluster_management.CLUSTER_LOCK}"): @@ -269,19 +278,27 @@ def testenv_setup_teardown( if not list(pytest_root_tmp.glob(f"{running_session_glob}_*")): _save_env_for_allure(request.config) + # Remove dangling files from previous interrupted test run + (session_basetemp / INTERRUPTED_NAME).unlink(missing_ok=True) + + # Create file indicating that testing session on this worker is running (pytest_root_tmp / f"{running_session_glob}_{worker_id}").touch() yield with locking.FileLockIfXdist(f"{pytest_root_tmp}/{cluster_management.CLUSTER_LOCK}"): + # Remove file indicating that testing session on this worker is running + (pytest_root_tmp / f"{running_session_glob}_{worker_id}").unlink() + # Save CLI coverage to dir specified by `--cli-coverage-dir` cluster_manager_obj = cluster_management.ClusterManager( worker_id=worker_id, pytest_config=request.config ) cluster_manager_obj.save_worker_cli_coverage() - # Remove file indicating that testing session on this worker is running - (pytest_root_tmp / f"{running_session_glob}_{worker_id}").unlink() + # Don't do any cleanup on keyboard interrupt + if (session_basetemp / INTERRUPTED_NAME).exists(): + return None # Perform cleanup if this is the last running pytest worker if not list(pytest_root_tmp.glob(f"{running_session_glob}_*")): From c49456b0807e4a5e22ac2e8b0afbc60c2c506e6b Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 1 Oct 2024 16:37:18 +0200 Subject: [PATCH 002/168] fix(logfiles): ignore handshake errors on testnet Added logic to ignore "TrHandshakeClientError" on testnets. This error can occur when some clients are outdated or using incorrect network magic. This change helps to reduce noise in the logs and focus on relevant issues. --- cardano_node_tests/utils/logfiles.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/cardano_node_tests/utils/logfiles.py b/cardano_node_tests/utils/logfiles.py index 714d3c243..2e299e9fd 100644 --- a/cardano_node_tests/utils/logfiles.py +++ b/cardano_node_tests/utils/logfiles.py @@ -60,6 +60,11 @@ # We sometimes see this error on CI. It seems time is not synced properly on GitHub runners. ERRORS_IGNORED.append("TraceBlockFromFuture") +if cluster_nodes.get_cluster_type().type == cluster_nodes.ClusterType.TESTNET: + # We can get these errors on testnets when some clients are old, or are using wrong + # network magic. + ERRORS_IGNORED.append("TrHandshakeClientError") + # Errors that are ignored if there are expected messages in the log file before the error ERRORS_LOOK_BACK_LINES = 10 ERRORS_LOOK_BACK_MAP = { From 93aac1b79bba39a15bf5fb7c807c08001e53c359 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 2 Oct 2024 11:59:13 +0200 Subject: [PATCH 003/168] feat(cluster): update address handling and funding logic - Create directory for faucet addresses data among test artifacts for better access during testnet cleanup. - Add more test address records to `ClusterType`. - Adjust funding logic to distribute funds evenly among new addresses. - Refactor address creation and funding logic for better clarity. --- .../cluster_management/cluster_getter.py | 8 ++- cardano_node_tests/utils/cluster_nodes.py | 59 ++++++++++++++----- 2 files changed, 51 insertions(+), 16 deletions(-) diff --git a/cardano_node_tests/cluster_management/cluster_getter.py b/cardano_node_tests/cluster_management/cluster_getter.py index 247739760..bbfc79a5b 100644 --- a/cardano_node_tests/cluster_management/cluster_getter.py +++ b/cardano_node_tests/cluster_management/cluster_getter.py @@ -256,8 +256,12 @@ def _respin(self, start_cmd: str = "", stop_cmd: str = "") -> bool: # noqa: C90 fp_out.write(cluster_instance_id) self.log(f"c{self.cluster_instance_num}: started cluster instance '{cluster_instance_id}'") - # Create dir for faucet addresses data - addr_data_dir = state_dir / common.ADDRS_DATA_DIRNAME + # Create dir for faucet addresses data among tests artifacts, so it can be accessed + # during testnet cleanup. + addr_data_dir = ( + temptools.get_pytest_worker_tmp() + / f"{common.ADDRS_DATA_DIRNAME}_ci{self.cluster_instance_num}" + ) addr_data_dir.mkdir(parents=True, exist_ok=True) # Setup faucet addresses diff --git a/cardano_node_tests/utils/cluster_nodes.py b/cardano_node_tests/utils/cluster_nodes.py index 83aca2f78..0a56cbf05 100644 --- a/cardano_node_tests/utils/cluster_nodes.py +++ b/cardano_node_tests/utils/cluster_nodes.py @@ -54,7 +54,13 @@ class ClusterType: LOCAL: tp.Final[str] = "local" TESTNET: tp.Final[str] = "testnet" - test_addr_records: tp.ClassVar[tp.Tuple[str, ...]] = ("user1",) + test_addr_records: tp.ClassVar[tp.Tuple[str, ...]] = ( + "user1", + "user2", + "user3", + "user4", + "user5", + ) NODES: tp.ClassVar[tp.Set[str]] = set() @@ -129,7 +135,7 @@ def create_addrs_data( cluster_env = get_cluster_env() instance_num = cluster_env.instance_num - # create new addresses + # Create new addresses new_addrs_data: tp.Dict[str, tp.Dict[str, tp.Any]] = {} for addr_name in self.test_addr_records: addr_name_instance = f"{addr_name}_ci{instance_num}" @@ -141,7 +147,7 @@ def create_addrs_data( "payment": payment, } - # create records for existing addresses + # Create records for existing addresses faucet_addrs_data: tp.Dict[str, tp.Dict[str, tp.Any]] = {"faucet": {"payment": None}} byron_dir = cluster_env.state_dir / "byron" shelley_dir = cluster_env.state_dir / "shelley" @@ -162,14 +168,15 @@ def create_addrs_data( msg = "Faucet address file doesn't exist." raise RuntimeError(msg) - # fund new addresses from faucet address + # Fund new addresses from faucet address LOGGER.debug("Funding created addresses.") to_fund = [d["payment"] for d in new_addrs_data.values()] + amount_per_address = 100_000_000_000_000 // len(self.test_addr_records) faucet.fund_from_faucet( *to_fund, cluster_obj=cluster_obj, faucet_data=faucet_addrs_data["faucet"], - amount=100_000_000_000_000, + amount=amount_per_address, destination_dir=destination_dir, force=True, ) @@ -238,19 +245,43 @@ def create_addrs_data( destination_dir: clusterlib.FileType = ".", ) -> tp.Dict[str, tp.Dict[str, tp.Any]]: """Create addresses and their keys for usage in tests.""" + # Store record of the original faucet address shelley_dir = get_cluster_env().state_dir / "shelley" + faucet_rec = clusterlib.AddressRecord( + address=clusterlib.read_address_from_file(shelley_dir / "faucet.addr"), + vkey_file=shelley_dir / "faucet.vkey", + skey_file=shelley_dir / "faucet.skey", + ) + faucet_addrs_data: tp.Dict[str, tp.Dict[str, tp.Any]] = { + self.test_addr_records[1]: {"payment": faucet_rec} + } - addrs_data: tp.Dict[str, tp.Dict[str, tp.Any]] = {} - for addr_name in self.test_addr_records: - faucet_addr = { - "payment": clusterlib.AddressRecord( - address=clusterlib.read_address_from_file(shelley_dir / "faucet.addr"), - vkey_file=shelley_dir / "faucet.vkey", - skey_file=shelley_dir / "faucet.skey", - ) + # Create new addresses + new_addrs_data: tp.Dict[str, tp.Dict[str, tp.Any]] = {} + for addr_name in self.test_addr_records[1:]: + payment = cluster_obj.g_address.gen_payment_addr_and_keys( + name=addr_name, + destination_dir=destination_dir, + ) + new_addrs_data[addr_name] = { + "payment": payment, } - addrs_data[addr_name] = faucet_addr + # Fund new addresses from faucet address + LOGGER.debug("Funding created addresses.") + to_fund = [d["payment"] for d in new_addrs_data.values()] + faucet_balance = cluster_obj.g_query.get_address_balance(address=faucet_rec.address) + amount_per_address = faucet_balance // len(self.test_addr_records) + faucet.fund_from_faucet( + *to_fund, + cluster_obj=cluster_obj, + faucet_data=faucet_addrs_data[self.test_addr_records[1]], + amount=amount_per_address, + destination_dir=destination_dir, + force=True, + ) + + addrs_data = {**new_addrs_data, **faucet_addrs_data} return addrs_data From d73be75fdbea7982223b0750c4b50fc8a4327a2a Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 2 Oct 2024 12:00:22 +0200 Subject: [PATCH 004/168] feat(faucet): add support for multiple faucet sources - Added `all_faucets` parameter to `fund_from_faucet` function. - Randomly select a faucet from `all_faucets` if `faucet_data` is not provided. - Ensure either `faucet_data` or `all_faucets` is provided. --- .../cluster_management/cluster_getter.py | 7 +++---- cardano_node_tests/utils/faucet.py | 17 +++++++++++++++-- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/cardano_node_tests/cluster_management/cluster_getter.py b/cardano_node_tests/cluster_management/cluster_getter.py index bbfc79a5b..44719ec26 100644 --- a/cardano_node_tests/cluster_management/cluster_getter.py +++ b/cardano_node_tests/cluster_management/cluster_getter.py @@ -12,7 +12,6 @@ import pytest from _pytest.config import Config -from cardano_clusterlib import clusterlib from cardano_node_tests.cluster_management import common from cardano_node_tests.cluster_management import resources @@ -130,7 +129,7 @@ def instance_dir(self) -> pl.Path: def _create_startup_files_dir(self, instance_num: int) -> pl.Path: _instance_dir = self.pytest_tmp_dir / f"{common.CLUSTER_DIR_TEMPLATE}{instance_num}" - rand_str = clusterlib.get_rand_str(8) + rand_str = helpers.get_rand_str(8) startup_files_dir = _instance_dir / "startup_files" / rand_str startup_files_dir.mkdir(exist_ok=True, parents=True) return startup_files_dir @@ -259,8 +258,8 @@ def _respin(self, start_cmd: str = "", stop_cmd: str = "") -> bool: # noqa: C90 # Create dir for faucet addresses data among tests artifacts, so it can be accessed # during testnet cleanup. addr_data_dir = ( - temptools.get_pytest_worker_tmp() - / f"{common.ADDRS_DATA_DIRNAME}_ci{self.cluster_instance_num}" + temptools.get_pytest_worker_tmp() / f"{common.ADDRS_DATA_DIRNAME}_" + f"ci{self.cluster_instance_num}_{cluster_instance_id}" ) addr_data_dir.mkdir(parents=True, exist_ok=True) diff --git a/cardano_node_tests/utils/faucet.py b/cardano_node_tests/utils/faucet.py index 20d370e14..550d0c7ce 100644 --- a/cardano_node_tests/utils/faucet.py +++ b/cardano_node_tests/utils/faucet.py @@ -1,5 +1,6 @@ import contextlib import logging +import random import typing as tp import cardano_clusterlib.types as cl_types @@ -15,14 +16,19 @@ def fund_from_faucet( *dst_addrs: tp.Union[clusterlib.AddressRecord, clusterlib.PoolUser], cluster_obj: clusterlib.ClusterLib, - faucet_data: dict, + faucet_data: tp.Optional[dict] = None, + all_faucets: tp.Optional[tp.Dict[str, dict]] = None, amount: tp.Union[int, tp.List[int]] = 1000_000_000, tx_name: tp.Optional[str] = None, destination_dir: clusterlib.FileType = ".", force: bool = False, ) -> tp.Optional[clusterlib.TxRawOutput]: """Send `amount` from faucet addr to all `dst_addrs`.""" - # get payment AddressRecord out of PoolUser + if not (faucet_data or all_faucets): + msg = "Either `faucet_data` or `all_faucets` must be provided." + raise AssertionError(msg) + + # Get payment AddressRecord out of PoolUser dst_addr_records: tp.List[clusterlib.AddressRecord] = [ (r.payment if hasattr(r, "payment") else r) for r in dst_addrs # type: ignore @@ -38,6 +44,13 @@ def fund_from_faucet( if not fund_dst: return None + if not faucet_data and all_faucets: + # Randomly select one of the "user" faucets + all_user_keys = [k for k in all_faucets if k.startswith("user")] + selected_user_key = random.choice(all_user_keys) + faucet_data = all_faucets[selected_user_key] + + assert faucet_data src_address = faucet_data["payment"].address with locking.FileLockIfXdist(f"{temptools.get_basetemp()}/{src_address}.lock"): tx_name = tx_name or helpers.get_timestamped_rand_str() From 17f7c69535d8d6d5d9407c98020478cf047a98f1 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 2 Oct 2024 12:07:45 +0200 Subject: [PATCH 005/168] feat: randomly select faucet address for funding --- cardano_node_tests/tests/common.py | 2 +- cardano_node_tests/tests/delegation.py | 2 +- .../tests/test_addr_registration.py | 2 +- cardano_node_tests/tests/test_blocks.py | 4 +-- .../tests/test_chain_transactions.py | 2 +- cardano_node_tests/tests/test_cli.py | 2 +- cardano_node_tests/tests/test_dbsync.py | 2 +- cardano_node_tests/tests/test_delegation.py | 8 +++--- .../tests/test_env_network_id.py | 2 +- cardano_node_tests/tests/test_governance.py | 2 +- cardano_node_tests/tests/test_mir_certs.py | 2 +- .../tests/test_native_tokens.py | 12 ++++----- cardano_node_tests/tests/test_node_upgrade.py | 4 +-- cardano_node_tests/tests/test_pools.py | 26 +++++++++---------- cardano_node_tests/tests/test_reconnect.py | 2 +- cardano_node_tests/tests/test_rollback.py | 2 +- cardano_node_tests/tests/test_scripts.py | 18 ++++++------- cardano_node_tests/tests/test_socket_path.py | 2 +- .../tests/test_staking_no_rewards.py | 10 +++---- .../tests/test_staking_rewards.py | 8 +++--- cardano_node_tests/tests/test_tx_basic.py | 14 +++++----- cardano_node_tests/tests/test_tx_fees.py | 4 +-- .../tests/test_tx_many_utxos.py | 2 +- cardano_node_tests/tests/test_tx_mempool.py | 2 +- cardano_node_tests/tests/test_tx_metadata.py | 4 +-- cardano_node_tests/tests/test_tx_negative.py | 2 +- .../tests/test_tx_unbalanced.py | 2 +- .../tests/test_update_proposals.py | 4 +-- .../tests/tests_conway/conway_common.py | 2 +- .../tests/tests_conway/test_committee.py | 2 +- .../tests/tests_conway/test_drep.py | 6 ++--- .../tests/tests_conway/test_guardrails.py | 2 +- .../tests/tests_conway/test_pparam_update.py | 2 +- .../tests_conway/test_treasury_donation.py | 4 +-- .../test_update_plutusv2_builtins.py | 2 +- .../tests/tests_plutus/test_delegation.py | 2 +- .../tests/tests_plutus/test_lobster.py | 2 +- .../tests/tests_plutus/test_mint_build.py | 2 +- .../tests_plutus/test_mint_negative_build.py | 2 +- .../tests_plutus/test_mint_negative_raw.py | 2 +- .../tests/tests_plutus/test_mint_raw.py | 2 +- .../tests/tests_plutus/test_spend_build.py | 4 +-- .../tests_plutus/test_spend_compat_build.py | 2 +- .../tests_plutus/test_spend_compat_raw.py | 2 +- .../tests_plutus/test_spend_datum_build.py | 2 +- .../tests_plutus/test_spend_datum_raw.py | 2 +- .../tests_plutus/test_spend_negative_build.py | 2 +- .../tests_plutus/test_spend_negative_raw.py | 4 +-- .../tests/tests_plutus/test_spend_raw.py | 4 +-- .../tests/tests_plutus_v2/test_mint_build.py | 2 +- .../test_mint_negative_build.py | 2 +- .../tests_plutus_v2/test_mint_negative_raw.py | 2 +- .../tests/tests_plutus_v2/test_mint_raw.py | 2 +- .../test_mint_secp256k1_build.py | 2 +- .../test_mint_secp256k1_raw.py | 2 +- .../tests/tests_plutus_v2/test_spend_build.py | 2 +- .../test_spend_collateral_build.py | 2 +- .../test_spend_collateral_raw.py | 2 +- .../test_spend_compat_build.py | 2 +- .../tests_plutus_v2/test_spend_compat_raw.py | 2 +- .../tests_plutus_v2/test_spend_datum_build.py | 2 +- .../tests_plutus_v2/test_spend_datum_raw.py | 2 +- .../tests/tests_plutus_v2/test_spend_raw.py | 2 +- .../test_spend_ref_inputs_build.py | 4 +-- .../test_spend_ref_inputs_raw.py | 2 +- .../test_spend_ref_scripts_build.py | 2 +- .../test_spend_ref_scripts_raw.py | 2 +- .../test_spend_secp256k1_build.py | 2 +- .../test_spend_secp256k1_raw.py | 2 +- cardano_node_tests/utils/governance_setup.py | 2 +- 70 files changed, 123 insertions(+), 123 deletions(-) diff --git a/cardano_node_tests/tests/common.py b/cardano_node_tests/tests/common.py index 700399269..e60b1d6c7 100644 --- a/cardano_node_tests/tests/common.py +++ b/cardano_node_tests/tests/common.py @@ -241,7 +241,7 @@ def detect_fork( tx_raw_output = clusterlib_utils.fund_from_faucet( payment_rec, cluster_obj=cluster_obj, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=2_000_000, ) assert tx_raw_output diff --git a/cardano_node_tests/tests/delegation.py b/cardano_node_tests/tests/delegation.py index e83dc20c0..e6ed6a260 100644 --- a/cardano_node_tests/tests/delegation.py +++ b/cardano_node_tests/tests/delegation.py @@ -152,7 +152,7 @@ def delegate_stake_addr( clusterlib_utils.fund_from_faucet( pool_user.payment, cluster_obj=cluster_obj, - faucet_data=addrs_data["user1"], + all_faucets=addrs_data, amount=amount, ) diff --git a/cardano_node_tests/tests/test_addr_registration.py b/cardano_node_tests/tests/test_addr_registration.py index ea25e0f3b..c918038d7 100644 --- a/cardano_node_tests/tests/test_addr_registration.py +++ b/cardano_node_tests/tests/test_addr_registration.py @@ -41,7 +41,7 @@ def pool_users( clusterlib_utils.fund_from_faucet( created_users[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return created_users diff --git a/cardano_node_tests/tests/test_blocks.py b/cardano_node_tests/tests/test_blocks.py index b97e2722b..ec04f285c 100644 --- a/cardano_node_tests/tests/test_blocks.py +++ b/cardano_node_tests/tests/test_blocks.py @@ -210,7 +210,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( *addrs, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs @@ -404,7 +404,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( *addrs, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs diff --git a/cardano_node_tests/tests/test_chain_transactions.py b/cardano_node_tests/tests/test_chain_transactions.py index d75b9ea9f..85a8511ac 100644 --- a/cardano_node_tests/tests/test_chain_transactions.py +++ b/cardano_node_tests/tests/test_chain_transactions.py @@ -36,7 +36,7 @@ def get_payment_addr( clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster_obj, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=amount, ) diff --git a/cardano_node_tests/tests/test_cli.py b/cardano_node_tests/tests/test_cli.py index e79f16c98..57a195c83 100644 --- a/cardano_node_tests/tests/test_cli.py +++ b/cardano_node_tests/tests/test_cli.py @@ -721,7 +721,7 @@ def test_pretty_utxo( clusterlib_utils.fund_from_faucet( payment_addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=amount1 + amount2 + 10_000_000, ) diff --git a/cardano_node_tests/tests/test_dbsync.py b/cardano_node_tests/tests/test_dbsync.py index be702113e..206f5d38e 100644 --- a/cardano_node_tests/tests/test_dbsync.py +++ b/cardano_node_tests/tests/test_dbsync.py @@ -314,7 +314,7 @@ def test_reconnect_dbsync( clusterlib_utils.fund_from_faucet( payment_addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=10_000_000, ) diff --git a/cardano_node_tests/tests/test_delegation.py b/cardano_node_tests/tests/test_delegation.py index 3e4317ac5..77c85c95f 100644 --- a/cardano_node_tests/tests/test_delegation.py +++ b/cardano_node_tests/tests/test_delegation.py @@ -80,7 +80,7 @@ def pool_users( clusterlib_utils.fund_from_faucet( created_users[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return created_users @@ -127,7 +127,7 @@ def pool_users_cluster_and_pool( clusterlib_utils.fund_from_faucet( created_users[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return created_users @@ -302,7 +302,7 @@ def _get_pool_users( clusterlib_utils.fund_from_faucet( *pool_users, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) # Step: Delegate the stake addresses to 2 different pools @@ -431,7 +431,7 @@ def test_deregister_delegated( clusterlib_utils.fund_from_faucet( *payment_addr_recs, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) pool_user = clusterlib.PoolUser(payment=payment_addr_recs[1], stake=stake_addr_rec) diff --git a/cardano_node_tests/tests/test_env_network_id.py b/cardano_node_tests/tests/test_env_network_id.py index e3fce7f8a..82486a4b6 100644 --- a/cardano_node_tests/tests/test_env_network_id.py +++ b/cardano_node_tests/tests/test_env_network_id.py @@ -117,7 +117,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=100_000_000, ) return addrs diff --git a/cardano_node_tests/tests/test_governance.py b/cardano_node_tests/tests/test_governance.py index cd19902cf..2c8d53450 100644 --- a/cardano_node_tests/tests/test_governance.py +++ b/cardano_node_tests/tests/test_governance.py @@ -62,7 +62,7 @@ def payment_addr( clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=amount, ) diff --git a/cardano_node_tests/tests/test_mir_certs.py b/cardano_node_tests/tests/test_mir_certs.py index a125ec4bd..a9af7636c 100644 --- a/cardano_node_tests/tests/test_mir_certs.py +++ b/cardano_node_tests/tests/test_mir_certs.py @@ -80,7 +80,7 @@ def pool_users( clusterlib_utils.fund_from_faucet( *created_users, cluster_obj=cluster_pots, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return created_users diff --git a/cardano_node_tests/tests/test_native_tokens.py b/cardano_node_tests/tests/test_native_tokens.py index fd74afa07..bc6335cea 100644 --- a/cardano_node_tests/tests/test_native_tokens.py +++ b/cardano_node_tests/tests/test_native_tokens.py @@ -90,7 +90,7 @@ def issuers_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=9_000_000, ) @@ -773,7 +773,7 @@ def _mint_tokens() -> clusterlib.TxRawOutput: clusterlib_utils.fund_from_faucet( token_mint_addr, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=300_000_000, ) @@ -799,7 +799,7 @@ def _mint_tokens() -> clusterlib.TxRawOutput: clusterlib_utils.fund_from_faucet( token_mint_addr, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=40_000_000, ) @@ -918,7 +918,7 @@ def _mint_tokens() -> clusterlib.TxRawOutput: clusterlib_utils.fund_from_faucet( token_mint_addr, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=300_000_000, ) @@ -944,7 +944,7 @@ def _mint_tokens() -> clusterlib.TxRawOutput: clusterlib_utils.fund_from_faucet( token_mint_addr, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=40_000_000, ) @@ -1662,7 +1662,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs diff --git a/cardano_node_tests/tests/test_node_upgrade.py b/cardano_node_tests/tests/test_node_upgrade.py index bc6d57856..5ba380ca5 100644 --- a/cardano_node_tests/tests/test_node_upgrade.py +++ b/cardano_node_tests/tests/test_node_upgrade.py @@ -47,7 +47,7 @@ def payment_addr_locked( clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addr @@ -71,7 +71,7 @@ def payment_addrs_disposable( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs diff --git a/cardano_node_tests/tests/test_pools.py b/cardano_node_tests/tests/test_pools.py index 111697f14..8e5c98b28 100644 --- a/cardano_node_tests/tests/test_pools.py +++ b/cardano_node_tests/tests/test_pools.py @@ -667,7 +667,7 @@ def test_stake_pool_metadata( clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, ) @@ -748,7 +748,7 @@ def test_stake_pool_not_avail_metadata( clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, ) @@ -818,7 +818,7 @@ def test_create_stake_pool( clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, ) @@ -883,7 +883,7 @@ def test_deregister_stake_pool( clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, ) @@ -1024,7 +1024,7 @@ def test_reregister_stake_pool( clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_500_000_000, ) @@ -1185,7 +1185,7 @@ def test_cancel_stake_pool_deregistration( clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_500_000_000, ) @@ -1350,7 +1350,7 @@ def test_update_stake_pool_metadata( clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000 * no_of_addr, ) @@ -1469,7 +1469,7 @@ def test_update_stake_pool_parameters( clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000 * no_of_addr, ) @@ -1570,7 +1570,7 @@ def test_sign_in_multiple_stages( clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, ) @@ -1707,7 +1707,7 @@ def test_pool_registration_deregistration( clusterlib_utils.fund_from_faucet( pool_owner.payment, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, ) @@ -1813,7 +1813,7 @@ def pool_owners_pbt( clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, ) @@ -1896,7 +1896,7 @@ def _subtest(pool_cost: int) -> None: clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, ) @@ -1940,7 +1940,7 @@ def pool_users( clusterlib_utils.fund_from_faucet( created_users[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=600_000_000, ) diff --git a/cardano_node_tests/tests/test_reconnect.py b/cardano_node_tests/tests/test_reconnect.py index 9d26e25fa..54d6d21d2 100644 --- a/cardano_node_tests/tests/test_reconnect.py +++ b/cardano_node_tests/tests/test_reconnect.py @@ -63,7 +63,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( *addrs, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs diff --git a/cardano_node_tests/tests/test_rollback.py b/cardano_node_tests/tests/test_rollback.py index d13644b04..4bf69ce20 100644 --- a/cardano_node_tests/tests/test_rollback.py +++ b/cardano_node_tests/tests/test_rollback.py @@ -72,7 +72,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( *addrs, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs diff --git a/cardano_node_tests/tests/test_scripts.py b/cardano_node_tests/tests/test_scripts.py index 895d969b6..7c08fb4fe 100644 --- a/cardano_node_tests/tests/test_scripts.py +++ b/cardano_node_tests/tests/test_scripts.py @@ -159,7 +159,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=10_000_000_000, ) @@ -705,7 +705,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs @@ -949,7 +949,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs @@ -1615,7 +1615,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs @@ -1849,7 +1849,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs @@ -2049,7 +2049,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs @@ -2158,7 +2158,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=100_000_000, ) @@ -2432,7 +2432,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs @@ -2859,7 +2859,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs diff --git a/cardano_node_tests/tests/test_socket_path.py b/cardano_node_tests/tests/test_socket_path.py index 1b91c70b5..3d5e0a8c5 100644 --- a/cardano_node_tests/tests/test_socket_path.py +++ b/cardano_node_tests/tests/test_socket_path.py @@ -109,7 +109,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=100_000_000, ) return addrs diff --git a/cardano_node_tests/tests/test_staking_no_rewards.py b/cardano_node_tests/tests/test_staking_no_rewards.py index e7ce199c4..b2af51389 100644 --- a/cardano_node_tests/tests/test_staking_no_rewards.py +++ b/cardano_node_tests/tests/test_staking_no_rewards.py @@ -159,7 +159,7 @@ def test_no_reward_unmet_pledge1( clusterlib_utils.fund_from_faucet( pool_owner, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, force=True, ) @@ -319,7 +319,7 @@ def test_no_reward_unmet_pledge2( clusterlib_utils.fund_from_faucet( delegation_out.pool_user, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, force=True, ) @@ -501,7 +501,7 @@ def test_no_reward_deregistered_stake_addr( clusterlib_utils.fund_from_faucet( pool_owner, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, force=True, ) @@ -697,7 +697,7 @@ def test_no_reward_deregistered_reward_addr( clusterlib_utils.fund_from_faucet( pool_reward, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, force=True, ) @@ -864,7 +864,7 @@ def test_deregister_reward_addr_retire_pool( clusterlib_utils.fund_from_faucet( pool_owner, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, force=True, ) diff --git a/cardano_node_tests/tests/test_staking_rewards.py b/cardano_node_tests/tests/test_staking_rewards.py index 2836e472b..86e4d21a9 100644 --- a/cardano_node_tests/tests/test_staking_rewards.py +++ b/cardano_node_tests/tests/test_staking_rewards.py @@ -369,7 +369,7 @@ def test_reward_amount( # noqa: C901 clusterlib_utils.fund_from_faucet( *payment_addr_recs, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) pool_user = clusterlib.PoolUser(payment=payment_addr_recs[1], stake=stake_addr_rec) @@ -675,7 +675,7 @@ def test_reward_addr_delegation( # noqa: C901 clusterlib_utils.fund_from_faucet( pool_owner, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, force=True, ) @@ -1081,7 +1081,7 @@ def test_decreasing_reward_transferred_funds( clusterlib_utils.fund_from_faucet( dst_addr_record, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) # transfer all funds from payment address back to faucet, so no funds are staked @@ -1200,7 +1200,7 @@ def test_2_pools_same_reward_addr( # noqa: C901 clusterlib_utils.fund_from_faucet( pool2_owner, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=900_000_000, force=True, ) diff --git a/cardano_node_tests/tests/test_tx_basic.py b/cardano_node_tests/tests/test_tx_basic.py index bfaad59d4..e7357206f 100644 --- a/cardano_node_tests/tests/test_tx_basic.py +++ b/cardano_node_tests/tests/test_tx_basic.py @@ -53,7 +53,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( *addrs, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs @@ -81,7 +81,7 @@ def byron_addrs( clusterlib_utils.fund_from_faucet( *new_byron_addrs, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return new_byron_addrs @@ -104,7 +104,7 @@ def payment_addrs_disposable( clusterlib_utils.fund_from_faucet( *addrs, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs @@ -126,7 +126,7 @@ def payment_addrs_no_change( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs @@ -770,7 +770,7 @@ def test_no_txout( clusterlib_utils.fund_from_faucet( src_record, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=2_000_000, ) @@ -1207,7 +1207,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=90_000_000_000, ) @@ -1508,7 +1508,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs diff --git a/cardano_node_tests/tests/test_tx_fees.py b/cardano_node_tests/tests/test_tx_fees.py index 4b71602ff..ec2053e9e 100644 --- a/cardano_node_tests/tests/test_tx_fees.py +++ b/cardano_node_tests/tests/test_tx_fees.py @@ -63,7 +63,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs @@ -219,7 +219,7 @@ def pool_users( clusterlib_utils.fund_from_faucet( *created_users[:10], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return created_users diff --git a/cardano_node_tests/tests/test_tx_many_utxos.py b/cardano_node_tests/tests/test_tx_many_utxos.py index b80b45fbf..8e529f718 100644 --- a/cardano_node_tests/tests/test_tx_many_utxos.py +++ b/cardano_node_tests/tests/test_tx_many_utxos.py @@ -52,7 +52,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=800_000_000_000, ) diff --git a/cardano_node_tests/tests/test_tx_mempool.py b/cardano_node_tests/tests/test_tx_mempool.py index 711869dad..5b7d170fd 100644 --- a/cardano_node_tests/tests/test_tx_mempool.py +++ b/cardano_node_tests/tests/test_tx_mempool.py @@ -37,7 +37,7 @@ def payment_addrs_locked( clusterlib_utils.fund_from_faucet( *addrs, cluster_obj=cluster_singleton, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs diff --git a/cardano_node_tests/tests/test_tx_metadata.py b/cardano_node_tests/tests/test_tx_metadata.py index d14c78687..d59cab968 100644 --- a/cardano_node_tests/tests/test_tx_metadata.py +++ b/cardano_node_tests/tests/test_tx_metadata.py @@ -51,7 +51,7 @@ def payment_addr( clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addr @@ -607,7 +607,7 @@ def test_tx_metadata_no_txout( clusterlib_utils.fund_from_faucet( src_record, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=2_000_000, ) diff --git a/cardano_node_tests/tests/test_tx_negative.py b/cardano_node_tests/tests/test_tx_negative.py index c2acd68ca..4003762cc 100644 --- a/cardano_node_tests/tests/test_tx_negative.py +++ b/cardano_node_tests/tests/test_tx_negative.py @@ -89,7 +89,7 @@ def pool_users( clusterlib_utils.fund_from_faucet( *created_users, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return created_users diff --git a/cardano_node_tests/tests/test_tx_unbalanced.py b/cardano_node_tests/tests/test_tx_unbalanced.py index fd28ebaeb..21a5cea7c 100644 --- a/cardano_node_tests/tests/test_tx_unbalanced.py +++ b/cardano_node_tests/tests/test_tx_unbalanced.py @@ -86,7 +86,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addrs diff --git a/cardano_node_tests/tests/test_update_proposals.py b/cardano_node_tests/tests/test_update_proposals.py index 3e166d7c2..150d4192d 100644 --- a/cardano_node_tests/tests/test_update_proposals.py +++ b/cardano_node_tests/tests/test_update_proposals.py @@ -64,7 +64,7 @@ def payment_addr( clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addr @@ -390,7 +390,7 @@ def payment_addr( clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addr diff --git a/cardano_node_tests/tests/tests_conway/conway_common.py b/cardano_node_tests/tests/tests_conway/conway_common.py index d650d9c05..a87bb1a90 100644 --- a/cardano_node_tests/tests/tests_conway/conway_common.py +++ b/cardano_node_tests/tests/tests_conway/conway_common.py @@ -143,7 +143,7 @@ def _create_user() -> clusterlib.PoolUser: clusterlib_utils.fund_from_faucet( pool_user.payment, cluster_obj=cluster_obj, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=fund_amount, ) diff --git a/cardano_node_tests/tests/tests_conway/test_committee.py b/cardano_node_tests/tests/tests_conway/test_committee.py index f772d6f21..b9dd2a715 100644 --- a/cardano_node_tests/tests/tests_conway/test_committee.py +++ b/cardano_node_tests/tests/tests_conway/test_committee.py @@ -56,7 +56,7 @@ def payment_addr_comm( clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addr diff --git a/cardano_node_tests/tests/tests_conway/test_drep.py b/cardano_node_tests/tests/tests_conway/test_drep.py index 498e3f027..b4fb2b671 100644 --- a/cardano_node_tests/tests/tests_conway/test_drep.py +++ b/cardano_node_tests/tests/tests_conway/test_drep.py @@ -76,7 +76,7 @@ def get_payment_addr( clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster_obj, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addr @@ -104,7 +104,7 @@ def get_pool_user( clusterlib_utils.fund_from_faucet( pool_user.payment, cluster_obj=cluster_obj, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return pool_user @@ -1876,7 +1876,7 @@ def _check_records() -> tp.List[blockers.GH]: clusterlib_utils.fund_from_faucet( *drep_users, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, # Add a lot of funds so no action can be ratified without the new DReps amount=10_000_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_conway/test_guardrails.py b/cardano_node_tests/tests/tests_conway/test_guardrails.py index 8795f1e91..0e8c75a9a 100644 --- a/cardano_node_tests/tests/tests_conway/test_guardrails.py +++ b/cardano_node_tests/tests/tests_conway/test_guardrails.py @@ -97,7 +97,7 @@ def payment_addr( clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=10_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_conway/test_pparam_update.py b/cardano_node_tests/tests/tests_conway/test_pparam_update.py index c6c6cb03d..505474bcd 100644 --- a/cardano_node_tests/tests/tests_conway/test_pparam_update.py +++ b/cardano_node_tests/tests/tests_conway/test_pparam_update.py @@ -1399,7 +1399,7 @@ def payment_addr( clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addr diff --git a/cardano_node_tests/tests/tests_conway/test_treasury_donation.py b/cardano_node_tests/tests/tests_conway/test_treasury_donation.py index 43d0c9fcd..df9cf837f 100644 --- a/cardano_node_tests/tests/tests_conway/test_treasury_donation.py +++ b/cardano_node_tests/tests/tests_conway/test_treasury_donation.py @@ -50,7 +50,7 @@ def payment_addr_treasury( clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster_treasury, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addr @@ -73,7 +73,7 @@ def payment_addr_singleton( clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster_singleton, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) return addr diff --git a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py index fac94c5f2..d66387dc1 100644 --- a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py +++ b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py @@ -62,7 +62,7 @@ def payment_addrs_lg( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_delegation.py b/cardano_node_tests/tests/tests_plutus/test_delegation.py index 194729c10..dcdd87da7 100644 --- a/cardano_node_tests/tests/tests_plutus/test_delegation.py +++ b/cardano_node_tests/tests/tests_plutus/test_delegation.py @@ -103,7 +103,7 @@ def pool_user( clusterlib_utils.fund_from_faucet( payment_addr_rec, cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=18_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_lobster.py b/cardano_node_tests/tests/tests_plutus/test_lobster.py index c00dca28b..c906b5e1e 100644 --- a/cardano_node_tests/tests/tests_plutus/test_lobster.py +++ b/cardano_node_tests/tests/tests_plutus/test_lobster.py @@ -51,7 +51,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_build.py b/cardano_node_tests/tests/tests_plutus/test_mint_build.py index 25b66d36c..589a87b39 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_build.py @@ -50,7 +50,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_negative_build.py b/cardano_node_tests/tests/tests_plutus/test_mint_negative_build.py index c6a74680d..e8b235726 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_negative_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_negative_build.py @@ -51,7 +51,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_negative_raw.py b/cardano_node_tests/tests/tests_plutus/test_mint_negative_raw.py index 51ebdecc8..2471f9f85 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_negative_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_negative_raw.py @@ -43,7 +43,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_raw.py b/cardano_node_tests/tests/tests_plutus/test_mint_raw.py index 76dcd80db..1090315cc 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_raw.py @@ -46,7 +46,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_build.py index 8b338d137..274894ddd 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_build.py @@ -45,7 +45,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_000_000_000, ) @@ -69,7 +69,7 @@ def pool_users( clusterlib_utils.fund_from_faucet( created_users[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_compat_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_compat_build.py index dc0ba4f97..c19bec9ed 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_compat_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_compat_build.py @@ -39,7 +39,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_compat_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_compat_raw.py index 33a0ef81b..8b72a49bf 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_compat_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_compat_raw.py @@ -38,7 +38,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_datum_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_datum_build.py index 5d177b688..1d742cd06 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_datum_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_datum_build.py @@ -46,7 +46,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_datum_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_datum_raw.py index db1180149..71fa300a6 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_datum_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_datum_raw.py @@ -43,7 +43,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py index 96a2a37d7..d5a777ae8 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py @@ -44,7 +44,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_negative_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_negative_raw.py index 0d68b104b..c4d2cf309 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_negative_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_negative_raw.py @@ -50,7 +50,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) @@ -720,7 +720,7 @@ def _fund_script_guessing_game( clusterlib_utils.fund_from_faucet( payment_addrs[0], cluster_obj=cluster_obj, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_raw.py index 4ca3b5e61..2cb665647 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_raw.py @@ -49,7 +49,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) @@ -73,7 +73,7 @@ def pool_users( clusterlib_utils.fund_from_faucet( created_users[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py index e412d18ea..4ea8035a5 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py @@ -42,7 +42,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_build.py index 6d52947fb..9c6fc2a16 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_build.py @@ -39,7 +39,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_raw.py index 8fbd41e93..db090d7ad 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_raw.py @@ -38,7 +38,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py index d12379c64..4a277844a 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py @@ -42,7 +42,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_build.py index a4985d76d..bd0c290cb 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_build.py @@ -40,7 +40,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_raw.py index 1858ca742..580ffd52f 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_raw.py @@ -40,7 +40,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py index 9fabe6dee..b3662257d 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py @@ -42,7 +42,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py index 2dbae82a8..d56ed7afc 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py @@ -42,7 +42,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_raw.py index 2669c4556..b9befa582 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_raw.py @@ -41,7 +41,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_build.py index a66bede53..7d4b84d9f 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_build.py @@ -40,7 +40,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_raw.py index e8d4aa595..a8fa52bf3 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_raw.py @@ -39,7 +39,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_build.py index 6f6eb6656..a90548726 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_build.py @@ -44,7 +44,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_raw.py index 142af6827..3199ce5cb 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_raw.py @@ -43,7 +43,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_raw.py index 32090046d..0a2afdc2e 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_raw.py @@ -41,7 +41,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_build.py index 1fac4baae..b4938a2fe 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_build.py @@ -43,7 +43,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_000_000_000, ) @@ -290,7 +290,7 @@ def test_use_same_reference_input_multiple_times( clusterlib_utils.fund_from_faucet( payment_addrs[1], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, ) # create the reference input diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_raw.py index a6993f8fe..323ef8650 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_raw.py @@ -42,7 +42,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_build.py index f4b50b272..f2f373380 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_build.py @@ -40,7 +40,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_raw.py index 7434bb7be..b81d8910d 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_raw.py @@ -40,7 +40,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_build.py index 9739c6ca3..7c8f93fd0 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_build.py @@ -43,7 +43,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=1_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_raw.py index 33d6f3533..663317295 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_raw.py @@ -39,7 +39,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - faucet_data=cluster_manager.cache.addrs_data["user1"], + all_faucets=cluster_manager.cache.addrs_data, amount=3_000_000_000, ) diff --git a/cardano_node_tests/utils/governance_setup.py b/cardano_node_tests/utils/governance_setup.py index 8e0b22392..b8d6a4522 100644 --- a/cardano_node_tests/utils/governance_setup.py +++ b/cardano_node_tests/utils/governance_setup.py @@ -113,7 +113,7 @@ def create_vote_stake( clusterlib_utils.fund_from_faucet( *pool_users, cluster_obj=cluster_obj, - faucet_data=cluster_manager.cache.addrs_data["user1"], + faucet_data=cluster_manager.cache.addrs_data["faucet"], amount=500_000_000_000, destination_dir=destination_dir, ) From 04472020ba65dafc23d405cd5eae59a0b4969870 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 2 Oct 2024 14:36:20 +0200 Subject: [PATCH 006/168] refactor(tests): simplify address creation in test_blocks Simplified the creation of payment addresses in TestDynamicBlockProd by removing the use of fixture_cache. The caching is not needed as the fixture is used by single test. --- cardano_node_tests/tests/test_blocks.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/cardano_node_tests/tests/test_blocks.py b/cardano_node_tests/tests/test_blocks.py index ec04f285c..c0ebaa16b 100644 --- a/cardano_node_tests/tests/test_blocks.py +++ b/cardano_node_tests/tests/test_blocks.py @@ -390,17 +390,12 @@ def payment_addrs( """Create new payment addresses.""" cluster = cluster_singleton - with cluster_manager.cache_fixture() as fixture_cache: - if fixture_cache.value: - return fixture_cache.value # type: ignore - - addrs = clusterlib_utils.create_payment_addr_records( - *[f"addr_dyn_prod_ci{cluster_manager.cluster_instance_num}_{i}" for i in range(20)], - cluster_obj=cluster, - ) - fixture_cache.value = addrs + addrs = clusterlib_utils.create_payment_addr_records( + *[f"addr_dyn_prod_ci{cluster_manager.cluster_instance_num}_{i}" for i in range(20)], + cluster_obj=cluster, + ) - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( *addrs, cluster_obj=cluster, From 106cf4bd29de5e4ea10922e2a94e15e9c3ea1b63 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 2 Oct 2024 14:37:09 +0200 Subject: [PATCH 007/168] fix(tests): ensure network reaches epoch 2 before tests Updated the test_blocks.py to ensure the network reaches at least epoch 2 before running tests. Removed the condition for waiting until epoch 1 and the special case for local clusters starting in Byron era. --- cardano_node_tests/tests/test_blocks.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/cardano_node_tests/tests/test_blocks.py b/cardano_node_tests/tests/test_blocks.py index c0ebaa16b..1754f5b14 100644 --- a/cardano_node_tests/tests/test_blocks.py +++ b/cardano_node_tests/tests/test_blocks.py @@ -447,16 +447,8 @@ def _save_state(curr_epoch: int) -> tp.Dict[str, int]: blocks_before: tp.Dict[str, int] = ledger_state["blocksBefore"] return blocks_before - # Blocks are produced by BFT node in Byron epoch and first Shelley epoch on local cluster - # that starts in Byron era. - if ( - cluster_nodes.get_cluster_type().type == cluster_nodes.ClusterType.LOCAL - and not cluster_nodes.get_cluster_type().uses_shortcut - ): - cluster.wait_for_epoch(epoch_no=2) - - # The network needs to be at least in epoch 1 - cluster.wait_for_epoch(epoch_no=1) + # The network needs to be at least in epoch 2 + cluster.wait_for_epoch(epoch_no=2) # Wait for the epoch to be at least half way through and not too close to the end. # We want the original pool to have time to forge blocks in this epoch, before it becomes From ee7d242c3293bf6bf43c1fbf6b1d7914a32173f7 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 2 Oct 2024 16:12:51 +0200 Subject: [PATCH 008/168] feat(tests): add assertion for protocol major version Added an assertion to check that the major protocol version is still 9 in the ratification epoch. --- cardano_node_tests/tests/tests_conway/test_hardfork.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/cardano_node_tests/tests/tests_conway/test_hardfork.py b/cardano_node_tests/tests/tests_conway/test_hardfork.py index 4ef5bab30..9d0041433 100644 --- a/cardano_node_tests/tests/tests_conway/test_hardfork.py +++ b/cardano_node_tests/tests/tests_conway/test_hardfork.py @@ -235,6 +235,10 @@ def test_hardfork( assert rat_gov_state["nextRatifyState"]["ratificationDelayed"], "Ratification not delayed" reqc.cip038_07.success() + assert ( + rat_gov_state["currentPParams"]["protocolVersion"]["major"] == 9 + ), "Incorrect major version" + # Check enactment enact_epoch = cluster.wait_for_epoch(epoch_no=init_epoch + 2, padding_seconds=5) enact_gov_state = cluster.g_conway_governance.query.gov_state() From 9c7517695fd9d6edf4f9bce8c42d8b83cafedaa0 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 2 Oct 2024 16:26:22 +0200 Subject: [PATCH 009/168] fix(tests): correct name template in test_pparam_update.py --- cardano_node_tests/tests/tests_conway/test_pparam_update.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cardano_node_tests/tests/tests_conway/test_pparam_update.py b/cardano_node_tests/tests/tests_conway/test_pparam_update.py index 505474bcd..4c72cd074 100644 --- a/cardano_node_tests/tests/tests_conway/test_pparam_update.py +++ b/cardano_node_tests/tests/tests_conway/test_pparam_update.py @@ -1071,7 +1071,7 @@ def _check_proposed_pparams( # Vote on the "final" action that will be enacted reqc.cip037.start(url=helpers.get_vcs_link()) fin_prop_rec = _propose_pparams_update( - name_template=f"{temp_template}_fin_no", proposals=fin_update_proposals + name_template=f"{temp_template}_fin", proposals=fin_update_proposals ) _check_proposed_pparams( update_proposals=fin_prop_rec.proposals, From 034e4b1b2bd0d0989db3094f0de77f2e2ee1c78f Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 3 Oct 2024 09:14:26 +0200 Subject: [PATCH 010/168] fix(tests): fund large amounts of tADA from main faucet Fund large amounts of tADA from the main faucet instead of from the user faucets. User faucets don't need to have that much tADA. --- cardano_node_tests/tests/test_tx_many_utxos.py | 2 +- cardano_node_tests/tests/tests_conway/test_drep.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cardano_node_tests/tests/test_tx_many_utxos.py b/cardano_node_tests/tests/test_tx_many_utxos.py index 8e529f718..166bfab3c 100644 --- a/cardano_node_tests/tests/test_tx_many_utxos.py +++ b/cardano_node_tests/tests/test_tx_many_utxos.py @@ -52,7 +52,7 @@ def payment_addrs( clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, - all_faucets=cluster_manager.cache.addrs_data, + faucet_data=cluster_manager.cache.addrs_data["faucet"], amount=800_000_000_000, ) diff --git a/cardano_node_tests/tests/tests_conway/test_drep.py b/cardano_node_tests/tests/tests_conway/test_drep.py index b4fb2b671..d12a782d2 100644 --- a/cardano_node_tests/tests/tests_conway/test_drep.py +++ b/cardano_node_tests/tests/tests_conway/test_drep.py @@ -1876,7 +1876,7 @@ def _check_records() -> tp.List[blockers.GH]: clusterlib_utils.fund_from_faucet( *drep_users, cluster_obj=cluster, - all_faucets=cluster_manager.cache.addrs_data, + faucet_data=cluster_manager.cache.addrs_data["faucet"], # Add a lot of funds so no action can be ratified without the new DReps amount=10_000_000_000_000, ) From bd80a6ebc243b190a6ae7dee9e764fbe7dc885c9 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 3 Oct 2024 09:34:35 +0200 Subject: [PATCH 011/168] refactor: remove unused fund_from_genesis function The `fund_from_genesis` function and related imports have been removed from `clusterlib_utils.py` as they are no longer used in the codebase. --- cardano_node_tests/utils/clusterlib_utils.py | 39 -------------------- 1 file changed, 39 deletions(-) diff --git a/cardano_node_tests/utils/clusterlib_utils.py b/cardano_node_tests/utils/clusterlib_utils.py index 0a4731402..efe3204ea 100644 --- a/cardano_node_tests/utils/clusterlib_utils.py +++ b/cardano_node_tests/utils/clusterlib_utils.py @@ -16,9 +16,7 @@ from cardano_clusterlib import txtools as cl_txtools from cardano_node_tests.utils import helpers -from cardano_node_tests.utils import locking from cardano_node_tests.utils import submit_utils -from cardano_node_tests.utils import temptools from cardano_node_tests.utils.faucet import fund_from_faucet # noqa: F401 # for compatibility LOGGER = logging.getLogger(__name__) @@ -288,43 +286,6 @@ def deregister_stake_address( return tx_output -def fund_from_genesis( - *dst_addrs: str, - cluster_obj: clusterlib.ClusterLib, - amount: int = 2_000_000, - tx_name: tp.Optional[str] = None, - destination_dir: cl_types.FileType = ".", -) -> None: - """Send `amount` from genesis addr to all `dst_addrs`.""" - fund_dst = [ - clusterlib.TxOut(address=d, amount=amount) - for d in dst_addrs - if cluster_obj.g_query.get_address_balance(d) < amount - ] - if not fund_dst: - return - - with locking.FileLockIfXdist( - f"{temptools.get_basetemp()}/{cluster_obj.g_genesis.genesis_utxo_addr}.lock" - ): - tx_name = tx_name or helpers.get_timestamped_rand_str() - tx_name = f"{tx_name}_genesis_funding" - fund_tx_files = clusterlib.TxFiles( - signing_key_files=[ - *cluster_obj.g_genesis.genesis_keys.delegate_skeys, - cluster_obj.g_genesis.genesis_keys.genesis_utxo_skey, - ] - ) - - cluster_obj.g_transaction.send_funds( - src_address=cluster_obj.g_genesis.genesis_utxo_addr, - destinations=fund_dst, - tx_name=tx_name, - tx_files=fund_tx_files, - destination_dir=destination_dir, - ) - - def create_payment_addr_records( *names: str, cluster_obj: clusterlib.ClusterLib, From ab9680df75ed6bbde5e156431a0799a76e686a3c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 4 Oct 2024 08:47:45 +0000 Subject: [PATCH 012/168] chore(deps): bump cachix/install-nix-action from 29 to 30 Bumps [cachix/install-nix-action](https://github.com/cachix/install-nix-action) from 29 to 30. - [Release notes](https://github.com/cachix/install-nix-action/releases) - [Commits](https://github.com/cachix/install-nix-action/compare/v29...v30) --- updated-dependencies: - dependency-name: cachix/install-nix-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/nix_smoke.yaml | 2 +- .github/workflows/regression_reusable.yaml | 2 +- .github/workflows/upgrade_reusable.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/nix_smoke.yaml b/.github/workflows/nix_smoke.yaml index b94d13bfd..6f7c4e95e 100644 --- a/.github/workflows/nix_smoke.yaml +++ b/.github/workflows/nix_smoke.yaml @@ -17,7 +17,7 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - name: Install Nix - uses: cachix/install-nix-action@v29 + uses: cachix/install-nix-action@v30 with: extra_nix_config: | access-tokens = github.com=${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/regression_reusable.yaml b/.github/workflows/regression_reusable.yaml index 0c3d3208e..cf1259f8d 100644 --- a/.github/workflows/regression_reusable.yaml +++ b/.github/workflows/regression_reusable.yaml @@ -71,7 +71,7 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - name: Install Nix - uses: cachix/install-nix-action@v29 + uses: cachix/install-nix-action@v30 with: extra_nix_config: | access-tokens = github.com=${{ secrets.GH_TOKEN }} diff --git a/.github/workflows/upgrade_reusable.yaml b/.github/workflows/upgrade_reusable.yaml index e7f3e7fe9..cca9fef62 100644 --- a/.github/workflows/upgrade_reusable.yaml +++ b/.github/workflows/upgrade_reusable.yaml @@ -26,7 +26,7 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - name: Install Nix - uses: cachix/install-nix-action@v29 + uses: cachix/install-nix-action@v30 with: extra_nix_config: | access-tokens = github.com=${{ secrets.GH_TOKEN }} From aa0b3d78951a2e5d69c2d2d6e81598d4dd749807 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 4 Oct 2024 16:16:53 +0200 Subject: [PATCH 013/168] doc: update Conway User Stories coverage --- .../chang_user_stories_system_tests.rst | 759 +++++++++--------- 1 file changed, 386 insertions(+), 373 deletions(-) diff --git a/src_docs/source/test_results/requirements/chang_user_stories_system_tests.rst b/src_docs/source/test_results/requirements/chang_user_stories_system_tests.rst index 636dc4beb..7071283f5 100644 --- a/src_docs/source/test_results/requirements/chang_user_stories_system_tests.rst +++ b/src_docs/source/test_results/requirements/chang_user_stories_system_tests.rst @@ -1,11 +1,9 @@ System Tests Coverage ===================== -The page is updated every Tuesday and Friday until the coverage is complete. +Latest update: **2024-10-04**   -Latest update: **2024-08-16**   - -**Legend:** |Success Badge| |Failure Badge| |Partial Coverage Badge| |Uncovered Badge|   +**Legend:** |Success Badge| |Failure Badge| |Partial Coverage Badge| |Uncovered Badge| |Unplanned Badge|   CLI User Stories ---------------- @@ -23,229 +21,229 @@ CLI User Stories - |image-CLI1| - Obtain constitution hash for verification (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to obtain the hash of the off-chain text of a Constitution, **so that** I can compare it against the hash registered on-chain to verify its authenticity. - - |image-CLI2| - Generate hash of the off-chain constitution (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to generate the hash of the off-chain text for a proposed Constitution, **so that** the hash can be utilized in a governance action. - - |image-CLI3| - Generate Committee member cold key pair (CCM) - `→ `__ + `→ `__ - **As** a potential Constitutional Committee member, **I want** to generate COLD key pair, **so that** I can be proposed for the Committee in a Governance action. - - |image-CLI4| - Generate committee member hot key pair (CCM) - `→ `__ + `→ `__ - **As** potential Constitutional Committee member, **I want** to generate HOT key pair, **so that** I can authorise the Hot key to sign votes on behalf of the Cold key. - - |image-CLI5| - Authorization certificate (CCM) - `→ `__ + `→ `__ - **As** a Constitutional Committee member, **I want** to issue a authorization certificate from my cold key to a hot key, **so that** I can sign my votes using the hot key and keep the cold key in cold storage and can authorise a new hot key in case the original one is compromised. - - |image-CLI6| - Generate committee member key hash (CCM) - `→ `__ + `→ `__ - **As** a potential Constitutional Committee member, **I want** to generate the key hashes for my cold and hot keys, **so that** they can be used by third parties to propose me as a new Constitutional Committee member and for identification purposes once I’ve been elected as Constitutional Committee member. - - |image-CLI7| - Committee member resignation certificate (CCM) - `→ `__ + `→ `__ - **As** a Constitutional Committee member, **I want** to be able to generate a resignation certificate, **so that** I can submit it to the chain on a transaction to signal to the Ada holders that I’m resigning from my duties as CC member. - - |image-CLI8| - Generate DRep keys (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to generate Ed25519 keys, **so that** I can register as a DRep. - - |image-CLI9| - Generate DRep ID (DRep) - `→ `__ + `→ `__ - **As** a DRep, **I want** to generate a DRep Id, **so that** Ada holder can use it to delegate their votes to me and my voting record can be tracked. - - |image-CLI10| - DRep Registration Certificate Generation (DRep) - `→ `__ + `→ `__ - **As** a DRep, **I want** to generate a DRep registration certificate, **so that** I can submit it on a transaction and the Ada holders can delegate their votes to me. - - |image-CLI11| - DRep Retirement Certificate Generation (DRep) - `→ `__ + `→ `__ - **As** a DRep, **I want** to generate a DRep retirement (unregistration) certificate, **so that** I can submit it on a transaction and can get my DRep deposit back. - - |image-CLI12| - DRep Metadata Hash Generation (DRep) - `→ `__ + `→ `__ - **As** a DRep, **I want** to generate the hash of my DRep metadata, **so that** I can supply it when registering as DRep. - - |image-CLI13| - Create Update Constitution Governance Action (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to create a governance action that updates the constitution, **so that** it can be submitted to the chain and be voted by the governance bodies. - - |image-CLI14| - Create Update Constitutional Committee Governance Action (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to create a governance action that updates the Constitutional Committee, **so that** it can be submitted to the chain and be voted by the governance bodies. - - |image-CLI15| - Create Treasury Withdrawal Governance Action (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to create a governance action to withdraw funds from the treasury, **so that** it can be submitted to the chain and be voted by the governance bodies. Command: ``cardano-cli conway governance action create-treasury-withdrawal``. - - |image-CLI16| - Create info governance action (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to create an info governance action, **so that** it can be submitted to the chain and be voted by the governance bodies. Command: ``cardano-cli conway governance action create-info``. - - |image-CLI17| - Create update protocol parameters governance action (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to create a governance action to update protocol parameters, **so that** it can be submitted to the chain and be voted by the governance bodies. Command: ``cardano-cli conway governance action create-protocol-parameters-update``. - - |image-CLI18| - Create no-confidence governance action (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to create a no-confidence governance action, **so that** it can be submitted to the chain and be voted by the governance bodies. Command: ``cardano-cli conway governance action create-no-confidence``. - - |image-CLI19| - Create Hard-fork initiation governance action (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to create a governance action to initiate a hardfork, **so that** it can be submitted to the chain and be voted by the governance bodies. Command: ``cardano-cli conway governance action create-hf-init``. - - |image-CLI20| - View governance action file (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to inspect the contents of a governance action file, **so that** I can verify it is correct before submitting it in a transaction. Command: ``cardano-cli conway governance action view``. - - |image-CLI21| - Create a governance action vote (DRep/SPO/CCM) - `→ `__ + `→ `__ - **As** a DRep, SPO or CC member, **I want** to create a vote for a governance action, **so that** I can include it in a transaction and submit it to the chain. Command: ``cardano-cli conway governance vote create``. - - |image-CLI22| - View vote file (DRep/SPO/CCM) - `→ `__ + `→ `__ - **As** a DRep, SPO or CC member, **I want** to inspect the contents of a vote file, **so that** I can verify it is correct before submitting it in a transaction. Command: ``cardano-cli conway governance vote view``. - - |image-CLI23| - Build a transaction with to submit proposal (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to build a transaction that includes a proposal (containing a governance action), **so that** I can later sign and submit to the chain. Command: ``transaction build``. - - |image-CLI24| - Build transaction for proposal vote (DRep, SPO, CCM) - `→ `__ + `→ `__ - **As** a DRep, SPO or CC member, **I want** to build a transaction that includes my vote on a particular governance action, **so that** I can later sign and submit to the chain. Command: ``transaction build``. - - |image-CLI25| - Build RAW transaction for proposal vote (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to build a transaction that includes a proposal (containing a governance action), **so that** I can later sign and submit to the chain. Command: ``transaction build-raw``. - - |image-CLI26| - Build RAW transaction for proposal vote (DRep/SPO/CCM) - `→ `__ + `→ `__ - **As** a DRep, SPO or CC member, **I want** to build a transaction that includes my vote on a particular governance action, **so that** I can later sign and submit to the chain. Command: ``transaction build-raw``. - - |image-CLI27| - Create stake registration certificate (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to create a conway cddl-compliant stake registration certificate. - - |image-CLI28| - Create stake deregistration certificate (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to create a conway cddl-compliant stake deregistration certificate to get my deposit back. - - |image-CLI29| - Delegate vote to DRep (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to delegate my votes to a DRep (registered or default), **so that** my stake is counted when the DRep votes. - - |image-CLI30| - Delegate stake to SPO and votes to DRep with a single certificate (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to delegate my stake to a stake pool AND my votes to a DRep (registered or default) with a single certificate. - - |image-CLI31| - Query governance state (ANY) - `→ `__ + `→ `__ - **As** any persona, **I want** to query the nodes for the currentGovernance state, **so that** I can inform my decisions. - - |image-CLI32| - Query committee state (CCM) - `→ `__ + `→ `__ - **As** a CC member, **I want** to query the Constitutional Committee state, **so that** I can find my expiration term and whether my hot key authorization certificate has been recorded on chain. - - |image-CLI33| - Query DRep state (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to query the DRep state, **so that** I can find detailed information about registered DReps. - - |image-CLI34| - Query DRep stake distribution (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder and DRep, **I want** to query the DRep stake distribution, **so that** I can find the weight (of the votes) of each DRep. - - |image-CLI35| - Expand query stake-address-info to show deposits and vote delegation (HOLDER) - `→ `__ + `→ `__ - **As** an Ada holder, **I want** to query my stake address information, **so that** I can learn to which pool and DRep I’m delegating to and the value in lovelace of my deposits for delegating and for submitting governance actions. - - |image-CLI36| - Query constitution. - `→ `__ + `→ `__ - **As** any persona, **I want** to query the on-chain constitution, **so that** I can know the url where it is stored and the document hash, **so that** I can verify authenticity. @@ -265,175 +263,175 @@ CIP1694 User Stories - |image-CIP1a| - Constitution - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the ledger to maintain a record of the hash value of the current constitution together with a URL hosting the off-chain document, **so that** I can verify the authenticity of the off-chain document. - - |image-CIP1b| - Hash value of the off-chain Constitution is recorded on-chain - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the ledger to maintain a record of the hash value of the current constitution together with a URL hosting the off-chain document, **so that** I can verify the authenticity of the off-chain document. - - |image-CIP2| - Node records Committee member key hashes, terms and status - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the key hash of active and expired Committee Members and their terms to be registered on-chain, **so that** the system can count their votes. - - |image-CIP3| - Authorization Certificate - `→ `__ + `→ `__ - **As** a Committee Member, **I want** to generate and submit a Cold to Hot Credential Authorization certificate, **so that** I can sign votes using the hot credential and keep the cold credential in safe storage. - - |image-CIP4| - Record cold credentials and authorization certificates on chain - `→ `__ + `→ `__ - **As** a committee member, **I want** the ledger to accurately maintain the record of key-hashes, terms, and cold to hot credentials authorization maps for active and expired members, **so that** only votes from active Committee members count. - - |image-CIP5| - Replacing the constitutional committee via a governance action - `→ `__ + `→ `__ - **As** a Governance Actor, **I want** to submit a governance action to replace all or part of the current constitutional committee, **so that** committee members who have lost the confidence of stakeholders can be removed from their role. - - |image-CIP6| - Size of the constitutional committee - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the minimal size of the Constitutional Committee to be a protocol parameter, **so that** it can be adjusted via a governance action. - - |image-CIP7| - Committee voting threshold (quorum) can be modified - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the committee quorum (the fraction of committee required to ratify a gov action) to be not fixed, **so that** it can be modified via a governance action. - - |image-CIP8| - Electing an empty committee - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** to have the option of electing an empty committee, **so that** governance actions don’t need the votes of a Constitutional Committee to be ratified. - - |image-CIP9| - Constitutional committee members have a limited term - `→ `__ + `→ `__ - **As** a Stakeholder and as a Committee Member, **I want** each Committee Member to have an individual term, **so that** the system can have a rotation scheme. - - |image-CIP10| - Tracking committee member expirations - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the system to keep track of the expiration epoch of each committee member, **so that** the information is publicly available in the ledger and the community can plan ahead and agree on new CC member. - - |image-CIP11| - Automatically expire committee members that have completed their terms - `→ `__ + `→ `__ - **As** a Stakeholder and as a Committee Member, **I want** the system to automatically expire committee members that have reached their term, **so that** only votes from active committee members count towards ratification. - - |image-CIP12| - Resign as committee member - `→ `__ + `→ `__ - **As** a committee member, **I want** to be able to resign my responsibilities, **so that** I can stop my responsibilities with the Cardano Community while minimizing the effects on the system. - - |image-CIP13| - State of no-confidence - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** to submit a governance action to depose the current Constitutional Committee and put the system in a no-confidence state, **so that** the community must elect a new Constitutional Committee. - - |image-CIP14| - Constitutional Committee below committeeMinSize - `→ `__ + `→ `__ - **As** a Stakeholder, I want, when the number of non-expired committee members falls below the minimal size of the committee, only update-committee and no-confidence governance actions can be ratified. - - |image-CIP15| - Proposal policy - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the option for the constitution to be accompanied by a script, **so that** governance actions proposing parameter changes or treasury withdrawals that violate accepted limits are automatically restricted. - - |image-CIP16| - Delegate votes to a registered Delegate Representatives - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** to delegate voting rights to a registered Delegate Representative (DRep), **so that** I can participate in the governance of the system backing up votes with my stake. - - |image-CIP17| - Delegate to always abstain - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** to delegate my stake to the predefined option 'Abstain', **so that** my stake is marked as not participating in governance. - - |image-CIP18| - Delegate to no-confidence - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** to delegate my stake to the predefined DRep 'No Confidence', **so that** my stake is counted as a 'Yes' vote on every 'No Confidence' action and a 'No' vote on every other action. - - |image-CIP19| - Inactive DReps - `→ `__ + `→ `__ - **As** an Ada holder, **I want** DReps to be considered inactive if they don’t vote for ``drepActivity``-many epochs, **so that** their delegated stake does not count towards the active voting stake, this to avoid leaving the system in a state where no governance action can pass. - - |image-CIP20| - DRep credentials - `→ `__ + `→ `__ - **As** a DRep, **I want** to be identified by a credential that can be a verification key (Ed25519) or a Native or Plutus Script, **so that** I can register and vote on governance actions with a signing key or with the evaluation of a script logic. - - |image-CIP21| - DRep registration certificate - `→ `__ + `→ `__ - **As** a DRep, **I want** to generate and submit a registration certificate, **so that** the system recognizes my credentials and counts my votes on governance actions proportionally to the voting stake delegated to me. - - |image-CIP22| - Vote delegation certificate - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** to generate a vote delegation certificate, enabling me to delegate my voting rights to either a default or a registered DRep. - - |image-CIP23| - DRep retirement certificate - `→ `__ + `→ `__ - **As** a DRep, **I want** to generate and submit a retirement certificate, **so that** the system and stakeholders know that I’m no longer voting on governance actions and that stakeholders should re-delegate. - - |image-CIP24| - DRep retirement certificate is applied immediately after being accepted on-chain - `→ `__ + `→ `__ - **As** a DRep, **I want** my retirement certificate to be applied immediately upon acceptance on-chain, with the DRep deposit returned in the same transaction, ensuring no waiting time. - - |image-CIP25| - per-DRep stake distribution - `→ `__ + `→ `__ - **As** an Ada Holder, **I want** the system to calculate the stake distribution per DRep, **so that** each DRep's vote is weighted according to the actual stake delegated to them. This per-DRep stake distribution should use the stake snapshot from the last epoch boundary. - - |image-CIP26| - Bootstrapping phase - `→ `__ + `→ `__ - - - |image-CIP27| - Block rewards withdrawals for stake credentials that are not delegating to a DRep - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that when bootstrapping phase ends, the system blocks rewards withdrawals for stake credentials that are not delegating to a DRep. - - |image-CIP28| - Types of governance actions - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the governance system to allow 7 different types of governance actions: 1. Motion of no-confidence A motion to create a state of no-confidence in the current Constitutional Committee @@ -447,19 +445,19 @@ CIP1694 User Stories - |image-CIP29| - Governance action initiation - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** any stakeholder to be able to submit a governance action without restrictions, beyond those necessary for a transaction of this type to be considered valid. - - |image-CIP30| - Governance action initiation - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** Governance Actors to be required to provide a deposit in lovelace, **so that** I can prevent the network from being spammed with meaningless governance actions. This deposit should be returned once the action is either ratified or expired. - - |image-CIP31a| - Contents of governance actions - `→ `__ + `→ `__ - **As** a Governance Actor, **I want** every governance action to contain the following elements: - a deposit amount @@ -470,7 +468,7 @@ CIP1694 User Stories - |image-CIP31b| - New committee/threshold GA additional data - `→ `__ + `→ `__ - **As** a Governance actor creating a New Committee governance action, **I want** to specify the following additional data: - The set of verification key hash digests for members to be removed. @@ -480,73 +478,73 @@ CIP1694 User Stories - |image-CIP31c| - Update the constitution GA additional data - `→ `__ + `→ `__ - **As** a Governance actor creating a Update to the constitution GA, **I want** to include an anchor to the Constitution and an optional script hash of the proposal policy. - - |image-CIP31d| - Hardfork initiation GA additional data - `→ `__ + `→ `__ - **As** a Governance actor creating a hardfork initiation governance action, **I want** to include the new (greater) major protocol version. - - |image-CIP31e| - Protocol parameter changes GA additional data - `→ `__ + `→ `__ - **As** a Governance actor creating a protocol parameter change GA, **I want** to include the parameter to change and their new values. - - |image-CIP31f| - Treasury withdrawal GA additional data - `→ `__ + `→ `__ - **As** a governance actor creating a treasury withdrawal GA, **I want** to include a map from stake credentials to a positive number of Lovelace. - - |image-CIP32| - Governance action maximum lifetime - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** governance actions submitted in a transaction and admitted to the chain to remain active for up to govActionLifetime epochs, **so that** these actions are checked for ratification at every epoch boundary within their govActionLifetime. If an action gathers enough 'yes' votes to meet the thresholds of the governing bodies, it is ratified; otherwise, if it fails to gather sufficient 'yes' votes during the active period, the proposal expires and is removed. - - |image-CIP33| - Enactment of ratified actions - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** ratified actions to be automatically enacted at the next epoch transition following their ratification. - - |image-CIP34| - Governance action deposit returns - `→ `__ + `→ `__ - **As** a Governance Actor, **I want** governance action deposits to be returned immediately after ratification or expiration. - - |image-CIP35| - Deposits count towards voting power (stake) - `→ `__ + `→ `__ - Governance action deposits are added to the deposit pot and count towards the stake of the reward address to which they will be returned, to ensure that the proposer can back their own action with their voting power. - - |image-CIP36| - Proposal policy - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** governance actions that attempt to change protocol parameters or involve treasury withdrawals to include the supplementary script from the constitution in the witness set, either directly or via reference inputs, whenever such a script exists. - - |image-CIP37| - Multiple protocol parameter updates - `→ `__ + `→ `__ - **As** a Governance Actor, **I want** a governance action to allow multiple protocol parameter changes at once. - - |image-CIP38| - Delay of ratification - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the ratification of all other governance actions to be delayed until the first epoch following the enactment of a successful motion of no-confidence, the election of a new Constitutional Committee, a constitutional change, or a hard-fork. - - |image-CIP39| - Motion of no confidence, requirements for ratification - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the ratification of a Motion of no confidence governance action requires: - DRep votes to be >= than DrepVotingThreshold for NoConfidence as a percentage of active voting stake. @@ -555,7 +553,7 @@ CIP1694 User Stories - |image-CIP40| - New committee/threshold (normal state), requirements for ratification - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the ratification of a New committee/threshold (normal state) governance action requires: - DRep votes to be >= than DrepVotingThreshold for CommitteeNormalState as a percentage of active voting stake. @@ -564,7 +562,7 @@ CIP1694 User Stories - |image-CIP41| - New committee/threshold (state of no-confidence), requirements for ratification - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the ratification of a New committee/threshold (state of no-confidence) governance action requires: - DRep votes to be >= than DrepVotingThreshold dvtCommitteeNoConfidence as a percentage of active voting stake. @@ -573,7 +571,7 @@ CIP1694 User Stories - |image-CIP42| - Update to the Constitution or proposal policy, requirements for ratification - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the ratification of a Update to the Constitution or proposal policy governance action requires: - A minimum of CommitteeThreshold members must approve the Governance action @@ -582,7 +580,7 @@ CIP1694 User Stories - |image-CIP43| - Hard-fork initiation, requirements for ratification - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the ratification of a Hard-fork initiation governance action requires: - A minimum of CommitteeThreshold members must approve the Governance action @@ -592,7 +590,7 @@ CIP1694 User Stories - |image-CIP44| - Protocol parameter changes, network group - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the ratification of a network group protocol parameter change requires: - A minimum of CommitteeThreshold members must approve the Governance action @@ -601,7 +599,7 @@ CIP1694 User Stories - |image-CIP45| - Protocol parameter changes, economic group - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the ratification of a economic group protocol parameter change requires: - A minimum of CommitteeThreshold members must approve the Governance action @@ -610,7 +608,7 @@ CIP1694 User Stories - |image-CIP46| - Protocol parameter changes, technical group - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the ratification of a technical group protocol parameter change requires: - A minimum of CommitteeThreshold members must approve the Governance action @@ -619,7 +617,7 @@ CIP1694 User Stories - |image-CIP47| - Protocol parameter changes, governance group - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the ratification of a governance group protocol parameter change requires: - A minimum of CommitteeThreshold members must approve the Governance action @@ -628,7 +626,7 @@ CIP1694 User Stories - |image-CIP48| - Treasury withdrawal, requirements for ratification - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the ratification of a Treasury withdrawal governance action requires: - A minimum of CommitteeThreshold members must approve the Governance action @@ -637,7 +635,7 @@ CIP1694 User Stories - |image-CIP49| - The network group protocol parameters - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the network group consist of: - maximum block body size (maxBBSize) @@ -651,7 +649,7 @@ CIP1694 User Stories - |image-CIP50| - The economic group protocol parameters - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the economic group consist of: - minimum fee coefficient (minFeeA) @@ -667,7 +665,7 @@ CIP1694 User Stories - |image-CIP51| - The technical group protocol parameters - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the technical group consist of: - pool pledge influence (a0) @@ -679,7 +677,7 @@ CIP1694 User Stories - |image-CIP52| - The governance group protocol parameters - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** that the governance group consist of: - governance voting thresholds @@ -693,19 +691,19 @@ CIP1694 User Stories - |image-CIP53| - Thresholds for Info is set to 100% - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the two thresholds for the Info action be set to 100% since setting it any lower would result in not being able to poll above the threshold. - - |image-CIP54| - Preventing accidental clash of actions of the same type - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** all governance actions, except for Treasury withdrawals and Infos, to include the governance action ID of the most recently enacted action of the same type, **so that** accidental clashes between actions can be prevented. - - |image-CIP55| - Governance action enactment prioritization - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** actions that have been ratified in the current epoch to be prioritized for enactment in the following order: - Motion of no-confidence @@ -719,31 +717,31 @@ CIP1694 User Stories - |image-CIP56| - Governance action order of enactment - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** governance actions to be enacted in the order of their acceptance to the chain. - - |image-CIP57| - Governance actions automatic enactment - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** ratified actions to be automatically enacted at the next epoch boundary. - - |image-CIP58| - No duplicate committee members - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** each pair of credentials in a committee to be unique, ensuring no duplicate committee members. - - |image-CIP59| - Governance action ID - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the transaction ID and index of the transaction that submits the governance action to the chain to serve as the governance action ID, **so that** this ID shall would be used for casting votes. - - |image-CIP60| - Vote transactions contents - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** each vote transaction to consist of the following elements: - a governance action ID @@ -755,79 +753,79 @@ CIP1694 User Stories - |image-CIP61| - SPO and DREP votes are proportional to the stake delegated to them - `→ `__ + `→ `__ - For SPOs and DReps, the number of votes cast ('Yes', 'No', or 'Abstain') shall be proportional to the amount of Lovelace delegated to them at the time the action is checked for ratification. - - |image-CIP62| - CC votes - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** each current committee member to have one vote. - - |image-CIP63| - Active voting stake - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the active voting stake to be the total registered stake minus the abstain votes stake (both credential DReps and AlwaysAbstain). - - |image-CIP64| - Unregistered stake behaves like Abstain vote - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** unregistered stake to be treated as an abstain vote, **so that** it should not count towards the active voting stake. - - |image-CIP65| - Registered stake that did not vote behaves like a 'No' vote - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** any registered stake that did not submit a vote, whether through its DRep or SPO, to be counted as a 'No' vote. - - |image-CIP66| - New Plutus script purpose for scripts - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** a new voting purpose for Plutus scripts. - - |image-CIP67| - Any new vote overrides any older vote for the same credential and role - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** new votes on a governance action to override any previous votes for the same credential and role, **so that** individuals could change their minds. - - |image-CIP68| - Voting ends when an action is ratified and transactions containing further votes are invalid - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the voting period to terminate immediately after an action is ratified or expires. - - |image-CIP69| - Governance state tracking governance action progress - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the governance state section of the ledger to track the progress of governance actions to include: capturing votes, tracking the expiration epoch, and other relevant information until the actions are either ratified or expired. - - |image-CIP70| - Remove MIR certificates - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** MIR certificates to be removed, **so that** the only way to withdraw funds from the treasury is through a ratified Treasury Withdrawal governance action. - - |image-CIP71| - Remove genesis certificates - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** genesis certificates to be removed. In Conway era these are no longer useful or required. - - |image-CIP72| - Changes to the existing ledger rules - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the ledger to adjust its rules to accommodate for the governance features, i.e. Delegations, Certificates, Proposals, Votes, Ratification, Enactment. - - |image-CIP73| - Changes to the local state-query protocol - `→ `__ + `→ `__ - **As** a Stakeholder, **I want** the ledger to adjust the local state query protocol to accommodate for new queries that provide insights about governance, at least: - Governance actions currently staged for enactment @@ -837,7 +835,7 @@ CIP1694 User Stories - |image-CIP74| - Ratification of Security related parameters - `→ `__ + `→ `__ - The security relevant protocol parameters require the approval of the three governing bodies. - maxBBSize @@ -854,97 +852,97 @@ CIP1694 User Stories - |image-CIP75| - Auditor review of current network parameters - `→ `__ + `→ `__ - **As** an Auditor, **I want** to audit the current state of the network parameters, **so that** I can ensure they align with the governance decisions. - - |image-CIP76| - Auditor review of current technical parameters - `→ `__ + `→ `__ - **As** an Auditor, **I want** to audit the current technical parameters, including consenus and cost models, **so that** I can ensure their compliance with the network parameters specified. - - |image-CIP77| - Auditor review of current economic parameters - `→ `__ + `→ `__ - **As** an Auditor, **I want** to audit the current economic parameters, including parameters affecting transaction fees, taxes, and staking rewards, **so that** I can assess their impact on the network's economy. - - |image-CIP78| - Auditor review of current governance parameters and voting thresholds - `→ `__ + `→ `__ - **As** an Auditor, **I want** to audit the current governance parameters and voting thresholds for governance actions to fail or ratify, **so that** I can verify their appropriateness and adherence to governance rules, adherence to the constitution, and enforcement of voting thresholds. - - |image-CIP79| - Auditor review of current state of the treasury - `→ `__ + `→ `__ - **As** an Auditor, **I want** to audit the current state of the treasury, including the total amount of Ada, **so that** I can assess the current balance and the system's financial health. - - |image-CIP80| - Auditor needs access to historical proposals affecting network parameters - `→ `__ + `→ `__ - **As** an Auditor, **I want** to access and review the history of proposals related to network parameters, including their outcomes, **so that** I can track governance effectiveness over time. - - |image-CIP81| - Auditor needs access to historical proposals affecting technical parameters - `→ `__ + `→ `__ - **As** an Auditor, **I want** to access and review the history of proposals related to technical parameters, including both ratified and failed proposals, **so that** I can understand technical evolution and parameter change impact. - - |image-CIP82| - Auditor needs access to historical proposals affecting economic parameters - `→ `__ + `→ `__ - **As** an Auditor, **I want** to access and review the history of proposals related to economic parameters, focusing on their ratification status, **so that** I can evaluate economic policy changes. - - |image-CIP83| - Auditor needs access to the historical record of all governance proposals and voting thresholds - `→ `__ + `→ `__ - **As** an Auditor, **I want** to access history changes to governance parameters, the proposals, and the voting thresholds, **so that** I can audit the changes made over time and verify compliance with governance rules, and evaluate the impact of these changes on governance actions' outcomes, with the primary purpose to verify voting thresholds were enforced. - - |image-CIP84| - Auditor needs access to the history of treasury withdrawals - `→ `__ + `→ `__ - **As** an Auditor, **I want** to audit the history of treasury withdrawals, including amounts, dates, and recipient wallet addresses, **so that** I can ensure transparency and accountability. - - |image-CIP85| - DRep Id is blake2b-224 of drep vkey - `→ `__ + `→ `__ - **As** a DRep, **I want** to verify proper Drep Id is being generated that is it should be outcome of blake2b-224 hash of DRep verification key. - - |image-CIP86| - Change delegation - `→ `__ + `→ `__ - **As** a stakeholder, **I want** to change my voting delegation to a different Drep. After I have first delegate to a DRep say DRep 1 I want to change my delegation to another Drep 2, my vote delegation should be updated to Drep2. - - |image-CIP87| - No multiple delegation - `→ `__ + `→ `__ - **As** a stakeholder, I should not be able to submit multiple voting delegations to different Dreps. The voting rights should be delegated to a single DRep only, even If I submit multiple voting delegation certificates. - - |image-CIP88| - No delegation without stake registration - `→ `__ + `→ `__ - **As** a stakeholder, I should not be able to delegate my votes without registering my stake address first. - - |image-CIP89| - No retirement before register - `→ `__ + `→ `__ - **As** a DRep, I should not be able to retire my DRep before registering it. - - |image-CIP90| - No multiple DRep registration - `→ `__ + `→ `__ - **As** As a DRep, I should not be able to register my DRep multiple times using the same DRep credentials. @@ -964,271 +962,271 @@ Governance guardrails User Stories - |image-GR001| - Prevent an unconstitutional `txFeePerByte` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `txFeePerByte`. - - |image-GR002| - Prevent an unconstitutional `txFeeFixed` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `txFeeFixed`. - - |image-GR003| - Prevent an unconstitutional `monetaryExpansion` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `monetaryExpansion`. - - |image-GR004| - Prevent an unconstitutional `treasuryCut` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `treasuryCut`. - - |image-GR005| - Prevent an unconstitutional `minPoolCost` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `minPoolCost`. - - |image-GR006| - Prevent an unconstitutional `utxoCostPerByte` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `utxoCostPerByte`. - - |image-GR007a| - Prevent an unconstitutional `executionUnitPrices [priceMemory]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `executionUnitPrices[priceMemory]`. - - |image-GR007b| - Prevent an unconstitutional `executionUnitPrices [priceSteps]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `executionUnitPrices[priceSteps]`. - - |image-GR008| - Prevent an unconstitutional `maxBlockBodySize` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `maxBlockBodySize`. - - |image-GR009a| - Prevent an unconstitutional `maxTxExecutionUnits [memory]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `maxTxExecutionUnits[memory]`. - - |image-GR009b| - Prevent an unconstitutional `maxTxExecutionUnits [steps]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `maxTxExecutionUnits[steps]`. - - |image-GR010a| - Prevent an unconstitutional `maxBlockExecutionUnits [memory]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `maxBlockExecutionUnits[memory]`. - - |image-GR010b| - Prevent an unconstitutional `maxBlockExecutionUnits [steps]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `maxBlockExecutionUnits[steps]`. - - |image-GR011| - Prevent an unconstitutional `maxValueSize` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `maxValueSize`. - - |image-GR012| - Prevent an unconstitutional `collateralPercentage` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `collateralPercentage`. - - |image-GR013| - Prevent an unconstitutional `maxCollateralInputs` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `maxCollateralInputs`. - - |image-GR014a| - Prevent an unconstitutional `poolVotingThresholds [motionNoConfidence]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `poolVotingThresholds[motionNoConfidence]`. - - |image-GR014b| - Prevent an unconstitutional `poolVotingThresholds [committeeNormal]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `poolVotingThresholds[committeeNormal]`. - - |image-GR014c| - Prevent an unconstitutional `poolVotingThresholds [committeeNoConfidence]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `poolVotingThresholds[committeeNoConfidence]`. - - |image-GR014d| - Prevent an unconstitutional `poolVotingThresholds [hardForkInitiation]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `poolVotingThresholds[hardForkInitiation]`. - - |image-GR014e| - Prevent an unconstitutional `poolVotingThresholds [ppSecurityGroup]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `poolVotingThresholds[ppSecurityGroup]`. - - |image-GR015a| - Prevent an unconstitutional `dRepVotingThresholds [motionNoConfidence]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `dRepVotingThresholds[motionNoConfidence]`. - - |image-GR015b| - Prevent an unconstitutional `dRepVotingThresholds [committeeNormal]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `dRepVotingThresholds[committeeNormal]`. - - |image-GR015c| - Prevent an unconstitutional `dRepVotingThresholds [committeeNoConfidence]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `dRepVotingThresholds[committeeNoConfidence]`. - - |image-GR015d| - Prevent an unconstitutional `dRepVotingThresholds [updateToConstitution]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `dRepVotingThresholds[updateToConstitution]`. - - |image-GR015e| - Prevent an unconstitutional `dRepVotingThresholds [hardForkInitiation]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `dRepVotingThresholds[hardForkInitiation]`. - - |image-GR015f| - Prevent an unconstitutional `dRepVotingThresholds [ppNetworkGroup]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `dRepVotingThresholds[ppNetworkGroup]`. - - |image-GR015g| - Prevent an unconstitutional `dRepVotingThresholds [ppEconomicGroup]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `dRepVotingThresholds[ppEconomicGroup]`. - - |image-GR015h| - Prevent an unconstitutional `dRepVotingThresholds [ppTechnicalGroup]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `dRepVotingThresholds[ppTechnicalGroup]`. - - |image-GR015i| - Prevent an unconstitutional `dRepVotingThresholds [ppGovGroup]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `dRepVotingThresholds[ppGovGroup]`. - - |image-GR015j| - Prevent an unconstitutional `dRepVotingThresholds [treasuryWithdrawal]` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `dRepVotingThresholds[treasuryWithdrawal]`. - - |image-GR016| - Prevent an unconstitutional `committeeMinSize` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `committeeMinSize`. - - |image-GR017| - Prevent an unconstitutional `committeeMaxTermLimit` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `committeeMaxTermLimit`. - - |image-GR018| - Prevent an unconstitutional `govActionLifetime` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `govActionLifetime`. - - |image-GR019| - Prevent an unconstitutional `maxTxSize` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `maxTxSize`. - - |image-GR020| - Prevent an unconstitutional `govDeposit` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `govDeposit`. - - |image-GR021| - Prevent an unconstitutional `dRepDeposit` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `dRepDeposit`. - - |image-GR022| - Prevent an unconstitutional `dRepActivity` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `dRepActivity`. - - |image-GR023| - Prevent an unconstitutional `minFeeRefScriptCoinsPerByte` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `minFeeRefScriptCoinsPerByte`. - - |image-GR024| - Prevent an unconstitutional `maxBlockHeaderSize` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `maxBlockHeaderSize`. - - |image-GR025| - Prevent an unconstitutional `stakeAddressDeposit` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `stakeAddressDeposit`. - - |image-GR026| - Prevent an unconstitutional `stakePoolDeposit` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `stakePoolDeposit`. - - |image-GR027| - Prevent an unconstitutional `poolRetireMaxEpoch` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `poolRetireMaxEpoch`. - - |image-GR028| - Prevent an unconstitutional `stakePoolTargetNum` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `stakePoolTargetNum`. - - |image-GR029| - Prevent an unconstitutional `poolPledgeInfluence` value - `→ `__ + `→ `__ - As an ADA holder, when submitting an update protocol parameters proposal, the governance guardrail should prevent an unconstitutional value for `poolPledgeInfluence`. @@ -1349,397 +1347,408 @@ DB Sync - Conway related tables - |image-off_chain_vote_fetch_error| - Errors while fetching or validating offchain Voting Anchor metadata. `→ `__ + - + + - |image-param_proposal| + - A table containing block chain parameter change proposals. + `→ `__ + - + + - |image-epoch_param| + - The accepted protocol parameters for an epoch. + `→ `__ .. |Success Badge| image:: https://img.shields.io/badge/success-green .. |Failure Badge| image:: https://img.shields.io/badge/failure-red .. |Partial Coverage Badge| image:: https://img.shields.io/badge/partial_coverage-yellow .. |Uncovered Badge| image:: https://img.shields.io/badge/uncovered-grey +.. |Unplanned Badge| image:: https://img.shields.io/badge/unplanned-silver .. |image-CLI1| image:: https://img.shields.io/badge/CLI001-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_constitution.py#L497 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L493 .. |image-CLI2| image:: https://img.shields.io/badge/CLI002-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_constitution.py#L358 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L362 .. |image-CLI3| image:: https://img.shields.io/badge/CLI003-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L344 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L354 .. |image-CLI4| image:: https://img.shields.io/badge/CLI004-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L344 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L354 .. |image-CLI5| image:: https://img.shields.io/badge/CLI005-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L344 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L354 .. |image-CLI6| image:: https://img.shields.io/badge/CLI006-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L344 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L354 .. |image-CLI7| image:: https://img.shields.io/badge/CLI007-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L517 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L527 .. |image-CLI8| image:: https://img.shields.io/badge/CLI008-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L338 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L396 .. |image-CLI9| image:: https://img.shields.io/badge/CLI009-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L338 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L396 .. |image-CLI10| image:: https://img.shields.io/badge/CLI010-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L338 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L396 .. |image-CLI11| image:: https://img.shields.io/badge/CLI011-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L408 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L465 .. |image-CLI12| image:: https://img.shields.io/badge/CLI012-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L331 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L389 .. |image-CLI13| image:: https://img.shields.io/badge/CLI013-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_constitution.py#L381 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L385 .. |image-CLI14| image:: https://img.shields.io/badge/CLI014-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L455 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L465 .. |image-CLI15| image:: https://img.shields.io/badge/CLI015-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L97 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L137 .. |image-CLI16| image:: https://img.shields.io/badge/CLI016-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_info.py#L80 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_info.py#L89 .. |image-CLI17| image:: https://img.shields.io/badge/CLI017-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L687 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L689 .. |image-CLI18| image:: https://img.shields.io/badge/CLI018-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L105 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L105 .. |image-CLI19| image:: https://img.shields.io/badge/CLI019-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_hardfork.py#L93 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_hardfork.py#L93 .. |image-CLI20| image:: https://img.shields.io/badge/CLI020-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_constitution.py#L545 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L541 .. |image-CLI21| image:: https://img.shields.io/badge/CLI021-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_info.py#L146 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_info.py#L154 .. |image-CLI22| image:: https://img.shields.io/badge/CLI022-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_info.py#L259 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_info.py#L286 .. |image-CLI23| image:: https://img.shields.io/badge/CLI023-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_info.py#L101 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_info.py#L110 .. |image-CLI24| image:: https://img.shields.io/badge/CLI024-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_info.py#L192 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_info.py#L200 .. |image-CLI25| image:: https://img.shields.io/badge/CLI025-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L475 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L494 .. |image-CLI26| image:: https://img.shields.io/badge/CLI026-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L564 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L584 .. |image-CLI27| image:: https://img.shields.io/badge/CLI027-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L930 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1078 .. |image-CLI28| image:: https://img.shields.io/badge/CLI028-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L973 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1126 .. |image-CLI29| image:: https://img.shields.io/badge/CLI029-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L939 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1087 .. |image-CLI30| image:: https://img.shields.io/badge/CLI030-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L1095 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1281 .. |image-CLI31| image:: https://img.shields.io/badge/CLI031-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_info.py#L126 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_info.py#L135 .. |image-CLI32| image:: https://img.shields.io/badge/CLI032-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L738 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L749 .. |image-CLI33| image:: https://img.shields.io/badge/CLI033-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L371 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L429 .. |image-CLI34| image:: https://img.shields.io/badge/CLI034-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L1021 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1176 .. |image-CLI35| image:: https://img.shields.io/badge/CLI035-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L988 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1141 .. |image-CLI36| image:: https://img.shields.io/badge/CLI036-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_constitution.py#L524 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L520 .. |image-CIP1a| image:: https://img.shields.io/badge/CIP001a-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_constitution.py#L497 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L493 .. |image-CIP1b| image:: https://img.shields.io/badge/CIP001b-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_constitution.py#L497 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L493 .. |image-CIP2| image:: https://img.shields.io/badge/CIP002-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L738 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L749 .. |image-CIP3| image:: https://img.shields.io/badge/CIP003-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L344 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L354 .. |image-CIP4| image:: https://img.shields.io/badge/CIP004-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L738 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L749 .. |image-CIP5| image:: https://img.shields.io/badge/CIP005-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L567 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L577 .. |image-CIP6| image:: https://img.shields.io/badge/CIP006-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L687 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L689 .. |image-CIP7| image:: https://img.shields.io/badge/CIP007-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L227 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L230 .. |image-CIP8| image:: https://img.shields.io/badge/CIP008-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L1188 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L1184 .. |image-CIP9| image:: https://img.shields.io/badge/CIP009-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L877 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L889 .. |image-CIP10| image:: https://img.shields.io/badge/CIP010-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L877 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L889 .. |image-CIP11| image:: https://img.shields.io/badge/CIP011-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L833 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L845 .. |image-CIP12| image:: https://img.shields.io/badge/CIP012-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L517 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L527 .. |image-CIP13| image:: https://img.shields.io/badge/CIP013-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L105 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L105 .. |image-CIP14| image:: https://img.shields.io/badge/CIP014-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L361 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L347 .. |image-CIP15| image:: https://img.shields.io/badge/CIP015-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L196 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L196 .. |image-CIP16| image:: https://img.shields.io/badge/CIP016-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L927 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1075 .. |image-CIP17| image:: https://img.shields.io/badge/CIP017-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L927 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1075 .. |image-CIP18| image:: https://img.shields.io/badge/CIP018-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L927 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1075 .. |image-CIP19| image:: https://img.shields.io/badge/CIP019-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L1805 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1992 .. |image-CIP20| image:: https://img.shields.io/badge/CIP020-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L1015 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1170 .. |image-CIP21| image:: https://img.shields.io/badge/CIP021-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L338 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L396 .. |image-CIP22| image:: https://img.shields.io/badge/CIP022-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L939 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1087 .. |image-CIP23| image:: https://img.shields.io/badge/CIP023-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L408 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L465 .. |image-CIP24| image:: https://img.shields.io/badge/CIP024-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L432 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L489 .. |image-CIP25| image:: https://img.shields.io/badge/CIP025-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L1021 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1176 .. |image-CIP26| image:: https://img.shields.io/badge/CIP026-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L211 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L214 .. |image-CIP27| image:: https://img.shields.io/badge/CIP027-grey :target: https://github.com/CIP027-404 .. |image-CIP28| image:: https://img.shields.io/badge/CIP028-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_constitution.py#L381 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L385 .. |image-CIP29| image:: https://img.shields.io/badge/CIP029-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L105 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L105 .. |image-CIP30| image:: https://img.shields.io/badge/CIP030-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L105 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L105 .. |image-CIP31a| image:: https://img.shields.io/badge/CIP031a-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L188 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L191 .. |image-CIP31b| image:: https://img.shields.io/badge/CIP031b-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L455 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L465 .. |image-CIP31c| image:: https://img.shields.io/badge/CIP031c-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_constitution.py#L381 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L385 .. |image-CIP31d| image:: https://img.shields.io/badge/CIP031d-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_hardfork.py#L93 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_hardfork.py#L93 .. |image-CIP31e| image:: https://img.shields.io/badge/CIP031e-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L687 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L689 .. |image-CIP31f| image:: https://img.shields.io/badge/CIP031f-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L97 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L137 .. |image-CIP32| image:: https://img.shields.io/badge/CIP032-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L266 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L257 .. |image-CIP33| image:: https://img.shields.io/badge/CIP033-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L330 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L354 .. |image-CIP34| image:: https://img.shields.io/badge/CIP034-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1261 -.. |image-CIP35| image:: https://img.shields.io/badge/CIP035-grey + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L974 +.. |image-CIP35| image:: https://img.shields.io/badge/CIP035-silver :target: https://github.com/CIP035-404 .. |image-CIP36| image:: https://img.shields.io/badge/CIP036-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1608 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1607 .. |image-CIP37| image:: https://img.shields.io/badge/CIP037-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1069 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1072 .. |image-CIP38| image:: https://img.shields.io/badge/CIP038-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L824 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L836 .. |image-CIP39| image:: https://img.shields.io/badge/CIP039-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L192 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L193 .. |image-CIP40| image:: https://img.shields.io/badge/CIP040-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L738 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L749 .. |image-CIP41| image:: https://img.shields.io/badge/CIP041-green :target: https://github.com/CIP41-404 .. |image-CIP42| image:: https://img.shields.io/badge/CIP042-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_constitution.py#L429 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L435 .. |image-CIP43| image:: https://img.shields.io/badge/CIP043-yellow - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_hardfork.py#L178 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_hardfork.py#L179 .. |image-CIP44| image:: https://img.shields.io/badge/CIP044-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L680 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L682 .. |image-CIP45| image:: https://img.shields.io/badge/CIP045-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L680 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L682 .. |image-CIP46| image:: https://img.shields.io/badge/CIP046-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L680 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L682 .. |image-CIP47| image:: https://img.shields.io/badge/CIP047-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L680 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L682 .. |image-CIP48| image:: https://img.shields.io/badge/CIP048-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L278 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L325 .. |image-CIP49| image:: https://img.shields.io/badge/CIP049-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L277 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L279 .. |image-CIP50| image:: https://img.shields.io/badge/CIP050-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L277 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L279 .. |image-CIP51| image:: https://img.shields.io/badge/CIP051-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L277 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L279 .. |image-CIP52| image:: https://img.shields.io/badge/CIP052-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L277 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L279 .. |image-CIP53| image:: https://img.shields.io/badge/CIP053-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_info.py#L146 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_info.py#L154 .. |image-CIP54| image:: https://img.shields.io/badge/CIP054-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L687 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L689 .. |image-CIP55| image:: https://img.shields.io/badge/CIP055-grey :target: https://github.com/CIP055-404 .. |image-CIP56| image:: https://img.shields.io/badge/CIP056-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1120 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1123 .. |image-CIP57| image:: https://img.shields.io/badge/CIP057-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L266 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L257 .. |image-CIP58| image:: https://img.shields.io/badge/CIP058-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L455 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L465 .. |image-CIP59| image:: https://img.shields.io/badge/CIP059-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_info.py#L146 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_info.py#L154 .. |image-CIP60| image:: https://img.shields.io/badge/CIP060-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L680 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L682 .. |image-CIP61| image:: https://img.shields.io/badge/CIP061-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L738 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L749 .. |image-CIP62| image:: https://img.shields.io/badge/CIP062-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1093 -.. |image-CIP63| image:: https://img.shields.io/badge/CIP063-grey + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1096 +.. |image-CIP63| image:: https://img.shields.io/badge/CIP063-silver :target: https://github.com/CIP063-404 .. |image-CIP64| image:: https://img.shields.io/badge/CIP064-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L738 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L749 .. |image-CIP65| image:: https://img.shields.io/badge/CIP065-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L851 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L853 .. |image-CIP66| image:: https://img.shields.io/badge/CIP066-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L669 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L668 .. |image-CIP67| image:: https://img.shields.io/badge/CIP067-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L718 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L729 .. |image-CIP68| image:: https://img.shields.io/badge/CIP068-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1161 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1163 .. |image-CIP69| image:: https://img.shields.io/badge/CIP069-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L177 -.. |image-CIP70| image:: https://img.shields.io/badge/CIP070-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L693 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_no_confidence.py#L178 +.. |image-CIP70| image:: https://img.shields.io/badge/CIP070-grey + :target: https://github.com/CIP070-404 .. |image-CIP71| image:: https://img.shields.io/badge/CIP071-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_conway.py#L32 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_conway.py#L32 .. |image-CIP72| image:: https://img.shields.io/badge/CIP072-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_constitution.py#L497 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L493 .. |image-CIP73| image:: https://img.shields.io/badge/CIP073-yellow - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_constitution.py#L497 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L493 .. |image-CIP74| image:: https://img.shields.io/badge/CIP074-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L898 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L900 .. |image-CIP75| image:: https://img.shields.io/badge/CIP075-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1349 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1353 .. |image-CIP76| image:: https://img.shields.io/badge/CIP076-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1349 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1353 .. |image-CIP77| image:: https://img.shields.io/badge/CIP077-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1349 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1353 .. |image-CIP78| image:: https://img.shields.io/badge/CIP078-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1349 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1353 .. |image-CIP79| image:: https://img.shields.io/badge/CIP079-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L346 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L370 .. |image-CIP80| image:: https://img.shields.io/badge/CIP080-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L898 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L900 .. |image-CIP81| image:: https://img.shields.io/badge/CIP081-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L898 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L900 .. |image-CIP82| image:: https://img.shields.io/badge/CIP082-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L898 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L900 .. |image-CIP83| image:: https://img.shields.io/badge/CIP083-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L898 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L900 .. |image-CIP84| image:: https://img.shields.io/badge/CIP084-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L362 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L386 .. |image-CIP85| image:: https://img.shields.io/badge/CIP085-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L272 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L330 .. |image-CIP86| image:: https://img.shields.io/badge/CIP086-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L1303 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1490 .. |image-CIP87| image:: https://img.shields.io/badge/CIP087-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L637 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L793 .. |image-CIP88| image:: https://img.shields.io/badge/CIP088-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L717 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L868 .. |image-CIP89| image:: https://img.shields.io/badge/CIP089-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L776 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L922 .. |image-CIP90| image:: https://img.shields.io/badge/CIP090-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L834 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L980 .. |image-GR001| image:: https://img.shields.io/badge/GR001-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L669 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L668 .. |image-GR002| image:: https://img.shields.io/badge/GR002-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L685 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L684 .. |image-GR003| image:: https://img.shields.io/badge/GR003-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L702 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L701 .. |image-GR004| image:: https://img.shields.io/badge/GR004-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L717 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L716 .. |image-GR005| image:: https://img.shields.io/badge/GR005-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L734 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L733 .. |image-GR006| image:: https://img.shields.io/badge/GR006-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L751 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L750 .. |image-GR007a| image:: https://img.shields.io/badge/GR007a-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L781 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L780 .. |image-GR007b| image:: https://img.shields.io/badge/GR007b-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L790 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L789 .. |image-GR008| image:: https://img.shields.io/badge/GR008-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L804 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L803 .. |image-GR009a| image:: https://img.shields.io/badge/GR009a-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L836 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L835 .. |image-GR009b| image:: https://img.shields.io/badge/GR009b-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L836 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L835 .. |image-GR010a| image:: https://img.shields.io/badge/GR010a-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L904 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L903 .. |image-GR010b| image:: https://img.shields.io/badge/GR010b-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L904 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L903 .. |image-GR011| image:: https://img.shields.io/badge/GR011-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L957 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L956 .. |image-GR012| image:: https://img.shields.io/badge/GR012-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L974 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L973 .. |image-GR013| image:: https://img.shields.io/badge/GR013-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L991 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L990 .. |image-GR014a| image:: https://img.shields.io/badge/GR014a-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1039 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1038 .. |image-GR014b| image:: https://img.shields.io/badge/GR014b-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1053 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1052 .. |image-GR014c| image:: https://img.shields.io/badge/GR014c-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1067 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1066 .. |image-GR014d| image:: https://img.shields.io/badge/GR014d-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1081 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1080 .. |image-GR014e| image:: https://img.shields.io/badge/GR014e-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1095 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1094 .. |image-GR015a| image:: https://img.shields.io/badge/GR015a-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1175 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1174 .. |image-GR015b| image:: https://img.shields.io/badge/GR015b-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1194 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1193 .. |image-GR015c| image:: https://img.shields.io/badge/GR015c-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1213 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1212 .. |image-GR015d| image:: https://img.shields.io/badge/GR015d-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1232 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1231 .. |image-GR015e| image:: https://img.shields.io/badge/GR015e-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1251 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1250 .. |image-GR015f| image:: https://img.shields.io/badge/GR015f-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1270 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1269 .. |image-GR015g| image:: https://img.shields.io/badge/GR015g-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1289 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1288 .. |image-GR015h| image:: https://img.shields.io/badge/GR015h-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1308 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1307 .. |image-GR015i| image:: https://img.shields.io/badge/GR015i-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1327 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1326 .. |image-GR015j| image:: https://img.shields.io/badge/GR015j-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1346 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1345 .. |image-GR016| image:: https://img.shields.io/badge/GR016-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1371 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1370 .. |image-GR017| image:: https://img.shields.io/badge/GR017-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1388 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1387 .. |image-GR018| image:: https://img.shields.io/badge/GR018-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1405 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1404 .. |image-GR019| image:: https://img.shields.io/badge/GR019-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1420 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1419 .. |image-GR020| image:: https://img.shields.io/badge/GR020-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1435 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1434 .. |image-GR021| image:: https://img.shields.io/badge/GR021-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1450 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1449 .. |image-GR022| image:: https://img.shields.io/badge/GR022-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1467 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1466 .. |image-GR023| image:: https://img.shields.io/badge/GR023-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1484 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1483 .. |image-GR024| image:: https://img.shields.io/badge/GR024-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1501 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1500 .. |image-GR025| image:: https://img.shields.io/badge/GR025-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1518 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1517 .. |image-GR026| image:: https://img.shields.io/badge/GR026-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1535 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1534 .. |image-GR027| image:: https://img.shields.io/badge/GR027-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1552 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1551 .. |image-GR028| image:: https://img.shields.io/badge/GR028-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1569 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1568 .. |image-GR029| image:: https://img.shields.io/badge/GR029-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1586 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_guardrails.py#L1585 .. |image-drep_hash| image:: https://img.shields.io/badge/drep_hash-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L386 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L444 .. |image-committee_hash| image:: https://img.shields.io/badge/committee_hash-green :target: https://github.com/committe_hash-404 -.. |image-delegation_vote| image:: https://img.shields.io/badge/delegation_vote-grey - :target: https://github.com/delegation_vote-404 +.. |image-delegation_vote| image:: https://img.shields.io/badge/delegation_vote-green + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1210 .. |image-committee_registration| image:: https://img.shields.io/badge/committee_registration-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L980 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L1005 .. |image-committee_de_registration| image:: https://img.shields.io/badge/committee_de_registration-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L980 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L1005 .. |image-drep_registration| image:: https://img.shields.io/badge/drep_registration-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_drep.py#L386 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L444 .. |image-voting_anchor| image:: https://img.shields.io/badge/voting_anchor-grey :target: https://github.com/voting_anchor-404 .. |image-gov_action_proposal| image:: https://img.shields.io/badge/gov_action_proposal-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L737 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L739 .. |image-treasury_withdrawal| image:: https://img.shields.io/badge/treasury_withdrawal-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L362 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py#L386 .. |image-committee| image:: https://img.shields.io/badge/committee-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L264 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L274 .. |image-committee_member| image:: https://img.shields.io/badge/committee_member-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_committee.py#L264 -.. |image-constitution| image:: https://img.shields.io/badge/constitution-grey - :target: https://github.com/constitution-404 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_committee.py#L274 +.. |image-constitution| image:: https://img.shields.io/badge/constitution-green + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_constitution.py#L552 .. |image-voting_procedure| image:: https://img.shields.io/badge/voting_procedure-green - :target: https://github.com/IntersectMBO/cardano-node-tests/blob/5b261a9a9bd20e7c39101ae224ab65011bf1bfe3/cardano_node_tests/tests/tests_conway/test_info.py#L267 -.. |image-drep_distr| image:: https://img.shields.io/badge/drep_distr-grey - :target: https://github.com/drep_distr-404 + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_info.py#L294 +.. |image-drep_distr| image:: https://img.shields.io/badge/drep_distr-green + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L1220 .. |image-off_chain_vote_data| image:: https://img.shields.io/badge/off_chain_vote_data-grey :target: https://github.com/off_chain_vote_data-404 .. |image-off_chain_vote_gov_action_data| image:: https://img.shields.io/badge/off_chain_vote_gov_action_data-grey @@ -1752,5 +1761,9 @@ DB Sync - Conway related tables :target: https://github.com/off_chain_vote_reference-404 .. |image-off_chain_vote_external_update| image:: https://img.shields.io/badge/off_chain_vote_external_update-grey :target: https://github.com/off_chain_vote_external_update-404 -.. |image-off_chain_vote_fetch_error| image:: https://img.shields.io/badge/off_chain_vote_fetch_error-grey - :target: https://github.com/off_chain_vote_fetch_error-404 +.. |image-off_chain_vote_fetch_error| image:: https://img.shields.io/badge/off_chain_vote_fetch_error-green + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_drep.py#L605 +.. |image-param_proposal| image:: https://img.shields.io/badge/param_proposal-green + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L940 +.. |image-epoch_param| image:: https://img.shields.io/badge/epoch_param-green + :target: https://github.com/IntersectMBO/cardano-node-tests/blob/2528154e4ce17b660e327ea965fac93ad0b67e01/cardano_node_tests/tests/tests_conway/test_pparam_update.py#L1247 From 37855866cbfbedaf4ca25a0c0c64bec5796a817d Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 10 Oct 2024 11:51:05 +0200 Subject: [PATCH 014/168] feat: make test env setup reusable in another shells Running test env setup recreates the whole test env. Sometimes it is useful to activate the same test env in multiple shells. This change enables that. --- README.md | 6 ++++++ prepare_test_env.sh | 24 +++++++++++++++++++++++- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 1d88581e2..ba086181c 100644 --- a/README.md +++ b/README.md @@ -74,6 +74,12 @@ Sometimes it is useful to run individual tests and keep the local cluster runnin ./dev_workdir/conway_fast/stop-cluster ``` +To reuse the existing testing environment in another nix shell, source the `.source` file that was generated during setup: + +```sh +source ./dev_workdir/.source +``` + ## Variables for configuring testrun Tests execution can be configured using env variables. diff --git a/prepare_test_env.sh b/prepare_test_env.sh index 70888e88e..957c9cdcb 100755 --- a/prepare_test_env.sh +++ b/prepare_test_env.sh @@ -52,10 +52,32 @@ if [ ! -d "$_scripts_dest" ]; then fi unset _scripts_dest +cat > "$WORKDIR/.source" <&2 +fi +source "$VIRTUAL_ENV/bin/activate" +PYTHONPATH="$(echo "\$VIRTUAL_ENV"/lib/python3*/site-packages):\$PYTHONPATH" +export PYTHONPATH +export CARDANO_NODE_SOCKET_PATH="$PWD/dev_workdir/state-cluster0/bft1.socket" +export TMPDIR="$PWD/dev_workdir/tmp" +export DEV_CLUSTER_RUNNING=1 +export CLUSTERS_COUNT=1 +export FORBID_RESTART=1 +export NO_ARTIFACTS=1 +export CLUSTER_ERA="$CLUSTER_ERA" +export COMMAND_ERA="${COMMAND_ERA:-""}" +EoF + echo echo -echo "----------------------------------------" +echo "------------------------" +echo "| Test Env Ready |" +echo "------------------------" echo echo "To start local testnet, run:" echo "$WORKDIR/${CLUSTER_ERA}_fast/start-cluster" echo +echo "To reuse the test env in another shell, source the env with:" +echo "source $WORKDIR/.source" +echo From 22b7117be4f0e6c31c9b992d540f48e035d9ddb2 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 17 Oct 2024 16:10:40 +0200 Subject: [PATCH 015/168] refactor(tests): use subtests for guardrail scenarios Refactor governance guardrail tests to use subtests for each scenario. This makes it possible to schedule all guardrails tests to a single pytest worker, and respin the cluster instance immediately after the tests are finished. - Removed individual test methods in TestGovernanceGuardrails class - Added get_subtests function to yield guardrail test scenarios - Updated test_guardrails method to run subtests --- .../tests/tests_conway/test_guardrails.py | 281 ++++++++---------- 1 file changed, 124 insertions(+), 157 deletions(-) diff --git a/cardano_node_tests/tests/tests_conway/test_guardrails.py b/cardano_node_tests/tests/tests_conway/test_guardrails.py index 0e8c75a9a..4580de8d6 100644 --- a/cardano_node_tests/tests/tests_conway/test_guardrails.py +++ b/cardano_node_tests/tests/tests_conway/test_guardrails.py @@ -11,6 +11,7 @@ import allure import pytest +import pytest_subtests from cardano_clusterlib import clusterlib from cardano_node_tests.cluster_management import cluster_management @@ -48,7 +49,6 @@ def cluster_guardrails( Cleanup (== respin the cluster instance) after the tests are finished. """ cluster_obj = cluster_manager.get( - mark="guardrails", use_resources=cluster_management.Resources.ALL_POOLS, lock_resources=[cluster_management.Resources.COMMITTEE, cluster_management.Resources.DREPS], cleanup=True, @@ -66,13 +66,11 @@ def pool_user( ) -> clusterlib.PoolUser: """Create a pool user for "lock governance".""" cluster, __ = cluster_guardrails - key = helpers.get_current_line_str() name_template = common.get_test_id(cluster) return conway_common.get_registered_pool_user( cluster_manager=cluster_manager, name_template=name_template, cluster_obj=cluster, - caching_key=key, ) @@ -83,15 +81,10 @@ def payment_addr( ) -> clusterlib.AddressRecord: """Create new payment address.""" cluster, __ = cluster_guardrails - with cluster_manager.cache_fixture() as fixture_cache: - if fixture_cache.value: - return fixture_cache.value # type: ignore - - addr = clusterlib_utils.create_payment_addr_records( - f"payment_addr_{cluster_manager.cluster_instance_num}", - cluster_obj=cluster, - )[0] - fixture_cache.value = addr + addr = clusterlib_utils.create_payment_addr_records( + f"payment_addr_{cluster_manager.cluster_instance_num}", + cluster_obj=cluster, + )[0] # Fund source address clusterlib_utils.fund_from_faucet( @@ -648,22 +641,13 @@ def perform_predicates_check( ) -class TestGovernanceGuardrails: - """Test governance guardrails using plutus script constitution. - - * Enact a new constitution with a plutus script - * Propose parameter change for different guardrail checks - * Check that the guardrails are enforced - * Expecting plutus error in case of invalid proposals - * Expecting valid proposals to be accepted - * Data file used : data/defaultConstitution.json +def get_subtests() -> tp.Generator[tp.Callable, None, None]: # noqa: C901 + """Get the guardrails scenarios. + + The scenarios are executed as subtests in the `test_guardrails` test. """ - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_tx_fee_per_byte( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + def tx_fee_per_byte(cluster_with_constitution: ClusterWithConstitutionRecord): """Test txFeePerByte guardrails defined in the key "0" of default constitution.""" _url = helpers.get_vcs_link() [r.start(url=_url) for r in (reqc.gr001, reqc.cip066)] @@ -677,9 +661,9 @@ def test_guardrail_tx_fee_per_byte( ) [r.success() for r in (reqc.gr001, reqc.cip066)] - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_tx_fee_fixed(self, cluster_with_constitution: ClusterWithConstitutionRecord): + yield tx_fee_per_byte + + def tx_fee_fixed(cluster_with_constitution: ClusterWithConstitutionRecord): """Test txFeeFixed guardrails defined in the key "1" of default constitution.""" reqc.gr002.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -692,11 +676,9 @@ def test_guardrail_tx_fee_fixed(self, cluster_with_constitution: ClusterWithCons ) reqc.gr002.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_monetary_expansion( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield tx_fee_fixed + + def monetary_expansion(cluster_with_constitution: ClusterWithConstitutionRecord): """Test monetaryExpansion guardrails defined in the key "10" of default constitution.""" reqc.gr003.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -709,9 +691,9 @@ def test_guardrail_monetary_expansion( ) reqc.gr003.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_treasury_cut(self, cluster_with_constitution: ClusterWithConstitutionRecord): + yield monetary_expansion + + def treasury_cut(cluster_with_constitution: ClusterWithConstitutionRecord): """Test treasuryCut guardrails defined in the key "11" of default constitution.""" reqc.gr004.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -724,11 +706,9 @@ def test_guardrail_treasury_cut(self, cluster_with_constitution: ClusterWithCons ) reqc.gr004.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_min_pool_cost( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield treasury_cut + + def min_pool_cost(cluster_with_constitution: ClusterWithConstitutionRecord): """Test minPoolCost guardrails defined in the key "16" of default constitution.""" reqc.gr005.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -741,11 +721,9 @@ def test_guardrail_min_pool_cost( ) reqc.gr005.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_utxo_cost_per_byte( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield min_pool_cost + + def utxo_cost_per_byte(cluster_with_constitution: ClusterWithConstitutionRecord): """Test utxoCostPerByte guardrails defined in the key "17" of default constitution.""" reqc.gr006.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -758,11 +736,9 @@ def test_guardrail_utxo_cost_per_byte( ) reqc.gr006.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_execution_unit_prices( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield utxo_cost_per_byte + + def execution_unit_prices(cluster_with_constitution: ClusterWithConstitutionRecord): """Test executionUnitPrices guardrails defined in the key "19" of default constitution.""" ex_units_prices_memory_param = GuardrailTestParam( param_key="19[0]", @@ -794,11 +770,9 @@ def test_guardrail_execution_unit_prices( ) reqc.gr007b.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_max_block_body_size( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield execution_unit_prices + + def max_block_body_size(cluster_with_constitution: ClusterWithConstitutionRecord): """Test maxBlockBodySize guardrails defined in the key "2" of default constitution.""" reqc.gr008.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -811,11 +785,9 @@ def test_guardrail_max_block_body_size( ) reqc.gr008.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_max_tx_execution_units( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield max_block_body_size + + def max_tx_execution_units(cluster_with_constitution: ClusterWithConstitutionRecord): """ Test maxTxExecutionUnits guardrail defined in the key "20" of default constitution. @@ -879,11 +851,9 @@ def test_guardrail_max_tx_execution_units( ) [r.success() for r in (reqc.gr009a, reqc.gr009b)] - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_max_block_execution_units( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield max_tx_execution_units + + def max_block_execution_units(cluster_with_constitution: ClusterWithConstitutionRecord): """ Test maxBlockExecutionUnits guardrails defined in the key "21" of default constitution. @@ -947,11 +917,9 @@ def test_guardrail_max_block_execution_units( ) [r.success() for r in (reqc.gr010a, reqc.gr010b)] - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_max_value_size( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield max_block_execution_units + + def max_value_size(cluster_with_constitution: ClusterWithConstitutionRecord): """Test maxValueSize guardrails defined in the key "22" of default constitution.""" reqc.gr011.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -964,11 +932,9 @@ def test_guardrail_max_value_size( ) reqc.gr011.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_collateral_percentage( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield max_value_size + + def collateral_percentage(cluster_with_constitution: ClusterWithConstitutionRecord): """Test collateralPercentage guardrails defined in the key "23" of default constitution.""" reqc.gr012.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -981,11 +947,9 @@ def test_guardrail_collateral_percentage( ) reqc.gr012.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_max_collateral_inputs( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield collateral_percentage + + def max_collateral_inputs(cluster_with_constitution: ClusterWithConstitutionRecord): """Test maxCollateralInputs guardrails defined in the key "24" of default constitution.""" reqc.gr013.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -998,11 +962,9 @@ def test_guardrail_max_collateral_inputs( ) reqc.gr013.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_pool_voting_thresholds( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield max_collateral_inputs + + def pool_voting_thresholds(cluster_with_constitution: ClusterWithConstitutionRecord): """Test for poolVotingThresholds defined in the key "25" of default constitution.""" pool_motion_no_confidence_param = GuardrailTestParam( param_key="25[0]", @@ -1104,11 +1066,9 @@ def test_guardrail_pool_voting_thresholds( ) reqc.gr014e.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_drep_voting_thresholds( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield pool_voting_thresholds + + def drep_voting_thresholds(cluster_with_constitution: ClusterWithConstitutionRecord): """Test for dRepVotingThresholds defined in the key "26" of default constitution.""" drep_motion_no_confidence_param = GuardrailTestParam( param_key="26[0]", @@ -1360,10 +1320,9 @@ def test_guardrail_drep_voting_thresholds( ) reqc.gr015j.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_committee_min_size( - self, + yield drep_voting_thresholds + + def committee_min_size( cluster_with_constitution: ClusterWithConstitutionRecord, ): """Test committeeMinSize guardrails defined in the key "27" of default constitution.""" @@ -1378,11 +1337,9 @@ def test_guardrail_committee_min_size( ) reqc.gr016.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_committee_max_term_limit( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield committee_min_size + + def committee_max_term_limit(cluster_with_constitution: ClusterWithConstitutionRecord): """Test committeeMaxTermLimit guardrails defined in the key "28" of default constitution.""" reqc.gr017.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -1395,11 +1352,9 @@ def test_guardrail_committee_max_term_limit( ) reqc.gr017.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_gov_action_lifetime( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield committee_max_term_limit + + def gov_action_lifetime(cluster_with_constitution: ClusterWithConstitutionRecord): """Test govActionLifetime guardrails defined in the key "29" of default constitution.""" reqc.gr018.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -1412,9 +1367,9 @@ def test_guardrail_gov_action_lifetime( ) reqc.gr018.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_max_tx_size(self, cluster_with_constitution: ClusterWithConstitutionRecord): + yield gov_action_lifetime + + def max_tx_size(cluster_with_constitution: ClusterWithConstitutionRecord): """Test maxTxSize guardrails defined in the key "3" of default constitution.""" reqc.gr019.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -1427,9 +1382,9 @@ def test_guardrail_max_tx_size(self, cluster_with_constitution: ClusterWithConst ) reqc.gr019.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_gov_deposit(self, cluster_with_constitution: ClusterWithConstitutionRecord): + yield max_tx_size + + def gov_deposit(cluster_with_constitution: ClusterWithConstitutionRecord): """Test govDeposit guardrails defined in the key "30" of default constitution.""" reqc.gr020.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -1442,9 +1397,9 @@ def test_guardrail_gov_deposit(self, cluster_with_constitution: ClusterWithConst ) reqc.gr020.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_drep_deposit(self, cluster_with_constitution: ClusterWithConstitutionRecord): + yield gov_deposit + + def drep_deposit(cluster_with_constitution: ClusterWithConstitutionRecord): """Test dRepDeposit guardrails defined in the key "31" of default constitution.""" reqc.gr021.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -1457,11 +1412,9 @@ def test_guardrail_drep_deposit(self, cluster_with_constitution: ClusterWithCons ) reqc.gr021.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_drep_activity( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield drep_deposit + + def drep_activity(cluster_with_constitution: ClusterWithConstitutionRecord): """Test dRepActivity guardrails defined in the key "32" of default constitution.""" reqc.gr022.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -1474,10 +1427,10 @@ def test_guardrail_drep_activity( ) reqc.gr022.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_min_fee_ref_script_coins_per_byte( - self, cluster_with_constitution: ClusterWithConstitutionRecord + yield drep_activity + + def min_fee_ref_script_coins_per_byte( + cluster_with_constitution: ClusterWithConstitutionRecord, ): """Test minFeeRefScriptCoinsPerByte defined in the key "33" of default constitution.""" reqc.gr023.start(url=helpers.get_vcs_link()) @@ -1491,11 +1444,9 @@ def test_guardrail_min_fee_ref_script_coins_per_byte( ) reqc.gr023.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_max_block_header_size( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield min_fee_ref_script_coins_per_byte + + def max_block_header_size(cluster_with_constitution: ClusterWithConstitutionRecord): """Test maxBlockHeaderSize guardrails defined in the key "4" of default constitution.""" reqc.gr024.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -1508,11 +1459,9 @@ def test_guardrail_max_block_header_size( ) reqc.gr024.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_stake_address_deposit( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield max_block_header_size + + def stake_address_deposit(cluster_with_constitution: ClusterWithConstitutionRecord): """Test stakeAddressDeposit guardrails defined in the key "5" of default constitution.""" reqc.gr025.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -1525,11 +1474,9 @@ def test_guardrail_stake_address_deposit( ) reqc.gr025.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_stake_pool_deposit( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield stake_address_deposit + + def stake_pool_deposit(cluster_with_constitution: ClusterWithConstitutionRecord): """Test stakePoolDeposit guardrails defined in the key "6" of default constitution.""" reqc.gr026.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -1542,11 +1489,9 @@ def test_guardrail_stake_pool_deposit( ) reqc.gr026.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_pool_retire_max_epoch( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield stake_pool_deposit + + def pool_retire_max_epoch(cluster_with_constitution: ClusterWithConstitutionRecord): """Test poolRetireMaxEpoch guardrails defined in the key "7" of default constitution.""" reqc.gr027.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -1559,11 +1504,9 @@ def test_guardrail_pool_retire_max_epoch( ) reqc.gr027.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_stake_pool_target_num( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield pool_retire_max_epoch + + def stake_pool_target_num(cluster_with_constitution: ClusterWithConstitutionRecord): """Test stakePoolTargetNum guardrails defined in the key "8" of default constitution.""" reqc.gr028.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -1576,11 +1519,9 @@ def test_guardrail_stake_pool_target_num( ) reqc.gr028.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_pool_pledge_influence( - self, cluster_with_constitution: ClusterWithConstitutionRecord - ): + yield stake_pool_target_num + + def pool_pledge_influence(cluster_with_constitution: ClusterWithConstitutionRecord): """Test poolPledgeInfluence guardrails defined in the key "9" of default constitution.""" reqc.gr029.start(url=helpers.get_vcs_link()) perform_predicates_check( @@ -1595,9 +1536,9 @@ def test_guardrail_pool_pledge_influence( ) reqc.gr029.success() - @allure.link(helpers.get_vcs_link()) - @pytest.mark.testnets - def test_guardrail_cost_models(self, cluster_with_constitution: ClusterWithConstitutionRecord): + yield pool_pledge_influence + + def cost_models(cluster_with_constitution: ClusterWithConstitutionRecord): """Test costModels guardrails defined in the key "18" of default constitution.""" # Sample cost model data file data_dir = pl.Path(__file__).parent.parent / "data" @@ -1617,3 +1558,29 @@ def test_guardrail_cost_models(self, cluster_with_constitution: ClusterWithConst cluster_with_constitution=cluster_with_constitution, proposals=valid_proposals ) [r.success() for r in (reqc.cip028, reqc.cip036)] + + yield cost_models + + +class TestGovernanceGuardrails: + @allure.link(helpers.get_vcs_link()) + @pytest.mark.testnets + def test_guardrails( + self, + cluster_with_constitution: ClusterWithConstitutionRecord, + subtests: pytest_subtests.SubTests, + ): + """Test governance guardrails using plutus script constitution. + + * Enact a new constitution with a plutus script + * Propose parameter change for different guardrail checks + * Check that the guardrails are enforced + * Expecting plutus error in case of invalid proposals + * Expecting valid proposals to be accepted + * Data file used : data/defaultConstitution.json + """ + common.get_test_id(cluster_with_constitution.cluster) + + for subt in get_subtests(): + with subtests.test(scenario=subt.__name__): + subt(cluster_with_constitution) From 0413a50081ca164e7a996eee688b3cf26ef5a090 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 17 Oct 2024 17:42:12 +0200 Subject: [PATCH 016/168] test: fix incorrect file template name --- cardano_node_tests/tests/test_addr_registration.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cardano_node_tests/tests/test_addr_registration.py b/cardano_node_tests/tests/test_addr_registration.py index c918038d7..88972deb6 100644 --- a/cardano_node_tests/tests/test_addr_registration.py +++ b/cardano_node_tests/tests/test_addr_registration.py @@ -159,13 +159,13 @@ def _build_dereg() -> clusterlib.TxRawOutput: tx_signed = cluster.g_transaction.sign_tx( tx_body_file=tx_raw_output_dereg.out_file, signing_key_files=tx_files_dereg.signing_key_files, - tx_name=f"{temp_template}_dereg", # TODO: should be reg_dereg + tx_name=f"{temp_template}_dereg", ) cluster.g_transaction.submit_tx(tx_file=tx_signed, txins=tx_raw_output_dereg.txins) else: tx_raw_output_dereg = cluster.g_transaction.send_tx( src_address=user_payment.address, - tx_name=f"{temp_template}_reg_dereg", + tx_name=f"{temp_template}_dereg", tx_files=tx_files_dereg, ) From 697e8557bc9558a7a9696730fd574ad36752458c Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 17 Oct 2024 19:36:05 +0200 Subject: [PATCH 017/168] fix typo --- framework_tests/test_subtests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework_tests/test_subtests.py b/framework_tests/test_subtests.py index cb4d43b49..2d236f361 100644 --- a/framework_tests/test_subtests.py +++ b/framework_tests/test_subtests.py @@ -12,7 +12,7 @@ def test_outcomes( # In subtest, don't return any other outcome than success or failure. # Allure doesn't work well with subtests. It will use outcome of the first non-successful # subtest as the overall test outcome. - # Therefore skiped / xfailed subtests could mask subtest failures. As a workaround, + # Therefore skipped / xfailed subtests could mask subtest failures. As a workaround, # record the outcome of the subtest and use it as the outcome of the main test. def _subtest(num: int) -> None: if num > 200: From b73893aad96fd92f7a49cf4cf80983eac3cbbfff Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 18 Oct 2024 12:31:02 +0200 Subject: [PATCH 018/168] feat(tests): mark test_guardrails as long running Added @pytest.mark.long to the test_guardrails method in TestGovernanceGuardrails class to indicate that it is a long-running test. This helps in scheduling tests based on their execution time. --- cardano_node_tests/tests/tests_conway/test_guardrails.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cardano_node_tests/tests/tests_conway/test_guardrails.py b/cardano_node_tests/tests/tests_conway/test_guardrails.py index 4580de8d6..c201bb86e 100644 --- a/cardano_node_tests/tests/tests_conway/test_guardrails.py +++ b/cardano_node_tests/tests/tests_conway/test_guardrails.py @@ -1564,6 +1564,7 @@ def cost_models(cluster_with_constitution: ClusterWithConstitutionRecord): class TestGovernanceGuardrails: @allure.link(helpers.get_vcs_link()) + @pytest.mark.long @pytest.mark.testnets def test_guardrails( self, From 2ae7f17bc57bf3a4c7d86a3289b50eb0ff9aa952 Mon Sep 17 00:00:00 2001 From: Artur Wieczorek Date: Fri, 18 Oct 2024 14:43:48 +0200 Subject: [PATCH 019/168] Update CODEOWNERS --- .github/CODEOWNERS | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 2fc323307..134b63bb4 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -17,3 +17,6 @@ cardano_node_tests/utils/dbsync_* @mkoura @saratomaz @ArturWieczorek .github/workflows/db_sync_* @ArturWieczorek .github/workflows/node_sync_* @ArturWieczorek .github/workflows/smash_* @ArturWieczorek + +.buildkite/db_sync_* @ArturWieczorek +.buildkite/node_sync_* @ArturWieczorek From ca0f5ff32e8c57c723afba775b947931efa35384 Mon Sep 17 00:00:00 2001 From: saratomaz Date: Fri, 18 Oct 2024 20:49:33 +0100 Subject: [PATCH 020/168] Add SECURITY.md file --- SECURITY.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..30f4dc639 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,18 @@ +# Security Policy + +## Reporting a Vulnerability + +Please report (suspected) security vulnerabilities to [security@intersectmbo.org](mailto:security@intersectmbo.org). You will receive a +response from us within 48 hours. If the issue is confirmed, we will release a patch as soon +as possible. + +Please provide a clear and concise description of the vulnerability, including: + +* the affected version(s) of Open-Source-Office, +* steps that can be followed to exercise the vulnerability, +* any workarounds or mitigations + +If you have developed any code or utilities that can help demonstrate the suspected +vulnerability, please mention them in your email but ***DO NOT*** attempt to include them as +attachments as this may cause your Email to be blocked by spam filters. +See the security file in the [Cardano engineering handbook](https://github.com/input-output-hk/cardano-engineering-handbook/blob/main/SECURITY.md). From 30900d5fe5deb4f0ed62fea6b012b5e6f9fbb50f Mon Sep 17 00:00:00 2001 From: saratomaz Date: Sat, 19 Oct 2024 15:09:20 +0100 Subject: [PATCH 021/168] Fix expected error msg in test_delegatee_not_registered --- cardano_node_tests/tests/test_delegation.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/cardano_node_tests/tests/test_delegation.py b/cardano_node_tests/tests/test_delegation.py index 77c85c95f..d562d9858 100644 --- a/cardano_node_tests/tests/test_delegation.py +++ b/cardano_node_tests/tests/test_delegation.py @@ -1169,4 +1169,7 @@ def test_delegatee_not_registered( tx_files=tx_files, ) err_msg = str(excinfo.value) - assert "DelegateeNotRegisteredDELEG" in err_msg, err_msg + assert ( + "DelegateeNotRegisteredDELEG" in err_msg + or "DelegateeStakePoolNotRegisteredDELEG" in err_msg + ), err_msg From 80755277319b66167475413de558c421eaaaa5cd Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 21 Oct 2024 10:54:28 +0200 Subject: [PATCH 022/168] tests: Add node version info to error message --- cardano_node_tests/tests/test_delegation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cardano_node_tests/tests/test_delegation.py b/cardano_node_tests/tests/test_delegation.py index d562d9858..5049676d3 100644 --- a/cardano_node_tests/tests/test_delegation.py +++ b/cardano_node_tests/tests/test_delegation.py @@ -1170,6 +1170,6 @@ def test_delegatee_not_registered( ) err_msg = str(excinfo.value) assert ( - "DelegateeNotRegisteredDELEG" in err_msg + "DelegateeNotRegisteredDELEG" in err_msg # Before cardano-node 10.0.0 or "DelegateeStakePoolNotRegisteredDELEG" in err_msg ), err_msg From 1aa4c24ca6a7b854d7cd78d7f1255fd2771eb142 Mon Sep 17 00:00:00 2001 From: saratomaz Date: Sat, 19 Oct 2024 17:07:20 +0100 Subject: [PATCH 023/168] Fix error message in test_native_tokens.py related to max Tx size --- cardano_node_tests/tests/test_native_tokens.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/cardano_node_tests/tests/test_native_tokens.py b/cardano_node_tests/tests/test_native_tokens.py index bc6335cea..b499e2173 100644 --- a/cardano_node_tests/tests/test_native_tokens.py +++ b/cardano_node_tests/tests/test_native_tokens.py @@ -777,15 +777,21 @@ def _mint_tokens() -> clusterlib.TxRawOutput: amount=300_000_000, ) + max_tx_size = cluster.g_query.get_protocol_params().get("maxTxSize") + try: # Disable logging of "Not enough funds to make the transaction" logging.disable(logging.ERROR) + with pytest.raises((clusterlib.CLIError, submit_api.SubmitApiError)) as excinfo: _mint_tokens() err_msg = str(excinfo.value) assert ( + # On older cardano-node releases "OutputTooBigUTxO" in err_msg # For `build-raw` command or "balance of the transaction is negative" in err_msg # For `build` command + # On cardano-node 10.0.0+ + or re.search(rf"MaxTxSizeUTxO \d+ {max_tx_size}", err_msg) ), "Unexpected error message" finally: logging.disable(logging.NOTSET) @@ -922,15 +928,21 @@ def _mint_tokens() -> clusterlib.TxRawOutput: amount=300_000_000, ) + max_tx_size = cluster.g_query.get_protocol_params().get("maxTxSize") + try: # Disable logging of "Not enough funds to make the transaction" logging.disable(logging.ERROR) + with pytest.raises((clusterlib.CLIError, submit_api.SubmitApiError)) as excinfo: _mint_tokens() err_msg = str(excinfo.value) assert ( + # On older cardano-node releases "OutputTooBigUTxO" in err_msg # For `build-raw` command or "balance of the transaction is negative" in err_msg # For `build` command + # On cardano-node 10.0.0+ + or re.search(rf"MaxTxSizeUTxO \d+ {max_tx_size}", err_msg) ), "Unexpected error message" finally: logging.disable(logging.NOTSET) From f1f757b24764573d0420fb8750f2a3dd28787bd9 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 21 Oct 2024 17:13:02 +0200 Subject: [PATCH 024/168] fix(tests): handle CLI issue 942 in various tests - Added handling for CLI issue 942 in `TestRegisterAddr` and `TestDelegateAddr` classes. - Updated `script_dreps_lg` to use workaround for CLI issue 942. - Introduced `CLI_WITH_ISSUE_942` constant to manage version-specific behavior. - Ensured tests finish appropriately when encountering `ValueNotConservedUTxO` error. --- cardano_node_tests/tests/issues.py | 6 ++++++ .../tests/test_addr_registration.py | 7 ++++++- cardano_node_tests/tests/test_delegation.py | 8 +++++++- .../tests/tests_conway/test_constitution.py | 2 +- .../tests/tests_plutus/test_delegation.py | 15 +++++++++------ 5 files changed, 29 insertions(+), 9 deletions(-) diff --git a/cardano_node_tests/tests/issues.py b/cardano_node_tests/tests/issues.py index dcb90c04e..ac15cc6a9 100644 --- a/cardano_node_tests/tests/issues.py +++ b/cardano_node_tests/tests/issues.py @@ -86,6 +86,12 @@ fixed_in="9.5.0.0", # Fixed in some release after 9.4.1.0 message="Negative pparam proposal values overflow to positive.", ) +cli_942 = blockers.GH( + issue=942, + repo="IntersectMBO/cardano-cli", + fixed_in="10.0.0.1", # Fixed in some release after 10.0.0.0 + message="build command doesn't balance key deposit.", +) consensus_973 = blockers.GH( issue=973, diff --git a/cardano_node_tests/tests/test_addr_registration.py b/cardano_node_tests/tests/test_addr_registration.py index 88972deb6..9a2b0ecb9 100644 --- a/cardano_node_tests/tests/test_addr_registration.py +++ b/cardano_node_tests/tests/test_addr_registration.py @@ -161,7 +161,12 @@ def _build_dereg() -> clusterlib.TxRawOutput: signing_key_files=tx_files_dereg.signing_key_files, tx_name=f"{temp_template}_dereg", ) - cluster.g_transaction.submit_tx(tx_file=tx_signed, txins=tx_raw_output_dereg.txins) + try: + cluster.g_transaction.submit_tx(tx_file=tx_signed, txins=tx_raw_output_dereg.txins) + except clusterlib.CLIError as exc: + if "ValueNotConservedUTxO" in str(exc): + issues.cli_942.finish_test() + raise else: tx_raw_output_dereg = cluster.g_transaction.send_tx( src_address=user_payment.address, diff --git a/cardano_node_tests/tests/test_delegation.py b/cardano_node_tests/tests/test_delegation.py index 5049676d3..53a41b323 100644 --- a/cardano_node_tests/tests/test_delegation.py +++ b/cardano_node_tests/tests/test_delegation.py @@ -11,6 +11,7 @@ from cardano_node_tests.cluster_management import resources_management from cardano_node_tests.tests import common from cardano_node_tests.tests import delegation +from cardano_node_tests.tests import issues from cardano_node_tests.utils import clusterlib_utils from cardano_node_tests.utils import dbsync_utils from cardano_node_tests.utils import helpers @@ -819,7 +820,12 @@ def _build_deleg_dereg() -> clusterlib.TxRawOutput: signing_key_files=tx_files.signing_key_files, tx_name=f"{temp_template}_deleg_dereg", ) - cluster.g_transaction.submit_tx(tx_file=tx_signed, txins=tx_raw_output_deleg.txins) + try: + cluster.g_transaction.submit_tx(tx_file=tx_signed, txins=tx_raw_output_deleg.txins) + except clusterlib.CLIError as exc: + if "ValueNotConservedUTxO" in str(exc): + issues.cli_942.finish_test() + raise else: tx_raw_output_deleg = cluster.g_transaction.send_tx( src_address=user_payment.address, diff --git a/cardano_node_tests/tests/tests_conway/test_constitution.py b/cardano_node_tests/tests/tests_conway/test_constitution.py index 87ce7d366..2498207e3 100644 --- a/cardano_node_tests/tests/tests_conway/test_constitution.py +++ b/cardano_node_tests/tests/tests_conway/test_constitution.py @@ -238,7 +238,7 @@ def _dereg_stake() -> None: cluster_obj=cluster, name_template=f"{temp_template}_dereg", src_address=pool_users[0].payment.address, - use_build_cmd=True, + use_build_cmd=False, # Workaround for CLI issue 942 tx_files=tx_files, withdrawals=withdrawals, deposit=-sum(s.delegation_deposit for __, s in pool_users_info), diff --git a/cardano_node_tests/tests/tests_plutus/test_delegation.py b/cardano_node_tests/tests/tests_plutus/test_delegation.py index dcdd87da7..c8d34be8c 100644 --- a/cardano_node_tests/tests/tests_plutus/test_delegation.py +++ b/cardano_node_tests/tests/tests_plutus/test_delegation.py @@ -13,6 +13,7 @@ import allure import pytest from cardano_clusterlib import clusterlib +from packaging import version from cardano_node_tests.cluster_management import cluster_management from cardano_node_tests.cluster_management import resources_management @@ -29,6 +30,8 @@ LOGGER = logging.getLogger(__name__) +CLI_WITH_ISSUE_942 = version.parse("10.0.0.0") + pytestmark = [ common.SKIPIF_PLUTUS_UNUSABLE, pytest.mark.plutus, @@ -630,7 +633,7 @@ def test_register_deregister( pool_user=pool_user, redeemer_file=plutus_common.REDEEMER_42, reference_script_utxos=reference_script_utxos, - use_build_cmd=use_build_cmd, + use_build_cmd=use_build_cmd and VERSIONS.cli != CLI_WITH_ISSUE_942, ) if reward_error: @@ -839,12 +842,12 @@ def test_delegate_deregister( # noqa: C901 pool_user=pool_user, redeemer_file=plutus_common.REDEEMER_42, reference_script_utxos=reference_script_utxos, - use_build_cmd=use_build_cmd, + use_build_cmd=use_build_cmd and VERSIONS.cli != CLI_WITH_ISSUE_942, ) except clusterlib.CLIError as exc: - if "(MissingRedeemers" not in str(exc): - raise - issues.cli_299.finish_test() + if "(MissingRedeemers" in str(exc): + issues.cli_299.finish_test() + raise if reward_error: raise AssertionError(reward_error) @@ -1069,7 +1072,7 @@ def test_register_delegate_deregister( pool_user=pool_user, redeemer_file=plutus_common.REDEEMER_42, reference_script_utxos=reference_script_utxos, - use_build_cmd=use_build_cmd, + use_build_cmd=use_build_cmd and VERSIONS.cli != CLI_WITH_ISSUE_942, ) if reward_error: From fa95bee488888f8d82540459d3cebddde0b6e46d Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 21 Oct 2024 17:52:02 +0200 Subject: [PATCH 025/168] fix(tests): handle CLIError for different CLI versions Updated test_spend_negative_build.py to handle CLIError differently based on the CLI version. Added a version check to assert the error message conditionally for CLI versions >= 10.0.0.0. --- .../tests/tests_plutus/test_spend_negative_build.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py index d5a777ae8..bbd81db5a 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py @@ -11,6 +11,7 @@ import hypothesis.strategies as st import pytest from cardano_clusterlib import clusterlib +from packaging import version from cardano_node_tests.cluster_management import cluster_management from cardano_node_tests.tests import common @@ -18,6 +19,7 @@ from cardano_node_tests.tests.tests_plutus import spend_build from cardano_node_tests.utils import clusterlib_utils from cardano_node_tests.utils import helpers +from cardano_node_tests.utils.versions import VERSIONS LOGGER = logging.getLogger(__name__) @@ -216,7 +218,8 @@ def test_collateral_w_tokens( tokens_collateral=tokens_rec, ) - with pytest.raises(clusterlib.CLIError) as excinfo: + exc_str = "" + try: spend_build._build_spend_locked_txin( temp_template=temp_template, cluster_obj=cluster, @@ -227,9 +230,13 @@ def test_collateral_w_tokens( plutus_op=plutus_op, amount=2_000_000, ) + except clusterlib.CLIError as exc: + exc_str = str(exc) - err_str = str(excinfo.value) - assert "CollateralContainsNonADA" in err_str, err_str + if VERSIONS.cli >= version.parse("10.0.0.0"): + assert not exc_str, exc_str + else: + assert "CollateralContainsNonADA" in exc_str, exc_str # check expected fees expected_fee_fund = 173597 From 33778576f2de10147edb273dfa632cec4e7f791e Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 22 Oct 2024 17:10:06 +0200 Subject: [PATCH 026/168] feat(tests): add test to return collateral with tokens With cardano-cli 10.0.0.0, tokens on collateral UTxO are allowed even when return collateral txout is not specified. --- .../test_spend_collateral_build.py | 64 +++++++++++++------ 1 file changed, 46 insertions(+), 18 deletions(-) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py index d56ed7afc..dcaffce2d 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py @@ -6,6 +6,7 @@ import allure import pytest from cardano_clusterlib import clusterlib +from packaging import version from cardano_node_tests.cluster_management import cluster_management from cardano_node_tests.tests import common @@ -232,11 +233,28 @@ def test_with_total_return_collateral( tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output_redeem) @allure.link(helpers.get_vcs_link()) + @pytest.mark.parametrize( + "use_return_collateral", + ( + True, + pytest.param( + False, + marks=pytest.mark.skipif( + VERSIONS.cli < version.parse("10.0.0.0"), + reason="not supported in cardano-cli < 10.0.0.0", + ), + ), + ), + ids=("using_return_collateral", "without_return_collateral"), + ) @pytest.mark.smoke @pytest.mark.testnets @pytest.mark.dbsync def test_collateral_with_tokens( - self, cluster: clusterlib.ClusterLib, payment_addrs: tp.List[clusterlib.AddressRecord] + self, + cluster: clusterlib.ClusterLib, + payment_addrs: tp.List[clusterlib.AddressRecord], + use_return_collateral: bool, ): """Test failing script using collaterals with tokens. @@ -256,14 +274,20 @@ def test_collateral_with_tokens( assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file - redeem_cost = plutus_common.compute_cost( - execution_cost=plutus_op.execution_cost, - protocol_params=cluster.g_query.get_protocol_params(), - ) - token_amount = 100 - amount_for_collateral = redeem_cost.collateral * 4 - return_collateral_amount = amount_for_collateral - redeem_cost.collateral + + if use_return_collateral: + redeem_cost = plutus_common.compute_cost( + execution_cost=plutus_op.execution_cost, + protocol_params=cluster.g_query.get_protocol_params(), + ) + total_collateral_amount = redeem_cost.collateral + amount_for_collateral = total_collateral_amount * 4 + return_collateral_amount = amount_for_collateral - total_collateral_amount + else: + total_collateral_amount = None + amount_for_collateral = None + return_collateral_amount = 0 # Create the token token_rand = clusterlib.get_rand_str(5) @@ -291,15 +315,19 @@ def test_collateral_with_tokens( # Spend the "locked" UTxO - txouts_return_collateral = [ - clusterlib.TxOut( - address=dst_addr.address, - amount=return_collateral_amount, - ), - clusterlib.TxOut( - address=dst_addr.address, amount=token_amount, coin=tokens_rec[0].coin - ), - ] + txouts_return_collateral = ( + [ + clusterlib.TxOut( + address=dst_addr.address, + amount=return_collateral_amount, + ), + clusterlib.TxOut( + address=dst_addr.address, amount=token_amount, coin=tokens_rec[0].coin + ), + ] + if return_collateral_amount + else [] + ) try: tx_output_redeem = self._build_spend_locked_txin( @@ -310,7 +338,7 @@ def test_collateral_with_tokens( script_utxos=script_utxos, collateral_utxos=collateral_utxos, plutus_op=plutus_op, - total_collateral_amount=redeem_cost.collateral, + total_collateral_amount=total_collateral_amount, return_collateral_txouts=txouts_return_collateral, ) except clusterlib.CLIError as exc: From 468a6472cc2a6abb76d2cb6097f51e5cedfd0aae Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 23 Oct 2024 11:36:10 +0200 Subject: [PATCH 027/168] feat: update delegation of stake addresses to include vote delegation Updated the cluster scripts and test files to delegate to `always_abstain` DRep. This is needed in protocol version 10 to make withdrawals work. --- .../cluster_scripts/conway_fast/start-cluster | 11 +++++++--- cardano_node_tests/tests/delegation.py | 6 ++++-- cardano_node_tests/tests/test_delegation.py | 21 ++++++++++++------- .../tests/test_staking_rewards.py | 3 ++- 4 files changed, 28 insertions(+), 13 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index 8491ae938..6197bb9e1 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -345,17 +345,20 @@ for i in $(seq 1 "$NUM_POOLS"); do cardano_cli_log conway stake-address key-gen \ --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.skey" \ --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" + # payment address cardano_cli_log conway address build \ --payment-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" \ --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner.addr" + # stake address cardano_cli_log conway stake-address build \ --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.addr" + # stake address registration cert cardano_cli_log conway stake-address registration-certificate \ --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ @@ -363,15 +366,17 @@ for i in $(seq 1 "$NUM_POOLS"); do --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake.reg.cert" # stake reward address registration cert - cardano_cli_log conway stake-address registration-certificate \ + cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ + --always-abstain \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" - # stake address delegation certs - cardano_cli_log conway stake-address stake-delegation-certificate \ + # owner stake address delegation certs + cardano_cli_log conway stake-address stake-and-vote-delegation-certificate \ --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ + --always-abstain \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" POOL_NAME="TestPool$i" diff --git a/cardano_node_tests/tests/delegation.py b/cardano_node_tests/tests/delegation.py index e6ed6a260..15179ec56 100644 --- a/cardano_node_tests/tests/delegation.py +++ b/cardano_node_tests/tests/delegation.py @@ -170,6 +170,7 @@ def delegate_stake_addr( deleg_kwargs: tp.Dict[str, tp.Any] = { "addr_name": f"{temp_template}_addr0", "stake_vkey_file": pool_user.stake.vkey_file, + "always_abstain": True, } if pool_id: deleg_kwargs["stake_pool_id"] = pool_id @@ -177,7 +178,7 @@ def delegate_stake_addr( deleg_kwargs["cold_vkey_file"] = cold_vkey pool_id = cluster_obj.g_stake_pool.get_stake_pool_id(cold_vkey) - stake_addr_deleg_cert_file = cluster_obj.g_stake_address.gen_stake_addr_delegation_cert( + stake_addr_deleg_cert_file = cluster_obj.g_stake_address.gen_stake_and_vote_delegation_cert( **deleg_kwargs ) @@ -222,6 +223,7 @@ def delegate_stake_addr( # check that the stake address was delegated stake_addr_info = cluster_obj.g_query.get_stake_addr_info(pool_user.stake.address) assert stake_addr_info.delegation, f"Stake address was not delegated yet: {stake_addr_info}" - assert pool_id == stake_addr_info.delegation, "Stake address delegated to wrong pool" + assert stake_addr_info.delegation == pool_id, "Stake address delegated to wrong pool" + assert stake_addr_info.vote_delegation == "alwaysAbstain" return DelegationOut(pool_user=pool_user, pool_id=pool_id, tx_raw_output=tx_raw_output) diff --git a/cardano_node_tests/tests/test_delegation.py b/cardano_node_tests/tests/test_delegation.py index 53a41b323..3a6343c98 100644 --- a/cardano_node_tests/tests/test_delegation.py +++ b/cardano_node_tests/tests/test_delegation.py @@ -325,10 +325,11 @@ def _get_pool_id(idx: int) -> str: # Create delegation certificates to different pool for each stake address stake_addr_deleg_cert_files = [ - cluster.g_stake_address.gen_stake_addr_delegation_cert( + cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_{i}_addr", stake_vkey_file=d[0].stake.vkey_file, stake_pool_id=d[1], + always_abstain=True, ) for i, d in enumerate(delegation_map) ] @@ -783,11 +784,12 @@ def test_addr_delegation_deregistration( src_registered_balance = cluster.g_query.get_address_balance(user_payment.address) # create stake address delegation cert - stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_addr_delegation_cert( + stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", stake_vkey_file=stake_vkey_file, stake_address=stake_address, stake_pool_id=pool_id, + always_abstain=True, ) clusterlib_utils.wait_for_epoch_interval( @@ -874,10 +876,11 @@ def test_delegation_cert_with_wrong_key( # create stake address delegation cert, use wrong stake vkey with pytest.raises(clusterlib.CLIError) as excinfo: - cluster.g_stake_address.gen_stake_addr_delegation_cert( + cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", stake_vkey_file=pool_users_cluster_and_pool[0].payment.vkey_file, stake_pool_id=pool_id, + always_abstain=True, ) err_msg = str(excinfo.value) assert ( @@ -926,10 +929,11 @@ def test_delegate_addr_with_wrong_key( ).address, f"Stake address is not registered: {user_registered.stake.address}" # create stake address delegation cert - stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_addr_delegation_cert( + stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", stake_vkey_file=user_registered.stake.vkey_file, stake_pool_id=pool_id, + always_abstain=True, ) # delegate stake address, use wrong payment skey @@ -969,10 +973,11 @@ def test_delegate_unknown_addr( user_payment = pool_users_cluster_and_pool[0].payment # create stake address delegation cert - stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_addr_delegation_cert( + stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", stake_vkey_file=user_registered.stake.vkey_file, stake_pool_id=pool_id, + always_abstain=True, ) # delegate unknown stake address @@ -1075,10 +1080,11 @@ def test_delegate_deregistered_addr( ).address, f"Stake address is registered: {user_registered.stake.address}" # create stake address delegation cert - stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_addr_delegation_cert( + stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", stake_vkey_file=user_registered.stake.vkey_file, stake_pool_id=pool_id, + always_abstain=True, ) # delegate deregistered stake address @@ -1156,10 +1162,11 @@ def test_delegatee_not_registered( node_cold = cluster.g_node.gen_cold_key_pair_and_counter(node_name=f"{temp_template}_pool") # create stake address delegation cert - stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_addr_delegation_cert( + stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", stake_vkey_file=user_registered.stake.vkey_file, cold_vkey_file=node_cold.vkey_file, + always_abstain=True, ) # delegate stake address diff --git a/cardano_node_tests/tests/test_staking_rewards.py b/cardano_node_tests/tests/test_staking_rewards.py index 86e4d21a9..a8f73c033 100644 --- a/cardano_node_tests/tests/test_staking_rewards.py +++ b/cardano_node_tests/tests/test_staking_rewards.py @@ -819,10 +819,11 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: # delegate pool rewards address to pool node_cold = pool_rec["cold_key_pair"] - reward_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_addr_delegation_cert( + reward_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", stake_vkey_file=pool_reward.stake.vkey_file, cold_vkey_file=node_cold.vkey_file, + always_abstain=True, ) tx_files = clusterlib.TxFiles( certificate_files=[ From c2290b9e327056d63318f6a621a850ac7c4e7c7c Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 24 Oct 2024 10:48:54 +0200 Subject: [PATCH 028/168] feat(workflows): update regression workflows for conway era - Added conway 9 and conway 10 options to cluster era in regression-dbsync.yaml - Set default cluster era to conway 9 in regression-dbsync.yaml - Added conway option to tx era in regression-dbsync.yaml - Removed older eras (alonzo, mary, allegra, shelley) from tx era in both regression-dbsync.yaml and regression.yaml - Added conway only option to markexpr in regression-dbsync.yaml --- .github/workflows/regression-dbsync.yaml | 11 +++++------ .github/workflows/regression.yaml | 5 ----- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/.github/workflows/regression-dbsync.yaml b/.github/workflows/regression-dbsync.yaml index 441acdf2b..fa641af63 100644 --- a/.github/workflows/regression-dbsync.yaml +++ b/.github/workflows/regression-dbsync.yaml @@ -13,18 +13,16 @@ on: type: choice description: "Cluster era" options: - - babbage - default: babbage + - conway 9 + - conway 10 + default: conway 9 tx_era: type: choice description: "Tx era" options: - default + - conway - babbage - - alonzo - - mary - - allegra - - shelley default: default markexpr: type: choice @@ -36,6 +34,7 @@ on: - plutus - plutus and smoke - not long + - conway only - dbsync and smoke - dbsync and plutus - dbsync and not long diff --git a/.github/workflows/regression.yaml b/.github/workflows/regression.yaml index d0c349737..6c6fbbc82 100644 --- a/.github/workflows/regression.yaml +++ b/.github/workflows/regression.yaml @@ -12,7 +12,6 @@ on: options: - conway 9 - conway 10 - - babbage default: conway 9 tx_era: type: choice @@ -21,10 +20,6 @@ on: - default - conway - babbage - - alonzo - - mary - - allegra - - shelley default: default markexpr: type: choice From cc5ad08b21fd0e8903a06618d013d201983e5367 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 24 Oct 2024 11:02:49 +0200 Subject: [PATCH 029/168] feat(testnets): remove babbage testnet scripts Babbage is no longer used on any of the testnets, so we can remove support for starting local testnet in Babbage era from the framework. --- Makefile | 1 - .../cluster_scripts/babbage/byron-params.json | 23 - .../cluster_scripts/babbage/cardano-node-bft1 | 36 - .../babbage/dbsync-config.yaml | 115 -- .../babbage/empty-genesis.conway.spec.json | 3 - .../babbage/genesis.alonzo.spec.json | 365 ------ .../babbage/genesis.conway.spec.json | 292 ----- .../cluster_scripts/babbage/genesis.spec.json | 44 - .../cluster_scripts/babbage/postgres-setup.sh | 23 - .../babbage/release-genesis.alonzo.spec.json | 371 ------ .../babbage/release-genesis.conway.spec.json | 39 - .../release_8_9-genesis.conway.spec.json | 40 - .../babbage/run-cardano-dbsync | 14 - .../babbage/run-cardano-submit-api | 19 - .../cluster_scripts/babbage/start-cluster | 1118 ----------------- .../cluster_scripts/babbage/stop-cluster | 34 - .../babbage/submit-api-config.json | 111 -- .../babbage/template-cardano-node-pool | 34 - .../babbage/template-config.json | 180 --- .../babbage_fast/byron-params.json | 23 - .../babbage_fast/cardano-node-bft1 | 31 - .../babbage_fast/dbsync-config.yaml | 115 -- .../empty-genesis.conway.spec.json | 3 - .../babbage_fast/genesis.alonzo.spec.json | 365 ------ .../babbage_fast/genesis.conway.spec.json | 292 ----- .../babbage_fast/genesis.spec.json | 44 - .../babbage_fast/postgres-setup.sh | 23 - .../release-genesis.alonzo.spec.json | 371 ------ .../release-genesis.conway.spec.json | 39 - .../release_8_9-genesis.conway.spec.json | 40 - .../babbage_fast/run-cardano-dbsync | 14 - .../babbage_fast/run-cardano-submit-api | 19 - .../babbage_fast/start-cluster | 639 ---------- .../cluster_scripts/babbage_fast/stop-cluster | 34 - .../babbage_fast/submit-api-config.json | 111 -- .../babbage_fast/template-cardano-node-pool | 34 - .../babbage_fast/template-config.json | 184 --- cardano_node_tests/utils/configuration.py | 2 +- prepare_test_env.sh | 5 +- 39 files changed, 2 insertions(+), 5248 deletions(-) delete mode 100644 cardano_node_tests/cluster_scripts/babbage/byron-params.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage/cardano-node-bft1 delete mode 100644 cardano_node_tests/cluster_scripts/babbage/dbsync-config.yaml delete mode 100644 cardano_node_tests/cluster_scripts/babbage/empty-genesis.conway.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage/genesis.alonzo.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage/genesis.conway.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage/genesis.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage/postgres-setup.sh delete mode 100644 cardano_node_tests/cluster_scripts/babbage/release-genesis.alonzo.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage/release-genesis.conway.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage/release_8_9-genesis.conway.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage/run-cardano-dbsync delete mode 100644 cardano_node_tests/cluster_scripts/babbage/run-cardano-submit-api delete mode 100644 cardano_node_tests/cluster_scripts/babbage/start-cluster delete mode 100644 cardano_node_tests/cluster_scripts/babbage/stop-cluster delete mode 100644 cardano_node_tests/cluster_scripts/babbage/submit-api-config.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage/template-cardano-node-pool delete mode 100644 cardano_node_tests/cluster_scripts/babbage/template-config.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/byron-params.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/cardano-node-bft1 delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/dbsync-config.yaml delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/empty-genesis.conway.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/genesis.alonzo.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/genesis.conway.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/genesis.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/postgres-setup.sh delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/release-genesis.alonzo.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/release-genesis.conway.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/release_8_9-genesis.conway.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/run-cardano-dbsync delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/run-cardano-submit-api delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/start-cluster delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/stop-cluster delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/submit-api-config.json delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/template-cardano-node-pool delete mode 100644 cardano_node_tests/cluster_scripts/babbage_fast/template-config.json diff --git a/Makefile b/Makefile index bcdc7b561..f579bbf49 100644 --- a/Makefile +++ b/Makefile @@ -93,7 +93,6 @@ tests: .dirs .run_tests # run tests that are supposed to run on PR level .PHONY: testpr testpr: export TESTPR=1 -testpr: export SCRIPTS_DIRNAME := $(or $(SCRIPTS_DIRNAME),babbage_fast) testpr: export CLUSTERS_COUNT := $(or $(CLUSTERS_COUNT),5) testpr: TEST_THREADS := $(or $(TEST_THREADS),20) testpr: MARKEXPR := $(or $(MARKEXPR),-m "smoke") diff --git a/cardano_node_tests/cluster_scripts/babbage/byron-params.json b/cardano_node_tests/cluster_scripts/babbage/byron-params.json deleted file mode 100644 index d424e4d50..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/byron-params.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "heavyDelThd": "300000000000", - "maxBlockSize": "2000000", - "maxTxSize": "4096", - "maxHeaderSize": "2000000", - "maxProposalSize": "700", - "mpcThd": "20000000000000", - "scriptVersion": 0, - "slotDuration": "2000", - "softforkRule": { - "initThd": "900000000000000", - "minThd": "600000000000000", - "thdDecrement": "50000000000000" - }, - "txFeePolicy": { - "multiplier": "43946000000", - "summand": "155381000000000" - }, - "unlockStakeEpoch": "18446744073709551615", - "updateImplicit": "10000", - "updateProposalThd": "100000000000000", - "updateVoteThd": "1000000000000" -} diff --git a/cardano_node_tests/cluster_scripts/babbage/cardano-node-bft1 b/cardano_node_tests/cluster_scripts/babbage/cardano-node-bft1 deleted file mode 100644 index b9fa142cc..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/cardano-node-bft1 +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env bash - -if [ -e ./state-cluster%%INSTANCE_NUM%%/utxo_backend ]; then - UTXO_BACKEND="$(<./state-cluster%%INSTANCE_NUM%%/utxo_backend)" -fi - -case "${UTXO_BACKEND:-""}" in - mem) - UTXO_BACKEND_ARGS=("--utxos-in-memory") - ;; - disk) - UTXO_BACKEND_ARGS=("--utxos-on-disk") - ;; - *) - UTXO_BACKEND_ARGS=() - ;; -esac - -echo "Starting cardano-node run: cardano-node run" - echo "--config ./state-cluster%%INSTANCE_NUM%%/config-bft1.json" - echo "--database-path ./state-cluster%%INSTANCE_NUM%%/db-bft1" - echo "--topology ./state-cluster%%INSTANCE_NUM%%/topology-bft1.json" - echo "--host-addr 127.0.0.1" - echo "--port %%NODE_PORT_BASE%%" - echo "--socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket" - echo "--signing-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/byron-deleg.key" - echo "--delegation-certificate ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/byron-deleg.json" - echo "--shelley-vrf-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/vrf.skey" - echo "--shelley-kes-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/kes.skey" - echo "--shelley-operational-certificate ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/op.cert ${UTXO_BACKEND_ARGS[*]} $*" - -echo "..or, once again, in a single line:" -echo "cardano-node run --config ./state-cluster%%INSTANCE_NUM%%/config-bft1.json --database-path ./state-cluster%%INSTANCE_NUM%%/db-bft1 --topology ./state-cluster%%INSTANCE_NUM%%/topology-bft1.json --host-addr 127.0.0.1 --port %%NODE_PORT_BASE%% --socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket --signing-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/byron-deleg.key --delegation-certificate ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/byron-deleg.json --shelley-vrf-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/vrf.skey --shelley-kes-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/kes.skey --shelley-operational-certificate ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/op.cert ${UTXO_BACKEND_ARGS[*]} $*" - - -exec cardano-node run --config ./state-cluster%%INSTANCE_NUM%%/config-bft1.json --database-path ./state-cluster%%INSTANCE_NUM%%/db-bft1 --topology ./state-cluster%%INSTANCE_NUM%%/topology-bft1.json --host-addr 127.0.0.1 --port %%NODE_PORT_BASE%% --socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket --signing-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/byron-deleg.key --delegation-certificate ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/byron-deleg.json --shelley-vrf-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/vrf.skey --shelley-kes-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/kes.skey --shelley-operational-certificate ./state-cluster%%INSTANCE_NUM%%/nodes/node-bft1/op.cert "${UTXO_BACKEND_ARGS[@]}" "$@" diff --git a/cardano_node_tests/cluster_scripts/babbage/dbsync-config.yaml b/cardano_node_tests/cluster_scripts/babbage/dbsync-config.yaml deleted file mode 100644 index f6724feb4..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/dbsync-config.yaml +++ /dev/null @@ -1,115 +0,0 @@ -# Explorer DB Node configuration - -NetworkName: localnet - -EnableLogMetrics: False -EnableLogging: True - -# The default port is 8080 -# PrometheusPort: 8080 - -# The config file for the node we are connecting to. If this is not the correct -# config, it will likely lead to db-sync throwing up weird error messages from -# the consensus layer. -# The path to the node config file is relative to this config file. -NodeConfigFile: config-bft1.json - -# ------------------------------------------------------------------------------ -# Logging configuration follows. - -# global filter; messages must have at least this severity to pass: -minSeverity: Info - -# global file rotation settings: -rotation: - rpLogLimitBytes: 5000000 - rpKeepFilesNum: 10 - rpMaxAgeHours: 24 - -# these backends are initialized: -setupBackends: - - AggregationBK - - KatipBK - # - EditorBK - # - EKGViewBK - -# if not indicated otherwise, then messages are passed to these backends: -defaultBackends: - - KatipBK - -# if wanted, the GUI is listening on this port: -# hasGUI: 12787 - -# if wanted, the EKG interface is listening on this port: -# hasEKG: 12788 - -# here we set up outputs of logging in 'katip': -setupScribes: - - scKind: StdoutSK - scName: stdout - scFormat: ScText - scRotation: null - -# if not indicated otherwise, then log output is directed to this: -defaultScribes: - - - StdoutSK - - stdout - -# more options which can be passed as key-value pairs: -options: - cfokey: - value: "Release-1.0.0" - mapSubtrace: - benchmark: - contents: - - GhcRtsStats - - MonotonicClock - subtrace: ObservableTrace - '#ekgview': - contents: - - - tag: Contains - contents: 'cardano.epoch-validation.benchmark' - - - tag: Contains - contents: .monoclock.basic. - - - tag: Contains - contents: 'cardano.epoch-validation.benchmark' - - - tag: Contains - contents: diff.RTS.cpuNs.timed. - - - tag: StartsWith - contents: '#ekgview.#aggregation.cardano.epoch-validation.benchmark' - - - tag: Contains - contents: diff.RTS.gcNum.timed. - subtrace: FilterTrace - 'cardano.epoch-validation.utxo-stats': - # Change the `subtrace` value to `Neutral` in order to log - # `UTxO`-related messages during epoch validation. - subtrace: NoTrace - '#messagecounters.aggregation': - subtrace: NoTrace - '#messagecounters.ekgview': - subtrace: NoTrace - '#messagecounters.switchboard': - subtrace: NoTrace - '#messagecounters.katip': - subtrace: NoTrace - '#messagecounters.monitoring': - subtrace: NoTrace - 'cardano.#messagecounters.aggregation': - subtrace: NoTrace - 'cardano.#messagecounters.ekgview': - subtrace: NoTrace - 'cardano.#messagecounters.switchboard': - subtrace: NoTrace - 'cardano.#messagecounters.katip': - subtrace: NoTrace - 'cardano.#messagecounters.monitoring': - subtrace: NoTrace - mapBackends: - cardano.epoch-validation.benchmark: - - AggregationBK - '#aggregation.cardano.epoch-validation.benchmark': - - EKGViewBK - mapSeverity: - db-sync-node.Subscription: Error - db-sync-node.Mux: Error - db-sync-node: Info diff --git a/cardano_node_tests/cluster_scripts/babbage/empty-genesis.conway.spec.json b/cardano_node_tests/cluster_scripts/babbage/empty-genesis.conway.spec.json deleted file mode 100644 index 4525ef4a5..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/empty-genesis.conway.spec.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "genDelegs": {} -} diff --git a/cardano_node_tests/cluster_scripts/babbage/genesis.alonzo.spec.json b/cardano_node_tests/cluster_scripts/babbage/genesis.alonzo.spec.json deleted file mode 100644 index abcaab932..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/genesis.alonzo.spec.json +++ /dev/null @@ -1,365 +0,0 @@ -{ - "collateralPercentage": 150, - "costModels": { - "PlutusV1": [ - 205665, - 812, - 1, - 1, - 1000, - 571, - 0, - 1, - 1000, - 24177, - 4, - 1, - 1000, - 32, - 117366, - 10475, - 4, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 100, - 100, - 23000, - 100, - 19537, - 32, - 175354, - 32, - 46417, - 4, - 221973, - 511, - 0, - 1, - 89141, - 32, - 497525, - 14068, - 4, - 2, - 196500, - 453240, - 220, - 0, - 1, - 1, - 1000, - 28662, - 4, - 2, - 245000, - 216773, - 62, - 1, - 1060367, - 12586, - 1, - 208512, - 421, - 1, - 187000, - 1000, - 52998, - 1, - 80436, - 32, - 43249, - 32, - 1000, - 32, - 80556, - 1, - 57667, - 4, - 1000, - 10, - 197145, - 156, - 1, - 197145, - 156, - 1, - 204924, - 473, - 1, - 208896, - 511, - 1, - 52467, - 32, - 64832, - 32, - 65493, - 32, - 22558, - 32, - 16563, - 32, - 76511, - 32, - 196500, - 453240, - 220, - 0, - 1, - 1, - 69522, - 11687, - 0, - 1, - 60091, - 32, - 196500, - 453240, - 220, - 0, - 1, - 1, - 196500, - 453240, - 220, - 0, - 1, - 1, - 806990, - 30482, - 4, - 1927926, - 82523, - 4, - 265318, - 0, - 4, - 0, - 85931, - 32, - 205665, - 812, - 1, - 1, - 41182, - 32, - 212342, - 32, - 31220, - 32, - 32696, - 32, - 43357, - 32, - 32247, - 32, - 38314, - 32, - 9462713, - 1021, - 10 - ], - "PlutusV2": [ - 205665, - 812, - 1, - 1, - 1000, - 571, - 0, - 1, - 1000, - 24177, - 4, - 1, - 1000, - 32, - 117366, - 10475, - 4, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 100, - 100, - 23000, - 100, - 19537, - 32, - 175354, - 32, - 46417, - 4, - 221973, - 511, - 0, - 1, - 89141, - 32, - 497525, - 14068, - 4, - 2, - 196500, - 453240, - 220, - 0, - 1, - 1, - 1000, - 28662, - 4, - 2, - 245000, - 216773, - 62, - 1, - 1060367, - 12586, - 1, - 208512, - 421, - 1, - 187000, - 1000, - 52998, - 1, - 80436, - 32, - 43249, - 32, - 1000, - 32, - 80556, - 1, - 57667, - 4, - 1000, - 10, - 197145, - 156, - 1, - 197145, - 156, - 1, - 204924, - 473, - 1, - 208896, - 511, - 1, - 52467, - 32, - 64832, - 32, - 65493, - 32, - 22558, - 32, - 16563, - 32, - 76511, - 32, - 196500, - 453240, - 220, - 0, - 1, - 1, - 69522, - 11687, - 0, - 1, - 60091, - 32, - 196500, - 453240, - 220, - 0, - 1, - 1, - 196500, - 453240, - 220, - 0, - 1, - 1, - 1159724, - 392670, - 0, - 2, - 806990, - 30482, - 4, - 1927926, - 82523, - 4, - 265318, - 0, - 4, - 0, - 85931, - 32, - 205665, - 812, - 1, - 1, - 41182, - 32, - 212342, - 32, - 31220, - 32, - 32696, - 32, - 43357, - 32, - 32247, - 32, - 38314, - 32, - 35892428, - 10, - 9462713, - 1021, - 10, - 38887044, - 32947, - 10 - ] - }, - "executionPrices": { - "prMem": 0.0577, - "prSteps": 7.21e-05 - }, - "lovelacePerUTxOWord": 34482, - "maxBlockExUnits": { - "exUnitsMem": 62000000, - "exUnitsSteps": 40000000000 - }, - "maxCollateralInputs": 3, - "maxTxExUnits": { - "exUnitsMem": 14000000, - "exUnitsSteps": 10000000000 - }, - "maxValueSize": 5000 -} diff --git a/cardano_node_tests/cluster_scripts/babbage/genesis.conway.spec.json b/cardano_node_tests/cluster_scripts/babbage/genesis.conway.spec.json deleted file mode 100644 index 17b09d995..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/genesis.conway.spec.json +++ /dev/null @@ -1,292 +0,0 @@ -{ - "poolVotingThresholds": { - "motionNoConfidence": 0.51, - "committeeNormal": 0.51, - "committeeNoConfidence": 0.51, - "hardForkInitiation": 0.51, - "ppSecurityGroup": 0.51 - }, - "dRepVotingThresholds": { - "motionNoConfidence": 0.51, - "committeeNormal": 0.51, - "committeeNoConfidence": 0.51, - "updateToConstitution": 0.51, - "hardForkInitiation": 0.51, - "ppNetworkGroup": 0.51, - "ppEconomicGroup": 0.51, - "ppTechnicalGroup": 0.51, - "ppGovGroup": 0.51, - "treasuryWithdrawal": 0.51 - }, - "committeeMinSize": 0, - "committeeMaxTermLength": 11000, - "govActionLifetime": 2, - "govActionDeposit": 100000000, - "dRepDeposit": 2000000, - "dRepActivity": 100, - "minFeeRefScriptCostPerByte": 0, - "plutusV3CostModel": [ - 100788, - 420, - 1, - 1, - 1000, - 173, - 0, - 1, - 1000, - 59957, - 4, - 1, - 11183, - 32, - 201305, - 8356, - 4, - 16000, - 100, - 16000, - 100, - 16000, - 100, - 16000, - 100, - 16000, - 100, - 16000, - 100, - 100, - 100, - 16000, - 100, - 94375, - 32, - 132994, - 32, - 61462, - 4, - 72010, - 178, - 0, - 1, - 22151, - 32, - 91189, - 769, - 4, - 2, - 85848, - 123203, - 7305, - -900, - 1716, - 549, - 57, - 85848, - 0, - 1, - 1, - 1000, - 42921, - 4, - 2, - 24548, - 29498, - 38, - 1, - 898148, - 27279, - 1, - 51775, - 558, - 1, - 39184, - 1000, - 60594, - 1, - 141895, - 32, - 83150, - 32, - 15299, - 32, - 76049, - 1, - 13169, - 4, - 22100, - 10, - 28999, - 74, - 1, - 28999, - 74, - 1, - 43285, - 552, - 1, - 44749, - 541, - 1, - 33852, - 32, - 68246, - 32, - 72362, - 32, - 7243, - 32, - 7391, - 32, - 11546, - 32, - 85848, - 123203, - 7305, - -900, - 1716, - 549, - 57, - 85848, - 0, - 1, - 90434, - 519, - 0, - 1, - 74433, - 32, - 85848, - 123203, - 7305, - -900, - 1716, - 549, - 57, - 85848, - 0, - 1, - 1, - 85848, - 123203, - 7305, - -900, - 1716, - 549, - 57, - 85848, - 0, - 1, - 955506, - 213312, - 0, - 2, - 270652, - 22588, - 4, - 1457325, - 64566, - 4, - 20467, - 1, - 4, - 0, - 141992, - 32, - 100788, - 420, - 1, - 1, - 81663, - 32, - 59498, - 32, - 20142, - 32, - 24588, - 32, - 20744, - 32, - 25933, - 32, - 24623, - 32, - 43053543, - 10, - 53384111, - 14333, - 10, - 43574283, - 26308, - 10, - 16000, - 100, - 16000, - 100, - 962335, - 18, - 2780678, - 6, - 442008, - 1, - 52538055, - 3756, - 18, - 267929, - 18, - 76433006, - 8868, - 18, - 52948122, - 18, - 1995836, - 36, - 3227919, - 12, - 901022, - 1, - 166917843, - 4307, - 36, - 284546, - 36, - 158221314, - 26549, - 36, - 74698472, - 36, - 333849714, - 1, - 254006273, - 72, - 2174038, - 72, - 2261318, - 64571, - 4, - 207616, - 8310, - 4, - 1293828, - 28716, - 63, - 0, - 1, - 1006041, - 43623, - 251, - 0, - 1 - ], - "constitution": { - "anchor": { - "url": "", - "dataHash": "0000000000000000000000000000000000000000000000000000000000000000" - } - }, - "committee": { - "members": { - }, - "threshold": 0.0 - } -} diff --git a/cardano_node_tests/cluster_scripts/babbage/genesis.spec.json b/cardano_node_tests/cluster_scripts/babbage/genesis.spec.json deleted file mode 100644 index d294f3fac..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/genesis.spec.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "activeSlotsCoeff": 0.1, - "protocolParams": { - "poolDeposit": 500000000, - "protocolVersion": { - "minor": 0, - "major": 2 - }, - "minUTxOValue": 1, - "decentralisationParam": 0.8, - "maxTxSize": 16384, - "minPoolCost": 0, - "minFeeA": 44, - "maxBlockBodySize": 65536, - "minFeeB": 155381, - "eMax": 18, - "extraEntropy": { - "tag": "NeutralNonce" - }, - "maxBlockHeaderSize": 1100, - "keyDeposit": 400000, - "nOpt": 500, - "rho": 0.0022, - "tau": 0.05, - "a0": 0.3 - }, - "protocolMagicId": 42, - "genDelegs": {}, - "updateQuorum": 1, - "networkId": "Testnet", - "initialFunds": {}, - "maxLovelaceSupply": 45000000000000000, - "networkMagic": 42, - "epochLength": 1000, - "staking": { - "pools": {}, - "stake": {} - }, - "systemStart": "2020-07-08T02:39:16.033076859Z", - "slotsPerKESPeriod": 129600, - "slotLength": 0.2, - "maxKESEvolutions": 64, - "securityParam": 10 -} diff --git a/cardano_node_tests/cluster_scripts/babbage/postgres-setup.sh b/cardano_node_tests/cluster_scripts/babbage/postgres-setup.sh deleted file mode 100644 index 2dc08d0c8..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/postgres-setup.sh +++ /dev/null @@ -1,23 +0,0 @@ -#! /usr/bin/env -S nix develop --accept-flake-config github:IntersectMBO/cardano-node-tests#postgres -i -k CARDANO_NODE_SOCKET_PATH -k PGHOST -k PGPORT -k PGUSER --no-write-lock-file -c bash -# shellcheck shell=bash - -set -euo pipefail - -SOCKET_PATH="$(readlink -m "$CARDANO_NODE_SOCKET_PATH")" -STATE_CLUSTER="${SOCKET_PATH%/*}" -INSTANCE_NUM="${STATE_CLUSTER#*state-cluster}" -DATABASE_NAME="dbsync${INSTANCE_NUM}" - -PGPASSFILE="$STATE_CLUSTER/pgpass" -export PGHOST="${PGHOST:-localhost}" -export PGPORT="${PGPORT:-5432}" -export PGUSER="${PGUSER:-postgres}" - -echo "Deleting db $DATABASE_NAME" -psql -d "$DATABASE_NAME" -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE datname = current_database() AND pid <> pg_backend_pid();" > /dev/null 2>&1 || : -dropdb --if-exists "$DATABASE_NAME" > /dev/null -echo "Setting up db $DATABASE_NAME" -createdb -T template0 --owner="$PGUSER" --encoding=UTF8 "$DATABASE_NAME" - -echo "${PGHOST}:${PGPORT}:${DATABASE_NAME}:${PGUSER}:secret" > "$PGPASSFILE" -chmod 600 "$PGPASSFILE" diff --git a/cardano_node_tests/cluster_scripts/babbage/release-genesis.alonzo.spec.json b/cardano_node_tests/cluster_scripts/babbage/release-genesis.alonzo.spec.json deleted file mode 100644 index 3970561a6..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/release-genesis.alonzo.spec.json +++ /dev/null @@ -1,371 +0,0 @@ -{ - "lovelacePerUTxOWord": 34482, - "executionPrices": { - "prSteps": { - "numerator": 721, - "denominator": 10000000 - }, - "prMem": { - "numerator": 577, - "denominator": 10000 - } - }, - "maxTxExUnits": { - "exUnitsMem": 14000000, - "exUnitsSteps": 10000000000 - }, - "maxBlockExUnits": { - "exUnitsMem": 62000000, - "exUnitsSteps": 40000000000 - }, - "maxValueSize": 5000, - "collateralPercentage": 150, - "maxCollateralInputs": 3, - "costModels": { - "PlutusV1": { - "addInteger-cpu-arguments-intercept": 205665, - "addInteger-cpu-arguments-slope": 812, - "addInteger-memory-arguments-intercept": 1, - "addInteger-memory-arguments-slope": 1, - "appendByteString-cpu-arguments-intercept": 1000, - "appendByteString-cpu-arguments-slope": 571, - "appendByteString-memory-arguments-intercept": 0, - "appendByteString-memory-arguments-slope": 1, - "appendString-cpu-arguments-intercept": 1000, - "appendString-cpu-arguments-slope": 24177, - "appendString-memory-arguments-intercept": 4, - "appendString-memory-arguments-slope": 1, - "bData-cpu-arguments": 1000, - "bData-memory-arguments": 32, - "blake2b_256-cpu-arguments-intercept": 117366, - "blake2b_256-cpu-arguments-slope": 10475, - "blake2b_256-memory-arguments": 4, - "cekApplyCost-exBudgetCPU": 23000, - "cekApplyCost-exBudgetMemory": 100, - "cekBuiltinCost-exBudgetCPU": 23000, - "cekBuiltinCost-exBudgetMemory": 100, - "cekConstCost-exBudgetCPU": 23000, - "cekConstCost-exBudgetMemory": 100, - "cekDelayCost-exBudgetCPU": 23000, - "cekDelayCost-exBudgetMemory": 100, - "cekForceCost-exBudgetCPU": 23000, - "cekForceCost-exBudgetMemory": 100, - "cekLamCost-exBudgetCPU": 23000, - "cekLamCost-exBudgetMemory": 100, - "cekStartupCost-exBudgetCPU": 100, - "cekStartupCost-exBudgetMemory": 100, - "cekVarCost-exBudgetCPU": 23000, - "cekVarCost-exBudgetMemory": 100, - "chooseData-cpu-arguments": 19537, - "chooseData-memory-arguments": 32, - "chooseList-cpu-arguments": 175354, - "chooseList-memory-arguments": 32, - "chooseUnit-cpu-arguments": 46417, - "chooseUnit-memory-arguments": 4, - "consByteString-cpu-arguments-intercept": 221973, - "consByteString-cpu-arguments-slope": 511, - "consByteString-memory-arguments-intercept": 0, - "consByteString-memory-arguments-slope": 1, - "constrData-cpu-arguments": 89141, - "constrData-memory-arguments": 32, - "decodeUtf8-cpu-arguments-intercept": 497525, - "decodeUtf8-cpu-arguments-slope": 14068, - "decodeUtf8-memory-arguments-intercept": 4, - "decodeUtf8-memory-arguments-slope": 2, - "divideInteger-cpu-arguments-constant": 196500, - "divideInteger-cpu-arguments-model-arguments-intercept": 453240, - "divideInteger-cpu-arguments-model-arguments-slope": 220, - "divideInteger-memory-arguments-intercept": 0, - "divideInteger-memory-arguments-minimum": 1, - "divideInteger-memory-arguments-slope": 1, - "encodeUtf8-cpu-arguments-intercept": 1000, - "encodeUtf8-cpu-arguments-slope": 28662, - "encodeUtf8-memory-arguments-intercept": 4, - "encodeUtf8-memory-arguments-slope": 2, - "equalsByteString-cpu-arguments-constant": 245000, - "equalsByteString-cpu-arguments-intercept": 216773, - "equalsByteString-cpu-arguments-slope": 62, - "equalsByteString-memory-arguments": 1, - "equalsData-cpu-arguments-intercept": 1060367, - "equalsData-cpu-arguments-slope": 12586, - "equalsData-memory-arguments": 1, - "equalsInteger-cpu-arguments-intercept": 208512, - "equalsInteger-cpu-arguments-slope": 421, - "equalsInteger-memory-arguments": 1, - "equalsString-cpu-arguments-constant": 187000, - "equalsString-cpu-arguments-intercept": 1000, - "equalsString-cpu-arguments-slope": 52998, - "equalsString-memory-arguments": 1, - "fstPair-cpu-arguments": 80436, - "fstPair-memory-arguments": 32, - "headList-cpu-arguments": 43249, - "headList-memory-arguments": 32, - "iData-cpu-arguments": 1000, - "iData-memory-arguments": 32, - "ifThenElse-cpu-arguments": 80556, - "ifThenElse-memory-arguments": 1, - "indexByteString-cpu-arguments": 57667, - "indexByteString-memory-arguments": 4, - "lengthOfByteString-cpu-arguments": 1000, - "lengthOfByteString-memory-arguments": 10, - "lessThanByteString-cpu-arguments-intercept": 197145, - "lessThanByteString-cpu-arguments-slope": 156, - "lessThanByteString-memory-arguments": 1, - "lessThanEqualsByteString-cpu-arguments-intercept": 197145, - "lessThanEqualsByteString-cpu-arguments-slope": 156, - "lessThanEqualsByteString-memory-arguments": 1, - "lessThanEqualsInteger-cpu-arguments-intercept": 204924, - "lessThanEqualsInteger-cpu-arguments-slope": 473, - "lessThanEqualsInteger-memory-arguments": 1, - "lessThanInteger-cpu-arguments-intercept": 208896, - "lessThanInteger-cpu-arguments-slope": 511, - "lessThanInteger-memory-arguments": 1, - "listData-cpu-arguments": 52467, - "listData-memory-arguments": 32, - "mapData-cpu-arguments": 64832, - "mapData-memory-arguments": 32, - "mkCons-cpu-arguments": 65493, - "mkCons-memory-arguments": 32, - "mkNilData-cpu-arguments": 22558, - "mkNilData-memory-arguments": 32, - "mkNilPairData-cpu-arguments": 16563, - "mkNilPairData-memory-arguments": 32, - "mkPairData-cpu-arguments": 76511, - "mkPairData-memory-arguments": 32, - "modInteger-cpu-arguments-constant": 196500, - "modInteger-cpu-arguments-model-arguments-intercept": 453240, - "modInteger-cpu-arguments-model-arguments-slope": 220, - "modInteger-memory-arguments-intercept": 0, - "modInteger-memory-arguments-minimum": 1, - "modInteger-memory-arguments-slope": 1, - "multiplyInteger-cpu-arguments-intercept": 69522, - "multiplyInteger-cpu-arguments-slope": 11687, - "multiplyInteger-memory-arguments-intercept": 0, - "multiplyInteger-memory-arguments-slope": 1, - "nullList-cpu-arguments": 60091, - "nullList-memory-arguments": 32, - "quotientInteger-cpu-arguments-constant": 196500, - "quotientInteger-cpu-arguments-model-arguments-intercept": 453240, - "quotientInteger-cpu-arguments-model-arguments-slope": 220, - "quotientInteger-memory-arguments-intercept": 0, - "quotientInteger-memory-arguments-minimum": 1, - "quotientInteger-memory-arguments-slope": 1, - "remainderInteger-cpu-arguments-constant": 196500, - "remainderInteger-cpu-arguments-model-arguments-intercept": 453240, - "remainderInteger-cpu-arguments-model-arguments-slope": 220, - "remainderInteger-memory-arguments-intercept": 0, - "remainderInteger-memory-arguments-minimum": 1, - "remainderInteger-memory-arguments-slope": 1, - "sha2_256-cpu-arguments-intercept": 806990, - "sha2_256-cpu-arguments-slope": 30482, - "sha2_256-memory-arguments": 4, - "sha3_256-cpu-arguments-intercept": 1927926, - "sha3_256-cpu-arguments-slope": 82523, - "sha3_256-memory-arguments": 4, - "sliceByteString-cpu-arguments-intercept": 265318, - "sliceByteString-cpu-arguments-slope": 0, - "sliceByteString-memory-arguments-intercept": 4, - "sliceByteString-memory-arguments-slope": 0, - "sndPair-cpu-arguments": 85931, - "sndPair-memory-arguments": 32, - "subtractInteger-cpu-arguments-intercept": 205665, - "subtractInteger-cpu-arguments-slope": 812, - "subtractInteger-memory-arguments-intercept": 1, - "subtractInteger-memory-arguments-slope": 1, - "tailList-cpu-arguments": 41182, - "tailList-memory-arguments": 32, - "trace-cpu-arguments": 212342, - "trace-memory-arguments": 32, - "unBData-cpu-arguments": 31220, - "unBData-memory-arguments": 32, - "unConstrData-cpu-arguments": 32696, - "unConstrData-memory-arguments": 32, - "unIData-cpu-arguments": 43357, - "unIData-memory-arguments": 32, - "unListData-cpu-arguments": 32247, - "unListData-memory-arguments": 32, - "unMapData-cpu-arguments": 38314, - "unMapData-memory-arguments": 32, - "verifyEd25519Signature-cpu-arguments-intercept": 9462713, - "verifyEd25519Signature-cpu-arguments-slope": 1021, - "verifyEd25519Signature-memory-arguments": 10 - }, - "PlutusV2": { - "addInteger-cpu-arguments-intercept": 205665, - "addInteger-cpu-arguments-slope": 812, - "addInteger-memory-arguments-intercept": 1, - "addInteger-memory-arguments-slope": 1, - "appendByteString-cpu-arguments-intercept": 1000, - "appendByteString-cpu-arguments-slope": 571, - "appendByteString-memory-arguments-intercept": 0, - "appendByteString-memory-arguments-slope": 1, - "appendString-cpu-arguments-intercept": 1000, - "appendString-cpu-arguments-slope": 24177, - "appendString-memory-arguments-intercept": 4, - "appendString-memory-arguments-slope": 1, - "bData-cpu-arguments": 1000, - "bData-memory-arguments": 32, - "blake2b_256-cpu-arguments-intercept": 117366, - "blake2b_256-cpu-arguments-slope": 10475, - "blake2b_256-memory-arguments": 4, - "cekApplyCost-exBudgetCPU": 23000, - "cekApplyCost-exBudgetMemory": 100, - "cekBuiltinCost-exBudgetCPU": 23000, - "cekBuiltinCost-exBudgetMemory": 100, - "cekConstCost-exBudgetCPU": 23000, - "cekConstCost-exBudgetMemory": 100, - "cekDelayCost-exBudgetCPU": 23000, - "cekDelayCost-exBudgetMemory": 100, - "cekForceCost-exBudgetCPU": 23000, - "cekForceCost-exBudgetMemory": 100, - "cekLamCost-exBudgetCPU": 23000, - "cekLamCost-exBudgetMemory": 100, - "cekStartupCost-exBudgetCPU": 100, - "cekStartupCost-exBudgetMemory": 100, - "cekVarCost-exBudgetCPU": 23000, - "cekVarCost-exBudgetMemory": 100, - "chooseData-cpu-arguments": 19537, - "chooseData-memory-arguments": 32, - "chooseList-cpu-arguments": 175354, - "chooseList-memory-arguments": 32, - "chooseUnit-cpu-arguments": 46417, - "chooseUnit-memory-arguments": 4, - "consByteString-cpu-arguments-intercept": 221973, - "consByteString-cpu-arguments-slope": 511, - "consByteString-memory-arguments-intercept": 0, - "consByteString-memory-arguments-slope": 1, - "constrData-cpu-arguments": 89141, - "constrData-memory-arguments": 32, - "decodeUtf8-cpu-arguments-intercept": 497525, - "decodeUtf8-cpu-arguments-slope": 14068, - "decodeUtf8-memory-arguments-intercept": 4, - "decodeUtf8-memory-arguments-slope": 2, - "divideInteger-cpu-arguments-constant": 196500, - "divideInteger-cpu-arguments-model-arguments-intercept": 453240, - "divideInteger-cpu-arguments-model-arguments-slope": 220, - "divideInteger-memory-arguments-intercept": 0, - "divideInteger-memory-arguments-minimum": 1, - "divideInteger-memory-arguments-slope": 1, - "encodeUtf8-cpu-arguments-intercept": 1000, - "encodeUtf8-cpu-arguments-slope": 28662, - "encodeUtf8-memory-arguments-intercept": 4, - "encodeUtf8-memory-arguments-slope": 2, - "equalsByteString-cpu-arguments-constant": 245000, - "equalsByteString-cpu-arguments-intercept": 216773, - "equalsByteString-cpu-arguments-slope": 62, - "equalsByteString-memory-arguments": 1, - "equalsData-cpu-arguments-intercept": 1060367, - "equalsData-cpu-arguments-slope": 12586, - "equalsData-memory-arguments": 1, - "equalsInteger-cpu-arguments-intercept": 208512, - "equalsInteger-cpu-arguments-slope": 421, - "equalsInteger-memory-arguments": 1, - "equalsString-cpu-arguments-constant": 187000, - "equalsString-cpu-arguments-intercept": 1000, - "equalsString-cpu-arguments-slope": 52998, - "equalsString-memory-arguments": 1, - "fstPair-cpu-arguments": 80436, - "fstPair-memory-arguments": 32, - "headList-cpu-arguments": 43249, - "headList-memory-arguments": 32, - "iData-cpu-arguments": 1000, - "iData-memory-arguments": 32, - "ifThenElse-cpu-arguments": 80556, - "ifThenElse-memory-arguments": 1, - "indexByteString-cpu-arguments": 57667, - "indexByteString-memory-arguments": 4, - "lengthOfByteString-cpu-arguments": 1000, - "lengthOfByteString-memory-arguments": 10, - "lessThanByteString-cpu-arguments-intercept": 197145, - "lessThanByteString-cpu-arguments-slope": 156, - "lessThanByteString-memory-arguments": 1, - "lessThanEqualsByteString-cpu-arguments-intercept": 197145, - "lessThanEqualsByteString-cpu-arguments-slope": 156, - "lessThanEqualsByteString-memory-arguments": 1, - "lessThanEqualsInteger-cpu-arguments-intercept": 204924, - "lessThanEqualsInteger-cpu-arguments-slope": 473, - "lessThanEqualsInteger-memory-arguments": 1, - "lessThanInteger-cpu-arguments-intercept": 208896, - "lessThanInteger-cpu-arguments-slope": 511, - "lessThanInteger-memory-arguments": 1, - "listData-cpu-arguments": 52467, - "listData-memory-arguments": 32, - "mapData-cpu-arguments": 64832, - "mapData-memory-arguments": 32, - "mkCons-cpu-arguments": 65493, - "mkCons-memory-arguments": 32, - "mkNilData-cpu-arguments": 22558, - "mkNilData-memory-arguments": 32, - "mkNilPairData-cpu-arguments": 16563, - "mkNilPairData-memory-arguments": 32, - "mkPairData-cpu-arguments": 76511, - "mkPairData-memory-arguments": 32, - "modInteger-cpu-arguments-constant": 196500, - "modInteger-cpu-arguments-model-arguments-intercept": 453240, - "modInteger-cpu-arguments-model-arguments-slope": 220, - "modInteger-memory-arguments-intercept": 0, - "modInteger-memory-arguments-minimum": 1, - "modInteger-memory-arguments-slope": 1, - "multiplyInteger-cpu-arguments-intercept": 69522, - "multiplyInteger-cpu-arguments-slope": 11687, - "multiplyInteger-memory-arguments-intercept": 0, - "multiplyInteger-memory-arguments-slope": 1, - "nullList-cpu-arguments": 60091, - "nullList-memory-arguments": 32, - "quotientInteger-cpu-arguments-constant": 196500, - "quotientInteger-cpu-arguments-model-arguments-intercept": 453240, - "quotientInteger-cpu-arguments-model-arguments-slope": 220, - "quotientInteger-memory-arguments-intercept": 0, - "quotientInteger-memory-arguments-minimum": 1, - "quotientInteger-memory-arguments-slope": 1, - "remainderInteger-cpu-arguments-constant": 196500, - "remainderInteger-cpu-arguments-model-arguments-intercept": 453240, - "remainderInteger-cpu-arguments-model-arguments-slope": 220, - "remainderInteger-memory-arguments-intercept": 0, - "remainderInteger-memory-arguments-minimum": 1, - "remainderInteger-memory-arguments-slope": 1, - "serialiseData-cpu-arguments-intercept": 1159724, - "serialiseData-cpu-arguments-slope": 392670, - "serialiseData-memory-arguments-intercept": 0, - "serialiseData-memory-arguments-slope": 2, - "sha2_256-cpu-arguments-intercept": 806990, - "sha2_256-cpu-arguments-slope": 30482, - "sha2_256-memory-arguments": 4, - "sha3_256-cpu-arguments-intercept": 1927926, - "sha3_256-cpu-arguments-slope": 82523, - "sha3_256-memory-arguments": 4, - "sliceByteString-cpu-arguments-intercept": 265318, - "sliceByteString-cpu-arguments-slope": 0, - "sliceByteString-memory-arguments-intercept": 4, - "sliceByteString-memory-arguments-slope": 0, - "sndPair-cpu-arguments": 85931, - "sndPair-memory-arguments": 32, - "subtractInteger-cpu-arguments-intercept": 205665, - "subtractInteger-cpu-arguments-slope": 812, - "subtractInteger-memory-arguments-intercept": 1, - "subtractInteger-memory-arguments-slope": 1, - "tailList-cpu-arguments": 41182, - "tailList-memory-arguments": 32, - "trace-cpu-arguments": 212342, - "trace-memory-arguments": 32, - "unBData-cpu-arguments": 31220, - "unBData-memory-arguments": 32, - "unConstrData-cpu-arguments": 32696, - "unConstrData-memory-arguments": 32, - "unIData-cpu-arguments": 43357, - "unIData-memory-arguments": 32, - "unListData-cpu-arguments": 32247, - "unListData-memory-arguments": 32, - "unMapData-cpu-arguments": 38314, - "unMapData-memory-arguments": 32, - "verifyEcdsaSecp256k1Signature-cpu-arguments": 35892428, - "verifyEcdsaSecp256k1Signature-memory-arguments": 10, - "verifyEd25519Signature-cpu-arguments-intercept": 9462713, - "verifyEd25519Signature-cpu-arguments-slope": 1021, - "verifyEd25519Signature-memory-arguments": 10, - "verifySchnorrSecp256k1Signature-cpu-arguments-intercept": 38887044, - "verifySchnorrSecp256k1Signature-cpu-arguments-slope": 32947, - "verifySchnorrSecp256k1Signature-memory-arguments": 10 - } - } -} diff --git a/cardano_node_tests/cluster_scripts/babbage/release-genesis.conway.spec.json b/cardano_node_tests/cluster_scripts/babbage/release-genesis.conway.spec.json deleted file mode 100644 index 59ed2c5f4..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/release-genesis.conway.spec.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "poolVotingThresholds": { - "pvtCommitteeNormal": 0, - "pvtCommitteeNoConfidence": 0, - "pvtHardForkInitiation": 0, - "pvtMotionNoConfidence": 0 - }, - "dRepVotingThresholds": { - "dvtMotionNoConfidence": 0, - "dvtCommitteeNormal": 0, - "dvtCommitteeNoConfidence": 0, - "dvtUpdateToConstitution": 0, - "dvtHardForkInitiation": 0, - "dvtPPNetworkGroup": 0, - "dvtPPEconomicGroup": 0, - "dvtPPTechnicalGroup": 0, - "dvtPPGovGroup": 0, - "dvtTreasuryWithdrawal": 0 - }, - "committeeMinSize": 0, - "committeeMaxTermLength": 0, - "govActionLifetime": 0, - "govActionDeposit": 0, - "dRepDeposit": 0, - "dRepActivity": 0, - "constitution": { - "anchor": { - "url": "", - "dataHash": "0000000000000000000000000000000000000000000000000000000000000000" - } - }, - "committee": { - "members": { - "keyHash-4e88cc2d27c364aaf90648a87dfb95f8ee103ba67fa1f12f5e86c42a": 1, - "scriptHash-4e88cc2d27c364aaf90648a87dfb95f8ee103ba67fa1f12f5e86c42a": 2 - }, - "quorum": 0.5 - } -} diff --git a/cardano_node_tests/cluster_scripts/babbage/release_8_9-genesis.conway.spec.json b/cardano_node_tests/cluster_scripts/babbage/release_8_9-genesis.conway.spec.json deleted file mode 100644 index 12d461512..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/release_8_9-genesis.conway.spec.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "poolVotingThresholds": { - "motionNoConfidence": 0.51, - "committeeNormal": 0.51, - "committeeNoConfidence": 0.51, - "hardForkInitiation": 0.51, - "ppSecurityGroup": 0.51 - }, - "dRepVotingThresholds": { - "motionNoConfidence": 0.51, - "committeeNormal": 0.51, - "committeeNoConfidence": 0.51, - "updateToConstitution": 0.51, - "hardForkInitiation": 0.51, - "ppNetworkGroup": 0.51, - "ppEconomicGroup": 0.51, - "ppTechnicalGroup": 0.51, - "ppGovGroup": 0.51, - "treasuryWithdrawal": 0.51 - }, - "committeeMinSize": 0, - "committeeMaxTermLength": 11000, - "govActionLifetime": 2, - "govActionDeposit": 100000000, - "dRepDeposit": 2000000, - "dRepActivity": 100, - "minFeeRefScriptCostPerByte": 0, - "constitution": { - "anchor": { - "url": "", - "dataHash": "0000000000000000000000000000000000000000000000000000000000000000" - } - }, - "committee": { - "members": { - }, - "quorum": 0.0, - "threshold": 0.0 - } -} diff --git a/cardano_node_tests/cluster_scripts/babbage/run-cardano-dbsync b/cardano_node_tests/cluster_scripts/babbage/run-cardano-dbsync deleted file mode 100644 index e094e1906..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/run-cardano-dbsync +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash - -set -uo pipefail - -SOCKET_PATH="$(readlink -m "$CARDANO_NODE_SOCKET_PATH")" -STATE_CLUSTER="${SOCKET_PATH%/*}" -STATE_CLUSTER_NAME="${STATE_CLUSTER##*/}" - -export PGPASSFILE="$STATE_CLUSTER/pgpass" -export PGHOST="${PGHOST:-localhost}" -export PGPORT="${PGPORT:-5432}" -export PGUSER="${PGUSER:-postgres}" - -exec "$DBSYNC_REPO/db-sync-node/bin/cardano-db-sync" --config "./$STATE_CLUSTER_NAME/dbsync-config.yaml" --socket-path "$CARDANO_NODE_SOCKET_PATH" --state-dir "./$STATE_CLUSTER_NAME/db-sync" --schema-dir "$DBSYNC_REPO/schema" diff --git a/cardano_node_tests/cluster_scripts/babbage/run-cardano-submit-api b/cardano_node_tests/cluster_scripts/babbage/run-cardano-submit-api deleted file mode 100644 index 66fac02ae..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/run-cardano-submit-api +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash - -testnet_magic="$(<./state-cluster%%INSTANCE_NUM%%/db-bft1/protocolMagicId)" - -# TODO: `--metrics-port` is not available in older cardano-node releases, see node issue #4280 -metrics_port="$(cardano-submit-api --metrics-port 8081 2>&1 | { read -r i; if [[ "$i" == *Invalid* ]]; then echo ""; else echo "--metrics-port %%METRICS_SUBMIT_API_PORT%%"; fi; })" - -echo "Starting cardano-submit-api: cardano-submit-api" - echo "--config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json" - echo "--socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket" - echo "--listen-address 127.0.0.1" - echo "--port %%SUBMIT_API_PORT%%" - echo "$metrics_port" - echo --testnet-magic "$testnet_magic" -echo "..or, once again, in a single line:" -echo cardano-submit-api --config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json --socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket --listen-address 127.0.0.1 --port %%SUBMIT_API_PORT%% "$metrics_port" --testnet-magic "$testnet_magic" - -# shellcheck disable=SC2086 -exec cardano-submit-api --config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json --socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket --listen-address 127.0.0.1 --port %%SUBMIT_API_PORT%% $metrics_port --testnet-magic "$testnet_magic" diff --git a/cardano_node_tests/cluster_scripts/babbage/start-cluster b/cardano_node_tests/cluster_scripts/babbage/start-cluster deleted file mode 100644 index 8091da974..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/start-cluster +++ /dev/null @@ -1,1118 +0,0 @@ -#!/usr/bin/env bash - -# controlling environment variables: -# DBSYNC_REPO - will start and configure db-sync if the value is path to db-sync repository -# ENABLE_P2P - if set, local cluster will use P2P -# MIXED_P2P - if set, local cluster will use P2P for some nodes and legacy topology for others -# UTXO_BACKEND - 'mem' or 'disk', default is 'mem' (or legacy) if unset -# DRY_RUN - if set, will not start the cluster - -set -euo pipefail -sets="$-" - -SCRIPT_DIR="$(readlink -m "${0%/*}")" -SOCKET_PATH="$(readlink -m "$CARDANO_NODE_SOCKET_PATH")" -STATE_CLUSTER="${SOCKET_PATH%/*}" -STATE_CLUSTER_NAME="${STATE_CLUSTER##*/}" - -INSTANCE_NUM="%%INSTANCE_NUM%%" -if [[ "$SOCKET_PATH" != *"/state-cluster${INSTANCE_NUM}/"* ]]; then - echo "CARDANO_NODE_SOCKET_PATH must be set to a path containing 'state-cluster${INSTANCE_NUM}', line $LINENO" >&2 - exit 1 -fi - -NUM_BFT_NODES=1 -NUM_POOLS=%%NUM_POOLS%% -PROPOSAL_DELAY=5 -POOL_PLEDGE=1000000000000 - -FEE=5000000 - -SECURITY_PARAM="$(jq '.securityParam' < "$SCRIPT_DIR/genesis.spec.json")" -NETWORK_MAGIC="$(jq '.networkMagic' < "$SCRIPT_DIR/genesis.spec.json")" -MAX_SUPPLY="$(jq '.maxLovelaceSupply' < "$SCRIPT_DIR/genesis.spec.json")" - -if [ -n "${MIXED_P2P:-""}" ]; then - export ENABLE_P2P=1 -fi - -if [ -f "$STATE_CLUSTER/supervisord.pid" ]; then - echo "Cluster already running. Please run \`$SCRIPT_DIR/stop-cluster\` first!" >&2 - exit 1 -fi - -if [ "$NUM_POOLS" -lt 3 ]; then - echo "NUM_POOLS must be at least 3" >&2 - exit 1 -fi - -cardano_cli_log() { - echo cardano-cli "$@" >> "$STATE_CLUSTER/start_cluster_cmds.log" - - for _ in {1..3}; do - set +e - out="$(cardano-cli "$@" 2>&1)" - retval="$?" - set -"$sets" - - case "$out" in - *"resource vanished"*) - printf "Retrying \`cardano-cli %s\`. Failure:\n%s\n" "$*" "$out" >&2 - sleep 1 - ;; - *) - if [ -n "$out" ]; then echo "$out"; fi - break - ;; - esac - done - - return "$retval" -} - -get_epoch() { - cardano_cli_log query tip --testnet-magic "$NETWORK_MAGIC" | jq -r '.epoch' -} - -get_slot() { - future_offset="${1:-0}" - cardano_cli_log query tip --testnet-magic "$NETWORK_MAGIC" | jq -r ".slot + $future_offset" -} - -get_era() { - cardano_cli_log query tip --testnet-magic "$NETWORK_MAGIC" | jq -r ".era" -} - -wait_for_era() { - for _ in {1..10}; do - era="$(get_era)" - if [ "$era" = "$1" ]; then - return - fi - sleep 2 - done - - echo "Unexpected era '$era' instead of '$1'" >&2 - exit 1 -} - -wait_for_epoch() { - for _ in {1..10}; do - epoch="$(get_epoch)" - if [ "$epoch" -ge "$1" ]; then - return - fi - sleep 2 - done - - echo "Unexpected epoch '$epoch' instead of '$1'" >&2 - exit 1 -} - -enable_submit_api() { - command -v cardano-submit-api >/dev/null 2>&1 || return 1 - - # TODO: `--metrics-port` is not available in older cardano-node releases, see node issue #4280 - # If the metrics port is not available, we can start the `cardano-submit-api` only on the first - # cluster instance. - [[ "$CARDANO_NODE_SOCKET_PATH" == */cluster0/* ]] && return 0 - if cardano-submit-api --metrics-port 8081 2>&1 | { read -r i; [[ "$i" == *Invalid* ]]; }; then - return 1 - fi - - return 0 -} - -ENABLE_SUBMIT_API="$(enable_submit_api && echo 1 || echo 0)" - -if [ -e "$SCRIPT_DIR/shell_env" ]; then - # shellcheck disable=SC1090,SC1091 - source "$SCRIPT_DIR/shell_env" -fi - - -CERT_ERA_ARG=("--shelley-era") -if { cardano-cli stake-address registration-certificate --shelley-era 2>&1; true; } |\ - { read -r i; [[ "$i" == *Invalid* ]]; }; then - CERT_ERA_ARG=() -fi - -rm -rf "$STATE_CLUSTER" -mkdir -p "$STATE_CLUSTER"/{shelley,webserver,db-sync} -cd "$STATE_CLUSTER/.." - -cp "$SCRIPT_DIR"/cardano-node-* "$STATE_CLUSTER" -cp "$SCRIPT_DIR/run-cardano-submit-api" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/byron-params.json" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/dbsync-config.yaml" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/submit-api-config.json" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/supervisor.conf" "$STATE_CLUSTER" -cp "$SCRIPT_DIR"/*genesis*.spec.json "$STATE_CLUSTER/shelley/" - -if [ -n "${ENABLE_P2P:-""}" ]; then - # use P2P topology files - for tconf in "$SCRIPT_DIR"/p2p-topology-*.json; do - tfname="${tconf##*/p2p-}" - cp "$tconf" "${STATE_CLUSTER}/${tfname}" - done -else - cp "$SCRIPT_DIR"/topology-*.json "$STATE_CLUSTER" -fi - -case "${UTXO_BACKEND:=""}" in - "" | mem | disk) - echo "$UTXO_BACKEND" > "$STATE_CLUSTER/utxo_backend" - ;; - *) - echo "Unknown \`UTXO_BACKEND\`: '$UTXO_BACKEND', line $LINENO" >&2 - exit 1 - ;; -esac - -# enable db-sync service -if [ -n "${DBSYNC_REPO:-""}" ]; then - [ -e "$DBSYNC_REPO/db-sync-node/bin/cardano-db-sync" ] || \ - { echo "The \`$DBSYNC_REPO/db-sync-node/bin/cardano-db-sync\` not found, line $LINENO" >&2; exit 1; } # assert - - # create clean database - if [ -z "${DRY_RUN:-""}" ]; then - "$SCRIPT_DIR/postgres-setup.sh" - fi - - cat >> "$STATE_CLUSTER/supervisor.conf" <> "$STATE_CLUSTER/supervisor.conf" < "$STATE_CLUSTER/cluster_start_time" - -cardano_cli_log byron genesis genesis \ - --protocol-magic "$NETWORK_MAGIC" \ - --k "$SECURITY_PARAM" \ - --n-poor-addresses 0 \ - --n-delegate-addresses "$NUM_BFT_NODES" \ - --total-balance "$MAX_SUPPLY" \ - --delegate-share 1 \ - --avvm-entry-count 0 \ - --avvm-entry-balance 0 \ - --protocol-parameters-file "$STATE_CLUSTER/byron-params.json" \ - --genesis-output-dir "$STATE_CLUSTER/byron" \ - --start-time "$START_TIME" - -mv "$STATE_CLUSTER/byron-params.json" "$STATE_CLUSTER/byron/params.json" - -gen_genesis() { - cardano_cli_log genesis create \ - --genesis-dir "$STATE_CLUSTER/shelley" \ - --testnet-magic "$NETWORK_MAGIC" \ - --gen-genesis-keys "$NUM_BFT_NODES" \ - --start-time "$START_TIME_SHELLEY" \ - --gen-utxo-keys 1 -} - -gen_genesis && genesis_created=1 || genesis_created=0 - -if [ "$genesis_created" -eq 0 ] ; then - echo "Failed to generate genesis files, retrying with a different genesis.conway.spec.json" - mv "$STATE_CLUSTER/shelley/genesis.conway.spec.json" \ - "$STATE_CLUSTER/shelley/master-genesis.conway.spec.json" - mv "$STATE_CLUSTER/shelley/release_8_9-genesis.conway.spec.json" \ - "$STATE_CLUSTER/shelley/genesis.conway.spec.json" - gen_genesis && genesis_created=1 || genesis_created=0 -fi - -if [ "$genesis_created" -eq 0 ] ; then - echo "Failed to generate genesis files, retrying with a different genesis.conway.spec.json" - mv "$STATE_CLUSTER/shelley/genesis.conway.spec.json" \ - "$STATE_CLUSTER/shelley/release_8_9-genesis.conway.spec.json" - mv "$STATE_CLUSTER/shelley/release-genesis.conway.spec.json" \ - "$STATE_CLUSTER/shelley/genesis.conway.spec.json" - gen_genesis && genesis_created=1 || genesis_created=0 -fi - -if [ "$genesis_created" -eq 0 ] ; then - echo "Failed to generate genesis files, retrying with a different genesis.conway.spec.json" - mv "$STATE_CLUSTER/shelley/genesis.conway.spec.json" \ - "$STATE_CLUSTER/shelley/release-genesis.conway.spec.json" - mv "$STATE_CLUSTER/shelley/empty-genesis.conway.spec.json" \ - "$STATE_CLUSTER/shelley/genesis.conway.spec.json" - gen_genesis && genesis_created=1 || genesis_created=0 -fi - -if [ "$genesis_created" -eq 0 ] ; then - echo "Failed to generate genesis files, retrying with a different genesis.alonzo.spec.json" - mv "$STATE_CLUSTER/shelley/genesis.alonzo.spec.json" \ - "$STATE_CLUSTER/shelley/master-genesis.alonzo.spec.json" - mv "$STATE_CLUSTER/shelley/release-genesis.alonzo.spec.json" \ - "$STATE_CLUSTER/shelley/genesis.alonzo.spec.json" - gen_genesis && genesis_created=1 || genesis_created=0 -fi - -jq -r ' - .initialFunds = {}' \ - < "$STATE_CLUSTER/shelley/genesis.json" > "$STATE_CLUSTER/shelley/genesis.json_jq" -cat "$STATE_CLUSTER/shelley/genesis.json_jq" > "$STATE_CLUSTER/shelley/genesis.json" -rm -f "$STATE_CLUSTER/shelley/genesis.json_jq" - -BYRON_GENESIS_HASH="$(cardano_cli_log byron genesis print-genesis-hash --genesis-json \ - "$STATE_CLUSTER/byron/genesis.json")" -SHELLEY_GENESIS_HASH="$(cardano_cli_log genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.json")" -ALONZO_GENESIS_HASH="$(cardano_cli_log genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.alonzo.json")" - -CONWAY_GENESIS_HASH="" -EXP_PROTOCOLS_KEY="TestEnableDevelopmentNetworkProtocols" -EXP_PROTOCOLS_VAL="false" - -# conway genesis is not present on node < 1.36.0, so this can be used for checking node version -if [ -e "$STATE_CLUSTER/shelley/genesis.conway.json" ]; then - CONWAY_GENESIS_HASH="$(cardano_cli_log genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.conway.json")" - EXP_PROTOCOLS_KEY="ExperimentalProtocolsEnabled" -fi - -for conf in "$SCRIPT_DIR"/config-*.json; do - fname="${conf##*/}" - jq \ - --arg byron_hash "$BYRON_GENESIS_HASH" \ - --arg shelley_hash "$SHELLEY_GENESIS_HASH" \ - --arg alonzo_hash "$ALONZO_GENESIS_HASH" \ - '.ByronGenesisHash = $byron_hash - | .ShelleyGenesisHash = $shelley_hash - | .AlonzoGenesisHash = $alonzo_hash' \ - "$conf" > "$STATE_CLUSTER/$fname" - - if [ -n "$CONWAY_GENESIS_HASH" ]; then - jq \ - --arg conway_hash "$CONWAY_GENESIS_HASH" \ - --arg exp_protocols_key "$EXP_PROTOCOLS_KEY" \ - '.ConwayGenesisFile = "shelley/genesis.conway.json" - | .ConwayGenesisHash = $conway_hash' \ - "$STATE_CLUSTER/$fname" > "$STATE_CLUSTER/${fname}_jq" - cat "$STATE_CLUSTER/${fname}_jq" > "$STATE_CLUSTER/$fname" - rm -f "$STATE_CLUSTER/${fname}_jq" - fi - - # enable P2P - if [ -n "${ENABLE_P2P:-""}" ]; then - # experimental protocols are needed for P2P on node < 1.36.0 - if [ -z "$CONWAY_GENESIS_HASH" ]; then - EXP_PROTOCOLS_VAL="true" - fi - - # setup mix of P2P and legacy - if [ -n "${MIXED_P2P:-""}" ]; then - if [ "$fname" = "config-bft1.json" ]; then - # use legacy topology file for bft1 - cp -f "$SCRIPT_DIR"/topology-bft1.json "$STATE_CLUSTER" - continue - fi - - # use legacy topology files for odd numbered pools - pool_num="${fname##*-pool}" - pool_num="${pool_num%.json}" - if [ "$((pool_num % 2))" != 0 ]; then - cp -f "$SCRIPT_DIR/topology-pool${pool_num}.json" "$STATE_CLUSTER" - continue - fi - fi - - jq \ - --arg exp_protocols_key "$EXP_PROTOCOLS_KEY" \ - --argjson exp_protocols_val "$EXP_PROTOCOLS_VAL" \ - '.EnableP2P = true - | .MaxConcurrencyBulkSync = 2 - | .MaxConcurrencyDeadline = 4 - | .TargetNumberOfRootPeers = 100 - | .TargetNumberOfKnownPeers = 100 - | .TargetNumberOfEstablishedPeers = 50 - | .TargetNumberOfActivePeers = 20 - | .[$exp_protocols_key] = $exp_protocols_val - | .TraceBlockFetchClient = true - | .TraceChainSyncClient = true' \ - "$STATE_CLUSTER/$fname" > "$STATE_CLUSTER/${fname}_jq" - cat "$STATE_CLUSTER/${fname}_jq" > "$STATE_CLUSTER/$fname" - rm -f "$STATE_CLUSTER/${fname}_jq" - fi -done - -for i in $(seq 1 $NUM_BFT_NODES); do - mkdir -p "$STATE_CLUSTER/nodes/node-bft$i" - ln -s "../../shelley/delegate-keys/delegate$i.vrf.skey" "$STATE_CLUSTER/nodes/node-bft$i/vrf.skey" - ln -s "../../shelley/delegate-keys/delegate$i.vrf.vkey" "$STATE_CLUSTER/nodes/node-bft$i/vrf.vkey" - - cardano_cli_log node key-gen-KES \ - --verification-key-file "$STATE_CLUSTER/nodes/node-bft$i/kes.vkey" \ - --signing-key-file "$STATE_CLUSTER/nodes/node-bft$i/kes.skey" - - cardano_cli_log node issue-op-cert \ - --kes-period 0 \ - --cold-signing-key-file "$STATE_CLUSTER/shelley/delegate-keys/delegate$i.skey" \ - --kes-verification-key-file "$STATE_CLUSTER/nodes/node-bft$i/kes.vkey" \ - --operational-certificate-issue-counter-file \ - "$STATE_CLUSTER/shelley/delegate-keys/delegate$i.counter" \ - --out-file "$STATE_CLUSTER/nodes/node-bft$i/op.cert" - - INDEX="$(printf "%03d" $((i - 1)))" - - cardano_cli_log byron key keygen \ - --secret "$STATE_CLUSTER/byron/payment-keys.$INDEX.key" - - cardano_cli_log byron key signing-key-address \ - --byron-formats \ - --testnet-magic "$NETWORK_MAGIC" \ - --secret "$STATE_CLUSTER/byron/payment-keys.$INDEX.key" > "$STATE_CLUSTER/byron/address-$INDEX" - - # write Genesis addresses to files - cardano_cli_log byron key signing-key-address \ - --byron-formats \ - --testnet-magic "$NETWORK_MAGIC" \ - --secret "$STATE_CLUSTER/byron/genesis-keys.$INDEX.key" \ - > "$STATE_CLUSTER/byron/genesis-address-$INDEX" - - ln -s "../../byron/delegate-keys.$INDEX.key" "$STATE_CLUSTER/nodes/node-bft$i/byron-deleg.key" - ln -s "../../byron/delegation-cert.$INDEX.json" "$STATE_CLUSTER/nodes/node-bft$i/byron-deleg.json" - - # create Byron address that moves funds out of the genesis UTxO into a regular address - cardano_cli_log byron transaction issue-genesis-utxo-expenditure \ - --genesis-json "$STATE_CLUSTER/byron/genesis.json" \ - --testnet-magic "$NETWORK_MAGIC" \ - --byron-formats \ - --tx "$STATE_CLUSTER/byron/tx$i.tx" \ - --wallet-key "$STATE_CLUSTER/nodes/node-bft$i/byron-deleg.key" \ - --rich-addr-from "$(head -n 1 "$STATE_CLUSTER/byron/genesis-address-$INDEX")" \ - --txout "(\"$(head -n 1 "$STATE_CLUSTER/byron/address-$INDEX")\", $FUNDS_PER_BYRON_ADDRESS)" - - # convert to Shelley addresses and keys - cardano_cli_log key convert-byron-key \ - --byron-signing-key-file "$STATE_CLUSTER/byron/payment-keys.$INDEX.key" \ - --out-file "$STATE_CLUSTER/byron/payment-keys.$INDEX-converted.skey" \ - --byron-payment-key-type - - cardano_cli_log key verification-key \ - --signing-key-file "$STATE_CLUSTER/byron/payment-keys.$INDEX-converted.skey" \ - --verification-key-file "$STATE_CLUSTER/byron/payment-keys.$INDEX-converted.vkey" - - cardano_cli_log address build \ - --testnet-magic "$NETWORK_MAGIC" \ - --payment-verification-key-file "$STATE_CLUSTER/byron/payment-keys.$INDEX-converted.vkey" \ - > "$STATE_CLUSTER/byron/address-$INDEX-converted" - - BFT_PORT=$(("%%NODE_PORT_BASE%%" + (i - 1) * "%%PORTS_PER_NODE%%" )) - echo "$BFT_PORT" > "$STATE_CLUSTER/nodes/node-bft$i/port" -done - -for i in $(seq 1 "$NUM_POOLS"); do - mkdir -p "$STATE_CLUSTER/nodes/node-pool$i" - echo "Generating Pool $i Secrets" - - # pool owner addresses and keys - cardano_cli_log address key-gen \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.skey" \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" - cardano_cli_log stake-address key-gen \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.skey" \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" - # payment address - cardano_cli_log address build \ - --payment-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner.addr" - # stake address - cardano_cli_log stake-address build \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.addr" - # stake address registration cert - cardano_cli_log stake-address registration-certificate \ - "${CERT_ERA_ARG[@]}" \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake.reg.cert" - - # stake reward keys - cardano_cli_log stake-address key-gen \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.skey" \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" - # stake reward address registration cert - cardano_cli_log stake-address registration-certificate \ - "${CERT_ERA_ARG[@]}" \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" - - # pool keys - cardano_cli_log node key-gen \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --cold-signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.skey" \ - --operational-certificate-issue-counter-file "$STATE_CLUSTER/nodes/node-pool$i/cold.counter" - cardano_cli_log node key-gen-KES \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/kes.vkey" \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/kes.skey" - cardano_cli_log node key-gen-VRF \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/vrf.skey" - - # stake address delegation certs - cardano_cli_log stake-address delegation-certificate \ - "${CERT_ERA_ARG[@]}" \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" - - # pool opcert - cardano_cli_log node issue-op-cert \ - --kes-period 0 \ - --cold-signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.skey" \ - --kes-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/kes.vkey" \ - --operational-certificate-issue-counter-file "$STATE_CLUSTER/nodes/node-pool$i/cold.counter" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/op.cert" - - POOL_NAME="TestPool$i" - POOL_DESC="Test Pool $i" - POOL_TICKER="TP$i" - - cat > "$STATE_CLUSTER/webserver/pool$i.html" < - - -$POOL_NAME - - -name: $POOL_NAME
-description: $POOL_DESC
-ticker: $POOL_TICKER
- - -EoF - - echo "Generating Pool $i Metadata" - jq -n \ - --arg name "$POOL_NAME" \ - --arg description "$POOL_DESC" \ - --arg ticker "$POOL_TICKER" \ - --arg homepage "http://localhost:%%WEBSERVER_PORT%%/pool$i.html" \ - '{"name": $name, "description": $description, "ticker": $ticker, "homepage": $homepage}' \ - > "$STATE_CLUSTER/webserver/pool$i.json" - - METADATA_URL="http://localhost:%%WEBSERVER_PORT%%/pool$i.json" - METADATA_HASH=$(cardano_cli_log stake-pool metadata-hash --pool-metadata-file \ - "$STATE_CLUSTER/webserver/pool$i.json") - POOL_PORT=$(("%%NODE_PORT_BASE%%" + ("$NUM_BFT_NODES" + i - 1) * "%%PORTS_PER_NODE%%")) - echo "$POOL_PORT" > "$STATE_CLUSTER/nodes/node-pool$i/port" - echo $POOL_PLEDGE > "$STATE_CLUSTER/nodes/node-pool$i/pledge" - - cardano_cli_log stake-pool registration-certificate \ - "${CERT_ERA_ARG[@]}" \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --vrf-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" \ - --pool-pledge "$POOL_PLEDGE" \ - --pool-margin 0.35 \ - --pool-cost 600 \ - --pool-reward-account-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ - --pool-owner-stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --metadata-url "$METADATA_URL" \ - --metadata-hash "$METADATA_HASH" \ - --pool-relay-port "$POOL_PORT" \ - --pool-relay-ipv4 "127.0.0.1" \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/register.cert" -done - -mv "$STATE_CLUSTER/shelley/utxo-keys/utxo1.vkey" "$STATE_CLUSTER/shelley/genesis-utxo.vkey" -mv "$STATE_CLUSTER/shelley/utxo-keys/utxo1.skey" "$STATE_CLUSTER/shelley/genesis-utxo.skey" -rmdir "$STATE_CLUSTER/shelley/utxo-keys" - -# create scripts for cluster starting / stopping -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start all" > "$STATE_CLUSTER/supervisorctl_start" -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% restart nodes:" > "$STATE_CLUSTER/supervisorctl_restart_nodes" -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% \"\$@\"" > "$STATE_CLUSTER/supervisorctl" - -cat > "$STATE_CLUSTER/supervisord_start" < "$STATE_CLUSTER/supervisord_stop" <&2; exit 1; } # assert - -# give nodes time to establish connections with each other -sleep 5 - -echo "Moving funds out of Byron genesis" -for i in $(seq 1 $NUM_BFT_NODES); do - cardano_cli_log byron transaction submit-tx \ - --testnet-magic "$NETWORK_MAGIC" \ - --tx "$STATE_CLUSTER/byron/tx$i.tx" -done - -# length of Byron era is hardcoded to 200 sec (and we already slept for at least 5 sec) -echo "Waiting 195 sec for Shelley era to start" -sleep 195 - - -# -# In Shelley era -# - -# variables used in Shelley-based eras -GENESIS_VERIFICATION=() -for vkey in "$STATE_CLUSTER"/shelley/genesis-keys/genesis?.vkey; do - GENESIS_VERIFICATION+=("--genesis-verification-key-file" "$vkey") -done - -GENESIS_SIGNING=() -for skey in "$STATE_CLUSTER"/shelley/genesis-keys/genesis?.skey; do - GENESIS_SIGNING+=("--signing-key-file" "$skey") -done - -DELEGATE_SIGNING=() -for skey in "$STATE_CLUSTER"/shelley/delegate-keys/delegate?.skey; do - DELEGATE_SIGNING+=("--signing-key-file" "$skey") -done - -FAUCET_ADDR="$(<"$STATE_CLUSTER"/byron/address-000-converted)" -FAUCET_SKEY="$STATE_CLUSTER/byron/payment-keys.000-converted.skey" - - -wait_for_era "Shelley" - -# start db-sync -if [ -n "${DBSYNC_REPO:-""}" ]; then - echo "Starting db-sync" - supervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start dbsync -fi - -sleep "$PROPOSAL_DELAY" -echo "Submitting update proposal to transfer to Allegra, transferring funds to pool owners, registering pools and delegations" - -ALLEGRA_HF_PROPOSAL_FILE="$STATE_CLUSTER/shelley/update-proposal-allegra.proposal" -ALLEGRA_TX_BASE="$STATE_CLUSTER/shelley/transfer-register-delegate" - -cardano_cli_log governance create-update-proposal \ - --out-file "$ALLEGRA_HF_PROPOSAL_FILE" \ - --epoch "$(get_epoch)" \ - "${GENESIS_VERIFICATION[@]}" \ - --protocol-major-version 3 \ - --protocol-minor-version 0 - -# Transfer funds, register pools and delegations, submit update proposal, all in one big transaction: - -cardano_cli_log query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - -DEPOSITS="$(jq '.protocolParams.poolDeposit + (2 * .protocolParams.keyDeposit)' \ - < "$STATE_CLUSTER/shelley/genesis.json")" -NEEDED_AMOUNT="$(( (POOL_PLEDGE + DEPOSITS) * NUM_POOLS ))" -STOP_TXIN_AMOUNT="$((NEEDED_AMOUNT + FEE))" - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace$|[0-9]$")" - -TTL="$(get_slot 1000)" - -POOL_ARGS=() -for i in $(seq 1 "$NUM_POOLS"); do - POOL_ARGS+=( \ - "--tx-out" "$(<"$STATE_CLUSTER/nodes/node-pool$i/owner.addr")+$POOL_PLEDGE" \ - "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/stake.reg.cert" \ - "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" \ - "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/register.cert" \ - "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" \ - ) -done - -POOL_SIGNING=() -for i in $(seq 1 "$NUM_POOLS"); do - POOL_SIGNING+=( \ - "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.skey" \ - "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool$i/reward.skey" \ - "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool$i/cold.skey" \ - ) -done - -TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE - NEEDED_AMOUNT))" - -cardano_cli_log transaction build-raw \ - --shelley-era \ - --ttl "$TTL" \ - --fee "$FEE" \ - "${TXINS[@]}" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ - "${POOL_ARGS[@]}" \ - --update-proposal-file "$ALLEGRA_HF_PROPOSAL_FILE" \ - --out-file "${ALLEGRA_TX_BASE}-tx.txbody" - -cardano_cli_log transaction sign \ - "${POOL_SIGNING[@]}" \ - "${GENESIS_SIGNING[@]}" \ - "${DELEGATE_SIGNING[@]}" \ - --signing-key-file "$FAUCET_SKEY" \ - --testnet-magic "$NETWORK_MAGIC" \ - --tx-body-file "${ALLEGRA_TX_BASE}-tx.txbody" \ - --out-file "${ALLEGRA_TX_BASE}-tx.tx" - -cardano_cli_log transaction submit \ - --tx-file "${ALLEGRA_TX_BASE}-tx.tx" \ - --testnet-magic "$NETWORK_MAGIC" - -EPOCH_SEC="$(jq '.epochLength * .slotLength' < "$STATE_CLUSTER/shelley/genesis.json")" -NEXT_EPOCH_SLEEP="$((EPOCH_SEC - PROPOSAL_DELAY))" - -echo "Waiting $NEXT_EPOCH_SLEEP sec for Allegra era to start" -sleep "$NEXT_EPOCH_SLEEP" - - -# -# In Allegra era -# - - -wait_for_era "Allegra" - -sleep "$PROPOSAL_DELAY" -echo "Submitting update proposal to transfer to Mary, set d = 0" - -MARY_HF_PROPOSAL="$STATE_CLUSTER/shelley/update-proposal-mary" - -cardano_cli_log governance create-update-proposal \ - --out-file "${MARY_HF_PROPOSAL}.proposal" \ - --epoch "$(get_epoch)" \ - "${GENESIS_VERIFICATION[@]}" \ - --decentralization-parameter 0 \ - --protocol-major-version 4 \ - --protocol-minor-version 0 - -cardano_cli_log query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - -STOP_TXIN_AMOUNT=$FEE - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace$|[0-9]$")" - -TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" - -cardano_cli_log transaction build-raw \ - --allegra-era \ - --fee "$FEE" \ - "${TXINS[@]}" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ - --update-proposal-file "${MARY_HF_PROPOSAL}.proposal" \ - --out-file "${MARY_HF_PROPOSAL}-tx.txbody" - -cardano_cli_log transaction sign \ - --signing-key-file "$FAUCET_SKEY" \ - "${DELEGATE_SIGNING[@]}" \ - --testnet-magic "$NETWORK_MAGIC" \ - --tx-body-file "${MARY_HF_PROPOSAL}-tx.txbody" \ - --out-file "${MARY_HF_PROPOSAL}-tx.tx" - -cardano_cli_log transaction submit \ - --tx-file "${MARY_HF_PROPOSAL}-tx.tx" \ - --testnet-magic "$NETWORK_MAGIC" - -echo "Waiting $NEXT_EPOCH_SLEEP sec for Mary era to start" -sleep "$NEXT_EPOCH_SLEEP" - - -# -# In Mary era -# - - -wait_for_era "Mary" - -sleep "$PROPOSAL_DELAY" -echo "Submitting update proposal to transfer to Alonzo" - -ALONZO_UPDATE_PROPOSAL="$STATE_CLUSTER/shelley/update-proposal-alonzo" - -cardano_cli_log governance create-update-proposal \ - --out-file "${ALONZO_UPDATE_PROPOSAL}.proposal" \ - --epoch "$(get_epoch)" \ - "${GENESIS_VERIFICATION[@]}" \ - --protocol-major-version 5 \ - --protocol-minor-version 0 - -cardano_cli_log query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - -STOP_TXIN_AMOUNT=$FEE - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace$|[0-9]$")" - -TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" - -cardano_cli_log transaction build-raw \ - --mary-era \ - --fee "$FEE" \ - "${TXINS[@]}" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ - --update-proposal-file "${ALONZO_UPDATE_PROPOSAL}.proposal" \ - --out-file "${ALONZO_UPDATE_PROPOSAL}-tx.txbody" - -cardano_cli_log transaction sign \ - --signing-key-file "$FAUCET_SKEY" \ - "${DELEGATE_SIGNING[@]}" \ - --testnet-magic "$NETWORK_MAGIC" \ - --tx-body-file "${ALONZO_UPDATE_PROPOSAL}-tx.txbody" \ - --out-file "${ALONZO_UPDATE_PROPOSAL}-tx.tx" - -cardano_cli_log transaction submit \ - --tx-file "${ALONZO_UPDATE_PROPOSAL}-tx.tx" \ - --testnet-magic "$NETWORK_MAGIC" - -echo "Waiting $NEXT_EPOCH_SLEEP sec for Alonzo era to start" -sleep "$NEXT_EPOCH_SLEEP" - - -# -# In Alonzo era -# - - -wait_for_era "Alonzo" - -sleep "$PROPOSAL_DELAY" -echo "Submitting update proposal to update to PV6" - -ALONZO_UPDATE_PROPOSAL_PV6="$STATE_CLUSTER/shelley/update-proposal-alonzo-pv6" - -# protocol version + dapps parameters update -cardano_cli_log governance create-update-proposal \ - --out-file "${ALONZO_UPDATE_PROPOSAL_PV6}.proposal" \ - --epoch "$(get_epoch)" \ - "${GENESIS_VERIFICATION[@]}" \ - --protocol-major-version 6 \ - --protocol-minor-version 0 - -cardano_cli_log query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - -STOP_TXIN_AMOUNT=$FEE - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace \+ TxOutDatumNone$")" - -TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" - -cardano_cli_log transaction build-raw \ - --alonzo-era \ - --fee "$FEE" \ - "${TXINS[@]}" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ - --update-proposal-file "${ALONZO_UPDATE_PROPOSAL_PV6}.proposal" \ - --out-file "${ALONZO_UPDATE_PROPOSAL_PV6}-tx.txbody" - -cardano_cli_log transaction sign \ - --signing-key-file "$FAUCET_SKEY" \ - "${DELEGATE_SIGNING[@]}" \ - --testnet-magic "$NETWORK_MAGIC" \ - --tx-body-file "${ALONZO_UPDATE_PROPOSAL_PV6}-tx.txbody" \ - --out-file "${ALONZO_UPDATE_PROPOSAL_PV6}-tx.tx" - -cardano_cli_log transaction submit \ - --tx-file "${ALONZO_UPDATE_PROPOSAL_PV6}-tx.tx" \ - --testnet-magic "$NETWORK_MAGIC" - - -ALONZO_EPOCH="$(get_epoch)" - -echo "Waiting $NEXT_EPOCH_SLEEP sec for Alonzo era with PV6 to start" -sleep "$NEXT_EPOCH_SLEEP" - - -# -# In Alonzo era with PV6 -# - -wait_for_epoch "$((ALONZO_EPOCH + 1))" - -# start cardano-submit-api -if [ "$ENABLE_SUBMIT_API" -eq 1 ]; then - echo "Starting cardano-submit-api" - supervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start submit_api -fi - -cardano_cli_log query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - -PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "$STATE_CLUSTER/pparams.json")" - -[ "$PROTOCOL_VERSION" = 6 ] || { echo "Unexpected protocol version '$PROTOCOL_VERSION' on line $LINENO" >&2; exit 1; } # assert - -sleep "$PROPOSAL_DELAY" -echo "Submitting update proposal to update to Babbage" - -BABBAGE_UPDATE_PROPOSAL="$STATE_CLUSTER/shelley/update-proposal-babbage" - -# protocol version + dapps parameters update -cardano_cli_log governance create-update-proposal \ - --out-file "${BABBAGE_UPDATE_PROPOSAL}.proposal" \ - --epoch "$(get_epoch)" \ - "${GENESIS_VERIFICATION[@]}" \ - --protocol-major-version 7 \ - --protocol-minor-version 0 - -cardano_cli_log query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - -STOP_TXIN_AMOUNT=$FEE - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace \+ TxOutDatumNone$")" - -TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" - -cardano_cli_log transaction build-raw \ - --alonzo-era \ - --fee "$FEE" \ - "${TXINS[@]}" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ - --update-proposal-file "${BABBAGE_UPDATE_PROPOSAL}.proposal" \ - --out-file "${BABBAGE_UPDATE_PROPOSAL}-tx.txbody" - -cardano_cli_log transaction sign \ - --signing-key-file "$FAUCET_SKEY" \ - "${DELEGATE_SIGNING[@]}" \ - --testnet-magic "$NETWORK_MAGIC" \ - --tx-body-file "${BABBAGE_UPDATE_PROPOSAL}-tx.txbody" \ - --out-file "${BABBAGE_UPDATE_PROPOSAL}-tx.tx" - -cardano_cli_log transaction submit \ - --tx-file "${BABBAGE_UPDATE_PROPOSAL}-tx.tx" \ - --testnet-magic "$NETWORK_MAGIC" - -echo "Waiting $NEXT_EPOCH_SLEEP sec for Babbage era to start" -sleep "$NEXT_EPOCH_SLEEP" - - -# -# In Babbage era -# - -wait_for_era "Babbage" - -# update to PV8 -sleep "$PROPOSAL_DELAY" -echo "Submitting update proposal to update to PV8" - -BABBAGE_UPDATE_PROPOSAL_PV8="$STATE_CLUSTER/shelley/update-proposal-babbage-pv8" - -# protocol version 8 -cardano_cli_log governance create-update-proposal \ - --out-file "${BABBAGE_UPDATE_PROPOSAL_PV8}.proposal" \ - --epoch "$(get_epoch)" \ - "${GENESIS_VERIFICATION[@]}" \ - --protocol-major-version 8 \ - --protocol-minor-version 0 - -cardano_cli_log query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - -STOP_TXIN_AMOUNT=$FEE - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace \+ TxOutDatumNone$")" - -TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" - -cardano_cli_log transaction build-raw \ - --babbage-era \ - --fee "$FEE" \ - "${TXINS[@]}" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ - --update-proposal-file "${BABBAGE_UPDATE_PROPOSAL_PV8}.proposal" \ - --out-file "${BABBAGE_UPDATE_PROPOSAL_PV8}-tx.txbody" - -cardano_cli_log transaction sign \ - --signing-key-file "$FAUCET_SKEY" \ - "${DELEGATE_SIGNING[@]}" \ - --testnet-magic "$NETWORK_MAGIC" \ - --tx-body-file "${BABBAGE_UPDATE_PROPOSAL_PV8}-tx.txbody" \ - --out-file "${BABBAGE_UPDATE_PROPOSAL_PV8}-tx.tx" - -cardano_cli_log transaction submit \ - --tx-file "${BABBAGE_UPDATE_PROPOSAL_PV8}-tx.tx" \ - --testnet-magic "$NETWORK_MAGIC" - -BABBAGE_EPOCH="$(get_epoch)" - -echo "Waiting $NEXT_EPOCH_SLEEP sec for Babbage era with PV8 to start" -sleep "$NEXT_EPOCH_SLEEP" - - -# -# In Babbage era with PV8 -# - -wait_for_epoch "$((BABBAGE_EPOCH + 1))" - -cardano_cli_log query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - -PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "$STATE_CLUSTER/pparams.json")" - -[ "$PROTOCOL_VERSION" = 8 ] || { echo "Unexpected protocol version '$PROTOCOL_VERSION' on line $LINENO" >&2; exit 1; } # assert - -echo "Cluster started. Run \`$SCRIPT_DIR/stop-cluster\` to stop" diff --git a/cardano_node_tests/cluster_scripts/babbage/stop-cluster b/cardano_node_tests/cluster_scripts/babbage/stop-cluster deleted file mode 100644 index 42fad2d1e..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/stop-cluster +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env bash - -set -uo pipefail - -SOCKET_PATH="$(readlink -m "$CARDANO_NODE_SOCKET_PATH")" -STATE_CLUSTER="${SOCKET_PATH%/*}" -PID_FILE="${STATE_CLUSTER}/supervisord.pid" - -INSTANCE_NUM="%%INSTANCE_NUM%%" -if [[ "$SOCKET_PATH" != *"/state-cluster${INSTANCE_NUM}/"* ]]; then - echo "CARDANO_NODE_SOCKET_PATH must be set to a path containing 'state-cluster${INSTANCE_NUM}', line $LINENO" >&2 - exit 1 -fi - -supervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% stop all - -if [ ! -f "$PID_FILE" ]; then - echo "Cluster is not running!" - exit 0 -fi - -PID="$(<"$PID_FILE")" -for _ in {1..5}; do - if ! kill "$PID"; then - break - fi - sleep 1 - if [ ! -f "$PID_FILE" ]; then - break - fi -done - -rm -f "$PID_FILE" -echo "Cluster terminated!" diff --git a/cardano_node_tests/cluster_scripts/babbage/submit-api-config.json b/cardano_node_tests/cluster_scripts/babbage/submit-api-config.json deleted file mode 100644 index 36dd121ce..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/submit-api-config.json +++ /dev/null @@ -1,111 +0,0 @@ -{ - "EnableLogMetrics": false, - "EnableLogging": true, - "RequiresNetworkMagic": "RequiresMagic", - "defaultBackends": [ - "KatipBK" - ], - "defaultScribes": [ - [ - "StdoutSK", - "stdout" - ] - ], - "minSeverity": "Info", - "options": { - "cfokey": { - "value": "Release-1.0.0" - }, - "mapBackends": {}, - "mapSeverity": { - "db-sync-node": "Info", - "db-sync-node.Mux": "Error", - "db-sync-node.Subscription": "Error" - }, - "mapSubtrace": { - "#ekgview": { - "contents": [ - [ - { - "contents": "cardano.epoch-validation.benchmark", - "tag": "Contains" - }, - [ - { - "contents": ".monoclock.basic.", - "tag": "Contains" - } - ] - ], - [ - { - "contents": "cardano.epoch-validation.benchmark", - "tag": "Contains" - }, - [ - { - "contents": "diff.RTS.cpuNs.timed.", - "tag": "Contains" - } - ] - ], - [ - { - "contents": "#ekgview.#aggregation.cardano.epoch-validation.benchmark", - "tag": "StartsWith" - }, - [ - { - "contents": "diff.RTS.gcNum.timed.", - "tag": "Contains" - } - ] - ] - ], - "subtrace": "FilterTrace" - }, - "#messagecounters.aggregation": { - "subtrace": "NoTrace" - }, - "#messagecounters.ekgview": { - "subtrace": "NoTrace" - }, - "#messagecounters.katip": { - "subtrace": "NoTrace" - }, - "#messagecounters.monitoring": { - "subtrace": "NoTrace" - }, - "#messagecounters.switchboard": { - "subtrace": "NoTrace" - }, - "benchmark": { - "contents": [ - "GhcRtsStats", - "MonotonicClock" - ], - "subtrace": "ObservableTrace" - }, - "cardano.epoch-validation.utxo-stats": { - "subtrace": "NoTrace" - } - } - }, - "rotation": { - "rpKeepFilesNum": 10, - "rpLogLimitBytes": 5000000, - "rpMaxAgeHours": 24 - }, - "setupBackends": [ - "AggregationBK", - "KatipBK" - ], - "setupScribes": [ - { - "scFormat": "ScText", - "scKind": "StdoutSK", - "scName": "stdout", - "scRotation": null - } - ] -} diff --git a/cardano_node_tests/cluster_scripts/babbage/template-cardano-node-pool b/cardano_node_tests/cluster_scripts/babbage/template-cardano-node-pool deleted file mode 100644 index 66404c64c..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/template-cardano-node-pool +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env bash - -if [ -e ./state-cluster%%INSTANCE_NUM%%/utxo_backend ]; then - UTXO_BACKEND="$(<./state-cluster%%INSTANCE_NUM%%/utxo_backend)" -fi - -case "${UTXO_BACKEND:-""}" in - mem) - UTXO_BACKEND_ARGS=("--utxos-in-memory") - ;; - disk) - UTXO_BACKEND_ARGS=("--utxos-on-disk") - ;; - *) - UTXO_BACKEND_ARGS=() - ;; -esac - -echo "Starting cardano-node run: cardano-node run" - echo "--config ./state-cluster%%INSTANCE_NUM%%/config-pool%%POOL_NUM%%.json" - echo "--database-path ./state-cluster%%INSTANCE_NUM%%/db-pool%%POOL_NUM%%" - echo "--topology ./state-cluster%%INSTANCE_NUM%%/topology-pool%%POOL_NUM%%.json" - echo "--host-addr 127.0.0.1" - echo "--port %%NODE_PORT%%" - echo "--socket-path ./state-cluster%%INSTANCE_NUM%%/pool%%POOL_NUM%%.socket" - echo "--shelley-vrf-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/vrf.skey" - echo "--shelley-kes-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/kes.skey" - echo "--shelley-operational-certificate ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/op.cert ${UTXO_BACKEND_ARGS[*]} $*" - -echo "..or, once again, in a single line:" -echo "cardano-node run --config ./state-cluster%%INSTANCE_NUM%%/config-pool%%POOL_NUM%%.json --database-path ./state-cluster%%INSTANCE_NUM%%/db-pool%%POOL_NUM%% --topology ./state-cluster%%INSTANCE_NUM%%/topology-pool%%POOL_NUM%%.json --host-addr 127.0.0.1 --port %%NODE_PORT%% --socket-path ./state-cluster%%INSTANCE_NUM%%/pool%%POOL_NUM%%.socket --shelley-vrf-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/vrf.skey --shelley-kes-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/kes.skey --shelley-operational-certificate ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/op.cert ${UTXO_BACKEND_ARGS[*]} $*" - - -exec cardano-node run --config ./state-cluster%%INSTANCE_NUM%%/config-pool%%POOL_NUM%%.json --database-path ./state-cluster%%INSTANCE_NUM%%/db-pool%%POOL_NUM%% --topology ./state-cluster%%INSTANCE_NUM%%/topology-pool%%POOL_NUM%%.json --host-addr 127.0.0.1 --port %%NODE_PORT%% --socket-path ./state-cluster%%INSTANCE_NUM%%/pool%%POOL_NUM%%.socket --shelley-vrf-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/vrf.skey --shelley-kes-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/kes.skey --shelley-operational-certificate ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/op.cert "${UTXO_BACKEND_ARGS[@]}" "$@" diff --git a/cardano_node_tests/cluster_scripts/babbage/template-config.json b/cardano_node_tests/cluster_scripts/babbage/template-config.json deleted file mode 100644 index 49eec74cd..000000000 --- a/cardano_node_tests/cluster_scripts/babbage/template-config.json +++ /dev/null @@ -1,180 +0,0 @@ -{ - "ApplicationName": "cardano-sl", - "ApplicationVersion": 1, - "ByronGenesisFile": "byron/genesis.json", - "LastKnownBlockVersion-Alt": 0, - "LastKnownBlockVersion-Major": 2, - "LastKnownBlockVersion-Minor": 0, - "PBftSignatureThreshold": 1, - "Protocol": "Cardano", - "RequiresNetworkMagic": "RequiresMagic", - "ShelleyGenesisFile": "shelley/genesis.json", - "AlonzoGenesisFile": "shelley/genesis.alonzo.json", - "TraceBlockFetchClient": false, - "TraceBlockFetchDecisions": false, - "TraceBlockFetchProtocol": false, - "TraceBlockFetchProtocolSerialised": false, - "TraceBlockFetchServer": false, - "TraceChainDb": true, - "TraceChainSyncBlockServer": false, - "TraceChainSyncClient": false, - "TraceChainSyncHeaderServer": false, - "TraceChainSyncProtocol": false, - "TraceDNSResolver": true, - "TraceDNSSubscription": true, - "TraceErrorPolicy": true, - "TraceForge": true, - "TraceHandshake": true, - "TraceIpSubscription": true, - "TraceLocalChainSyncProtocol": false, - "TraceLocalErrorPolicy": true, - "TraceLocalHandshake": false, - "TraceLocalTxSubmissionProtocol": false, - "TraceLocalTxSubmissionServer": false, - "TraceMempool": true, - "TraceMux": true, - "TraceConnectionManager": true, - "TraceConnectionManagerTransitions": true, - "TracePeerSelection": true, - "TracePeerSelectionActions": true, - "TraceDebugPeerSelection": true, - "TraceTxInbound": false, - "TraceTxOutbound": false, - "TraceTxSubmissionProtocol": false, - "TraceInboundGovernor": true, - "TraceServer": true, - "TraceInboundGovernorCounters": true, - "TraceInboundGovernorTransitions": true, - "DebugPeerSelectionInitiator": true, - "DebugPeerSelectionInitiatorResponder": true, - "TracingVerbosity": "NormalVerbosity", - "TurnOnLogMetrics": true, - "TurnOnLogging": true, - "defaultBackends": [ - "KatipBK" - ], - "defaultScribes": [ - [ - "StdoutSK", - "stdout" - ] - ], - "hasEKG": %%EKG_PORT%%, - "hasPrometheus": [ - "127.0.0.1", - %%PROMETHEUS_PORT%% - ], - "minSeverity": "Info", - "options": { - "mapSeverity": { - "cardano.node.ConnectionManager": "Debug", - "cardano.node.ConnectionManagerTransition": "Debug", - "cardano.node.PeerSelection": "Info", - "cardano.node.DebugPeerSelection": "Debug", - "cardano.node.PeerSelectionActions": "Debug", - "cardano.node.Handshake": "Debug", - "cardano.node.Mux": "Info", - "cardano.node.ChainSyncProtocol": "Error", - "cardano.node.InboundGovernor": "Debug", - "cardano.node.resources": "Notice", - "cardano.node.InboundGovernor": "Debug", - "cardano.node.ConnectionManagerTransitions": "Debug" - }, - "mapBackends": { - "cardano.node-metrics": [ - "EKGViewBK" - ], - "cardano.node.BlockFetchDecision.peers": [ - "EKGViewBK" - ], - "cardano.node.ChainDB.metrics": [ - "EKGViewBK" - ], - "cardano.node.Forge.metrics": [ - "EKGViewBK" - ], - "cardano.node.metrics": [ - "EKGViewBK" - ], - "cardano.node.resources": [ - "EKGViewBK" - ] - }, - "mapSubtrace": { - "#ekgview": { - "contents": [ - [ - { - "contents": "cardano.epoch-validation.benchmark", - "tag": "Contains" - }, - [ - { - "contents": ".monoclock.basic.", - "tag": "Contains" - } - ] - ], - [ - { - "contents": "cardano.epoch-validation.benchmark", - "tag": "Contains" - }, - [ - { - "contents": "diff.RTS.cpuNs.timed.", - "tag": "Contains" - } - ] - ], - [ - { - "contents": "#ekgview.#aggregation.cardano.epoch-validation.benchmark", - "tag": "StartsWith" - }, - [ - { - "contents": "diff.RTS.gcNum.timed.", - "tag": "Contains" - } - ] - ] - ], - "subtrace": "FilterTrace" - }, - "benchmark": { - "contents": [ - "GhcRtsStats", - "MonotonicClock" - ], - "subtrace": "ObservableTrace" - }, - "cardano.epoch-validation.utxo-stats": { - "subtrace": "NoTrace" - }, - "cardano.node-metrics": { - "subtrace": "Neutral" - }, - "cardano.node.metrics": { - "subtrace": "Neutral" - } - } - }, - "rotation": { - "rpKeepFilesNum": 10, - "rpLogLimitBytes": 5000000, - "rpMaxAgeHours": 24 - }, - "setupBackends": [ - "KatipBK" - ], - "setupScribes": [ - { - "scFormat": "ScText", - "scKind": "StdoutSK", - "scName": "stdout", - "scRotation": null - } - ], - "TestShelleyHardForkAtEpoch": 1 -} diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/byron-params.json b/cardano_node_tests/cluster_scripts/babbage_fast/byron-params.json deleted file mode 100644 index d424e4d50..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/byron-params.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "heavyDelThd": "300000000000", - "maxBlockSize": "2000000", - "maxTxSize": "4096", - "maxHeaderSize": "2000000", - "maxProposalSize": "700", - "mpcThd": "20000000000000", - "scriptVersion": 0, - "slotDuration": "2000", - "softforkRule": { - "initThd": "900000000000000", - "minThd": "600000000000000", - "thdDecrement": "50000000000000" - }, - "txFeePolicy": { - "multiplier": "43946000000", - "summand": "155381000000000" - }, - "unlockStakeEpoch": "18446744073709551615", - "updateImplicit": "10000", - "updateProposalThd": "100000000000000", - "updateVoteThd": "1000000000000" -} diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/cardano-node-bft1 b/cardano_node_tests/cluster_scripts/babbage_fast/cardano-node-bft1 deleted file mode 100644 index 05b2ee118..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/cardano-node-bft1 +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env bash - -if [ -e ./state-cluster%%INSTANCE_NUM%%/utxo_backend ]; then - UTXO_BACKEND="$(<./state-cluster%%INSTANCE_NUM%%/utxo_backend)" -fi - -case "${UTXO_BACKEND:-""}" in - mem) - UTXO_BACKEND_ARGS=("--utxos-in-memory") - ;; - disk) - UTXO_BACKEND_ARGS=("--utxos-on-disk") - ;; - *) - UTXO_BACKEND_ARGS=() - ;; -esac - -echo "Starting cardano-node run: cardano-node run" - echo "--config ./state-cluster%%INSTANCE_NUM%%/config-bft1.json" - echo "--database-path ./state-cluster%%INSTANCE_NUM%%/db-bft1" - echo "--topology ./state-cluster%%INSTANCE_NUM%%/topology-bft1.json" - echo "--host-addr 127.0.0.1" - echo "--port %%NODE_PORT_BASE%%" - echo "--socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket ${UTXO_BACKEND_ARGS[*]} $*" - -echo "..or, once again, in a single line:" -echo "cardano-node run --config ./state-cluster%%INSTANCE_NUM%%/config-bft1.json --database-path ./state-cluster%%INSTANCE_NUM%%/db-bft1 --topology ./state-cluster%%INSTANCE_NUM%%/topology-bft1.json --host-addr 127.0.0.1 --port %%NODE_PORT_BASE%% --socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket ${UTXO_BACKEND_ARGS[*]} $*" - - -exec cardano-node run --config ./state-cluster%%INSTANCE_NUM%%/config-bft1.json --database-path ./state-cluster%%INSTANCE_NUM%%/db-bft1 --topology ./state-cluster%%INSTANCE_NUM%%/topology-bft1.json --host-addr 127.0.0.1 --port %%NODE_PORT_BASE%% --socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket "${UTXO_BACKEND_ARGS[@]}" "$@" diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/dbsync-config.yaml b/cardano_node_tests/cluster_scripts/babbage_fast/dbsync-config.yaml deleted file mode 100644 index f6724feb4..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/dbsync-config.yaml +++ /dev/null @@ -1,115 +0,0 @@ -# Explorer DB Node configuration - -NetworkName: localnet - -EnableLogMetrics: False -EnableLogging: True - -# The default port is 8080 -# PrometheusPort: 8080 - -# The config file for the node we are connecting to. If this is not the correct -# config, it will likely lead to db-sync throwing up weird error messages from -# the consensus layer. -# The path to the node config file is relative to this config file. -NodeConfigFile: config-bft1.json - -# ------------------------------------------------------------------------------ -# Logging configuration follows. - -# global filter; messages must have at least this severity to pass: -minSeverity: Info - -# global file rotation settings: -rotation: - rpLogLimitBytes: 5000000 - rpKeepFilesNum: 10 - rpMaxAgeHours: 24 - -# these backends are initialized: -setupBackends: - - AggregationBK - - KatipBK - # - EditorBK - # - EKGViewBK - -# if not indicated otherwise, then messages are passed to these backends: -defaultBackends: - - KatipBK - -# if wanted, the GUI is listening on this port: -# hasGUI: 12787 - -# if wanted, the EKG interface is listening on this port: -# hasEKG: 12788 - -# here we set up outputs of logging in 'katip': -setupScribes: - - scKind: StdoutSK - scName: stdout - scFormat: ScText - scRotation: null - -# if not indicated otherwise, then log output is directed to this: -defaultScribes: - - - StdoutSK - - stdout - -# more options which can be passed as key-value pairs: -options: - cfokey: - value: "Release-1.0.0" - mapSubtrace: - benchmark: - contents: - - GhcRtsStats - - MonotonicClock - subtrace: ObservableTrace - '#ekgview': - contents: - - - tag: Contains - contents: 'cardano.epoch-validation.benchmark' - - - tag: Contains - contents: .monoclock.basic. - - - tag: Contains - contents: 'cardano.epoch-validation.benchmark' - - - tag: Contains - contents: diff.RTS.cpuNs.timed. - - - tag: StartsWith - contents: '#ekgview.#aggregation.cardano.epoch-validation.benchmark' - - - tag: Contains - contents: diff.RTS.gcNum.timed. - subtrace: FilterTrace - 'cardano.epoch-validation.utxo-stats': - # Change the `subtrace` value to `Neutral` in order to log - # `UTxO`-related messages during epoch validation. - subtrace: NoTrace - '#messagecounters.aggregation': - subtrace: NoTrace - '#messagecounters.ekgview': - subtrace: NoTrace - '#messagecounters.switchboard': - subtrace: NoTrace - '#messagecounters.katip': - subtrace: NoTrace - '#messagecounters.monitoring': - subtrace: NoTrace - 'cardano.#messagecounters.aggregation': - subtrace: NoTrace - 'cardano.#messagecounters.ekgview': - subtrace: NoTrace - 'cardano.#messagecounters.switchboard': - subtrace: NoTrace - 'cardano.#messagecounters.katip': - subtrace: NoTrace - 'cardano.#messagecounters.monitoring': - subtrace: NoTrace - mapBackends: - cardano.epoch-validation.benchmark: - - AggregationBK - '#aggregation.cardano.epoch-validation.benchmark': - - EKGViewBK - mapSeverity: - db-sync-node.Subscription: Error - db-sync-node.Mux: Error - db-sync-node: Info diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/empty-genesis.conway.spec.json b/cardano_node_tests/cluster_scripts/babbage_fast/empty-genesis.conway.spec.json deleted file mode 100644 index 4525ef4a5..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/empty-genesis.conway.spec.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "genDelegs": {} -} diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/genesis.alonzo.spec.json b/cardano_node_tests/cluster_scripts/babbage_fast/genesis.alonzo.spec.json deleted file mode 100644 index abcaab932..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/genesis.alonzo.spec.json +++ /dev/null @@ -1,365 +0,0 @@ -{ - "collateralPercentage": 150, - "costModels": { - "PlutusV1": [ - 205665, - 812, - 1, - 1, - 1000, - 571, - 0, - 1, - 1000, - 24177, - 4, - 1, - 1000, - 32, - 117366, - 10475, - 4, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 100, - 100, - 23000, - 100, - 19537, - 32, - 175354, - 32, - 46417, - 4, - 221973, - 511, - 0, - 1, - 89141, - 32, - 497525, - 14068, - 4, - 2, - 196500, - 453240, - 220, - 0, - 1, - 1, - 1000, - 28662, - 4, - 2, - 245000, - 216773, - 62, - 1, - 1060367, - 12586, - 1, - 208512, - 421, - 1, - 187000, - 1000, - 52998, - 1, - 80436, - 32, - 43249, - 32, - 1000, - 32, - 80556, - 1, - 57667, - 4, - 1000, - 10, - 197145, - 156, - 1, - 197145, - 156, - 1, - 204924, - 473, - 1, - 208896, - 511, - 1, - 52467, - 32, - 64832, - 32, - 65493, - 32, - 22558, - 32, - 16563, - 32, - 76511, - 32, - 196500, - 453240, - 220, - 0, - 1, - 1, - 69522, - 11687, - 0, - 1, - 60091, - 32, - 196500, - 453240, - 220, - 0, - 1, - 1, - 196500, - 453240, - 220, - 0, - 1, - 1, - 806990, - 30482, - 4, - 1927926, - 82523, - 4, - 265318, - 0, - 4, - 0, - 85931, - 32, - 205665, - 812, - 1, - 1, - 41182, - 32, - 212342, - 32, - 31220, - 32, - 32696, - 32, - 43357, - 32, - 32247, - 32, - 38314, - 32, - 9462713, - 1021, - 10 - ], - "PlutusV2": [ - 205665, - 812, - 1, - 1, - 1000, - 571, - 0, - 1, - 1000, - 24177, - 4, - 1, - 1000, - 32, - 117366, - 10475, - 4, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 23000, - 100, - 100, - 100, - 23000, - 100, - 19537, - 32, - 175354, - 32, - 46417, - 4, - 221973, - 511, - 0, - 1, - 89141, - 32, - 497525, - 14068, - 4, - 2, - 196500, - 453240, - 220, - 0, - 1, - 1, - 1000, - 28662, - 4, - 2, - 245000, - 216773, - 62, - 1, - 1060367, - 12586, - 1, - 208512, - 421, - 1, - 187000, - 1000, - 52998, - 1, - 80436, - 32, - 43249, - 32, - 1000, - 32, - 80556, - 1, - 57667, - 4, - 1000, - 10, - 197145, - 156, - 1, - 197145, - 156, - 1, - 204924, - 473, - 1, - 208896, - 511, - 1, - 52467, - 32, - 64832, - 32, - 65493, - 32, - 22558, - 32, - 16563, - 32, - 76511, - 32, - 196500, - 453240, - 220, - 0, - 1, - 1, - 69522, - 11687, - 0, - 1, - 60091, - 32, - 196500, - 453240, - 220, - 0, - 1, - 1, - 196500, - 453240, - 220, - 0, - 1, - 1, - 1159724, - 392670, - 0, - 2, - 806990, - 30482, - 4, - 1927926, - 82523, - 4, - 265318, - 0, - 4, - 0, - 85931, - 32, - 205665, - 812, - 1, - 1, - 41182, - 32, - 212342, - 32, - 31220, - 32, - 32696, - 32, - 43357, - 32, - 32247, - 32, - 38314, - 32, - 35892428, - 10, - 9462713, - 1021, - 10, - 38887044, - 32947, - 10 - ] - }, - "executionPrices": { - "prMem": 0.0577, - "prSteps": 7.21e-05 - }, - "lovelacePerUTxOWord": 34482, - "maxBlockExUnits": { - "exUnitsMem": 62000000, - "exUnitsSteps": 40000000000 - }, - "maxCollateralInputs": 3, - "maxTxExUnits": { - "exUnitsMem": 14000000, - "exUnitsSteps": 10000000000 - }, - "maxValueSize": 5000 -} diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/genesis.conway.spec.json b/cardano_node_tests/cluster_scripts/babbage_fast/genesis.conway.spec.json deleted file mode 100644 index 17b09d995..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/genesis.conway.spec.json +++ /dev/null @@ -1,292 +0,0 @@ -{ - "poolVotingThresholds": { - "motionNoConfidence": 0.51, - "committeeNormal": 0.51, - "committeeNoConfidence": 0.51, - "hardForkInitiation": 0.51, - "ppSecurityGroup": 0.51 - }, - "dRepVotingThresholds": { - "motionNoConfidence": 0.51, - "committeeNormal": 0.51, - "committeeNoConfidence": 0.51, - "updateToConstitution": 0.51, - "hardForkInitiation": 0.51, - "ppNetworkGroup": 0.51, - "ppEconomicGroup": 0.51, - "ppTechnicalGroup": 0.51, - "ppGovGroup": 0.51, - "treasuryWithdrawal": 0.51 - }, - "committeeMinSize": 0, - "committeeMaxTermLength": 11000, - "govActionLifetime": 2, - "govActionDeposit": 100000000, - "dRepDeposit": 2000000, - "dRepActivity": 100, - "minFeeRefScriptCostPerByte": 0, - "plutusV3CostModel": [ - 100788, - 420, - 1, - 1, - 1000, - 173, - 0, - 1, - 1000, - 59957, - 4, - 1, - 11183, - 32, - 201305, - 8356, - 4, - 16000, - 100, - 16000, - 100, - 16000, - 100, - 16000, - 100, - 16000, - 100, - 16000, - 100, - 100, - 100, - 16000, - 100, - 94375, - 32, - 132994, - 32, - 61462, - 4, - 72010, - 178, - 0, - 1, - 22151, - 32, - 91189, - 769, - 4, - 2, - 85848, - 123203, - 7305, - -900, - 1716, - 549, - 57, - 85848, - 0, - 1, - 1, - 1000, - 42921, - 4, - 2, - 24548, - 29498, - 38, - 1, - 898148, - 27279, - 1, - 51775, - 558, - 1, - 39184, - 1000, - 60594, - 1, - 141895, - 32, - 83150, - 32, - 15299, - 32, - 76049, - 1, - 13169, - 4, - 22100, - 10, - 28999, - 74, - 1, - 28999, - 74, - 1, - 43285, - 552, - 1, - 44749, - 541, - 1, - 33852, - 32, - 68246, - 32, - 72362, - 32, - 7243, - 32, - 7391, - 32, - 11546, - 32, - 85848, - 123203, - 7305, - -900, - 1716, - 549, - 57, - 85848, - 0, - 1, - 90434, - 519, - 0, - 1, - 74433, - 32, - 85848, - 123203, - 7305, - -900, - 1716, - 549, - 57, - 85848, - 0, - 1, - 1, - 85848, - 123203, - 7305, - -900, - 1716, - 549, - 57, - 85848, - 0, - 1, - 955506, - 213312, - 0, - 2, - 270652, - 22588, - 4, - 1457325, - 64566, - 4, - 20467, - 1, - 4, - 0, - 141992, - 32, - 100788, - 420, - 1, - 1, - 81663, - 32, - 59498, - 32, - 20142, - 32, - 24588, - 32, - 20744, - 32, - 25933, - 32, - 24623, - 32, - 43053543, - 10, - 53384111, - 14333, - 10, - 43574283, - 26308, - 10, - 16000, - 100, - 16000, - 100, - 962335, - 18, - 2780678, - 6, - 442008, - 1, - 52538055, - 3756, - 18, - 267929, - 18, - 76433006, - 8868, - 18, - 52948122, - 18, - 1995836, - 36, - 3227919, - 12, - 901022, - 1, - 166917843, - 4307, - 36, - 284546, - 36, - 158221314, - 26549, - 36, - 74698472, - 36, - 333849714, - 1, - 254006273, - 72, - 2174038, - 72, - 2261318, - 64571, - 4, - 207616, - 8310, - 4, - 1293828, - 28716, - 63, - 0, - 1, - 1006041, - 43623, - 251, - 0, - 1 - ], - "constitution": { - "anchor": { - "url": "", - "dataHash": "0000000000000000000000000000000000000000000000000000000000000000" - } - }, - "committee": { - "members": { - }, - "threshold": 0.0 - } -} diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/genesis.spec.json b/cardano_node_tests/cluster_scripts/babbage_fast/genesis.spec.json deleted file mode 100644 index e81afcc93..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/genesis.spec.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "activeSlotsCoeff": 0.1, - "protocolParams": { - "poolDeposit": 500000000, - "protocolVersion": { - "minor": 0, - "major": 8 - }, - "minUTxOValue": 1, - "decentralisationParam": 0.8, - "maxTxSize": 16384, - "minPoolCost": 0, - "minFeeA": 44, - "maxBlockBodySize": 65536, - "minFeeB": 155381, - "eMax": 18, - "extraEntropy": { - "tag": "NeutralNonce" - }, - "maxBlockHeaderSize": 1100, - "keyDeposit": 400000, - "nOpt": 500, - "rho": 0.0022, - "tau": 0.05, - "a0": 0.3 - }, - "protocolMagicId": 42, - "genDelegs": {}, - "updateQuorum": 1, - "networkId": "Testnet", - "initialFunds": {}, - "maxLovelaceSupply": 45000000000000000, - "networkMagic": 42, - "epochLength": 1000, - "staking": { - "pools": {}, - "stake": {} - }, - "systemStart": "2020-07-08T02:39:16.033076859Z", - "slotsPerKESPeriod": 129600, - "slotLength": 0.2, - "maxKESEvolutions": 64, - "securityParam": 10 -} diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/postgres-setup.sh b/cardano_node_tests/cluster_scripts/babbage_fast/postgres-setup.sh deleted file mode 100644 index 2dc08d0c8..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/postgres-setup.sh +++ /dev/null @@ -1,23 +0,0 @@ -#! /usr/bin/env -S nix develop --accept-flake-config github:IntersectMBO/cardano-node-tests#postgres -i -k CARDANO_NODE_SOCKET_PATH -k PGHOST -k PGPORT -k PGUSER --no-write-lock-file -c bash -# shellcheck shell=bash - -set -euo pipefail - -SOCKET_PATH="$(readlink -m "$CARDANO_NODE_SOCKET_PATH")" -STATE_CLUSTER="${SOCKET_PATH%/*}" -INSTANCE_NUM="${STATE_CLUSTER#*state-cluster}" -DATABASE_NAME="dbsync${INSTANCE_NUM}" - -PGPASSFILE="$STATE_CLUSTER/pgpass" -export PGHOST="${PGHOST:-localhost}" -export PGPORT="${PGPORT:-5432}" -export PGUSER="${PGUSER:-postgres}" - -echo "Deleting db $DATABASE_NAME" -psql -d "$DATABASE_NAME" -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE datname = current_database() AND pid <> pg_backend_pid();" > /dev/null 2>&1 || : -dropdb --if-exists "$DATABASE_NAME" > /dev/null -echo "Setting up db $DATABASE_NAME" -createdb -T template0 --owner="$PGUSER" --encoding=UTF8 "$DATABASE_NAME" - -echo "${PGHOST}:${PGPORT}:${DATABASE_NAME}:${PGUSER}:secret" > "$PGPASSFILE" -chmod 600 "$PGPASSFILE" diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/release-genesis.alonzo.spec.json b/cardano_node_tests/cluster_scripts/babbage_fast/release-genesis.alonzo.spec.json deleted file mode 100644 index 3970561a6..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/release-genesis.alonzo.spec.json +++ /dev/null @@ -1,371 +0,0 @@ -{ - "lovelacePerUTxOWord": 34482, - "executionPrices": { - "prSteps": { - "numerator": 721, - "denominator": 10000000 - }, - "prMem": { - "numerator": 577, - "denominator": 10000 - } - }, - "maxTxExUnits": { - "exUnitsMem": 14000000, - "exUnitsSteps": 10000000000 - }, - "maxBlockExUnits": { - "exUnitsMem": 62000000, - "exUnitsSteps": 40000000000 - }, - "maxValueSize": 5000, - "collateralPercentage": 150, - "maxCollateralInputs": 3, - "costModels": { - "PlutusV1": { - "addInteger-cpu-arguments-intercept": 205665, - "addInteger-cpu-arguments-slope": 812, - "addInteger-memory-arguments-intercept": 1, - "addInteger-memory-arguments-slope": 1, - "appendByteString-cpu-arguments-intercept": 1000, - "appendByteString-cpu-arguments-slope": 571, - "appendByteString-memory-arguments-intercept": 0, - "appendByteString-memory-arguments-slope": 1, - "appendString-cpu-arguments-intercept": 1000, - "appendString-cpu-arguments-slope": 24177, - "appendString-memory-arguments-intercept": 4, - "appendString-memory-arguments-slope": 1, - "bData-cpu-arguments": 1000, - "bData-memory-arguments": 32, - "blake2b_256-cpu-arguments-intercept": 117366, - "blake2b_256-cpu-arguments-slope": 10475, - "blake2b_256-memory-arguments": 4, - "cekApplyCost-exBudgetCPU": 23000, - "cekApplyCost-exBudgetMemory": 100, - "cekBuiltinCost-exBudgetCPU": 23000, - "cekBuiltinCost-exBudgetMemory": 100, - "cekConstCost-exBudgetCPU": 23000, - "cekConstCost-exBudgetMemory": 100, - "cekDelayCost-exBudgetCPU": 23000, - "cekDelayCost-exBudgetMemory": 100, - "cekForceCost-exBudgetCPU": 23000, - "cekForceCost-exBudgetMemory": 100, - "cekLamCost-exBudgetCPU": 23000, - "cekLamCost-exBudgetMemory": 100, - "cekStartupCost-exBudgetCPU": 100, - "cekStartupCost-exBudgetMemory": 100, - "cekVarCost-exBudgetCPU": 23000, - "cekVarCost-exBudgetMemory": 100, - "chooseData-cpu-arguments": 19537, - "chooseData-memory-arguments": 32, - "chooseList-cpu-arguments": 175354, - "chooseList-memory-arguments": 32, - "chooseUnit-cpu-arguments": 46417, - "chooseUnit-memory-arguments": 4, - "consByteString-cpu-arguments-intercept": 221973, - "consByteString-cpu-arguments-slope": 511, - "consByteString-memory-arguments-intercept": 0, - "consByteString-memory-arguments-slope": 1, - "constrData-cpu-arguments": 89141, - "constrData-memory-arguments": 32, - "decodeUtf8-cpu-arguments-intercept": 497525, - "decodeUtf8-cpu-arguments-slope": 14068, - "decodeUtf8-memory-arguments-intercept": 4, - "decodeUtf8-memory-arguments-slope": 2, - "divideInteger-cpu-arguments-constant": 196500, - "divideInteger-cpu-arguments-model-arguments-intercept": 453240, - "divideInteger-cpu-arguments-model-arguments-slope": 220, - "divideInteger-memory-arguments-intercept": 0, - "divideInteger-memory-arguments-minimum": 1, - "divideInteger-memory-arguments-slope": 1, - "encodeUtf8-cpu-arguments-intercept": 1000, - "encodeUtf8-cpu-arguments-slope": 28662, - "encodeUtf8-memory-arguments-intercept": 4, - "encodeUtf8-memory-arguments-slope": 2, - "equalsByteString-cpu-arguments-constant": 245000, - "equalsByteString-cpu-arguments-intercept": 216773, - "equalsByteString-cpu-arguments-slope": 62, - "equalsByteString-memory-arguments": 1, - "equalsData-cpu-arguments-intercept": 1060367, - "equalsData-cpu-arguments-slope": 12586, - "equalsData-memory-arguments": 1, - "equalsInteger-cpu-arguments-intercept": 208512, - "equalsInteger-cpu-arguments-slope": 421, - "equalsInteger-memory-arguments": 1, - "equalsString-cpu-arguments-constant": 187000, - "equalsString-cpu-arguments-intercept": 1000, - "equalsString-cpu-arguments-slope": 52998, - "equalsString-memory-arguments": 1, - "fstPair-cpu-arguments": 80436, - "fstPair-memory-arguments": 32, - "headList-cpu-arguments": 43249, - "headList-memory-arguments": 32, - "iData-cpu-arguments": 1000, - "iData-memory-arguments": 32, - "ifThenElse-cpu-arguments": 80556, - "ifThenElse-memory-arguments": 1, - "indexByteString-cpu-arguments": 57667, - "indexByteString-memory-arguments": 4, - "lengthOfByteString-cpu-arguments": 1000, - "lengthOfByteString-memory-arguments": 10, - "lessThanByteString-cpu-arguments-intercept": 197145, - "lessThanByteString-cpu-arguments-slope": 156, - "lessThanByteString-memory-arguments": 1, - "lessThanEqualsByteString-cpu-arguments-intercept": 197145, - "lessThanEqualsByteString-cpu-arguments-slope": 156, - "lessThanEqualsByteString-memory-arguments": 1, - "lessThanEqualsInteger-cpu-arguments-intercept": 204924, - "lessThanEqualsInteger-cpu-arguments-slope": 473, - "lessThanEqualsInteger-memory-arguments": 1, - "lessThanInteger-cpu-arguments-intercept": 208896, - "lessThanInteger-cpu-arguments-slope": 511, - "lessThanInteger-memory-arguments": 1, - "listData-cpu-arguments": 52467, - "listData-memory-arguments": 32, - "mapData-cpu-arguments": 64832, - "mapData-memory-arguments": 32, - "mkCons-cpu-arguments": 65493, - "mkCons-memory-arguments": 32, - "mkNilData-cpu-arguments": 22558, - "mkNilData-memory-arguments": 32, - "mkNilPairData-cpu-arguments": 16563, - "mkNilPairData-memory-arguments": 32, - "mkPairData-cpu-arguments": 76511, - "mkPairData-memory-arguments": 32, - "modInteger-cpu-arguments-constant": 196500, - "modInteger-cpu-arguments-model-arguments-intercept": 453240, - "modInteger-cpu-arguments-model-arguments-slope": 220, - "modInteger-memory-arguments-intercept": 0, - "modInteger-memory-arguments-minimum": 1, - "modInteger-memory-arguments-slope": 1, - "multiplyInteger-cpu-arguments-intercept": 69522, - "multiplyInteger-cpu-arguments-slope": 11687, - "multiplyInteger-memory-arguments-intercept": 0, - "multiplyInteger-memory-arguments-slope": 1, - "nullList-cpu-arguments": 60091, - "nullList-memory-arguments": 32, - "quotientInteger-cpu-arguments-constant": 196500, - "quotientInteger-cpu-arguments-model-arguments-intercept": 453240, - "quotientInteger-cpu-arguments-model-arguments-slope": 220, - "quotientInteger-memory-arguments-intercept": 0, - "quotientInteger-memory-arguments-minimum": 1, - "quotientInteger-memory-arguments-slope": 1, - "remainderInteger-cpu-arguments-constant": 196500, - "remainderInteger-cpu-arguments-model-arguments-intercept": 453240, - "remainderInteger-cpu-arguments-model-arguments-slope": 220, - "remainderInteger-memory-arguments-intercept": 0, - "remainderInteger-memory-arguments-minimum": 1, - "remainderInteger-memory-arguments-slope": 1, - "sha2_256-cpu-arguments-intercept": 806990, - "sha2_256-cpu-arguments-slope": 30482, - "sha2_256-memory-arguments": 4, - "sha3_256-cpu-arguments-intercept": 1927926, - "sha3_256-cpu-arguments-slope": 82523, - "sha3_256-memory-arguments": 4, - "sliceByteString-cpu-arguments-intercept": 265318, - "sliceByteString-cpu-arguments-slope": 0, - "sliceByteString-memory-arguments-intercept": 4, - "sliceByteString-memory-arguments-slope": 0, - "sndPair-cpu-arguments": 85931, - "sndPair-memory-arguments": 32, - "subtractInteger-cpu-arguments-intercept": 205665, - "subtractInteger-cpu-arguments-slope": 812, - "subtractInteger-memory-arguments-intercept": 1, - "subtractInteger-memory-arguments-slope": 1, - "tailList-cpu-arguments": 41182, - "tailList-memory-arguments": 32, - "trace-cpu-arguments": 212342, - "trace-memory-arguments": 32, - "unBData-cpu-arguments": 31220, - "unBData-memory-arguments": 32, - "unConstrData-cpu-arguments": 32696, - "unConstrData-memory-arguments": 32, - "unIData-cpu-arguments": 43357, - "unIData-memory-arguments": 32, - "unListData-cpu-arguments": 32247, - "unListData-memory-arguments": 32, - "unMapData-cpu-arguments": 38314, - "unMapData-memory-arguments": 32, - "verifyEd25519Signature-cpu-arguments-intercept": 9462713, - "verifyEd25519Signature-cpu-arguments-slope": 1021, - "verifyEd25519Signature-memory-arguments": 10 - }, - "PlutusV2": { - "addInteger-cpu-arguments-intercept": 205665, - "addInteger-cpu-arguments-slope": 812, - "addInteger-memory-arguments-intercept": 1, - "addInteger-memory-arguments-slope": 1, - "appendByteString-cpu-arguments-intercept": 1000, - "appendByteString-cpu-arguments-slope": 571, - "appendByteString-memory-arguments-intercept": 0, - "appendByteString-memory-arguments-slope": 1, - "appendString-cpu-arguments-intercept": 1000, - "appendString-cpu-arguments-slope": 24177, - "appendString-memory-arguments-intercept": 4, - "appendString-memory-arguments-slope": 1, - "bData-cpu-arguments": 1000, - "bData-memory-arguments": 32, - "blake2b_256-cpu-arguments-intercept": 117366, - "blake2b_256-cpu-arguments-slope": 10475, - "blake2b_256-memory-arguments": 4, - "cekApplyCost-exBudgetCPU": 23000, - "cekApplyCost-exBudgetMemory": 100, - "cekBuiltinCost-exBudgetCPU": 23000, - "cekBuiltinCost-exBudgetMemory": 100, - "cekConstCost-exBudgetCPU": 23000, - "cekConstCost-exBudgetMemory": 100, - "cekDelayCost-exBudgetCPU": 23000, - "cekDelayCost-exBudgetMemory": 100, - "cekForceCost-exBudgetCPU": 23000, - "cekForceCost-exBudgetMemory": 100, - "cekLamCost-exBudgetCPU": 23000, - "cekLamCost-exBudgetMemory": 100, - "cekStartupCost-exBudgetCPU": 100, - "cekStartupCost-exBudgetMemory": 100, - "cekVarCost-exBudgetCPU": 23000, - "cekVarCost-exBudgetMemory": 100, - "chooseData-cpu-arguments": 19537, - "chooseData-memory-arguments": 32, - "chooseList-cpu-arguments": 175354, - "chooseList-memory-arguments": 32, - "chooseUnit-cpu-arguments": 46417, - "chooseUnit-memory-arguments": 4, - "consByteString-cpu-arguments-intercept": 221973, - "consByteString-cpu-arguments-slope": 511, - "consByteString-memory-arguments-intercept": 0, - "consByteString-memory-arguments-slope": 1, - "constrData-cpu-arguments": 89141, - "constrData-memory-arguments": 32, - "decodeUtf8-cpu-arguments-intercept": 497525, - "decodeUtf8-cpu-arguments-slope": 14068, - "decodeUtf8-memory-arguments-intercept": 4, - "decodeUtf8-memory-arguments-slope": 2, - "divideInteger-cpu-arguments-constant": 196500, - "divideInteger-cpu-arguments-model-arguments-intercept": 453240, - "divideInteger-cpu-arguments-model-arguments-slope": 220, - "divideInteger-memory-arguments-intercept": 0, - "divideInteger-memory-arguments-minimum": 1, - "divideInteger-memory-arguments-slope": 1, - "encodeUtf8-cpu-arguments-intercept": 1000, - "encodeUtf8-cpu-arguments-slope": 28662, - "encodeUtf8-memory-arguments-intercept": 4, - "encodeUtf8-memory-arguments-slope": 2, - "equalsByteString-cpu-arguments-constant": 245000, - "equalsByteString-cpu-arguments-intercept": 216773, - "equalsByteString-cpu-arguments-slope": 62, - "equalsByteString-memory-arguments": 1, - "equalsData-cpu-arguments-intercept": 1060367, - "equalsData-cpu-arguments-slope": 12586, - "equalsData-memory-arguments": 1, - "equalsInteger-cpu-arguments-intercept": 208512, - "equalsInteger-cpu-arguments-slope": 421, - "equalsInteger-memory-arguments": 1, - "equalsString-cpu-arguments-constant": 187000, - "equalsString-cpu-arguments-intercept": 1000, - "equalsString-cpu-arguments-slope": 52998, - "equalsString-memory-arguments": 1, - "fstPair-cpu-arguments": 80436, - "fstPair-memory-arguments": 32, - "headList-cpu-arguments": 43249, - "headList-memory-arguments": 32, - "iData-cpu-arguments": 1000, - "iData-memory-arguments": 32, - "ifThenElse-cpu-arguments": 80556, - "ifThenElse-memory-arguments": 1, - "indexByteString-cpu-arguments": 57667, - "indexByteString-memory-arguments": 4, - "lengthOfByteString-cpu-arguments": 1000, - "lengthOfByteString-memory-arguments": 10, - "lessThanByteString-cpu-arguments-intercept": 197145, - "lessThanByteString-cpu-arguments-slope": 156, - "lessThanByteString-memory-arguments": 1, - "lessThanEqualsByteString-cpu-arguments-intercept": 197145, - "lessThanEqualsByteString-cpu-arguments-slope": 156, - "lessThanEqualsByteString-memory-arguments": 1, - "lessThanEqualsInteger-cpu-arguments-intercept": 204924, - "lessThanEqualsInteger-cpu-arguments-slope": 473, - "lessThanEqualsInteger-memory-arguments": 1, - "lessThanInteger-cpu-arguments-intercept": 208896, - "lessThanInteger-cpu-arguments-slope": 511, - "lessThanInteger-memory-arguments": 1, - "listData-cpu-arguments": 52467, - "listData-memory-arguments": 32, - "mapData-cpu-arguments": 64832, - "mapData-memory-arguments": 32, - "mkCons-cpu-arguments": 65493, - "mkCons-memory-arguments": 32, - "mkNilData-cpu-arguments": 22558, - "mkNilData-memory-arguments": 32, - "mkNilPairData-cpu-arguments": 16563, - "mkNilPairData-memory-arguments": 32, - "mkPairData-cpu-arguments": 76511, - "mkPairData-memory-arguments": 32, - "modInteger-cpu-arguments-constant": 196500, - "modInteger-cpu-arguments-model-arguments-intercept": 453240, - "modInteger-cpu-arguments-model-arguments-slope": 220, - "modInteger-memory-arguments-intercept": 0, - "modInteger-memory-arguments-minimum": 1, - "modInteger-memory-arguments-slope": 1, - "multiplyInteger-cpu-arguments-intercept": 69522, - "multiplyInteger-cpu-arguments-slope": 11687, - "multiplyInteger-memory-arguments-intercept": 0, - "multiplyInteger-memory-arguments-slope": 1, - "nullList-cpu-arguments": 60091, - "nullList-memory-arguments": 32, - "quotientInteger-cpu-arguments-constant": 196500, - "quotientInteger-cpu-arguments-model-arguments-intercept": 453240, - "quotientInteger-cpu-arguments-model-arguments-slope": 220, - "quotientInteger-memory-arguments-intercept": 0, - "quotientInteger-memory-arguments-minimum": 1, - "quotientInteger-memory-arguments-slope": 1, - "remainderInteger-cpu-arguments-constant": 196500, - "remainderInteger-cpu-arguments-model-arguments-intercept": 453240, - "remainderInteger-cpu-arguments-model-arguments-slope": 220, - "remainderInteger-memory-arguments-intercept": 0, - "remainderInteger-memory-arguments-minimum": 1, - "remainderInteger-memory-arguments-slope": 1, - "serialiseData-cpu-arguments-intercept": 1159724, - "serialiseData-cpu-arguments-slope": 392670, - "serialiseData-memory-arguments-intercept": 0, - "serialiseData-memory-arguments-slope": 2, - "sha2_256-cpu-arguments-intercept": 806990, - "sha2_256-cpu-arguments-slope": 30482, - "sha2_256-memory-arguments": 4, - "sha3_256-cpu-arguments-intercept": 1927926, - "sha3_256-cpu-arguments-slope": 82523, - "sha3_256-memory-arguments": 4, - "sliceByteString-cpu-arguments-intercept": 265318, - "sliceByteString-cpu-arguments-slope": 0, - "sliceByteString-memory-arguments-intercept": 4, - "sliceByteString-memory-arguments-slope": 0, - "sndPair-cpu-arguments": 85931, - "sndPair-memory-arguments": 32, - "subtractInteger-cpu-arguments-intercept": 205665, - "subtractInteger-cpu-arguments-slope": 812, - "subtractInteger-memory-arguments-intercept": 1, - "subtractInteger-memory-arguments-slope": 1, - "tailList-cpu-arguments": 41182, - "tailList-memory-arguments": 32, - "trace-cpu-arguments": 212342, - "trace-memory-arguments": 32, - "unBData-cpu-arguments": 31220, - "unBData-memory-arguments": 32, - "unConstrData-cpu-arguments": 32696, - "unConstrData-memory-arguments": 32, - "unIData-cpu-arguments": 43357, - "unIData-memory-arguments": 32, - "unListData-cpu-arguments": 32247, - "unListData-memory-arguments": 32, - "unMapData-cpu-arguments": 38314, - "unMapData-memory-arguments": 32, - "verifyEcdsaSecp256k1Signature-cpu-arguments": 35892428, - "verifyEcdsaSecp256k1Signature-memory-arguments": 10, - "verifyEd25519Signature-cpu-arguments-intercept": 9462713, - "verifyEd25519Signature-cpu-arguments-slope": 1021, - "verifyEd25519Signature-memory-arguments": 10, - "verifySchnorrSecp256k1Signature-cpu-arguments-intercept": 38887044, - "verifySchnorrSecp256k1Signature-cpu-arguments-slope": 32947, - "verifySchnorrSecp256k1Signature-memory-arguments": 10 - } - } -} diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/release-genesis.conway.spec.json b/cardano_node_tests/cluster_scripts/babbage_fast/release-genesis.conway.spec.json deleted file mode 100644 index 59ed2c5f4..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/release-genesis.conway.spec.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "poolVotingThresholds": { - "pvtCommitteeNormal": 0, - "pvtCommitteeNoConfidence": 0, - "pvtHardForkInitiation": 0, - "pvtMotionNoConfidence": 0 - }, - "dRepVotingThresholds": { - "dvtMotionNoConfidence": 0, - "dvtCommitteeNormal": 0, - "dvtCommitteeNoConfidence": 0, - "dvtUpdateToConstitution": 0, - "dvtHardForkInitiation": 0, - "dvtPPNetworkGroup": 0, - "dvtPPEconomicGroup": 0, - "dvtPPTechnicalGroup": 0, - "dvtPPGovGroup": 0, - "dvtTreasuryWithdrawal": 0 - }, - "committeeMinSize": 0, - "committeeMaxTermLength": 0, - "govActionLifetime": 0, - "govActionDeposit": 0, - "dRepDeposit": 0, - "dRepActivity": 0, - "constitution": { - "anchor": { - "url": "", - "dataHash": "0000000000000000000000000000000000000000000000000000000000000000" - } - }, - "committee": { - "members": { - "keyHash-4e88cc2d27c364aaf90648a87dfb95f8ee103ba67fa1f12f5e86c42a": 1, - "scriptHash-4e88cc2d27c364aaf90648a87dfb95f8ee103ba67fa1f12f5e86c42a": 2 - }, - "quorum": 0.5 - } -} diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/release_8_9-genesis.conway.spec.json b/cardano_node_tests/cluster_scripts/babbage_fast/release_8_9-genesis.conway.spec.json deleted file mode 100644 index 12d461512..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/release_8_9-genesis.conway.spec.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "poolVotingThresholds": { - "motionNoConfidence": 0.51, - "committeeNormal": 0.51, - "committeeNoConfidence": 0.51, - "hardForkInitiation": 0.51, - "ppSecurityGroup": 0.51 - }, - "dRepVotingThresholds": { - "motionNoConfidence": 0.51, - "committeeNormal": 0.51, - "committeeNoConfidence": 0.51, - "updateToConstitution": 0.51, - "hardForkInitiation": 0.51, - "ppNetworkGroup": 0.51, - "ppEconomicGroup": 0.51, - "ppTechnicalGroup": 0.51, - "ppGovGroup": 0.51, - "treasuryWithdrawal": 0.51 - }, - "committeeMinSize": 0, - "committeeMaxTermLength": 11000, - "govActionLifetime": 2, - "govActionDeposit": 100000000, - "dRepDeposit": 2000000, - "dRepActivity": 100, - "minFeeRefScriptCostPerByte": 0, - "constitution": { - "anchor": { - "url": "", - "dataHash": "0000000000000000000000000000000000000000000000000000000000000000" - } - }, - "committee": { - "members": { - }, - "quorum": 0.0, - "threshold": 0.0 - } -} diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/run-cardano-dbsync b/cardano_node_tests/cluster_scripts/babbage_fast/run-cardano-dbsync deleted file mode 100644 index e094e1906..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/run-cardano-dbsync +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash - -set -uo pipefail - -SOCKET_PATH="$(readlink -m "$CARDANO_NODE_SOCKET_PATH")" -STATE_CLUSTER="${SOCKET_PATH%/*}" -STATE_CLUSTER_NAME="${STATE_CLUSTER##*/}" - -export PGPASSFILE="$STATE_CLUSTER/pgpass" -export PGHOST="${PGHOST:-localhost}" -export PGPORT="${PGPORT:-5432}" -export PGUSER="${PGUSER:-postgres}" - -exec "$DBSYNC_REPO/db-sync-node/bin/cardano-db-sync" --config "./$STATE_CLUSTER_NAME/dbsync-config.yaml" --socket-path "$CARDANO_NODE_SOCKET_PATH" --state-dir "./$STATE_CLUSTER_NAME/db-sync" --schema-dir "$DBSYNC_REPO/schema" diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/run-cardano-submit-api b/cardano_node_tests/cluster_scripts/babbage_fast/run-cardano-submit-api deleted file mode 100644 index 66fac02ae..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/run-cardano-submit-api +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash - -testnet_magic="$(<./state-cluster%%INSTANCE_NUM%%/db-bft1/protocolMagicId)" - -# TODO: `--metrics-port` is not available in older cardano-node releases, see node issue #4280 -metrics_port="$(cardano-submit-api --metrics-port 8081 2>&1 | { read -r i; if [[ "$i" == *Invalid* ]]; then echo ""; else echo "--metrics-port %%METRICS_SUBMIT_API_PORT%%"; fi; })" - -echo "Starting cardano-submit-api: cardano-submit-api" - echo "--config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json" - echo "--socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket" - echo "--listen-address 127.0.0.1" - echo "--port %%SUBMIT_API_PORT%%" - echo "$metrics_port" - echo --testnet-magic "$testnet_magic" -echo "..or, once again, in a single line:" -echo cardano-submit-api --config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json --socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket --listen-address 127.0.0.1 --port %%SUBMIT_API_PORT%% "$metrics_port" --testnet-magic "$testnet_magic" - -# shellcheck disable=SC2086 -exec cardano-submit-api --config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json --socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket --listen-address 127.0.0.1 --port %%SUBMIT_API_PORT%% $metrics_port --testnet-magic "$testnet_magic" diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/start-cluster b/cardano_node_tests/cluster_scripts/babbage_fast/start-cluster deleted file mode 100644 index 88876d025..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/start-cluster +++ /dev/null @@ -1,639 +0,0 @@ -#!/usr/bin/env bash - -# controlling environment variables: -# DBSYNC_REPO - will start and configure db-sync if the value is path to db-sync repository -# ENABLE_P2P - if set, local cluster will use P2P -# MIXED_P2P - if set, local cluster will use P2P for some nodes and legacy topology for others -# UTXO_BACKEND - 'mem' or 'disk', default is 'mem' (or legacy) if unset -# DRY_RUN - if set, will not start the cluster - -set -euo pipefail -sets="$-" - -SCRIPT_DIR="$(readlink -m "${0%/*}")" -SOCKET_PATH="$(readlink -m "$CARDANO_NODE_SOCKET_PATH")" -STATE_CLUSTER="${SOCKET_PATH%/*}" -STATE_CLUSTER_NAME="${STATE_CLUSTER##*/}" - -INSTANCE_NUM="%%INSTANCE_NUM%%" -if [[ "$SOCKET_PATH" != *"/state-cluster${INSTANCE_NUM}/"* ]]; then - echo "CARDANO_NODE_SOCKET_PATH must be set to a path containing 'state-cluster${INSTANCE_NUM}', line $LINENO" >&2 - exit 1 -fi - -NUM_BFT_NODES=1 -NUM_POOLS=%%NUM_POOLS%% -SETTLE_DELAY=20 -POOL_PLEDGE=1000000000000 -BYRON_INIT_SUPPLY=10020000000 - -SECURITY_PARAM="$(jq '.securityParam' < "$SCRIPT_DIR/genesis.spec.json")" -NETWORK_MAGIC="$(jq '.networkMagic' < "$SCRIPT_DIR/genesis.spec.json")" -MAX_SUPPLY="$(jq '.maxLovelaceSupply' < "$SCRIPT_DIR/genesis.spec.json")" - -# There is some weird calculation going on, and the deleg supply needs to have a minimum value, -# that is somehow based on non-delegated supply. -DELEG_MAGIC_VALUE=3340000000000000 -DELEG_SUPPLY="$((POOL_PLEDGE * NUM_POOLS + DELEG_MAGIC_VALUE))" -NONDELEG_SUPPLY="$(( (MAX_SUPPLY - DELEG_SUPPLY) * 8 / 10))" - -if [ -n "${MIXED_P2P:-""}" ]; then - export ENABLE_P2P=1 -fi - -if [ -f "$STATE_CLUSTER/supervisord.pid" ]; then - echo "Cluster already running. Please run \`$SCRIPT_DIR/stop-cluster\` first!" >&2 - exit 1 -fi - -if [ "$NUM_POOLS" -lt 3 ]; then - echo "NUM_POOLS must be at least 3" >&2 - exit 1 -fi - -cardano_cli_log() { - echo cardano-cli "$@" >> "$STATE_CLUSTER/start_cluster_cmds.log" - - for _ in {1..3}; do - set +e - out="$(cardano-cli "$@" 2>&1)" - retval="$?" - set -"$sets" - - case "$out" in - *"resource vanished"*) - printf "Retrying \`cardano-cli %s\`. Failure:\n%s\n" "$*" "$out" >&2 - sleep 1 - ;; - *) - if [ -n "$out" ]; then echo "$out"; fi - break - ;; - esac - done - - return "$retval" -} - -enable_submit_api() { - command -v cardano-submit-api >/dev/null 2>&1 || return 1 - - # TODO: `--metrics-port` is not available in older cardano-node releases, see node issue #4280 - # If the metrics port is not available, we can start the `cardano-submit-api` only on the first - # cluster instance. - [[ "$CARDANO_NODE_SOCKET_PATH" == */cluster0/* ]] && return 0 - if cardano-submit-api --metrics-port 8081 2>&1 | { read -r i; [[ "$i" == *Invalid* ]]; }; then - return 1 - fi - - return 0 -} - -ENABLE_SUBMIT_API="$(enable_submit_api && echo 1 || echo 0)" - -if [ -e "$SCRIPT_DIR/shell_env" ]; then - # shellcheck disable=SC1090,SC1091 - source "$SCRIPT_DIR/shell_env" -fi - - -CERT_ERA_ARG=("--babbage-era") -if { cardano-cli stake-address registration-certificate --babbage-era 2>&1; true; } |\ - { read -r i; [[ "$i" == *Invalid* ]]; }; then - CERT_ERA_ARG=() -fi - -rm -rf "$STATE_CLUSTER" -mkdir -p "$STATE_CLUSTER"/{shelley,webserver,db-sync,create_staked} -cd "$STATE_CLUSTER/.." - -cp "$SCRIPT_DIR"/cardano-node-* "$STATE_CLUSTER" -cp "$SCRIPT_DIR/run-cardano-submit-api" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/byron-params.json" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/dbsync-config.yaml" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/submit-api-config.json" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/supervisor.conf" "$STATE_CLUSTER" -cp "$SCRIPT_DIR"/*genesis*.spec.json "$STATE_CLUSTER/create_staked/" - -if [ -n "${ENABLE_P2P:-""}" ]; then - # use P2P topology files - for tconf in "$SCRIPT_DIR"/p2p-topology-*.json; do - tfname="${tconf##*/p2p-}" - cp "$tconf" "${STATE_CLUSTER}/${tfname}" - done -else - cp "$SCRIPT_DIR"/topology-*.json "$STATE_CLUSTER" -fi - -case "${UTXO_BACKEND:=""}" in - "" | mem | disk) - echo "$UTXO_BACKEND" > "$STATE_CLUSTER/utxo_backend" - ;; - *) - echo "Unknown \`UTXO_BACKEND\`: '$UTXO_BACKEND', line $LINENO" >&2 - exit 1 - ;; -esac - -# enable db-sync service -if [ -n "${DBSYNC_REPO:-""}" ]; then - [ -e "$DBSYNC_REPO/db-sync-node/bin/cardano-db-sync" ] || \ - { echo "The \`$DBSYNC_REPO/db-sync-node/bin/cardano-db-sync\` not found, line $LINENO" >&2; exit 1; } # assert - - # create clean database - if [ -z "${DRY_RUN:-""}" ]; then - "$SCRIPT_DIR/postgres-setup.sh" - fi - - cat >> "$STATE_CLUSTER/supervisor.conf" <> "$STATE_CLUSTER/supervisor.conf" < "$STATE_CLUSTER/cluster_start_time" - -cardano_cli_log byron genesis genesis \ - --protocol-magic "$NETWORK_MAGIC" \ - --k "$SECURITY_PARAM" \ - --n-poor-addresses 0 \ - --n-delegate-addresses "$NUM_POOLS" \ - --total-balance "$BYRON_INIT_SUPPLY" \ - --delegate-share 1 \ - --avvm-entry-count 0 \ - --avvm-entry-balance 0 \ - --protocol-parameters-file "$STATE_CLUSTER/byron-params.json" \ - --genesis-output-dir "$STATE_CLUSTER/byron" \ - --start-time "$START_TIME" - -mv "$STATE_CLUSTER/byron-params.json" "$STATE_CLUSTER/byron/params.json" - -gen_genesis() { - cardano_cli_log genesis create-staked \ - --genesis-dir "$STATE_CLUSTER/create_staked" \ - --testnet-magic "$NETWORK_MAGIC" \ - --gen-pools "$NUM_POOLS" \ - --gen-utxo-keys 1 \ - --supply "$NONDELEG_SUPPLY" \ - --gen-stake-delegs "$NUM_POOLS" \ - --supply-delegated "$DELEG_SUPPLY" \ - --start-time "$START_TIME_SHELLEY" -} - -gen_genesis && genesis_created=1 || genesis_created=0 - -if [ "$genesis_created" -eq 0 ] ; then - echo "Failed to generate genesis files, retrying with a different genesis.conway.spec.json" - mv "$STATE_CLUSTER/create_staked/genesis.conway.spec.json" \ - "$STATE_CLUSTER/create_staked/master-genesis.conway.spec.json" - mv "$STATE_CLUSTER/create_staked/release_8_9-genesis.conway.spec.json" \ - "$STATE_CLUSTER/create_staked/genesis.conway.spec.json" - gen_genesis && genesis_created=1 || genesis_created=0 -fi - -if [ "$genesis_created" -eq 0 ] ; then - echo "Failed to generate genesis files, retrying with a different genesis.conway.spec.json" - mv "$STATE_CLUSTER/create_staked/genesis.conway.spec.json" \ - "$STATE_CLUSTER/create_staked/release_8_9-genesis.conway.spec.json" - mv "$STATE_CLUSTER/create_staked/release-genesis.conway.spec.json" \ - "$STATE_CLUSTER/create_staked/genesis.conway.spec.json" - gen_genesis && genesis_created=1 || genesis_created=0 -fi - -if [ "$genesis_created" -eq 0 ] ; then - echo "Failed to generate genesis files, retrying with a different genesis.conway.spec.json" - mv "$STATE_CLUSTER/create_staked/genesis.conway.spec.json" \ - "$STATE_CLUSTER/create_staked/release-genesis.conway.spec.json" - mv "$STATE_CLUSTER/create_staked/empty-genesis.conway.spec.json" \ - "$STATE_CLUSTER/create_staked/genesis.conway.spec.json" - gen_genesis && genesis_created=1 || genesis_created=0 -fi - -if [ "$genesis_created" -eq 0 ] ; then - echo "Failed to generate genesis files, retrying with a different genesis.alonzo.spec.json" - mv "$STATE_CLUSTER/create_staked/genesis.alonzo.spec.json" \ - "$STATE_CLUSTER/create_staked/master-genesis.alonzo.spec.json" - mv "$STATE_CLUSTER/create_staked/release-genesis.alonzo.spec.json" \ - "$STATE_CLUSTER/create_staked/genesis.alonzo.spec.json" - gen_genesis && genesis_created=1 || genesis_created=0 -fi - -mv "$STATE_CLUSTER/create_staked/delegate-keys" "$STATE_CLUSTER/shelley/delegate-keys" -mv "$STATE_CLUSTER/create_staked/genesis-keys" "$STATE_CLUSTER/shelley/genesis-keys" -jq \ - --argjson max_supply "$MAX_SUPPLY" \ - '.maxLovelaceSupply = $max_supply' \ - "$STATE_CLUSTER/create_staked/genesis.json" > "$STATE_CLUSTER/shelley/genesis.json" -rm -f "$STATE_CLUSTER/create_staked/genesis.json" -mv "$STATE_CLUSTER"/create_staked/genesis*.json "$STATE_CLUSTER/shelley/" - -mv "$STATE_CLUSTER/create_staked/utxo-keys/utxo1.skey" "$STATE_CLUSTER/shelley/genesis-utxo.skey" -mv "$STATE_CLUSTER/create_staked/utxo-keys/utxo1.vkey" "$STATE_CLUSTER/shelley/genesis-utxo.vkey" -cardano_cli_log address build --payment-verification-key-file \ - "$STATE_CLUSTER/shelley/genesis-utxo.vkey" \ - --out-file "$STATE_CLUSTER/shelley/genesis-utxo.addr" \ - --testnet-magic "$NETWORK_MAGIC" - -mv "$STATE_CLUSTER/create_staked/stake-delegator-keys" "$STATE_CLUSTER/shelley/stake-delegator-keys" - -BYRON_GENESIS_HASH="$(cardano_cli_log byron genesis print-genesis-hash --genesis-json \ - "$STATE_CLUSTER/byron/genesis.json")" -SHELLEY_GENESIS_HASH="$(cardano_cli_log genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.json")" -ALONZO_GENESIS_HASH="$(cardano_cli_log genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.alonzo.json")" - -CONWAY_GENESIS_HASH="" -EXP_PROTOCOLS_KEY="TestEnableDevelopmentNetworkProtocols" - -# conway genesis is not present on node < 1.36.0 -if [ -e "$STATE_CLUSTER/shelley/genesis.conway.json" ]; then - CONWAY_GENESIS_HASH="$(cardano_cli_log genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.conway.json")" - EXP_PROTOCOLS_KEY="ExperimentalProtocolsEnabled" -fi - -for conf in "$SCRIPT_DIR"/config-*.json; do - fname="${conf##*/}" - jq \ - --arg byron_hash "$BYRON_GENESIS_HASH" \ - --arg shelley_hash "$SHELLEY_GENESIS_HASH" \ - --arg alonzo_hash "$ALONZO_GENESIS_HASH" \ - --arg exp_protocols_key "$EXP_PROTOCOLS_KEY" \ - '.ByronGenesisHash = $byron_hash - | .ShelleyGenesisHash = $shelley_hash - | .AlonzoGenesisHash = $alonzo_hash - | .[$exp_protocols_key] = true' \ - "$conf" > "$STATE_CLUSTER/$fname" - - if [ -n "$CONWAY_GENESIS_HASH" ]; then - CONWAY_GENESIS_HASH="${CONWAY_GENESIS_HASH:-"$(cardano_cli_log genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.conway.json")"}" - jq \ - --arg conway_hash "$CONWAY_GENESIS_HASH" \ - '.ConwayGenesisFile = "shelley/genesis.conway.json" | .ConwayGenesisHash = $conway_hash' \ - "$STATE_CLUSTER/$fname" > "$STATE_CLUSTER/${fname}_jq" - cat "$STATE_CLUSTER/${fname}_jq" > "$STATE_CLUSTER/$fname" - rm -f "$STATE_CLUSTER/${fname}_jq" - fi - - # enable P2P - if [ -n "${ENABLE_P2P:-""}" ]; then - - # setup mix of P2P and legacy - if [ -n "${MIXED_P2P:-""}" ]; then - if [ "$fname" = "config-bft1.json" ]; then - # use legacy topology file for bft1 - cp -f "$SCRIPT_DIR"/topology-bft1.json "$STATE_CLUSTER" - continue - fi - - # use legacy topology files for odd numbered pools - pool_num="${fname##*-pool}" - pool_num="${pool_num%.json}" - if [ "$((pool_num % 2))" != 0 ]; then - cp -f "$SCRIPT_DIR/topology-pool${pool_num}.json" "$STATE_CLUSTER" - continue - fi - fi - - jq \ - '.EnableP2P = true - | .MaxConcurrencyBulkSync = 2 - | .MaxConcurrencyDeadline = 4 - | .TargetNumberOfRootPeers = 100 - | .TargetNumberOfKnownPeers = 100 - | .TargetNumberOfEstablishedPeers = 50 - | .TargetNumberOfActivePeers = 20 - | .TraceBlockFetchClient = true - | .TraceChainSyncClient = true' \ - "$STATE_CLUSTER/$fname" > "$STATE_CLUSTER/${fname}_jq" - cat "$STATE_CLUSTER/${fname}_jq" > "$STATE_CLUSTER/$fname" - rm -f "$STATE_CLUSTER/${fname}_jq" - fi -done - -for i in $(seq 1 $NUM_BFT_NODES); do - mkdir -p "$STATE_CLUSTER/nodes/node-bft$i" - BFT_PORT=$(("%%NODE_PORT_BASE%%" + (i - 1) * "%%PORTS_PER_NODE%%" )) - echo "$BFT_PORT" > "$STATE_CLUSTER/nodes/node-bft$i/port" -done - -for i in $(seq 1 "$NUM_POOLS"); do - mkdir -p "$STATE_CLUSTER/nodes/node-pool$i" - mv "$STATE_CLUSTER/create_staked/pools/cold$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/cold.skey" - mv "$STATE_CLUSTER/create_staked/pools/cold$i.vkey" "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" - - mv "$STATE_CLUSTER/create_staked/pools/kes$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/kes.skey" - mv "$STATE_CLUSTER/create_staked/pools/kes$i.vkey" "$STATE_CLUSTER/nodes/node-pool$i/kes.vkey" - - mv "$STATE_CLUSTER/create_staked/pools/opcert$i.cert" "$STATE_CLUSTER/nodes/node-pool$i/op.cert" - mv "$STATE_CLUSTER/create_staked/pools/opcert$i.counter" "$STATE_CLUSTER/nodes/node-pool$i/cold.counter" - - # stake reward keys - mv "$STATE_CLUSTER/create_staked/pools/staking-reward$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/reward.skey" - mv "$STATE_CLUSTER/create_staked/pools/staking-reward$i.vkey" "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" - - mv "$STATE_CLUSTER/create_staked/pools/vrf$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/vrf.skey" - mv "$STATE_CLUSTER/create_staked/pools/vrf$i.vkey" "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" - - echo "Generating Pool $i Secrets" - - # pool owner addresses and keys - cardano_cli_log address key-gen \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.skey" \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" - cardano_cli_log stake-address key-gen \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.skey" \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" - # payment address - cardano_cli_log address build \ - --payment-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner.addr" - # stake address - cardano_cli_log stake-address build \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.addr" - # stake address registration cert - cardano_cli_log stake-address registration-certificate \ - "${CERT_ERA_ARG[@]}" \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake.reg.cert" - - # stake reward address registration cert - cardano_cli_log stake-address registration-certificate \ - "${CERT_ERA_ARG[@]}" \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" - - # stake address delegation certs - cardano_cli_log stake-address delegation-certificate \ - "${CERT_ERA_ARG[@]}" \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" - - POOL_NAME="TestPool$i" - POOL_DESC="Test Pool $i" - POOL_TICKER="TP$i" - - cat > "$STATE_CLUSTER/webserver/pool$i.html" < - - -$POOL_NAME - - -name: $POOL_NAME
-description: $POOL_DESC
-ticker: $POOL_TICKER
- - -EoF - - echo "Generating Pool $i Metadata" - jq -n \ - --arg name "$POOL_NAME" \ - --arg description "$POOL_DESC" \ - --arg ticker "$POOL_TICKER" \ - --arg homepage "http://localhost:%%WEBSERVER_PORT%%/pool$i.html" \ - '{"name": $name, "description": $description, "ticker": $ticker, "homepage": $homepage}' \ - > "$STATE_CLUSTER/webserver/pool$i.json" - - METADATA_URL="http://localhost:%%WEBSERVER_PORT%%/pool$i.json" - METADATA_HASH=$(cardano_cli_log stake-pool metadata-hash --pool-metadata-file \ - "$STATE_CLUSTER/webserver/pool$i.json") - POOL_PORT=$(("%%NODE_PORT_BASE%%" + ("$NUM_BFT_NODES" + i - 1) * "%%PORTS_PER_NODE%%")) - echo "$POOL_PORT" > "$STATE_CLUSTER/nodes/node-pool$i/port" - echo $POOL_PLEDGE > "$STATE_CLUSTER/nodes/node-pool$i/pledge" - - cardano_cli_log stake-pool registration-certificate \ - "${CERT_ERA_ARG[@]}" \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --vrf-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" \ - --pool-pledge "$POOL_PLEDGE" \ - --pool-margin 0.35 \ - --pool-cost 600 \ - --pool-reward-account-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ - --pool-owner-stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --metadata-url "$METADATA_URL" \ - --metadata-hash "$METADATA_HASH" \ - --pool-relay-port "$POOL_PORT" \ - --pool-relay-ipv4 "127.0.0.1" \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/register.cert" -done - -rm -rf "$STATE_CLUSTER/shelley/create_staked" - -# create scripts for cluster starting / stopping -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start all" > "$STATE_CLUSTER/supervisorctl_start" -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% restart nodes:" > "$STATE_CLUSTER/supervisorctl_restart_nodes" -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% \"\$@\"" > "$STATE_CLUSTER/supervisorctl" - -cat > "$STATE_CLUSTER/supervisord_start" < "$STATE_CLUSTER/supervisord_stop" <&2; exit 1; } # assert - - -# -# In Babbage era -# - - -# start db-sync -if [ -n "${DBSYNC_REPO:-""}" ]; then - echo "Starting db-sync" - supervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start dbsync -fi - -echo "Sleeping for $SETTLE_DELAY seconds to allow the chain to progress..." -sleep "$SETTLE_DELAY" - -echo "Re-registering pools" - -GENESIS_SIGNING=() -for skey in "$STATE_CLUSTER"/shelley/genesis-keys/genesis?.skey; do - GENESIS_SIGNING+=("--signing-key-file" "$skey") -done - -DELEGATE_SIGNING=() -for skey in "$STATE_CLUSTER"/shelley/delegate-keys/delegate?.skey; do - DELEGATE_SIGNING+=("--signing-key-file" "$skey") -done - -# Transfer funds, register stake addresses and pools, all in one big transaction: - -cardano_cli_log query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - -TXIN_ADDR="$(<"$STATE_CLUSTER"/shelley/genesis-utxo.addr)" -DEPOSITS="$(jq '2 * .protocolParams.keyDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.json")" -NEEDED_AMOUNT="$(( (POOL_PLEDGE + DEPOSITS) * NUM_POOLS ))" -FEE_BUFFER=100000000 -STOP_TXIN_AMOUNT="$((NEEDED_AMOUNT + FEE_BUFFER))" - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$TXIN_ADDR" | - grep -E "lovelace$|[0-9]$|lovelace \+ TxOutDatumNone$")" - -POOL_ARGS=() -for i in $(seq 1 "$NUM_POOLS"); do - POOL_ARGS+=( \ - "--tx-out" "$(<"$STATE_CLUSTER/nodes/node-pool$i/owner.addr")+$POOL_PLEDGE" \ - "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/stake.reg.cert" \ - "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" \ - "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/register.cert" \ - "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" \ - ) -done - -POOL_SIGNING=() -for i in $(seq 1 "$NUM_POOLS"); do - POOL_SIGNING+=( \ - "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.skey" \ - "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool$i/reward.skey" \ - "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool$i/cold.skey" \ - ) -done - -WITNESS_COUNT="$((${#POOL_SIGNING[@]} + ${#GENESIS_SIGNING[@]} + ${#DELEGATE_SIGNING[@]} + 1))" - -cardano_cli_log transaction build \ - --babbage-era \ - "${TXINS[@]}" \ - --change-address "$TXIN_ADDR" \ - "${POOL_ARGS[@]}" \ - --witness-override "$WITNESS_COUNT" \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.txbody" - -cardano_cli_log transaction sign \ - "${POOL_SIGNING[@]}" \ - "${GENESIS_SIGNING[@]}" \ - "${DELEGATE_SIGNING[@]}" \ - --signing-key-file "$STATE_CLUSTER/shelley/genesis-utxo.skey" \ - --testnet-magic "$NETWORK_MAGIC" \ - --tx-body-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.txbody" \ - --out-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.tx" - -cardano_cli_log transaction submit \ - --tx-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.tx" \ - --testnet-magic "$NETWORK_MAGIC" - -# start cardano-submit-api -if [ "$ENABLE_SUBMIT_API" -eq 1 ]; then - echo "Starting cardano-submit-api" - supervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start submit_api -fi - -sleep 3 - -query_spend_success=0 -for _ in {1..20}; do - if ! cardano_cli_log query utxo "${TXINS[@]}" --testnet-magic "$NETWORK_MAGIC" | grep -q lovelace; then - query_spend_success=1 - break - fi - sleep 3 -done -if [ "$query_spend_success" -eq 0 ]; then - echo "Failed to spend Tx inputs, line $LINENO" >&2 # assert - exit 1 -fi - -echo "Cluster started. Run \`$SCRIPT_DIR/stop-cluster\` to stop" diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/stop-cluster b/cardano_node_tests/cluster_scripts/babbage_fast/stop-cluster deleted file mode 100644 index 42fad2d1e..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/stop-cluster +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env bash - -set -uo pipefail - -SOCKET_PATH="$(readlink -m "$CARDANO_NODE_SOCKET_PATH")" -STATE_CLUSTER="${SOCKET_PATH%/*}" -PID_FILE="${STATE_CLUSTER}/supervisord.pid" - -INSTANCE_NUM="%%INSTANCE_NUM%%" -if [[ "$SOCKET_PATH" != *"/state-cluster${INSTANCE_NUM}/"* ]]; then - echo "CARDANO_NODE_SOCKET_PATH must be set to a path containing 'state-cluster${INSTANCE_NUM}', line $LINENO" >&2 - exit 1 -fi - -supervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% stop all - -if [ ! -f "$PID_FILE" ]; then - echo "Cluster is not running!" - exit 0 -fi - -PID="$(<"$PID_FILE")" -for _ in {1..5}; do - if ! kill "$PID"; then - break - fi - sleep 1 - if [ ! -f "$PID_FILE" ]; then - break - fi -done - -rm -f "$PID_FILE" -echo "Cluster terminated!" diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/submit-api-config.json b/cardano_node_tests/cluster_scripts/babbage_fast/submit-api-config.json deleted file mode 100644 index 36dd121ce..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/submit-api-config.json +++ /dev/null @@ -1,111 +0,0 @@ -{ - "EnableLogMetrics": false, - "EnableLogging": true, - "RequiresNetworkMagic": "RequiresMagic", - "defaultBackends": [ - "KatipBK" - ], - "defaultScribes": [ - [ - "StdoutSK", - "stdout" - ] - ], - "minSeverity": "Info", - "options": { - "cfokey": { - "value": "Release-1.0.0" - }, - "mapBackends": {}, - "mapSeverity": { - "db-sync-node": "Info", - "db-sync-node.Mux": "Error", - "db-sync-node.Subscription": "Error" - }, - "mapSubtrace": { - "#ekgview": { - "contents": [ - [ - { - "contents": "cardano.epoch-validation.benchmark", - "tag": "Contains" - }, - [ - { - "contents": ".monoclock.basic.", - "tag": "Contains" - } - ] - ], - [ - { - "contents": "cardano.epoch-validation.benchmark", - "tag": "Contains" - }, - [ - { - "contents": "diff.RTS.cpuNs.timed.", - "tag": "Contains" - } - ] - ], - [ - { - "contents": "#ekgview.#aggregation.cardano.epoch-validation.benchmark", - "tag": "StartsWith" - }, - [ - { - "contents": "diff.RTS.gcNum.timed.", - "tag": "Contains" - } - ] - ] - ], - "subtrace": "FilterTrace" - }, - "#messagecounters.aggregation": { - "subtrace": "NoTrace" - }, - "#messagecounters.ekgview": { - "subtrace": "NoTrace" - }, - "#messagecounters.katip": { - "subtrace": "NoTrace" - }, - "#messagecounters.monitoring": { - "subtrace": "NoTrace" - }, - "#messagecounters.switchboard": { - "subtrace": "NoTrace" - }, - "benchmark": { - "contents": [ - "GhcRtsStats", - "MonotonicClock" - ], - "subtrace": "ObservableTrace" - }, - "cardano.epoch-validation.utxo-stats": { - "subtrace": "NoTrace" - } - } - }, - "rotation": { - "rpKeepFilesNum": 10, - "rpLogLimitBytes": 5000000, - "rpMaxAgeHours": 24 - }, - "setupBackends": [ - "AggregationBK", - "KatipBK" - ], - "setupScribes": [ - { - "scFormat": "ScText", - "scKind": "StdoutSK", - "scName": "stdout", - "scRotation": null - } - ] -} diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/template-cardano-node-pool b/cardano_node_tests/cluster_scripts/babbage_fast/template-cardano-node-pool deleted file mode 100644 index 66404c64c..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/template-cardano-node-pool +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env bash - -if [ -e ./state-cluster%%INSTANCE_NUM%%/utxo_backend ]; then - UTXO_BACKEND="$(<./state-cluster%%INSTANCE_NUM%%/utxo_backend)" -fi - -case "${UTXO_BACKEND:-""}" in - mem) - UTXO_BACKEND_ARGS=("--utxos-in-memory") - ;; - disk) - UTXO_BACKEND_ARGS=("--utxos-on-disk") - ;; - *) - UTXO_BACKEND_ARGS=() - ;; -esac - -echo "Starting cardano-node run: cardano-node run" - echo "--config ./state-cluster%%INSTANCE_NUM%%/config-pool%%POOL_NUM%%.json" - echo "--database-path ./state-cluster%%INSTANCE_NUM%%/db-pool%%POOL_NUM%%" - echo "--topology ./state-cluster%%INSTANCE_NUM%%/topology-pool%%POOL_NUM%%.json" - echo "--host-addr 127.0.0.1" - echo "--port %%NODE_PORT%%" - echo "--socket-path ./state-cluster%%INSTANCE_NUM%%/pool%%POOL_NUM%%.socket" - echo "--shelley-vrf-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/vrf.skey" - echo "--shelley-kes-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/kes.skey" - echo "--shelley-operational-certificate ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/op.cert ${UTXO_BACKEND_ARGS[*]} $*" - -echo "..or, once again, in a single line:" -echo "cardano-node run --config ./state-cluster%%INSTANCE_NUM%%/config-pool%%POOL_NUM%%.json --database-path ./state-cluster%%INSTANCE_NUM%%/db-pool%%POOL_NUM%% --topology ./state-cluster%%INSTANCE_NUM%%/topology-pool%%POOL_NUM%%.json --host-addr 127.0.0.1 --port %%NODE_PORT%% --socket-path ./state-cluster%%INSTANCE_NUM%%/pool%%POOL_NUM%%.socket --shelley-vrf-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/vrf.skey --shelley-kes-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/kes.skey --shelley-operational-certificate ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/op.cert ${UTXO_BACKEND_ARGS[*]} $*" - - -exec cardano-node run --config ./state-cluster%%INSTANCE_NUM%%/config-pool%%POOL_NUM%%.json --database-path ./state-cluster%%INSTANCE_NUM%%/db-pool%%POOL_NUM%% --topology ./state-cluster%%INSTANCE_NUM%%/topology-pool%%POOL_NUM%%.json --host-addr 127.0.0.1 --port %%NODE_PORT%% --socket-path ./state-cluster%%INSTANCE_NUM%%/pool%%POOL_NUM%%.socket --shelley-vrf-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/vrf.skey --shelley-kes-key ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/kes.skey --shelley-operational-certificate ./state-cluster%%INSTANCE_NUM%%/nodes/node-pool%%POOL_NUM%%/op.cert "${UTXO_BACKEND_ARGS[@]}" "$@" diff --git a/cardano_node_tests/cluster_scripts/babbage_fast/template-config.json b/cardano_node_tests/cluster_scripts/babbage_fast/template-config.json deleted file mode 100644 index 315397f85..000000000 --- a/cardano_node_tests/cluster_scripts/babbage_fast/template-config.json +++ /dev/null @@ -1,184 +0,0 @@ -{ - "ApplicationName": "cardano-sl", - "ApplicationVersion": 1, - "ByronGenesisFile": "byron/genesis.json", - "LastKnownBlockVersion-Alt": 0, - "LastKnownBlockVersion-Major": 6, - "LastKnownBlockVersion-Minor": 0, - "PBftSignatureThreshold": 1, - "Protocol": "Cardano", - "RequiresNetworkMagic": "RequiresMagic", - "ShelleyGenesisFile": "shelley/genesis.json", - "AlonzoGenesisFile": "shelley/genesis.alonzo.json", - "TraceBlockFetchClient": false, - "TraceBlockFetchDecisions": false, - "TraceBlockFetchProtocol": false, - "TraceBlockFetchProtocolSerialised": false, - "TraceBlockFetchServer": false, - "TraceChainDb": true, - "TraceChainSyncBlockServer": false, - "TraceChainSyncClient": false, - "TraceChainSyncHeaderServer": false, - "TraceChainSyncProtocol": false, - "TraceDNSResolver": true, - "TraceDNSSubscription": true, - "TraceErrorPolicy": true, - "TraceForge": true, - "TraceHandshake": true, - "TraceIpSubscription": true, - "TraceLocalChainSyncProtocol": false, - "TraceLocalErrorPolicy": true, - "TraceLocalHandshake": false, - "TraceLocalTxSubmissionProtocol": false, - "TraceLocalTxSubmissionServer": false, - "TraceMempool": true, - "TraceMux": true, - "TraceConnectionManager": true, - "TraceConnectionManagerTransitions": true, - "TracePeerSelection": true, - "TracePeerSelectionActions": true, - "TraceDebugPeerSelection": true, - "TraceTxInbound": false, - "TraceTxOutbound": false, - "TraceTxSubmissionProtocol": false, - "TraceInboundGovernor": true, - "TraceServer": true, - "TraceInboundGovernorCounters": true, - "TraceInboundGovernorTransitions": true, - "DebugPeerSelectionInitiator": true, - "DebugPeerSelectionInitiatorResponder": true, - "TracingVerbosity": "NormalVerbosity", - "TurnOnLogMetrics": true, - "TurnOnLogging": true, - "defaultBackends": [ - "KatipBK" - ], - "defaultScribes": [ - [ - "StdoutSK", - "stdout" - ] - ], - "hasEKG": %%EKG_PORT%%, - "hasPrometheus": [ - "127.0.0.1", - %%PROMETHEUS_PORT%% - ], - "minSeverity": "Info", - "options": { - "mapSeverity": { - "cardano.node.ConnectionManager": "Debug", - "cardano.node.ConnectionManagerTransition": "Debug", - "cardano.node.PeerSelection": "Info", - "cardano.node.DebugPeerSelection": "Debug", - "cardano.node.PeerSelectionActions": "Debug", - "cardano.node.Handshake": "Debug", - "cardano.node.Mux": "Info", - "cardano.node.ChainSyncProtocol": "Error", - "cardano.node.InboundGovernor": "Debug", - "cardano.node.resources": "Notice", - "cardano.node.InboundGovernor": "Debug", - "cardano.node.ConnectionManagerTransitions": "Debug" - }, - "mapBackends": { - "cardano.node-metrics": [ - "EKGViewBK" - ], - "cardano.node.BlockFetchDecision.peers": [ - "EKGViewBK" - ], - "cardano.node.ChainDB.metrics": [ - "EKGViewBK" - ], - "cardano.node.Forge.metrics": [ - "EKGViewBK" - ], - "cardano.node.metrics": [ - "EKGViewBK" - ], - "cardano.node.resources": [ - "EKGViewBK" - ] - }, - "mapSubtrace": { - "#ekgview": { - "contents": [ - [ - { - "contents": "cardano.epoch-validation.benchmark", - "tag": "Contains" - }, - [ - { - "contents": ".monoclock.basic.", - "tag": "Contains" - } - ] - ], - [ - { - "contents": "cardano.epoch-validation.benchmark", - "tag": "Contains" - }, - [ - { - "contents": "diff.RTS.cpuNs.timed.", - "tag": "Contains" - } - ] - ], - [ - { - "contents": "#ekgview.#aggregation.cardano.epoch-validation.benchmark", - "tag": "StartsWith" - }, - [ - { - "contents": "diff.RTS.gcNum.timed.", - "tag": "Contains" - } - ] - ] - ], - "subtrace": "FilterTrace" - }, - "benchmark": { - "contents": [ - "GhcRtsStats", - "MonotonicClock" - ], - "subtrace": "ObservableTrace" - }, - "cardano.epoch-validation.utxo-stats": { - "subtrace": "NoTrace" - }, - "cardano.node-metrics": { - "subtrace": "Neutral" - }, - "cardano.node.metrics": { - "subtrace": "Neutral" - } - } - }, - "rotation": { - "rpKeepFilesNum": 10, - "rpLogLimitBytes": 5000000, - "rpMaxAgeHours": 24 - }, - "setupBackends": [ - "KatipBK" - ], - "setupScribes": [ - { - "scFormat": "ScText", - "scKind": "StdoutSK", - "scName": "stdout", - "scRotation": null - } - ], - "TestShelleyHardForkAtEpoch": 0, - "TestAllegraHardForkAtEpoch": 0, - "TestMaryHardForkAtEpoch": 0, - "TestAlonzoHardForkAtEpoch": 0, - "TestBabbageHardForkAtEpoch": 0 -} diff --git a/cardano_node_tests/utils/configuration.py b/cardano_node_tests/utils/configuration.py index 316416672..be9d1a11f 100644 --- a/cardano_node_tests/utils/configuration.py +++ b/cardano_node_tests/utils/configuration.py @@ -66,7 +66,7 @@ def _check_cardano_node_socket_path() -> None: BLOCK_PRODUCTION_DB = pl.Path(BLOCK_PRODUCTION_DB).expanduser().resolve() CLUSTER_ERA = os.environ.get("CLUSTER_ERA") or "" -if CLUSTER_ERA not in ("", "babbage", "conway"): +if CLUSTER_ERA not in ("", "conway"): msg = f"Invalid or unsupported CLUSTER_ERA: {CLUSTER_ERA}" raise RuntimeError(msg) diff --git a/prepare_test_env.sh b/prepare_test_env.sh index 957c9cdcb..45b99c54a 100755 --- a/prepare_test_env.sh +++ b/prepare_test_env.sh @@ -11,14 +11,11 @@ if [ -z "${IN_NIX_SHELL:-""}" ]; then fi case "${1:-""}" in - "babbage") - export CLUSTER_ERA=babbage - ;; "conway") export CLUSTER_ERA=conway COMMAND_ERA=conway ;; *) - echo "Usage: $0 {babbage|conway}" + echo "Usage: $0 {conway}" return 1 ;; esac From 3e3ef16a9ebaba262d80c74da4d6fa83754343c8 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 24 Oct 2024 11:37:18 +0200 Subject: [PATCH 030/168] feat(networking): switch default to P2P networking This commit changes the default networking mode from legacy to P2P. The ENABLE_P2P environment variable has been replaced with ENABLE_LEGACY to reflect this change. All relevant scripts and configurations have been updated accordingly. --- .github/env_nightly_dbsync | 1 + .github/env_nightly_dbsync_pv10 | 1 - .github/env_nightly_pv10 | 1 - .github/node_upgrade.sh | 2 +- .github/node_upgrade_pytest.sh | 6 ++++-- .github/regression.sh | 4 ++-- .github/workflows/regression-dbsync.yaml | 4 ++-- .github/workflows/regression.yaml | 4 ++-- README.md | 2 +- .../cluster_scripts/conway/start-cluster | 10 +++------- .../cluster_scripts/conway_fast/start-cluster | 10 +++------- .../cluster_scripts/mainnet_fast/start-cluster | 10 +++------- cardano_node_tests/tests/conftest.py | 2 +- cardano_node_tests/tests/test_blocks.py | 8 ++++---- cardano_node_tests/tests/test_kes.py | 4 ++-- cardano_node_tests/tests/test_reconnect.py | 2 +- cardano_node_tests/tests/test_rollback.py | 2 +- cardano_node_tests/utils/configuration.py | 2 +- check_dev_env.sh | 2 +- scripts/test_node_reconnect.sh | 1 - 20 files changed, 33 insertions(+), 45 deletions(-) diff --git a/.github/env_nightly_dbsync b/.github/env_nightly_dbsync index 4076122dc..d848272ec 100644 --- a/.github/env_nightly_dbsync +++ b/.github/env_nightly_dbsync @@ -2,6 +2,7 @@ CLUSTER_ERA=conway COMMAND_ERA=conway MARKEXPR=dbsync CLUSTERS_COUNT=4 +ENABLE_LEGACY=true DBSYNC_REV=13.5.0.0 DBSYNC_TAR_URL=https://github.com/IntersectMBO/cardano-db-sync/releases/download/13.5.0.0/cardano-db-sync-13.5.0.0-linux.tar.gz DBSYNC_SKIP_INDEXES=true diff --git a/.github/env_nightly_dbsync_pv10 b/.github/env_nightly_dbsync_pv10 index 9368be7ea..99f382cad 100644 --- a/.github/env_nightly_dbsync_pv10 +++ b/.github/env_nightly_dbsync_pv10 @@ -1,7 +1,6 @@ CLUSTER_ERA=conway COMMAND_ERA=conway PV10=true -ENABLE_P2P=true MARKEXPR=dbsync CLUSTERS_COUNT=4 DBSYNC_REV=13.5.0.0 diff --git a/.github/env_nightly_pv10 b/.github/env_nightly_pv10 index 287455cba..4ec354938 100644 --- a/.github/env_nightly_pv10 +++ b/.github/env_nightly_pv10 @@ -1,4 +1,3 @@ CLUSTER_ERA=conway COMMAND_ERA=conway PV10=true -ENABLE_P2P=true diff --git a/.github/node_upgrade.sh b/.github/node_upgrade.sh index 33b366d05..250796ffa 100755 --- a/.github/node_upgrade.sh +++ b/.github/node_upgrade.sh @@ -71,7 +71,7 @@ else fi export DEV_CLUSTER_RUNNING=1 CLUSTERS_COUNT=1 FORBID_RESTART=1 TEST_THREADS=10 NUM_POOLS="${NUM_POOLS:-4}" -unset ENABLE_P2P MIXED_P2P +unset ENABLE_LEGACY MIXED_P2P echo "::group::Nix env setup" printf "start: %(%H:%M:%S)T\n" -1 diff --git a/.github/node_upgrade_pytest.sh b/.github/node_upgrade_pytest.sh index 6c6205d26..d7ca87cf6 100755 --- a/.github/node_upgrade_pytest.sh +++ b/.github/node_upgrade_pytest.sh @@ -29,6 +29,7 @@ if [ "$1" = "step1" ]; then printf "STEP1 start: %(%H:%M:%S)T\n" -1 export UPGRADE_TESTS_STEP=1 + export ENABLE_LEGACY=1 if [ -n "${BASE_TAR_URL:-""}" ]; then # download and extract base revision binaries @@ -102,6 +103,8 @@ elif [ "$1" = "step2" ]; then printf "STEP2 start: %(%H:%M:%S)T\n" -1 export UPGRADE_TESTS_STEP=2 + export MIXED_P2P=1 + unset ENABLE_LEGACY # Setup `cardano-cli` binary if [ -n "${UPGRADE_CLI_REVISION:-""}" ]; then @@ -116,7 +119,6 @@ elif [ "$1" = "step2" ]; then # generate config and topology files for the "mixed" mode CARDANO_NODE_SOCKET_PATH="$WORKDIR/dry_mixed/state-cluster0/bft1.socket" \ - MIXED_P2P=1 \ DRY_RUN=1 \ "$CLUSTER_SCRIPTS_DIR/start-cluster" @@ -260,6 +262,7 @@ elif [ "$1" = "step3" ]; then printf "STEP3 start: %(%H:%M:%S)T\n" -1 export UPGRADE_TESTS_STEP=3 + unset ENABLE_LEGACY MIXED_P2P # Setup `cardano-cli` binary if [ -n "${UPGRADE_CLI_REVISION:-""}" ]; then @@ -274,7 +277,6 @@ elif [ "$1" = "step3" ]; then # generate config and topology files for p2p mode CARDANO_NODE_SOCKET_PATH="$WORKDIR/dry_p2p/state-cluster0/bft1.socket" \ - ENABLE_P2P=1 \ DRY_RUN=1 \ "$CLUSTER_SCRIPTS_DIR/start-cluster" diff --git a/.github/regression.sh b/.github/regression.sh index 44c30be0b..eadafc6c9 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -68,8 +68,8 @@ case "${CARDANO_CLI_REV:-""}" in ;; esac -if [ "${CI_TOPOLOGY:-""}" = "p2p" ]; then - export ENABLE_P2P=1 +if [ "${CI_TOPOLOGY:-""}" = "legacy" ]; then + export ENABLE_LEGACY=1 elif [ "${CI_TOPOLOGY:-""}" = "mixed" ]; then export MIXED_P2P=1 export NUM_POOLS="${NUM_POOLS:-4}" diff --git a/.github/workflows/regression-dbsync.yaml b/.github/workflows/regression-dbsync.yaml index fa641af63..627b29b23 100644 --- a/.github/workflows/regression-dbsync.yaml +++ b/.github/workflows/regression-dbsync.yaml @@ -43,10 +43,10 @@ on: type: choice description: "Network topology" options: - - legacy - p2p + - legacy - mixed - default: legacy + default: p2p byron_cluster: type: boolean default: false diff --git a/.github/workflows/regression.yaml b/.github/workflows/regression.yaml index 6c6fbbc82..e61abea03 100644 --- a/.github/workflows/regression.yaml +++ b/.github/workflows/regression.yaml @@ -36,10 +36,10 @@ on: type: choice description: "Network topology" options: - - legacy - p2p + - legacy - mixed - default: legacy + default: p2p byron_cluster: type: boolean default: false diff --git a/README.md b/README.md index ba086181c..d5d1f5f3a 100644 --- a/README.md +++ b/README.md @@ -92,7 +92,7 @@ Tests execution can be configured using env variables. * `CLUSTER_ERA` – cluster era for Cardano node – used for selecting the correct cluster start script (default: conway) * `COMMAND_ERA` – era for cardano-cli commands – can be used for creating Shelley-era (Allegra-era, ...) transactions (default: unset) * `NUM_POOLS` – number of stake pools created in each cluster instance (default: 3) -* `ENABLE_P2P` – use P2P networking instead of the default legacy networking (default: unset) +* `ENABLE_LEGACY` – use legacy networking instead of the default P2P networking (default: unset) * `MIXED_P2P` – use mix of P2P and legacy networking; half of stake pools using legacy and the other half P2P (default: unset) * `UTXO_BACKEND` – 'mem' or 'disk', default is 'mem' (or legacy) backend if unset (default: unset) * `SCRIPTS_DIRNAME` – path to a dir with local cluster start/stop scripts and configuration files (default: unset) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index b613edeae..f3ebf64b7 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -2,7 +2,7 @@ # controlling environment variables: # DBSYNC_REPO - will start and configure db-sync if the value is path to db-sync repository -# ENABLE_P2P - if set, local cluster will use P2P +# ENABLE_LEGACY - if set, local cluster will use legacy networking # MIXED_P2P - if set, local cluster will use P2P for some nodes and legacy topology for others # UTXO_BACKEND - 'mem' or 'disk', default is 'mem' (or legacy) if unset # NO_CC - if set, will not create committee @@ -38,10 +38,6 @@ MAX_SUPPLY="$(jq '.maxLovelaceSupply' < "$SCRIPT_DIR/genesis.spec.json")" SLOT_LENGTH="$(jq '.slotLength' < "$SCRIPT_DIR/genesis.spec.json")" EPOCH_SEC="$(jq '.epochLength * .slotLength | ceil' < "$SCRIPT_DIR/genesis.spec.json")" -if [ -n "${MIXED_P2P:-""}" ]; then - export ENABLE_P2P=1 -fi - if [ -f "$STATE_CLUSTER/supervisord.pid" ]; then echo "Cluster already running. Please run \`$SCRIPT_DIR/stop-cluster\` first!" >&2 exit 1 @@ -173,7 +169,7 @@ cp "$SCRIPT_DIR/submit-api-config.json" "$STATE_CLUSTER" cp "$SCRIPT_DIR/supervisor.conf" "$STATE_CLUSTER" cp "$SCRIPT_DIR"/*genesis*.spec.json "$STATE_CLUSTER/shelley/" -if [ -n "${ENABLE_P2P:-""}" ]; then +if [ -z "${ENABLE_LEGACY:-""}" ]; then # use P2P topology files for tconf in "$SCRIPT_DIR"/p2p-topology-*.json; do tfname="${tconf##*/p2p-}" @@ -319,7 +315,7 @@ for conf in "$SCRIPT_DIR"/config-*.json; do "$conf" > "$STATE_CLUSTER/$fname" # enable P2P - if [ -n "${ENABLE_P2P:-""}" ]; then + if [ -z "${ENABLE_LEGACY:-""}" ]; then # setup mix of P2P and legacy if [ -n "${MIXED_P2P:-""}" ]; then diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index 6197bb9e1..fb2806601 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -2,7 +2,7 @@ # controlling environment variables: # DBSYNC_REPO - will start and configure db-sync if the value is path to db-sync repository -# ENABLE_P2P - if set, local cluster will use P2P +# ENABLE_LEGACY - if set, local cluster will use legacy networking # MIXED_P2P - if set, local cluster will use P2P for some nodes and legacy topology for others # UTXO_BACKEND - 'mem' or 'disk', default is 'mem' (or legacy) if unset # NO_CC - if set, will not create committee @@ -45,10 +45,6 @@ DELEG_MAGIC_VALUE=3340000000000000 DELEG_SUPPLY="$((POOL_PLEDGE * NUM_POOLS + DELEG_MAGIC_VALUE))" NONDELEG_SUPPLY="$(( (MAX_SUPPLY - DELEG_SUPPLY) * 8 / 10))" -if [ -n "${MIXED_P2P:-""}" ]; then - export ENABLE_P2P=1 -fi - if [ -f "$STATE_CLUSTER/supervisord.pid" ]; then echo "Cluster already running. Please run \`$SCRIPT_DIR/stop-cluster\` first!" >&2 exit 1 @@ -112,7 +108,7 @@ cp "$SCRIPT_DIR/submit-api-config.json" "$STATE_CLUSTER" cp "$SCRIPT_DIR/supervisor.conf" "$STATE_CLUSTER" cp "$SCRIPT_DIR"/*genesis*.spec.json "$STATE_CLUSTER/create_staked/" -if [ -n "${ENABLE_P2P:-""}" ]; then +if [ -z "${ENABLE_LEGACY:-""}" ]; then # use P2P topology files for tconf in "$SCRIPT_DIR"/p2p-topology-*.json; do tfname="${tconf##*/p2p-}" @@ -274,7 +270,7 @@ for conf in "$SCRIPT_DIR"/config-*.json; do "$conf" > "$STATE_CLUSTER/$fname" # enable P2P - if [ -n "${ENABLE_P2P:-""}" ]; then + if [ -z "${ENABLE_LEGACY:-""}" ]; then # setup mix of P2P and legacy if [ -n "${MIXED_P2P:-""}" ]; then diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index 41fff07fd..6944b61b2 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -2,7 +2,7 @@ # controlling environment variables: # DBSYNC_REPO - will start and configure db-sync if the value is path to db-sync repository -# ENABLE_P2P - if set, local cluster will use P2P +# ENABLE_LEGACY - if set, local cluster will use legacy networking # MIXED_P2P - if set, local cluster will use P2P for some nodes and legacy topology for others # UTXO_BACKEND - 'mem' or 'disk', default is 'mem' (or legacy) if unset # DRY_RUN - if set, will not start the cluster @@ -38,10 +38,6 @@ DELEG_MAGIC_VALUE=3340000000000000 DELEG_SUPPLY="$((POOL_PLEDGE * NUM_POOLS + DELEG_MAGIC_VALUE))" NONDELEG_SUPPLY="$(( (MAX_SUPPLY - DELEG_SUPPLY) * 8 / 10))" -if [ -n "${MIXED_P2P:-""}" ]; then - export ENABLE_P2P=1 -fi - if [ -f "$STATE_CLUSTER/supervisord.pid" ]; then echo "Cluster already running. Please run \`$SCRIPT_DIR/stop-cluster\` first!" >&2 exit 1 @@ -109,7 +105,7 @@ cp "$SCRIPT_DIR/submit-api-config.json" "$STATE_CLUSTER" cp "$SCRIPT_DIR/supervisor.conf" "$STATE_CLUSTER" cp "$SCRIPT_DIR"/*genesis*.spec.json "$STATE_CLUSTER/create_staked/" -if [ -n "${ENABLE_P2P:-""}" ]; then +if [ -z "${ENABLE_LEGACY:-""}" ]; then # use P2P topology files for tconf in "$SCRIPT_DIR"/p2p-topology-*.json; do tfname="${tconf##*/p2p-}" @@ -294,7 +290,7 @@ for conf in "$SCRIPT_DIR"/config-*.json; do fi # enable P2P - if [ -n "${ENABLE_P2P:-""}" ]; then + if [ -z "${ENABLE_LEGACY:-""}" ]; then # setup mix of P2P and legacy if [ -n "${MIXED_P2P:-""}" ]; then diff --git a/cardano_node_tests/tests/conftest.py b/cardano_node_tests/tests/conftest.py index 5d9df2e57..c51f591c6 100644 --- a/cardano_node_tests/tests/conftest.py +++ b/cardano_node_tests/tests/conftest.py @@ -80,7 +80,7 @@ def pytest_configure(config: tp.Any) -> None: config.stash[metadata_key]["CLUSTER_ERA"] = configuration.CLUSTER_ERA config.stash[metadata_key]["COMMAND_ERA"] = configuration.COMMAND_ERA config.stash[metadata_key]["SCRIPTS_DIRNAME"] = configuration.SCRIPTS_DIRNAME - config.stash[metadata_key]["ENABLE_P2P"] = str(configuration.ENABLE_P2P) + config.stash[metadata_key]["ENABLE_LEGACY"] = str(configuration.ENABLE_LEGACY) config.stash[metadata_key]["MIXED_P2P"] = str(configuration.MIXED_P2P) config.stash[metadata_key]["NUM_POOLS"] = str(configuration.NUM_POOLS) config.stash[metadata_key]["UTXO_BACKEND"] = configuration.UTXO_BACKEND diff --git a/cardano_node_tests/tests/test_blocks.py b/cardano_node_tests/tests/test_blocks.py index 1754f5b14..173d48bbb 100644 --- a/cardano_node_tests/tests/test_blocks.py +++ b/cardano_node_tests/tests/test_blocks.py @@ -235,11 +235,11 @@ def test_block_production( # noqa: C901 rand = clusterlib.get_rand_str(5) num_epochs = int(os.environ.get("BLOCK_PRODUCTION_EPOCHS") or 50) - topology = "legacy" + topology = "p2p" if configuration.MIXED_P2P: topology = "mixed" - elif configuration.ENABLE_P2P: - topology = "p2p" + elif configuration.ENABLE_LEGACY: + topology = "legacy" pool_mapping = {} for idx, pn in enumerate(cluster_management.Resources.ALL_POOLS, start=1): @@ -342,7 +342,7 @@ def _save_state(curr_epoch: int) -> None: conn.close() -@pytest.mark.skipif(not configuration.ENABLE_P2P, reason="runs only on P2P enabled clusters") +@pytest.mark.skipif(configuration.ENABLE_LEGACY, reason="runs only on P2P enabled clusters") class TestDynamicBlockProd: """Tests for P2P dynamic block production.""" diff --git a/cardano_node_tests/tests/test_kes.py b/cardano_node_tests/tests/test_kes.py index 3a6159004..0d5381d1d 100644 --- a/cardano_node_tests/tests/test_kes.py +++ b/cardano_node_tests/tests/test_kes.py @@ -561,7 +561,7 @@ def test_opcert_invalid_kes_period( if ( "forked blockchain" in str(exc) and VERSIONS.transaction_era >= VERSIONS.ALONZO - and configuration.ENABLE_P2P + and not configuration.ENABLE_LEGACY ): pytest.xfail(str(exc)) raise @@ -730,7 +730,7 @@ def test_update_valid_opcert( if ( "forked blockchain" in str(exc) and VERSIONS.transaction_era >= VERSIONS.ALONZO - and configuration.ENABLE_P2P + and not configuration.ENABLE_LEGACY ): pytest.xfail(str(exc)) raise diff --git a/cardano_node_tests/tests/test_reconnect.py b/cardano_node_tests/tests/test_reconnect.py index 54d6d21d2..2f5a810d3 100644 --- a/cardano_node_tests/tests/test_reconnect.py +++ b/cardano_node_tests/tests/test_reconnect.py @@ -241,7 +241,7 @@ def _assert(tx_outputs: tp.List[clusterlib.TxRawOutput]) -> None: not TEST_METRICS_RECONNECT, reason="This is not a 'metrics reconnect' testrun" ) @pytest.mark.skipif(configuration.NUM_POOLS != 3, reason="`NUM_POOLS` must be 3") - @pytest.mark.skipif(not configuration.ENABLE_P2P, reason="Works only with P2P topology") + @pytest.mark.skipif(configuration.ENABLE_LEGACY, reason="Works only with P2P topology") def test_metrics_reconnect( self, cluster_manager: cluster_management.ClusterManager, diff --git a/cardano_node_tests/tests/test_rollback.py b/cardano_node_tests/tests/test_rollback.py index 4bf69ce20..05b77f269 100644 --- a/cardano_node_tests/tests/test_rollback.py +++ b/cardano_node_tests/tests/test_rollback.py @@ -115,7 +115,7 @@ def split_cluster(self, split_topology_dir: pl.Path) -> None: state_dir = cluster_nodes.get_cluster_env().state_dir topology_files = list(state_dir.glob("topology*.json")) - prefix = "p2p-split" if configuration.ENABLE_P2P else "split" + prefix = "split" if configuration.ENABLE_LEGACY else "p2p-split" for f in topology_files: shutil.copy(split_topology_dir / f"{prefix}-{f.name}", f) diff --git a/cardano_node_tests/utils/configuration.py b/cardano_node_tests/utils/configuration.py index be9d1a11f..3d453d2ad 100644 --- a/cardano_node_tests/utils/configuration.py +++ b/cardano_node_tests/utils/configuration.py @@ -33,7 +33,7 @@ def _check_cardano_node_socket_path() -> None: IS_XDIST = bool(os.environ.get("PYTEST_XDIST_TESTRUNUID")) # used also in startup scripts as `if [ -n "$VAR" ]...` -ENABLE_P2P = (os.environ.get("ENABLE_P2P") or "") != "" +ENABLE_LEGACY = (os.environ.get("ENABLE_LEGACY") or "") != "" # used also in startup scripts as `if [ -n "$VAR" ]...` MIXED_P2P = (os.environ.get("MIXED_P2P") or "") != "" diff --git a/check_dev_env.sh b/check_dev_env.sh index 81024b2dd..08c4d814c 100755 --- a/check_dev_env.sh +++ b/check_dev_env.sh @@ -49,7 +49,7 @@ IN_ROOT_DIR="$([ -d "cardano_node_tests" ]; process_result)" || exit_code=1 DEV_CLUSTER="$([ -n "${DEV_CLUSTER_RUNNING:-}" ]; process_result)" || exit_code=1 SOCKET_PATH_SET="$([ -n "${CARDANO_NODE_SOCKET_PATH:-}" ]; process_result)" || exit_code=1 USE_DBSYNC="$([ -n "${DBSYNC_REPO:-}" ]; process_result "optional")" || exit_code=1 -P2P_NET="$([ -n "${ENABLE_P2P:-}" ]; process_result "optional")" || exit_code=1 +P2P_NET="$([ -z "${ENABLE_LEGACY:-}" ]; process_result "optional")" || exit_code=1 printf "'cardano-node' available: $HAS_NODE\n" diff --git a/scripts/test_node_reconnect.sh b/scripts/test_node_reconnect.sh index 653216abe..f3af1b73d 100755 --- a/scripts/test_node_reconnect.sh +++ b/scripts/test_node_reconnect.sh @@ -11,7 +11,6 @@ TOP_DIR="$(readlink -m "${0%/*}/..")" export \ CLUSTERS_COUNT=1 \ TEST_THREADS=0 \ - ENABLE_P2P=1 \ SCRIPTS_DIRNAME="${SCRIPTS_DIRNAME:-mainnet_fast}" \ PYTEST_ARGS="-s -k TestNodeReconnect" From 7e02381563579c8d35b05eebaa0b24ed5ea112a0 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 24 Oct 2024 14:44:05 +0200 Subject: [PATCH 031/168] feat(mainnet): make mainnet testnet start in Conway era --- .../mainnet_fast/dbsync-config.yaml | 1 + .../empty-genesis.conway.spec.json | 3 - .../mainnet_fast/genesis.conway.spec.json | 254 +++++++++++- .../mainnet_fast/genesis.spec.json | 2 +- .../release-genesis.alonzo.spec.json | 371 ------------------ .../release-genesis.conway.spec.json | 39 -- .../release_8_9-genesis.conway.spec.json | 40 -- .../mainnet_fast/run-cardano-submit-api | 9 +- .../mainnet_fast/start-cluster | 240 ++++++----- .../mainnet_fast/template-config.json | 9 +- 10 files changed, 377 insertions(+), 591 deletions(-) delete mode 100644 cardano_node_tests/cluster_scripts/mainnet_fast/empty-genesis.conway.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/mainnet_fast/release-genesis.alonzo.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/mainnet_fast/release-genesis.conway.spec.json delete mode 100644 cardano_node_tests/cluster_scripts/mainnet_fast/release_8_9-genesis.conway.spec.json diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/dbsync-config.yaml b/cardano_node_tests/cluster_scripts/mainnet_fast/dbsync-config.yaml index f6724feb4..71b4b7269 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/dbsync-config.yaml +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/dbsync-config.yaml @@ -4,6 +4,7 @@ NetworkName: localnet EnableLogMetrics: False EnableLogging: True +EnableFutureGenesis: True # The default port is 8080 # PrometheusPort: 8080 diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/empty-genesis.conway.spec.json b/cardano_node_tests/cluster_scripts/mainnet_fast/empty-genesis.conway.spec.json deleted file mode 100644 index 4525ef4a5..000000000 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/empty-genesis.conway.spec.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "genDelegs": {} -} diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.conway.spec.json b/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.conway.spec.json index 441178802..17b09d995 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.conway.spec.json +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.conway.spec.json @@ -25,7 +25,259 @@ "dRepDeposit": 2000000, "dRepActivity": 100, "minFeeRefScriptCostPerByte": 0, - "plutusV3CostModel": [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], + "plutusV3CostModel": [ + 100788, + 420, + 1, + 1, + 1000, + 173, + 0, + 1, + 1000, + 59957, + 4, + 1, + 11183, + 32, + 201305, + 8356, + 4, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 100, + 100, + 16000, + 100, + 94375, + 32, + 132994, + 32, + 61462, + 4, + 72010, + 178, + 0, + 1, + 22151, + 32, + 91189, + 769, + 4, + 2, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 1, + 1000, + 42921, + 4, + 2, + 24548, + 29498, + 38, + 1, + 898148, + 27279, + 1, + 51775, + 558, + 1, + 39184, + 1000, + 60594, + 1, + 141895, + 32, + 83150, + 32, + 15299, + 32, + 76049, + 1, + 13169, + 4, + 22100, + 10, + 28999, + 74, + 1, + 28999, + 74, + 1, + 43285, + 552, + 1, + 44749, + 541, + 1, + 33852, + 32, + 68246, + 32, + 72362, + 32, + 7243, + 32, + 7391, + 32, + 11546, + 32, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 90434, + 519, + 0, + 1, + 74433, + 32, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 1, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 955506, + 213312, + 0, + 2, + 270652, + 22588, + 4, + 1457325, + 64566, + 4, + 20467, + 1, + 4, + 0, + 141992, + 32, + 100788, + 420, + 1, + 1, + 81663, + 32, + 59498, + 32, + 20142, + 32, + 24588, + 32, + 20744, + 32, + 25933, + 32, + 24623, + 32, + 43053543, + 10, + 53384111, + 14333, + 10, + 43574283, + 26308, + 10, + 16000, + 100, + 16000, + 100, + 962335, + 18, + 2780678, + 6, + 442008, + 1, + 52538055, + 3756, + 18, + 267929, + 18, + 76433006, + 8868, + 18, + 52948122, + 18, + 1995836, + 36, + 3227919, + 12, + 901022, + 1, + 166917843, + 4307, + 36, + 284546, + 36, + 158221314, + 26549, + 36, + 74698472, + 36, + 333849714, + 1, + 254006273, + 72, + 2174038, + 72, + 2261318, + 64571, + 4, + 207616, + 8310, + 4, + 1293828, + 28716, + 63, + 0, + 1, + 1006041, + 43623, + 251, + 0, + 1 + ], "constitution": { "anchor": { "url": "", diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.spec.json b/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.spec.json index 9fb25b2ff..ef5937575 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.spec.json +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.spec.json @@ -10,7 +10,7 @@ "protocolParams": { "protocolVersion": { "minor": 0, - "major": 8 + "major": 9 }, "decentralisationParam": 1, "eMax": 18, diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/release-genesis.alonzo.spec.json b/cardano_node_tests/cluster_scripts/mainnet_fast/release-genesis.alonzo.spec.json deleted file mode 100644 index 3970561a6..000000000 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/release-genesis.alonzo.spec.json +++ /dev/null @@ -1,371 +0,0 @@ -{ - "lovelacePerUTxOWord": 34482, - "executionPrices": { - "prSteps": { - "numerator": 721, - "denominator": 10000000 - }, - "prMem": { - "numerator": 577, - "denominator": 10000 - } - }, - "maxTxExUnits": { - "exUnitsMem": 14000000, - "exUnitsSteps": 10000000000 - }, - "maxBlockExUnits": { - "exUnitsMem": 62000000, - "exUnitsSteps": 40000000000 - }, - "maxValueSize": 5000, - "collateralPercentage": 150, - "maxCollateralInputs": 3, - "costModels": { - "PlutusV1": { - "addInteger-cpu-arguments-intercept": 205665, - "addInteger-cpu-arguments-slope": 812, - "addInteger-memory-arguments-intercept": 1, - "addInteger-memory-arguments-slope": 1, - "appendByteString-cpu-arguments-intercept": 1000, - "appendByteString-cpu-arguments-slope": 571, - "appendByteString-memory-arguments-intercept": 0, - "appendByteString-memory-arguments-slope": 1, - "appendString-cpu-arguments-intercept": 1000, - "appendString-cpu-arguments-slope": 24177, - "appendString-memory-arguments-intercept": 4, - "appendString-memory-arguments-slope": 1, - "bData-cpu-arguments": 1000, - "bData-memory-arguments": 32, - "blake2b_256-cpu-arguments-intercept": 117366, - "blake2b_256-cpu-arguments-slope": 10475, - "blake2b_256-memory-arguments": 4, - "cekApplyCost-exBudgetCPU": 23000, - "cekApplyCost-exBudgetMemory": 100, - "cekBuiltinCost-exBudgetCPU": 23000, - "cekBuiltinCost-exBudgetMemory": 100, - "cekConstCost-exBudgetCPU": 23000, - "cekConstCost-exBudgetMemory": 100, - "cekDelayCost-exBudgetCPU": 23000, - "cekDelayCost-exBudgetMemory": 100, - "cekForceCost-exBudgetCPU": 23000, - "cekForceCost-exBudgetMemory": 100, - "cekLamCost-exBudgetCPU": 23000, - "cekLamCost-exBudgetMemory": 100, - "cekStartupCost-exBudgetCPU": 100, - "cekStartupCost-exBudgetMemory": 100, - "cekVarCost-exBudgetCPU": 23000, - "cekVarCost-exBudgetMemory": 100, - "chooseData-cpu-arguments": 19537, - "chooseData-memory-arguments": 32, - "chooseList-cpu-arguments": 175354, - "chooseList-memory-arguments": 32, - "chooseUnit-cpu-arguments": 46417, - "chooseUnit-memory-arguments": 4, - "consByteString-cpu-arguments-intercept": 221973, - "consByteString-cpu-arguments-slope": 511, - "consByteString-memory-arguments-intercept": 0, - "consByteString-memory-arguments-slope": 1, - "constrData-cpu-arguments": 89141, - "constrData-memory-arguments": 32, - "decodeUtf8-cpu-arguments-intercept": 497525, - "decodeUtf8-cpu-arguments-slope": 14068, - "decodeUtf8-memory-arguments-intercept": 4, - "decodeUtf8-memory-arguments-slope": 2, - "divideInteger-cpu-arguments-constant": 196500, - "divideInteger-cpu-arguments-model-arguments-intercept": 453240, - "divideInteger-cpu-arguments-model-arguments-slope": 220, - "divideInteger-memory-arguments-intercept": 0, - "divideInteger-memory-arguments-minimum": 1, - "divideInteger-memory-arguments-slope": 1, - "encodeUtf8-cpu-arguments-intercept": 1000, - "encodeUtf8-cpu-arguments-slope": 28662, - "encodeUtf8-memory-arguments-intercept": 4, - "encodeUtf8-memory-arguments-slope": 2, - "equalsByteString-cpu-arguments-constant": 245000, - "equalsByteString-cpu-arguments-intercept": 216773, - "equalsByteString-cpu-arguments-slope": 62, - "equalsByteString-memory-arguments": 1, - "equalsData-cpu-arguments-intercept": 1060367, - "equalsData-cpu-arguments-slope": 12586, - "equalsData-memory-arguments": 1, - "equalsInteger-cpu-arguments-intercept": 208512, - "equalsInteger-cpu-arguments-slope": 421, - "equalsInteger-memory-arguments": 1, - "equalsString-cpu-arguments-constant": 187000, - "equalsString-cpu-arguments-intercept": 1000, - "equalsString-cpu-arguments-slope": 52998, - "equalsString-memory-arguments": 1, - "fstPair-cpu-arguments": 80436, - "fstPair-memory-arguments": 32, - "headList-cpu-arguments": 43249, - "headList-memory-arguments": 32, - "iData-cpu-arguments": 1000, - "iData-memory-arguments": 32, - "ifThenElse-cpu-arguments": 80556, - "ifThenElse-memory-arguments": 1, - "indexByteString-cpu-arguments": 57667, - "indexByteString-memory-arguments": 4, - "lengthOfByteString-cpu-arguments": 1000, - "lengthOfByteString-memory-arguments": 10, - "lessThanByteString-cpu-arguments-intercept": 197145, - "lessThanByteString-cpu-arguments-slope": 156, - "lessThanByteString-memory-arguments": 1, - "lessThanEqualsByteString-cpu-arguments-intercept": 197145, - "lessThanEqualsByteString-cpu-arguments-slope": 156, - "lessThanEqualsByteString-memory-arguments": 1, - "lessThanEqualsInteger-cpu-arguments-intercept": 204924, - "lessThanEqualsInteger-cpu-arguments-slope": 473, - "lessThanEqualsInteger-memory-arguments": 1, - "lessThanInteger-cpu-arguments-intercept": 208896, - "lessThanInteger-cpu-arguments-slope": 511, - "lessThanInteger-memory-arguments": 1, - "listData-cpu-arguments": 52467, - "listData-memory-arguments": 32, - "mapData-cpu-arguments": 64832, - "mapData-memory-arguments": 32, - "mkCons-cpu-arguments": 65493, - "mkCons-memory-arguments": 32, - "mkNilData-cpu-arguments": 22558, - "mkNilData-memory-arguments": 32, - "mkNilPairData-cpu-arguments": 16563, - "mkNilPairData-memory-arguments": 32, - "mkPairData-cpu-arguments": 76511, - "mkPairData-memory-arguments": 32, - "modInteger-cpu-arguments-constant": 196500, - "modInteger-cpu-arguments-model-arguments-intercept": 453240, - "modInteger-cpu-arguments-model-arguments-slope": 220, - "modInteger-memory-arguments-intercept": 0, - "modInteger-memory-arguments-minimum": 1, - "modInteger-memory-arguments-slope": 1, - "multiplyInteger-cpu-arguments-intercept": 69522, - "multiplyInteger-cpu-arguments-slope": 11687, - "multiplyInteger-memory-arguments-intercept": 0, - "multiplyInteger-memory-arguments-slope": 1, - "nullList-cpu-arguments": 60091, - "nullList-memory-arguments": 32, - "quotientInteger-cpu-arguments-constant": 196500, - "quotientInteger-cpu-arguments-model-arguments-intercept": 453240, - "quotientInteger-cpu-arguments-model-arguments-slope": 220, - "quotientInteger-memory-arguments-intercept": 0, - "quotientInteger-memory-arguments-minimum": 1, - "quotientInteger-memory-arguments-slope": 1, - "remainderInteger-cpu-arguments-constant": 196500, - "remainderInteger-cpu-arguments-model-arguments-intercept": 453240, - "remainderInteger-cpu-arguments-model-arguments-slope": 220, - "remainderInteger-memory-arguments-intercept": 0, - "remainderInteger-memory-arguments-minimum": 1, - "remainderInteger-memory-arguments-slope": 1, - "sha2_256-cpu-arguments-intercept": 806990, - "sha2_256-cpu-arguments-slope": 30482, - "sha2_256-memory-arguments": 4, - "sha3_256-cpu-arguments-intercept": 1927926, - "sha3_256-cpu-arguments-slope": 82523, - "sha3_256-memory-arguments": 4, - "sliceByteString-cpu-arguments-intercept": 265318, - "sliceByteString-cpu-arguments-slope": 0, - "sliceByteString-memory-arguments-intercept": 4, - "sliceByteString-memory-arguments-slope": 0, - "sndPair-cpu-arguments": 85931, - "sndPair-memory-arguments": 32, - "subtractInteger-cpu-arguments-intercept": 205665, - "subtractInteger-cpu-arguments-slope": 812, - "subtractInteger-memory-arguments-intercept": 1, - "subtractInteger-memory-arguments-slope": 1, - "tailList-cpu-arguments": 41182, - "tailList-memory-arguments": 32, - "trace-cpu-arguments": 212342, - "trace-memory-arguments": 32, - "unBData-cpu-arguments": 31220, - "unBData-memory-arguments": 32, - "unConstrData-cpu-arguments": 32696, - "unConstrData-memory-arguments": 32, - "unIData-cpu-arguments": 43357, - "unIData-memory-arguments": 32, - "unListData-cpu-arguments": 32247, - "unListData-memory-arguments": 32, - "unMapData-cpu-arguments": 38314, - "unMapData-memory-arguments": 32, - "verifyEd25519Signature-cpu-arguments-intercept": 9462713, - "verifyEd25519Signature-cpu-arguments-slope": 1021, - "verifyEd25519Signature-memory-arguments": 10 - }, - "PlutusV2": { - "addInteger-cpu-arguments-intercept": 205665, - "addInteger-cpu-arguments-slope": 812, - "addInteger-memory-arguments-intercept": 1, - "addInteger-memory-arguments-slope": 1, - "appendByteString-cpu-arguments-intercept": 1000, - "appendByteString-cpu-arguments-slope": 571, - "appendByteString-memory-arguments-intercept": 0, - "appendByteString-memory-arguments-slope": 1, - "appendString-cpu-arguments-intercept": 1000, - "appendString-cpu-arguments-slope": 24177, - "appendString-memory-arguments-intercept": 4, - "appendString-memory-arguments-slope": 1, - "bData-cpu-arguments": 1000, - "bData-memory-arguments": 32, - "blake2b_256-cpu-arguments-intercept": 117366, - "blake2b_256-cpu-arguments-slope": 10475, - "blake2b_256-memory-arguments": 4, - "cekApplyCost-exBudgetCPU": 23000, - "cekApplyCost-exBudgetMemory": 100, - "cekBuiltinCost-exBudgetCPU": 23000, - "cekBuiltinCost-exBudgetMemory": 100, - "cekConstCost-exBudgetCPU": 23000, - "cekConstCost-exBudgetMemory": 100, - "cekDelayCost-exBudgetCPU": 23000, - "cekDelayCost-exBudgetMemory": 100, - "cekForceCost-exBudgetCPU": 23000, - "cekForceCost-exBudgetMemory": 100, - "cekLamCost-exBudgetCPU": 23000, - "cekLamCost-exBudgetMemory": 100, - "cekStartupCost-exBudgetCPU": 100, - "cekStartupCost-exBudgetMemory": 100, - "cekVarCost-exBudgetCPU": 23000, - "cekVarCost-exBudgetMemory": 100, - "chooseData-cpu-arguments": 19537, - "chooseData-memory-arguments": 32, - "chooseList-cpu-arguments": 175354, - "chooseList-memory-arguments": 32, - "chooseUnit-cpu-arguments": 46417, - "chooseUnit-memory-arguments": 4, - "consByteString-cpu-arguments-intercept": 221973, - "consByteString-cpu-arguments-slope": 511, - "consByteString-memory-arguments-intercept": 0, - "consByteString-memory-arguments-slope": 1, - "constrData-cpu-arguments": 89141, - "constrData-memory-arguments": 32, - "decodeUtf8-cpu-arguments-intercept": 497525, - "decodeUtf8-cpu-arguments-slope": 14068, - "decodeUtf8-memory-arguments-intercept": 4, - "decodeUtf8-memory-arguments-slope": 2, - "divideInteger-cpu-arguments-constant": 196500, - "divideInteger-cpu-arguments-model-arguments-intercept": 453240, - "divideInteger-cpu-arguments-model-arguments-slope": 220, - "divideInteger-memory-arguments-intercept": 0, - "divideInteger-memory-arguments-minimum": 1, - "divideInteger-memory-arguments-slope": 1, - "encodeUtf8-cpu-arguments-intercept": 1000, - "encodeUtf8-cpu-arguments-slope": 28662, - "encodeUtf8-memory-arguments-intercept": 4, - "encodeUtf8-memory-arguments-slope": 2, - "equalsByteString-cpu-arguments-constant": 245000, - "equalsByteString-cpu-arguments-intercept": 216773, - "equalsByteString-cpu-arguments-slope": 62, - "equalsByteString-memory-arguments": 1, - "equalsData-cpu-arguments-intercept": 1060367, - "equalsData-cpu-arguments-slope": 12586, - "equalsData-memory-arguments": 1, - "equalsInteger-cpu-arguments-intercept": 208512, - "equalsInteger-cpu-arguments-slope": 421, - "equalsInteger-memory-arguments": 1, - "equalsString-cpu-arguments-constant": 187000, - "equalsString-cpu-arguments-intercept": 1000, - "equalsString-cpu-arguments-slope": 52998, - "equalsString-memory-arguments": 1, - "fstPair-cpu-arguments": 80436, - "fstPair-memory-arguments": 32, - "headList-cpu-arguments": 43249, - "headList-memory-arguments": 32, - "iData-cpu-arguments": 1000, - "iData-memory-arguments": 32, - "ifThenElse-cpu-arguments": 80556, - "ifThenElse-memory-arguments": 1, - "indexByteString-cpu-arguments": 57667, - "indexByteString-memory-arguments": 4, - "lengthOfByteString-cpu-arguments": 1000, - "lengthOfByteString-memory-arguments": 10, - "lessThanByteString-cpu-arguments-intercept": 197145, - "lessThanByteString-cpu-arguments-slope": 156, - "lessThanByteString-memory-arguments": 1, - "lessThanEqualsByteString-cpu-arguments-intercept": 197145, - "lessThanEqualsByteString-cpu-arguments-slope": 156, - "lessThanEqualsByteString-memory-arguments": 1, - "lessThanEqualsInteger-cpu-arguments-intercept": 204924, - "lessThanEqualsInteger-cpu-arguments-slope": 473, - "lessThanEqualsInteger-memory-arguments": 1, - "lessThanInteger-cpu-arguments-intercept": 208896, - "lessThanInteger-cpu-arguments-slope": 511, - "lessThanInteger-memory-arguments": 1, - "listData-cpu-arguments": 52467, - "listData-memory-arguments": 32, - "mapData-cpu-arguments": 64832, - "mapData-memory-arguments": 32, - "mkCons-cpu-arguments": 65493, - "mkCons-memory-arguments": 32, - "mkNilData-cpu-arguments": 22558, - "mkNilData-memory-arguments": 32, - "mkNilPairData-cpu-arguments": 16563, - "mkNilPairData-memory-arguments": 32, - "mkPairData-cpu-arguments": 76511, - "mkPairData-memory-arguments": 32, - "modInteger-cpu-arguments-constant": 196500, - "modInteger-cpu-arguments-model-arguments-intercept": 453240, - "modInteger-cpu-arguments-model-arguments-slope": 220, - "modInteger-memory-arguments-intercept": 0, - "modInteger-memory-arguments-minimum": 1, - "modInteger-memory-arguments-slope": 1, - "multiplyInteger-cpu-arguments-intercept": 69522, - "multiplyInteger-cpu-arguments-slope": 11687, - "multiplyInteger-memory-arguments-intercept": 0, - "multiplyInteger-memory-arguments-slope": 1, - "nullList-cpu-arguments": 60091, - "nullList-memory-arguments": 32, - "quotientInteger-cpu-arguments-constant": 196500, - "quotientInteger-cpu-arguments-model-arguments-intercept": 453240, - "quotientInteger-cpu-arguments-model-arguments-slope": 220, - "quotientInteger-memory-arguments-intercept": 0, - "quotientInteger-memory-arguments-minimum": 1, - "quotientInteger-memory-arguments-slope": 1, - "remainderInteger-cpu-arguments-constant": 196500, - "remainderInteger-cpu-arguments-model-arguments-intercept": 453240, - "remainderInteger-cpu-arguments-model-arguments-slope": 220, - "remainderInteger-memory-arguments-intercept": 0, - "remainderInteger-memory-arguments-minimum": 1, - "remainderInteger-memory-arguments-slope": 1, - "serialiseData-cpu-arguments-intercept": 1159724, - "serialiseData-cpu-arguments-slope": 392670, - "serialiseData-memory-arguments-intercept": 0, - "serialiseData-memory-arguments-slope": 2, - "sha2_256-cpu-arguments-intercept": 806990, - "sha2_256-cpu-arguments-slope": 30482, - "sha2_256-memory-arguments": 4, - "sha3_256-cpu-arguments-intercept": 1927926, - "sha3_256-cpu-arguments-slope": 82523, - "sha3_256-memory-arguments": 4, - "sliceByteString-cpu-arguments-intercept": 265318, - "sliceByteString-cpu-arguments-slope": 0, - "sliceByteString-memory-arguments-intercept": 4, - "sliceByteString-memory-arguments-slope": 0, - "sndPair-cpu-arguments": 85931, - "sndPair-memory-arguments": 32, - "subtractInteger-cpu-arguments-intercept": 205665, - "subtractInteger-cpu-arguments-slope": 812, - "subtractInteger-memory-arguments-intercept": 1, - "subtractInteger-memory-arguments-slope": 1, - "tailList-cpu-arguments": 41182, - "tailList-memory-arguments": 32, - "trace-cpu-arguments": 212342, - "trace-memory-arguments": 32, - "unBData-cpu-arguments": 31220, - "unBData-memory-arguments": 32, - "unConstrData-cpu-arguments": 32696, - "unConstrData-memory-arguments": 32, - "unIData-cpu-arguments": 43357, - "unIData-memory-arguments": 32, - "unListData-cpu-arguments": 32247, - "unListData-memory-arguments": 32, - "unMapData-cpu-arguments": 38314, - "unMapData-memory-arguments": 32, - "verifyEcdsaSecp256k1Signature-cpu-arguments": 35892428, - "verifyEcdsaSecp256k1Signature-memory-arguments": 10, - "verifyEd25519Signature-cpu-arguments-intercept": 9462713, - "verifyEd25519Signature-cpu-arguments-slope": 1021, - "verifyEd25519Signature-memory-arguments": 10, - "verifySchnorrSecp256k1Signature-cpu-arguments-intercept": 38887044, - "verifySchnorrSecp256k1Signature-cpu-arguments-slope": 32947, - "verifySchnorrSecp256k1Signature-memory-arguments": 10 - } - } -} diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/release-genesis.conway.spec.json b/cardano_node_tests/cluster_scripts/mainnet_fast/release-genesis.conway.spec.json deleted file mode 100644 index 59ed2c5f4..000000000 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/release-genesis.conway.spec.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "poolVotingThresholds": { - "pvtCommitteeNormal": 0, - "pvtCommitteeNoConfidence": 0, - "pvtHardForkInitiation": 0, - "pvtMotionNoConfidence": 0 - }, - "dRepVotingThresholds": { - "dvtMotionNoConfidence": 0, - "dvtCommitteeNormal": 0, - "dvtCommitteeNoConfidence": 0, - "dvtUpdateToConstitution": 0, - "dvtHardForkInitiation": 0, - "dvtPPNetworkGroup": 0, - "dvtPPEconomicGroup": 0, - "dvtPPTechnicalGroup": 0, - "dvtPPGovGroup": 0, - "dvtTreasuryWithdrawal": 0 - }, - "committeeMinSize": 0, - "committeeMaxTermLength": 0, - "govActionLifetime": 0, - "govActionDeposit": 0, - "dRepDeposit": 0, - "dRepActivity": 0, - "constitution": { - "anchor": { - "url": "", - "dataHash": "0000000000000000000000000000000000000000000000000000000000000000" - } - }, - "committee": { - "members": { - "keyHash-4e88cc2d27c364aaf90648a87dfb95f8ee103ba67fa1f12f5e86c42a": 1, - "scriptHash-4e88cc2d27c364aaf90648a87dfb95f8ee103ba67fa1f12f5e86c42a": 2 - }, - "quorum": 0.5 - } -} diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/release_8_9-genesis.conway.spec.json b/cardano_node_tests/cluster_scripts/mainnet_fast/release_8_9-genesis.conway.spec.json deleted file mode 100644 index d97a1622d..000000000 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/release_8_9-genesis.conway.spec.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "poolVotingThresholds": { - "motionNoConfidence": 0.51, - "committeeNormal": 0.51, - "committeeNoConfidence": 0.51, - "hardForkInitiation": 0.51, - "ppSecurityGroup": 0.51 - }, - "dRepVotingThresholds": { - "motionNoConfidence": 0.51, - "committeeNormal": 0.51, - "committeeNoConfidence": 0.51, - "updateToConstitution": 0.51, - "hardForkInitiation": 0.0, - "ppNetworkGroup": 0.51, - "ppEconomicGroup": 0.51, - "ppTechnicalGroup": 0.51, - "ppGovGroup": 0.51, - "treasuryWithdrawal": 0.51 - }, - "committeeMinSize": 0, - "committeeMaxTermLength": 11000, - "govActionLifetime": 2, - "govActionDeposit": 100000000, - "dRepDeposit": 2000000, - "dRepActivity": 100, - "minFeeRefScriptCostPerByte": 0, - "plutusV3CostModel": [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], - "constitution": { - "anchor": { - "url": "", - "dataHash": "0000000000000000000000000000000000000000000000000000000000000000" - } - }, - "committee": { - "members": { - }, - "threshold": 0.0 - } -} diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/run-cardano-submit-api b/cardano_node_tests/cluster_scripts/mainnet_fast/run-cardano-submit-api index 66fac02ae..fb4825d15 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/run-cardano-submit-api +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/run-cardano-submit-api @@ -2,18 +2,15 @@ testnet_magic="$(<./state-cluster%%INSTANCE_NUM%%/db-bft1/protocolMagicId)" -# TODO: `--metrics-port` is not available in older cardano-node releases, see node issue #4280 -metrics_port="$(cardano-submit-api --metrics-port 8081 2>&1 | { read -r i; if [[ "$i" == *Invalid* ]]; then echo ""; else echo "--metrics-port %%METRICS_SUBMIT_API_PORT%%"; fi; })" - echo "Starting cardano-submit-api: cardano-submit-api" echo "--config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json" echo "--socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket" echo "--listen-address 127.0.0.1" echo "--port %%SUBMIT_API_PORT%%" - echo "$metrics_port" + echo "--metrics-port %%METRICS_SUBMIT_API_PORT%%" echo --testnet-magic "$testnet_magic" echo "..or, once again, in a single line:" -echo cardano-submit-api --config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json --socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket --listen-address 127.0.0.1 --port %%SUBMIT_API_PORT%% "$metrics_port" --testnet-magic "$testnet_magic" +echo cardano-submit-api --config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json --socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket --listen-address 127.0.0.1 --port %%SUBMIT_API_PORT%% --metrics-port %%METRICS_SUBMIT_API_PORT%% --testnet-magic "$testnet_magic" # shellcheck disable=SC2086 -exec cardano-submit-api --config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json --socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket --listen-address 127.0.0.1 --port %%SUBMIT_API_PORT%% $metrics_port --testnet-magic "$testnet_magic" +exec cardano-submit-api --config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json --socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket --listen-address 127.0.0.1 --port %%SUBMIT_API_PORT%% --metrics-port %%METRICS_SUBMIT_API_PORT%% --testnet-magic "$testnet_magic" diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index 6944b61b2..6e4219160 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -5,6 +5,8 @@ # ENABLE_LEGACY - if set, local cluster will use legacy networking # MIXED_P2P - if set, local cluster will use P2P for some nodes and legacy topology for others # UTXO_BACKEND - 'mem' or 'disk', default is 'mem' (or legacy) if unset +# NO_CC - if set, will not create committee +# PV10 - if set, will use protocol version 10 # DRY_RUN - if set, will not start the cluster set -euo pipefail @@ -23,9 +25,15 @@ fi NUM_BFT_NODES=1 NUM_POOLS=%%NUM_POOLS%% +NUM_CC=5 TX_SUBMISSION_DELAY=60 +SUBMIT_DELAY=5 POOL_PLEDGE=1000000000000 BYRON_INIT_SUPPLY=10020000000 +PROTOCOL_VERSION=9 +if [ -n "${PV10:-""}" ]; then + PROTOCOL_VERSION=10 +fi SECURITY_PARAM="$(jq '.securityParam' < "$SCRIPT_DIR/genesis.spec.json")" NETWORK_MAGIC="$(jq '.networkMagic' < "$SCRIPT_DIR/genesis.spec.json")" @@ -72,21 +80,17 @@ cardano_cli_log() { return "$retval" } -enable_submit_api() { - command -v cardano-submit-api >/dev/null 2>&1 || return 1 - - # TODO: `--metrics-port` is not available in older cardano-node releases, see node issue #4280 - # If the metrics port is not available, we can start the `cardano-submit-api` only on the first - # cluster instance. - [[ "$CARDANO_NODE_SOCKET_PATH" == */cluster0/* ]] && return 0 - if cardano-submit-api --metrics-port 8081 2>&1 | { read -r i; [[ "$i" == *Invalid* ]]; }; then - return 1 - fi - - return 0 +check_spend_success() { + for _ in {1..10}; do + if ! cardano_cli_log conway query utxo "$@" --testnet-magic "$NETWORK_MAGIC" | grep -q lovelace; then + return 0 + fi + sleep 3 + done + return 1 } -ENABLE_SUBMIT_API="$(enable_submit_api && echo 1 || echo 0)" +ENABLE_SUBMIT_API="$(command -v cardano-submit-api >/dev/null 2>&1 && echo 1 || echo 0)" if [ -e "$SCRIPT_DIR/shell_env" ]; then # shellcheck disable=SC1090,SC1091 @@ -180,68 +184,62 @@ cardano_cli_log byron genesis genesis \ mv "$STATE_CLUSTER/byron-params.json" "$STATE_CLUSTER/byron/params.json" -gen_genesis() { - cardano_cli_log genesis create-staked \ - --genesis-dir "$STATE_CLUSTER/create_staked" \ - --testnet-magic "$NETWORK_MAGIC" \ - --gen-pools "$NUM_POOLS" \ - --gen-utxo-keys 1 \ - --supply "$NONDELEG_SUPPLY" \ - --gen-stake-delegs "$NUM_POOLS" \ - --supply-delegated "$DELEG_SUPPLY" \ - --start-time "$START_TIME_SHELLEY" -} - -gen_genesis && genesis_created=1 || genesis_created=0 - -if [ "$genesis_created" -eq 0 ] ; then - echo "Failed to generate genesis files, retrying with a different genesis.conway.spec.json" - mv "$STATE_CLUSTER/create_staked/genesis.conway.spec.json" \ - "$STATE_CLUSTER/create_staked/master-genesis.conway.spec.json" - mv "$STATE_CLUSTER/create_staked/release_8_9-genesis.conway.spec.json" \ - "$STATE_CLUSTER/create_staked/genesis.conway.spec.json" - gen_genesis && genesis_created=1 || genesis_created=0 -fi - -if [ "$genesis_created" -eq 0 ] ; then - echo "Failed to generate genesis files, retrying with a different genesis.conway.spec.json" - mv "$STATE_CLUSTER/create_staked/genesis.conway.spec.json" \ - "$STATE_CLUSTER/create_staked/release_8_9-genesis.conway.spec.json" - mv "$STATE_CLUSTER/create_staked/release-genesis.conway.spec.json" \ - "$STATE_CLUSTER/create_staked/genesis.conway.spec.json" - gen_genesis && genesis_created=1 || genesis_created=0 -fi - -if [ "$genesis_created" -eq 0 ] ; then - echo "Failed to generate genesis files, retrying with a different genesis.conway.spec.json" - mv "$STATE_CLUSTER/create_staked/genesis.conway.spec.json" \ - "$STATE_CLUSTER/create_staked/release-genesis.conway.spec.json" - mv "$STATE_CLUSTER/create_staked/empty-genesis.conway.spec.json" \ - "$STATE_CLUSTER/create_staked/genesis.conway.spec.json" - gen_genesis && genesis_created=1 || genesis_created=0 -fi +cardano_cli_log legacy genesis create-staked \ + --genesis-dir "$STATE_CLUSTER/create_staked" \ + --testnet-magic "$NETWORK_MAGIC" \ + --gen-pools "$NUM_POOLS" \ + --gen-utxo-keys 1 \ + --supply "$NONDELEG_SUPPLY" \ + --gen-stake-delegs "$NUM_POOLS" \ + --supply-delegated "$DELEG_SUPPLY" \ + --start-time "$START_TIME_SHELLEY" + +# Create committee keys +mkdir -p "$STATE_CLUSTER/governance_data" +if [ -z "${NO_CC:-""}" ]; then + for i in $(seq 1 "$NUM_CC"); do + cardano_cli_log conway governance committee key-gen-cold \ + --cold-verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.vkey" \ + --cold-signing-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.skey" + cardano_cli_log conway governance committee key-gen-hot \ + --verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot.vkey" \ + --signing-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot.skey" + cardano_cli_log conway governance committee create-hot-key-authorization-certificate \ + --cold-verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.vkey" \ + --hot-verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot.vkey" \ + --out-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot_auth.cert" + cardano_cli_log conway governance committee key-hash \ + --verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.vkey" \ + > "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.hash" + done -if [ "$genesis_created" -eq 0 ] ; then - echo "Failed to generate genesis files, retrying with a different genesis.alonzo.spec.json" - mv "$STATE_CLUSTER/create_staked/genesis.alonzo.spec.json" \ - "$STATE_CLUSTER/create_staked/master-genesis.alonzo.spec.json" - mv "$STATE_CLUSTER/create_staked/release-genesis.alonzo.spec.json" \ - "$STATE_CLUSTER/create_staked/genesis.alonzo.spec.json" - gen_genesis && genesis_created=1 || genesis_created=0 + # Pre-register committee in genesis + KEY_HASH_JSON=$(jq -nR '[inputs | {("keyHash-" + .): 10000}] | add' \ + "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.hash) + jq \ + --argjson keyHashJson "$KEY_HASH_JSON" \ + '.committee.members = $keyHashJson + | .committee.threshold = 0.6 + | .committeeMinSize = 2' \ + "$STATE_CLUSTER/create_staked/genesis.conway.json" > "$STATE_CLUSTER/create_staked/genesis.conway.json_jq" + cat "$STATE_CLUSTER/create_staked/genesis.conway.json_jq" > "$STATE_CLUSTER/create_staked/genesis.conway.json" + rm -f "$STATE_CLUSTER/create_staked/genesis.conway.json_jq" fi mv "$STATE_CLUSTER/create_staked/delegate-keys" "$STATE_CLUSTER/shelley/delegate-keys" mv "$STATE_CLUSTER/create_staked/genesis-keys" "$STATE_CLUSTER/shelley/genesis-keys" jq \ --argjson max_supply "$MAX_SUPPLY" \ - '.maxLovelaceSupply = $max_supply' \ + --argjson prot_ver "$PROTOCOL_VERSION" \ + '.protocolParams.protocolVersion.major = $prot_ver + | .maxLovelaceSupply = $max_supply' \ "$STATE_CLUSTER/create_staked/genesis.json" > "$STATE_CLUSTER/shelley/genesis.json" rm -f "$STATE_CLUSTER/create_staked/genesis.json" mv "$STATE_CLUSTER"/create_staked/genesis*.json "$STATE_CLUSTER/shelley/" mv "$STATE_CLUSTER/create_staked/utxo-keys/utxo1.skey" "$STATE_CLUSTER/shelley/genesis-utxo.skey" mv "$STATE_CLUSTER/create_staked/utxo-keys/utxo1.vkey" "$STATE_CLUSTER/shelley/genesis-utxo.vkey" -cardano_cli_log address build --payment-verification-key-file \ +cardano_cli_log conway address build --payment-verification-key-file \ "$STATE_CLUSTER/shelley/genesis-utxo.vkey" \ --out-file "$STATE_CLUSTER/shelley/genesis-utxo.addr" \ --testnet-magic "$NETWORK_MAGIC" @@ -250,20 +248,12 @@ mv "$STATE_CLUSTER/create_staked/stake-delegator-keys" "$STATE_CLUSTER/shelley/s BYRON_GENESIS_HASH="$(cardano_cli_log byron genesis print-genesis-hash --genesis-json \ "$STATE_CLUSTER/byron/genesis.json")" -SHELLEY_GENESIS_HASH="$(cardano_cli_log genesis hash --genesis \ +SHELLEY_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ "$STATE_CLUSTER/shelley/genesis.json")" -ALONZO_GENESIS_HASH="$(cardano_cli_log genesis hash --genesis \ +ALONZO_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ "$STATE_CLUSTER/shelley/genesis.alonzo.json")" - -CONWAY_GENESIS_HASH="" -EXP_PROTOCOLS_KEY="TestEnableDevelopmentNetworkProtocols" - -# conway genesis is not present on node < 1.36.0 -if [ -e "$STATE_CLUSTER/shelley/genesis.conway.json" ]; then - CONWAY_GENESIS_HASH="$(cardano_cli_log genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.conway.json")" - EXP_PROTOCOLS_KEY="ExperimentalProtocolsEnabled" -fi +CONWAY_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ + "$STATE_CLUSTER/shelley/genesis.conway.json")" for conf in "$SCRIPT_DIR"/config-*.json; do fname="${conf##*/}" @@ -271,24 +261,15 @@ for conf in "$SCRIPT_DIR"/config-*.json; do --arg byron_hash "$BYRON_GENESIS_HASH" \ --arg shelley_hash "$SHELLEY_GENESIS_HASH" \ --arg alonzo_hash "$ALONZO_GENESIS_HASH" \ - --arg exp_protocols_key "$EXP_PROTOCOLS_KEY" \ + --arg conway_hash "$CONWAY_GENESIS_HASH" \ + --argjson prot_ver "$PROTOCOL_VERSION" \ '.ByronGenesisHash = $byron_hash | .ShelleyGenesisHash = $shelley_hash | .AlonzoGenesisHash = $alonzo_hash - | .[$exp_protocols_key] = true' \ + | .ConwayGenesisHash = $conway_hash + | ."LastKnownBlockVersion-Major" = $prot_ver' \ "$conf" > "$STATE_CLUSTER/$fname" - if [ -n "$CONWAY_GENESIS_HASH" ]; then - CONWAY_GENESIS_HASH="${CONWAY_GENESIS_HASH:-"$(cardano_cli_log genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.conway.json")"}" - jq \ - --arg conway_hash "$CONWAY_GENESIS_HASH" \ - '.ConwayGenesisFile = "shelley/genesis.conway.json" | .ConwayGenesisHash = $conway_hash' \ - "$STATE_CLUSTER/$fname" > "$STATE_CLUSTER/${fname}_jq" - cat "$STATE_CLUSTER/${fname}_jq" > "$STATE_CLUSTER/$fname" - rm -f "$STATE_CLUSTER/${fname}_jq" - fi - # enable P2P if [ -z "${ENABLE_LEGACY:-""}" ]; then @@ -331,6 +312,9 @@ for i in $(seq 1 $NUM_BFT_NODES); do echo "$BFT_PORT" > "$STATE_CLUSTER/nodes/node-bft$i/port" done +KEY_DEPOSIT="$(jq '.protocolParams.keyDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.json")" + for i in $(seq 1 "$NUM_POOLS"); do mkdir -p "$STATE_CLUSTER/nodes/node-pool$i" mv "$STATE_CLUSTER/create_staked/pools/cold$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/cold.skey" @@ -352,37 +336,44 @@ for i in $(seq 1 "$NUM_POOLS"); do echo "Generating Pool $i Secrets" # pool owner addresses and keys - cardano_cli_log address key-gen \ + cardano_cli_log conway address key-gen \ --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.skey" \ --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" - cardano_cli_log stake-address key-gen \ + cardano_cli_log conway stake-address key-gen \ --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.skey" \ --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" + # payment address - cardano_cli_log address build \ + cardano_cli_log conway address build \ --payment-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" \ --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner.addr" + # stake address - cardano_cli_log stake-address build \ + cardano_cli_log conway stake-address build \ --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.addr" + # stake address registration cert - cardano_cli_log stake-address registration-certificate \ + cardano_cli_log conway stake-address registration-certificate \ --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --key-reg-deposit-amt "$KEY_DEPOSIT" \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake.reg.cert" # stake reward address registration cert - cardano_cli_log stake-address registration-certificate \ + cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ + --always-abstain \ + --key-reg-deposit-amt "$KEY_DEPOSIT" \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" - # stake address delegation certs - cardano_cli_log stake-address delegation-certificate \ + # owner stake address delegation certs + cardano_cli_log conway stake-address stake-and-vote-delegation-certificate \ --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ + --always-abstain \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" POOL_NAME="TestPool$i" @@ -413,13 +404,13 @@ EoF > "$STATE_CLUSTER/webserver/pool$i.json" METADATA_URL="http://localhost:%%WEBSERVER_PORT%%/pool$i.json" - METADATA_HASH=$(cardano_cli_log stake-pool metadata-hash --pool-metadata-file \ + METADATA_HASH=$(cardano_cli_log conway stake-pool metadata-hash --pool-metadata-file \ "$STATE_CLUSTER/webserver/pool$i.json") POOL_PORT=$(("%%NODE_PORT_BASE%%" + ("$NUM_BFT_NODES" + i - 1) * "%%PORTS_PER_NODE%%")) echo "$POOL_PORT" > "$STATE_CLUSTER/nodes/node-pool$i/port" echo $POOL_PLEDGE > "$STATE_CLUSTER/nodes/node-pool$i/pledge" - cardano_cli_log stake-pool registration-certificate \ + cardano_cli_log conway stake-pool registration-certificate \ --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ --vrf-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" \ --pool-pledge "$POOL_PLEDGE" \ @@ -504,7 +495,7 @@ done # -# In Babbage era +# In Conway era # @@ -531,13 +522,12 @@ done # Transfer funds, register stake addresses and pools, all in one big transaction: -cardano_cli_log query protocol-parameters \ +cardano_cli_log conway query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/pparams.json" TXIN_ADDR="$(<"$STATE_CLUSTER"/shelley/genesis-utxo.addr)" -DEPOSITS="$(jq '2 * .protocolParams.keyDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.json")" +DEPOSITS="$((KEY_DEPOSIT * 2))" NEEDED_AMOUNT="$(( (POOL_PLEDGE + DEPOSITS) * NUM_POOLS ))" FEE_BUFFER=100000000 STOP_TXIN_AMOUNT="$((NEEDED_AMOUNT + FEE_BUFFER))" @@ -552,7 +542,7 @@ while read -r txhash txix amount _; do if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then break fi -done <<< "$(cardano_cli_log query utxo --testnet-magic \ +done <<< "$(cardano_cli_log conway query utxo --testnet-magic \ "$NETWORK_MAGIC" \ --address "$TXIN_ADDR" | grep -E "lovelace$|[0-9]$|lovelace \+ TxOutDatumNone$")" @@ -568,14 +558,6 @@ for i in $(seq 1 "$NUM_POOLS"); do ) done -cardano_cli_log transaction build-raw \ - --babbage-era \ - --fee 0 \ - "${TXINS[@]}" \ - --tx-out "$TXIN_ADDR+0" \ - "${POOL_ARGS[@]}" \ - --out-file "$STATE_CLUSTER/shelley/transfer-register-delegate-fee-tx.txbody" - POOL_SIGNING=() for i in $(seq 1 "$NUM_POOLS"); do POOL_SIGNING+=( \ @@ -585,27 +567,40 @@ for i in $(seq 1 "$NUM_POOLS"); do ) done -WITNESS_COUNT="$((${#POOL_SIGNING[@]} + ${#GENESIS_SIGNING[@]} + ${#DELEGATE_SIGNING[@]} + 1))" +CC_ARGS=() +for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_hot_auth.cert; do + [ -e "$f" ] || continue + CC_ARGS+=( "--certificate-file" "$f" ) +done + +CC_SIGNING=() +for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.skey; do + [ -e "$f" ] || continue + CC_SIGNING+=( "--signing-key-file" "$f" ) +done -cardano_cli_log transaction build \ - --babbage-era \ +WITNESS_COUNT="$((${#POOL_SIGNING[@]} + ${#GENESIS_SIGNING[@]} + ${#DELEGATE_SIGNING[@]} + ${#CC_SIGNING[@]} + 1))" + +cardano_cli_log conway transaction build \ "${TXINS[@]}" \ --change-address "$TXIN_ADDR" \ "${POOL_ARGS[@]}" \ + "${CC_ARGS[@]}" \ --witness-override "$WITNESS_COUNT" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.txbody" + --out-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.txbody" -cardano_cli_log transaction sign \ +cardano_cli_log conway transaction sign \ "${POOL_SIGNING[@]}" \ "${GENESIS_SIGNING[@]}" \ "${DELEGATE_SIGNING[@]}" \ + "${CC_SIGNING[@]}" \ --signing-key-file "$STATE_CLUSTER/shelley/genesis-utxo.skey" \ --testnet-magic "$NETWORK_MAGIC" \ --tx-body-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.txbody" \ --out-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.tx" -cardano_cli_log transaction submit \ +cardano_cli_log conway transaction submit \ --tx-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.tx" \ --testnet-magic "$NETWORK_MAGIC" @@ -615,17 +610,8 @@ if [ "$ENABLE_SUBMIT_API" -eq 1 ]; then supervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start submit_api fi -sleep 3 - -query_spend_success=0 -for _ in {1..30}; do - if ! cardano_cli_log query utxo "${TXINS[@]}" --testnet-magic "$NETWORK_MAGIC" | grep -q lovelace; then - query_spend_success=1 - break - fi - sleep 3 -done -if [ "$query_spend_success" -eq 0 ]; then +sleep "$SUBMIT_DELAY" +if ! check_spend_success "${TXINS[@]}"; then echo "Failed to spend Tx inputs, line $LINENO" >&2 # assert exit 1 fi diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/template-config.json b/cardano_node_tests/cluster_scripts/mainnet_fast/template-config.json index 2b4c22dad..9fb46ed9c 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/template-config.json +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/template-config.json @@ -3,13 +3,14 @@ "ApplicationVersion": 1, "ByronGenesisFile": "byron/genesis.json", "LastKnownBlockVersion-Alt": 0, - "LastKnownBlockVersion-Major": 6, + "LastKnownBlockVersion-Major": 9, "LastKnownBlockVersion-Minor": 0, "PBftSignatureThreshold": 1, "Protocol": "Cardano", "RequiresNetworkMagic": "RequiresMagic", "ShelleyGenesisFile": "shelley/genesis.json", "AlonzoGenesisFile": "shelley/genesis.alonzo.json", + "ConwayGenesisFile": "shelley/genesis.conway.json", "TraceBlockFetchClient": false, "TraceBlockFetchDecisions": false, "TraceBlockFetchProtocol": false, @@ -46,7 +47,6 @@ "TraceInboundGovernorCounters": true, "TraceInboundGovernorTransitions": true, "DebugPeerSelectionInitiator": true, - "DebugPeerSelectionInitiatorOnly": true, "DebugPeerSelectionInitiatorResponder": true, "TracingVerbosity": "NormalVerbosity", "TurnOnLogMetrics": true, @@ -181,5 +181,8 @@ "TestAllegraHardForkAtEpoch": 0, "TestMaryHardForkAtEpoch": 0, "TestAlonzoHardForkAtEpoch": 0, - "TestBabbageHardForkAtEpoch": 0 + "TestBabbageHardForkAtEpoch": 0, + "TestConwayHardForkAtEpoch": 0, + "ExperimentalHardForksEnabled": true, + "ExperimentalProtocolsEnabled": true } From 50e44017421d593014a34281261953e05be6daee Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 24 Oct 2024 14:49:38 +0200 Subject: [PATCH 032/168] Reformat JSON files --- .../mainnet_fast/byron-params.json | 42 +++++----- .../mainnet_fast/genesis.spec.json | 80 +++++++++---------- 2 files changed, 61 insertions(+), 61 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/byron-params.json b/cardano_node_tests/cluster_scripts/mainnet_fast/byron-params.json index e1aaa2990..96047ef5b 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/byron-params.json +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/byron-params.json @@ -1,23 +1,23 @@ { - "heavyDelThd": "300000000000", - "maxBlockSize": "2000000", - "maxTxSize": "4096", - "maxHeaderSize": "2000000", - "maxProposalSize": "700", - "mpcThd": "20000000000000", - "scriptVersion": 0, - "slotDuration": "20000", - "softforkRule": { - "initThd": "900000000000000", - "minThd": "600000000000000", - "thdDecrement": "50000000000000" - }, - "txFeePolicy": { - "summand": "155381000000000", - "multiplier": "43946000000" - }, - "unlockStakeEpoch": "18446744073709551615", - "updateImplicit": "10000", - "updateProposalThd": "100000000000000", - "updateVoteThd": "1000000000000" + "heavyDelThd": "300000000000", + "maxBlockSize": "2000000", + "maxTxSize": "4096", + "maxHeaderSize": "2000000", + "maxProposalSize": "700", + "mpcThd": "20000000000000", + "scriptVersion": 0, + "slotDuration": "20000", + "softforkRule": { + "initThd": "900000000000000", + "minThd": "600000000000000", + "thdDecrement": "50000000000000" + }, + "txFeePolicy": { + "summand": "155381000000000", + "multiplier": "43946000000" + }, + "unlockStakeEpoch": "18446744073709551615", + "updateImplicit": "10000", + "updateProposalThd": "100000000000000", + "updateVoteThd": "1000000000000" } diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.spec.json b/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.spec.json index ef5937575..ca5209d78 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.spec.json +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.spec.json @@ -1,44 +1,44 @@ { - "activeSlotsCoeff": 0.05, - "epochLength": 432000, - "genDelegs": {}, - "initialFunds": {}, - "maxKESEvolutions": 62, - "maxLovelaceSupply": 45000000000000000, - "networkId": "Testnet", - "networkMagic": 42, - "protocolParams": { - "protocolVersion": { - "minor": 0, - "major": 9 - }, - "decentralisationParam": 1, - "eMax": 18, - "extraEntropy": { - "tag": "NeutralNonce" - }, - "maxTxSize": 16384, - "maxBlockBodySize": 65536, - "maxBlockHeaderSize": 1100, - "minFeeA": 44, - "minFeeB": 155381, - "minUTxOValue": 1000000, - "poolDeposit": 500000000, - "minPoolCost": 340000000, - "keyDeposit": 2000000, - "nOpt": 150, - "rho": 0.003, - "tau": 0.2, - "a0": 0.3 + "activeSlotsCoeff": 0.05, + "epochLength": 432000, + "genDelegs": {}, + "initialFunds": {}, + "maxKESEvolutions": 62, + "maxLovelaceSupply": 45000000000000000, + "networkId": "Testnet", + "networkMagic": 42, + "protocolParams": { + "protocolVersion": { + "minor": 0, + "major": 9 }, - "securityParam": 2160, - "slotLength": 1, - "slotsPerKESPeriod": 129600, - "staking": { - "pools": {}, - "stake": {} + "decentralisationParam": 1, + "eMax": 18, + "extraEntropy": { + "tag": "NeutralNonce" }, - "systemStart": "2020-07-08T02:39:16.033076859Z", - "updateQuorum": 1, - "protocolMagicId": 42 + "maxTxSize": 16384, + "maxBlockBodySize": 65536, + "maxBlockHeaderSize": 1100, + "minFeeA": 44, + "minFeeB": 155381, + "minUTxOValue": 1000000, + "poolDeposit": 500000000, + "minPoolCost": 340000000, + "keyDeposit": 2000000, + "nOpt": 150, + "rho": 0.003, + "tau": 0.2, + "a0": 0.3 + }, + "securityParam": 2160, + "slotLength": 1, + "slotsPerKESPeriod": 129600, + "staking": { + "pools": {}, + "stake": {} + }, + "systemStart": "2020-07-08T02:39:16.033076859Z", + "updateQuorum": 1, + "protocolMagicId": 42 } From 944dbd44678709d269a457454564921e21835198 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 24 Oct 2024 17:09:47 +0200 Subject: [PATCH 033/168] chore: update cardano-clusterlib to 0.7.0a4 - Bump cardano-clusterlib version from 0.7.0a3 to 0.7.0a4 in poetry.lock, pyproject.toml, and requirements_freeze.txt. - Update content-hash in poetry.lock to reflect changes. - Update Poetry version in poetry.lock from 1.8.3 to 1.8.4. --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- requirements_freeze.txt | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3c957b5d1..01ec6e4bc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "alabaster" @@ -105,13 +105,13 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "cardano-clusterlib" -version = "0.7.0a3" +version = "0.7.0a4" description = "Python wrapper for cardano-cli for working with cardano cluster" optional = false python-versions = ">=3.8" files = [ - {file = "cardano_clusterlib-0.7.0a3-py3-none-any.whl", hash = "sha256:d12dda87cb67e37c3867c8fdbbbfc96596c832d861c1d887e5c5b372cefddc20"}, - {file = "cardano_clusterlib-0.7.0a3.tar.gz", hash = "sha256:1366b1d6b7ae8a2a3acfb09621b77d256454155467f08ec7f9af4cbbbe6d2150"}, + {file = "cardano_clusterlib-0.7.0a4-py3-none-any.whl", hash = "sha256:6d0b497f116c6060b4e16fb4bf028eb087e4f91d5ae0a3269a5b7b76253cce86"}, + {file = "cardano_clusterlib-0.7.0a4.tar.gz", hash = "sha256:37181fc3000bde7355dcf594e33ba787feebe903dccf1b1081ac059a349df422"}, ] [package.dependencies] @@ -1961,4 +1961,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "fe49494d7c2b6bd271fcbedee810efa3d4ebfb0a17647667d9c262c0be7a348f" +content-hash = "3e3e2d9ee601c18527c77344a19a7087cad4d1ba625805258bd49c17ebc45f2b" diff --git a/pyproject.toml b/pyproject.toml index 3e759f61a..eb3fb1b5d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ packages = [{include = "cardano_node_tests"}] [tool.poetry.dependencies] python = ">=3.9,<4.0" allure-pytest = "^2.13.5" -cardano-clusterlib = "^0.7.0a3" +cardano-clusterlib = "^0.7.0a4" cbor2 = "^5.6.4" filelock = "^3.15.4" hypothesis = "^6.108.5" diff --git a/requirements_freeze.txt b/requirements_freeze.txt index cb8dcb625..e6aadb371 100644 --- a/requirements_freeze.txt +++ b/requirements_freeze.txt @@ -4,7 +4,7 @@ allure-pytest==2.13.5 ; python_version >= "3.9" and python_version < "4.0" allure-python-commons==2.13.5 ; python_version >= "3.9" and python_version < "4.0" annotated-types==0.7.0 ; python_version >= "3.9" and python_version < "4.0" attrs==24.2.0 ; python_version >= "3.9" and python_version < "4.0" -cardano-clusterlib==0.7.0a3 ; python_version >= "3.9" and python_version < "4.0" +cardano-clusterlib==0.7.0a4 ; python_version >= "3.9" and python_version < "4.0" cbor2==5.6.4 ; python_version >= "3.9" and python_version < "4.0" certifi==2024.8.30 ; python_version >= "3.9" and python_version < "4.0" cffi==1.17.1 ; python_version >= "3.9" and python_version < "4.0" From c692a8297a615ffc26a3cada8de02c0950c00040 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 25 Oct 2024 19:07:21 +0200 Subject: [PATCH 034/168] feat(cluster): add DRep support to cluster scripts Setup DReps in cluster scripts instead of in the framework. - Introduced NUM_DREPS and DREP_DELEGATED variables. - Added logic to generate DRep keys and registration certificates. - Updated transaction building to include DRep registration. - Modified governance setup to load DReps and DRep users. - Adjusted protocol parameters and transaction amounts. --- .../cluster_scripts/conway/start-cluster | 182 +++++++++++++----- .../cluster_scripts/conway_fast/start-cluster | 85 +++++++- cardano_node_tests/utils/governance_setup.py | 94 ++++++--- 3 files changed, 274 insertions(+), 87 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index f3ebf64b7..4ffdb250b 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -25,10 +25,12 @@ fi NUM_BFT_NODES=1 NUM_POOLS=%%NUM_POOLS%% NUM_CC=5 +NUM_DREPS=5 TX_SUBMISSION_DELAY=60 PROPOSAL_DELAY=5 SUBMIT_DELAY=5 POOL_PLEDGE=1000000000000 +DREP_DELEGATED=500000000000 FEE=5000000 @@ -260,8 +262,9 @@ jq -r ' cat "$STATE_CLUSTER/shelley/genesis.json_jq" > "$STATE_CLUSTER/shelley/genesis.json" rm -f "$STATE_CLUSTER/shelley/genesis.json_jq" -# Create committee keys mkdir -p "$STATE_CLUSTER/governance_data" + +# Create committee keys if [ -z "${NO_CC:-""}" ]; then for i in $(seq 1 "$NUM_CC"); do cardano_cli_log conway governance committee key-gen-cold \ @@ -533,6 +536,54 @@ mv "$STATE_CLUSTER/shelley/utxo-keys/utxo1.vkey" "$STATE_CLUSTER/shelley/genesis mv "$STATE_CLUSTER/shelley/utxo-keys/utxo1.skey" "$STATE_CLUSTER/shelley/genesis-utxo.skey" rmdir "$STATE_CLUSTER/shelley/utxo-keys" +KEY_DEPOSIT="$(jq '.protocolParams.keyDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.json")" +DREP_DEPOSIT="$(jq '.dRepDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.conway.json")" + +for i in $(seq 1 "$NUM_DREPS"); do + # DRep keys + cardano_cli_log conway governance drep key-gen \ + --signing-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ + --verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" + + # DRep registration + cardano_cli_log conway governance drep registration-certificate \ + --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ + --key-reg-deposit-amt "$DREP_DEPOSIT" \ + --out-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" + + # delegatee payment keys + cardano_cli_log conway address key-gen \ + --signing-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ + --verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.vkey" + + # delegatee stake keys + cardano_cli_log conway stake-address key-gen \ + --signing-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.skey" \ + --verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" + + # delegatee payment address + cardano_cli_log conway address build \ + --payment-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.vkey" \ + --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --testnet-magic "$NETWORK_MAGIC" \ + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr" + + # delegatee stake address + cardano_cli_log conway stake-address build \ + --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --testnet-magic "$NETWORK_MAGIC" \ + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.addr" + + # delegatee stake address registration and vote delegation cert + cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ + --key-reg-deposit-amt "$KEY_DEPOSIT" \ + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" +done + # create scripts for cluster starting / stopping printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start all" > "$STATE_CLUSTER/supervisorctl_start" printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% restart nodes:" > "$STATE_CLUSTER/supervisorctl_restart_nodes" @@ -1198,63 +1249,90 @@ PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "$STATE_CLUSTER/pparams.json") [ "$PROTOCOL_VERSION" = 9 ] || { echo "Unexpected protocol version '$PROTOCOL_VERSION' on line $LINENO" >&2; exit 1; } # assert -# Register committee members -if [ -z "${NO_CC:-""}" ]; then - STOP_TXIN_AMOUNT=$FEE - - TXINS=() - TXIN_COUNT=0 - TXIN_AMOUNT=0 - while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi - done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace \+ TxOutDatumNone$")" +# Register CC members, DReps, all in one big transaction: - TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" +DREP_DEPOSIT="$((KEY_DEPOSIT + DREP_DEPOSIT))" +DREP_NEEDED_AMOUNT="$(( (DREP_DELEGATED + DREP_DEPOSIT) * NUM_DREPS ))" +STOP_TXIN_AMOUNT="$(( FEE + DREP_NEEDED_AMOUNT))" - V9_TX_NAME="reg_cc_members" +TXINS=() +TXIN_COUNT=0 +TXIN_AMOUNT=0 +while read -r txhash txix amount _; do + TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" + TXIN_COUNT="$((TXIN_COUNT + 1))" + TXINS+=("--tx-in" "${txhash}#${txix}") + if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then + break + fi +done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ + "$NETWORK_MAGIC" \ + --address "$FAUCET_ADDR" | + grep -E "lovelace \+ TxOutDatumNone$")" - CC_ARGS=() - for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_hot_auth.cert; do - CC_ARGS+=( "--certificate-file" "$f" ) - done +TXOUT_AMOUNT="$((TXIN_AMOUNT - STOP_TXIN_AMOUNT))" - CC_SIGNING=() - for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.skey; do - CC_SIGNING+=( "--signing-key-file" "$f" ) - done +V9_TX_NAME="setup_governance" - cardano_cli_log conway transaction build-raw \ - --fee "$FEE" \ - "${TXINS[@]}" \ - "${CC_ARGS[@]}" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ - --out-file "${V9_TX_NAME}-tx.txbody" - - cardano_cli_log conway transaction sign \ - --signing-key-file "$FAUCET_SKEY" \ - "${CC_SIGNING[@]}" \ - --testnet-magic "$NETWORK_MAGIC" \ - --tx-body-file "${V9_TX_NAME}-tx.txbody" \ - --out-file "${V9_TX_NAME}-tx.tx" - - cardano_cli_log conway transaction submit \ - --tx-file "${V9_TX_NAME}-tx.tx" \ - --testnet-magic "$NETWORK_MAGIC" - - sleep "$SUBMIT_DELAY" - if ! check_spend_success "${TXINS[@]}"; then - echo "Failed to spend Tx inputs, line $LINENO" >&2 # assert - exit 1 - fi +# TODO: delegate pools stake to alwaysAbstain + +CC_ARGS=() +for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_hot_auth.cert; do + [ -e "$f" ] || continue + CC_ARGS+=( "--certificate-file" "$f" ) +done + +CC_SIGNING=() +for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.skey; do + [ -e "$f" ] || continue + CC_SIGNING+=( "--signing-key-file" "$f" ) +done + +DREPS_ARGS=() +for i in $(seq 1 "$NUM_DREPS"); do + DREPS_ARGS+=( \ + "--tx-out" "$(<"$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr")+$DREP_DELEGATED" \ + "--certificate-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" \ + "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" \ + ) +done + +DREPS_SIGNING=() +for i in $(seq 1 "$NUM_DREPS"); do + DREPS_SIGNING+=( \ + "--signing-key-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ + "--signing-key-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ + "--signing-key-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.skey" \ + ) +done +cardano_cli_log conway transaction build-raw \ + --fee "$FEE" \ + "${TXINS[@]}" \ + "${CC_ARGS[@]}" \ + "${DREPS_ARGS[@]}" \ + --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ + --out-file "${V9_TX_NAME}-tx.txbody" + +cardano_cli_log conway transaction sign \ + --signing-key-file "$FAUCET_SKEY" \ + "${CC_SIGNING[@]}" \ + "${DREPS_SIGNING[@]}" \ + --testnet-magic "$NETWORK_MAGIC" \ + --tx-body-file "${V9_TX_NAME}-tx.txbody" \ + --out-file "${V9_TX_NAME}-tx.tx" + +cardano_cli_log conway transaction submit \ + --tx-file "${V9_TX_NAME}-tx.tx" \ + --testnet-magic "$NETWORK_MAGIC" + +sleep "$SUBMIT_DELAY" +if ! check_spend_success "${TXINS[@]}"; then + echo "Failed to spend Tx inputs, line $LINENO" >&2 # assert + exit 1 +fi + +if [ -z "${NO_CC:-""}" ]; then cc_size="$(cardano-cli conway query committee-state --active --testnet-magic "$NETWORK_MAGIC" | grep -c '"status": "Active"')" [ "$cc_size" -ge "$NUM_CC" ] || \ diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index fb2806601..f1065b035 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -26,9 +26,11 @@ fi NUM_BFT_NODES=1 NUM_POOLS=%%NUM_POOLS%% NUM_CC=5 +NUM_DREPS=5 TX_SUBMISSION_DELAY=60 SUBMIT_DELAY=5 POOL_PLEDGE=1000000000000 +DREP_DELEGATED=500000000000 BYRON_INIT_SUPPLY=10020000000 PROTOCOL_VERSION=9 if [ -n "${PV10:-""}" ]; then @@ -193,8 +195,9 @@ cardano_cli_log legacy genesis create-staked \ --supply-delegated "$DELEG_SUPPLY" \ --start-time "$START_TIME_SHELLEY" -# Create committee keys mkdir -p "$STATE_CLUSTER/governance_data" + +# Create committee keys if [ -z "${NO_CC:-""}" ]; then for i in $(seq 1 "$NUM_CC"); do cardano_cli_log conway governance committee key-gen-cold \ @@ -427,6 +430,52 @@ done rm -rf "$STATE_CLUSTER/shelley/create_staked" +DREP_DEPOSIT="$(jq '.dRepDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.conway.json")" + +for i in $(seq 1 "$NUM_DREPS"); do + # DRep keys + cardano_cli_log conway governance drep key-gen \ + --signing-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ + --verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" + + # DRep registration + cardano_cli_log conway governance drep registration-certificate \ + --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ + --key-reg-deposit-amt "$DREP_DEPOSIT" \ + --out-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" + + # delegatee payment keys + cardano_cli_log conway address key-gen \ + --signing-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ + --verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.vkey" + + # delegatee stake keys + cardano_cli_log conway stake-address key-gen \ + --signing-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.skey" \ + --verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" + + # delegatee payment address + cardano_cli_log conway address build \ + --payment-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.vkey" \ + --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --testnet-magic "$NETWORK_MAGIC" \ + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr" + + # delegatee stake address + cardano_cli_log conway stake-address build \ + --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --testnet-magic "$NETWORK_MAGIC" \ + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.addr" + + # delegatee stake address registration and vote delegation cert + cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ + --key-reg-deposit-amt "$KEY_DEPOSIT" \ + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" +done + # create scripts for cluster starting / stopping printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start all" > "$STATE_CLUSTER/supervisorctl_start" printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% restart nodes:" > "$STATE_CLUSTER/supervisorctl_restart_nodes" @@ -507,7 +556,7 @@ fi echo "Sleeping for initial Tx submission delay of $TX_SUBMISSION_DELAY seconds" sleep "$TX_SUBMISSION_DELAY" -echo "Re-registering pools" +echo "Re-registering pools, creating CC members and DReps" GENESIS_SIGNING=() for skey in "$STATE_CLUSTER"/shelley/genesis-keys/genesis?.skey; do @@ -519,15 +568,19 @@ for skey in "$STATE_CLUSTER"/shelley/delegate-keys/delegate?.skey; do DELEGATE_SIGNING+=("--signing-key-file" "$skey") done -# Transfer funds, register stake addresses and pools, all in one big transaction: +# Transfer funds, register stake addresses and pools, CC members, DReps, all in one big transaction: cardano_cli_log conway query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/pparams.json" +POOL_DEPOSIT="$((KEY_DEPOSIT * 2))" +POOL_NEEDED_AMOUNT="$(( (POOL_PLEDGE + POOL_DEPOSIT) * NUM_POOLS ))" +DREP_DEPOSIT="$((KEY_DEPOSIT + DREP_DEPOSIT))" +DREP_NEEDED_AMOUNT="$(( (DREP_DELEGATED + DREP_DEPOSIT) * NUM_DREPS ))" +NEEDED_AMOUNT="$(( POOL_NEEDED_AMOUNT + DREP_NEEDED_AMOUNT))" + TXIN_ADDR="$(<"$STATE_CLUSTER"/shelley/genesis-utxo.addr)" -DEPOSITS="$((KEY_DEPOSIT * 2))" -NEEDED_AMOUNT="$(( (POOL_PLEDGE + DEPOSITS) * NUM_POOLS ))" FEE_BUFFER=100000000 STOP_TXIN_AMOUNT="$((NEEDED_AMOUNT + FEE_BUFFER))" @@ -578,13 +631,32 @@ for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.skey; do CC_SIGNING+=( "--signing-key-file" "$f" ) done -WITNESS_COUNT="$((${#POOL_SIGNING[@]} + ${#GENESIS_SIGNING[@]} + ${#DELEGATE_SIGNING[@]} + ${#CC_SIGNING[@]} + 1))" +DREPS_ARGS=() +for i in $(seq 1 "$NUM_DREPS"); do + DREPS_ARGS+=( \ + "--tx-out" "$(<"$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr")+$DREP_DELEGATED" \ + "--certificate-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" \ + "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" \ + ) +done + +DREPS_SIGNING=() +for i in $(seq 1 "$NUM_DREPS"); do + DREPS_SIGNING+=( \ + "--signing-key-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ + "--signing-key-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ + "--signing-key-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.skey" \ + ) +done + +WITNESS_COUNT="$((${#POOL_SIGNING[@]} + ${#GENESIS_SIGNING[@]} + ${#DELEGATE_SIGNING[@]} + ${#CC_SIGNING[@]} + ${#DREPS_SIGNING[@]} + 1))" cardano_cli_log conway transaction build \ "${TXINS[@]}" \ --change-address "$TXIN_ADDR" \ "${POOL_ARGS[@]}" \ "${CC_ARGS[@]}" \ + "${DREPS_ARGS[@]}" \ --witness-override "$WITNESS_COUNT" \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.txbody" @@ -594,6 +666,7 @@ cardano_cli_log conway transaction sign \ "${GENESIS_SIGNING[@]}" \ "${DELEGATE_SIGNING[@]}" \ "${CC_SIGNING[@]}" \ + "${DREPS_SIGNING[@]}" \ --signing-key-file "$STATE_CLUSTER/shelley/genesis-utxo.skey" \ --testnet-magic "$NETWORK_MAGIC" \ --tx-body-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.txbody" \ diff --git a/cardano_node_tests/utils/governance_setup.py b/cardano_node_tests/utils/governance_setup.py index b8d6a4522..64344ee4e 100644 --- a/cardano_node_tests/utils/governance_setup.py +++ b/cardano_node_tests/utils/governance_setup.py @@ -156,44 +156,75 @@ def load_committee(cluster_obj: clusterlib.ClusterLib) -> tp.List[governance_uti return cc_members +def load_dreps(cluster_obj: clusterlib.ClusterLib) -> tp.List[governance_utils.DRepRegistration]: + """Load DReps from the state directory.""" + data_dir = cluster_obj.state_dir / GOV_DATA_DIR + deposit_amt = cluster_obj.conway_genesis["dRepDeposit"] + + dreps = [] + for vkey_file in sorted(data_dir.glob("default_drep_*_drep.vkey")): + skey_file = vkey_file.with_suffix(".skey") + fpath = vkey_file.parent + reg_cert = fpath / vkey_file.name.replace(".vkey", "_reg.cert") + drep_id = cluster_obj.g_conway_governance.drep.get_id( + drep_vkey_file=vkey_file, + out_format="hex", + ) + dreps.append( + governance_utils.DRepRegistration( + registration_cert=reg_cert, + key_pair=clusterlib.KeyPair(vkey_file=vkey_file, skey_file=skey_file), + drep_id=drep_id, + deposit=deposit_amt, + ) + ) + + return dreps + + +def load_drep_users(cluster_obj: clusterlib.ClusterLib) -> tp.List[clusterlib.PoolUser]: + """Load DReps users from the state directory.""" + data_dir = cluster_obj.state_dir / GOV_DATA_DIR + + users = [] + for stake_vkey_file in sorted(data_dir.glob("vote_stake_addr*_stake.vkey")): + fpath = stake_vkey_file.parent + + stake_skey_file = stake_vkey_file.with_suffix(".skey") + stake_address = clusterlib.read_address_from_file(stake_vkey_file.with_suffix(".addr")) + + payment_vkey_file = fpath / stake_vkey_file.name.replace("_stake.vkey", ".vkey") + payment_skey_file = payment_vkey_file.with_suffix(".skey") + payment_address = clusterlib.read_address_from_file(payment_vkey_file.with_suffix(".addr")) + + users.append( + clusterlib.PoolUser( + payment=clusterlib.AddressRecord( + address=payment_address, + vkey_file=payment_vkey_file, + skey_file=payment_skey_file, + ), + stake=clusterlib.AddressRecord( + address=stake_address, vkey_file=stake_vkey_file, skey_file=stake_skey_file + ), + ) + ) + + return users + + def setup( cluster_manager: cluster_management.ClusterManager, cluster_obj: clusterlib.ClusterLib, - destination_dir: clusterlib.FileType = ".", ) -> governance_utils.GovernanceRecords: cc_members = load_committee(cluster_obj=cluster_obj) - vote_stake = create_vote_stake( - name_template="vote_stake", - cluster_manager=cluster_manager, - cluster_obj=cluster_obj, - no_of_addr=DREPS_NUM, - destination_dir=destination_dir, - ) - drep_reg_records, drep_users = governance_utils.create_dreps( - name_template="default_drep", - num=DREPS_NUM, - cluster_obj=cluster_obj, - payment_addr=vote_stake[0].payment, - pool_users=vote_stake, - destination_dir=destination_dir, - ) + drep_reg_records = load_dreps(cluster_obj=cluster_obj) + drep_users = load_drep_users(cluster_obj=cluster_obj) node_cold_records = [ cluster_manager.cache.addrs_data[pn]["cold_key_pair"] for pn in cluster_management.Resources.ALL_POOLS ] - cluster_obj.wait_for_new_epoch(padding_seconds=5) - - # Check delegation to DReps - deleg_state = clusterlib_utils.get_delegation_state(cluster_obj=cluster_obj) - drep_id = drep_reg_records[0].drep_id - stake_addr_hash = cluster_obj.g_stake_address.get_stake_vkey_hash( - stake_vkey_file=drep_users[0].stake.vkey_file - ) - governance_utils.check_drep_delegation( - deleg_state=deleg_state, drep_id=drep_id, stake_addr_hash=stake_addr_hash - ) - gov_data = save_default_governance( dreps_reg=drep_reg_records, drep_delegators=drep_users, @@ -201,6 +232,12 @@ def setup( pools_cold=node_cold_records, ) + # When using "fast" cluster, we need to wait for at least epoch 1 for DReps + # to be usable. DReps don't vote in PV9. + if cluster_obj.g_query.get_protocol_params()["protocolVersion"]["major"] >= 10: + cluster_obj.wait_for_epoch(epoch_no=1, padding_seconds=5) + # TODO: check `cardano-cli conway query drep-stake-distribution` + return gov_data @@ -221,7 +258,6 @@ def _setup_gov() -> tp.Optional[governance_utils.GovernanceRecords]: return setup( cluster_obj=cluster_obj, cluster_manager=cluster_manager, - destination_dir=gov_data_dir, ) if not gov_data_store.exists(): From 5f99c1dc2eed6256b9d7c1c862f27e48bc5f666d Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 28 Oct 2024 14:01:24 +0100 Subject: [PATCH 035/168] feat(cluster): delegate pool reward to alwaysAbstain DRep Added functionality to delegate pool rewards to alwaysAbstain DRep in the hard fork cluster scripts. This includes generating vote-delegation certificates for each pool and incorporating them into the transaction build and sign steps. --- .../cluster_scripts/conway/start-cluster | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index 4ffdb250b..bbc538066 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -1251,6 +1251,14 @@ PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "$STATE_CLUSTER/pparams.json") # Register CC members, DReps, all in one big transaction: +# delegate pool reward to alwaysAbstain DRep +for i in $(seq 1 "$NUM_POOLS"); do + cardano_cli_log conway stake-address vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool${i}/reward.vkey" \ + --always-abstain \ + --out-file "$STATE_CLUSTER/nodes/node-pool${i}/stake-reward_vote_deleg.cert" +done + DREP_DEPOSIT="$((KEY_DEPOSIT + DREP_DEPOSIT))" DREP_NEEDED_AMOUNT="$(( (DREP_DELEGATED + DREP_DEPOSIT) * NUM_DREPS ))" STOP_TXIN_AMOUNT="$(( FEE + DREP_NEEDED_AMOUNT))" @@ -1274,8 +1282,6 @@ TXOUT_AMOUNT="$((TXIN_AMOUNT - STOP_TXIN_AMOUNT))" V9_TX_NAME="setup_governance" -# TODO: delegate pools stake to alwaysAbstain - CC_ARGS=() for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_hot_auth.cert; do [ -e "$f" ] || continue @@ -1306,11 +1312,26 @@ for i in $(seq 1 "$NUM_DREPS"); do ) done +POOL_ARGS=() +for i in $(seq 1 "$NUM_POOLS"); do + POOL_ARGS+=( \ + "--certificate-file" "$STATE_CLUSTER/nodes/node-pool${i}/stake-reward_vote_deleg.cert" \ + ) +done + +POOL_SIGNING=() +for i in $(seq 1 "$NUM_POOLS"); do + POOL_SIGNING+=( \ + "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool${i}/reward.skey" \ + ) +done + cardano_cli_log conway transaction build-raw \ --fee "$FEE" \ "${TXINS[@]}" \ "${CC_ARGS[@]}" \ "${DREPS_ARGS[@]}" \ + "${POOL_ARGS[@]}" \ --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ --out-file "${V9_TX_NAME}-tx.txbody" @@ -1318,6 +1339,7 @@ cardano_cli_log conway transaction sign \ --signing-key-file "$FAUCET_SKEY" \ "${CC_SIGNING[@]}" \ "${DREPS_SIGNING[@]}" \ + "${POOL_SIGNING[@]}" \ --testnet-magic "$NETWORK_MAGIC" \ --tx-body-file "${V9_TX_NAME}-tx.txbody" \ --out-file "${V9_TX_NAME}-tx.tx" From e604f469527467a916e8b30d83c0d531f23444a5 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 28 Oct 2024 14:27:16 +0100 Subject: [PATCH 036/168] feat(cluster): make pool cost configurable Added POOL_COST variable to make the pool cost configurable in both conway and conway_fast cluster scripts. Updated the pool cost parameter to use the new variable. --- cardano_node_tests/cluster_scripts/conway/start-cluster | 3 ++- cardano_node_tests/cluster_scripts/conway_fast/start-cluster | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index bbc538066..2cbc09061 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -30,6 +30,7 @@ TX_SUBMISSION_DELAY=60 PROPOSAL_DELAY=5 SUBMIT_DELAY=5 POOL_PLEDGE=1000000000000 +POOL_COST=600 DREP_DELEGATED=500000000000 FEE=5000000 @@ -521,7 +522,7 @@ EoF --vrf-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" \ --pool-pledge "$POOL_PLEDGE" \ --pool-margin 0.35 \ - --pool-cost 600 \ + --pool-cost "$POOL_COST" \ --pool-reward-account-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ --pool-owner-stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ --metadata-url "$METADATA_URL" \ diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index f1065b035..66e19cecf 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -30,6 +30,7 @@ NUM_DREPS=5 TX_SUBMISSION_DELAY=60 SUBMIT_DELAY=5 POOL_PLEDGE=1000000000000 +POOL_COST=600 DREP_DELEGATED=500000000000 BYRON_INIT_SUPPLY=10020000000 PROTOCOL_VERSION=9 @@ -417,7 +418,7 @@ EoF --vrf-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" \ --pool-pledge "$POOL_PLEDGE" \ --pool-margin 0.35 \ - --pool-cost 600 \ + --pool-cost "$POOL_COST" \ --pool-reward-account-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ --pool-owner-stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ --metadata-url "$METADATA_URL" \ From 36c1e0c87025a5b9cbe56228c7076b1fce8bdb1c Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 28 Oct 2024 14:39:26 +0100 Subject: [PATCH 037/168] feat(mainnet): add DRep support to mainnet cluster scripts --- .../mainnet_fast/start-cluster | 85 +++++++++++++++++-- 1 file changed, 79 insertions(+), 6 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index 6e4219160..e4b139fc9 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -26,9 +26,11 @@ fi NUM_BFT_NODES=1 NUM_POOLS=%%NUM_POOLS%% NUM_CC=5 +NUM_DREPS=5 TX_SUBMISSION_DELAY=60 SUBMIT_DELAY=5 POOL_PLEDGE=1000000000000 +DREP_DELEGATED=500000000000 BYRON_INIT_SUPPLY=10020000000 PROTOCOL_VERSION=9 if [ -n "${PV10:-""}" ]; then @@ -194,8 +196,9 @@ cardano_cli_log legacy genesis create-staked \ --supply-delegated "$DELEG_SUPPLY" \ --start-time "$START_TIME_SHELLEY" -# Create committee keys mkdir -p "$STATE_CLUSTER/governance_data" + +# Create committee keys if [ -z "${NO_CC:-""}" ]; then for i in $(seq 1 "$NUM_CC"); do cardano_cli_log conway governance committee key-gen-cold \ @@ -428,6 +431,52 @@ done rm -rf "$STATE_CLUSTER/shelley/create_staked" +DREP_DEPOSIT="$(jq '.dRepDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.conway.json")" + +for i in $(seq 1 "$NUM_DREPS"); do + # DRep keys + cardano_cli_log conway governance drep key-gen \ + --signing-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ + --verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" + + # DRep registration + cardano_cli_log conway governance drep registration-certificate \ + --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ + --key-reg-deposit-amt "$DREP_DEPOSIT" \ + --out-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" + + # delegatee payment keys + cardano_cli_log conway address key-gen \ + --signing-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ + --verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.vkey" + + # delegatee stake keys + cardano_cli_log conway stake-address key-gen \ + --signing-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.skey" \ + --verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" + + # delegatee payment address + cardano_cli_log conway address build \ + --payment-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.vkey" \ + --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --testnet-magic "$NETWORK_MAGIC" \ + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr" + + # delegatee stake address + cardano_cli_log conway stake-address build \ + --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --testnet-magic "$NETWORK_MAGIC" \ + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.addr" + + # delegatee stake address registration and vote delegation cert + cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ + --key-reg-deposit-amt "$KEY_DEPOSIT" \ + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" +done + # create scripts for cluster starting / stopping printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start all" > "$STATE_CLUSTER/supervisorctl_start" printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% restart nodes:" > "$STATE_CLUSTER/supervisorctl_restart_nodes" @@ -508,7 +557,7 @@ fi echo "Sleeping for initial Tx submission delay of $TX_SUBMISSION_DELAY seconds" sleep "$TX_SUBMISSION_DELAY" -echo "Re-registering pools" +echo "Re-registering pools, creating CC members and DReps" GENESIS_SIGNING=() for skey in "$STATE_CLUSTER"/shelley/genesis-keys/genesis?.skey; do @@ -520,15 +569,19 @@ for skey in "$STATE_CLUSTER"/shelley/delegate-keys/delegate?.skey; do DELEGATE_SIGNING+=("--signing-key-file" "$skey") done -# Transfer funds, register stake addresses and pools, all in one big transaction: +# Transfer funds, register stake addresses and pools, CC members, DReps, all in one big transaction: cardano_cli_log conway query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/pparams.json" +POOL_DEPOSIT="$((KEY_DEPOSIT * 2))" +POOL_NEEDED_AMOUNT="$(( (POOL_PLEDGE + POOL_DEPOSIT) * NUM_POOLS ))" +DREP_DEPOSIT="$((KEY_DEPOSIT + DREP_DEPOSIT))" +DREP_NEEDED_AMOUNT="$(( (DREP_DELEGATED + DREP_DEPOSIT) * NUM_DREPS ))" +NEEDED_AMOUNT="$(( POOL_NEEDED_AMOUNT + DREP_NEEDED_AMOUNT))" + TXIN_ADDR="$(<"$STATE_CLUSTER"/shelley/genesis-utxo.addr)" -DEPOSITS="$((KEY_DEPOSIT * 2))" -NEEDED_AMOUNT="$(( (POOL_PLEDGE + DEPOSITS) * NUM_POOLS ))" FEE_BUFFER=100000000 STOP_TXIN_AMOUNT="$((NEEDED_AMOUNT + FEE_BUFFER))" @@ -579,13 +632,32 @@ for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.skey; do CC_SIGNING+=( "--signing-key-file" "$f" ) done -WITNESS_COUNT="$((${#POOL_SIGNING[@]} + ${#GENESIS_SIGNING[@]} + ${#DELEGATE_SIGNING[@]} + ${#CC_SIGNING[@]} + 1))" +DREPS_ARGS=() +for i in $(seq 1 "$NUM_DREPS"); do + DREPS_ARGS+=( \ + "--tx-out" "$(<"$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr")+$DREP_DELEGATED" \ + "--certificate-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" \ + "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" \ + ) +done + +DREPS_SIGNING=() +for i in $(seq 1 "$NUM_DREPS"); do + DREPS_SIGNING+=( \ + "--signing-key-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ + "--signing-key-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ + "--signing-key-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.skey" \ + ) +done + +WITNESS_COUNT="$((${#POOL_SIGNING[@]} + ${#GENESIS_SIGNING[@]} + ${#DELEGATE_SIGNING[@]} + ${#CC_SIGNING[@]} + ${#DREPS_SIGNING[@]} + 1))" cardano_cli_log conway transaction build \ "${TXINS[@]}" \ --change-address "$TXIN_ADDR" \ "${POOL_ARGS[@]}" \ "${CC_ARGS[@]}" \ + "${DREPS_ARGS[@]}" \ --witness-override "$WITNESS_COUNT" \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.txbody" @@ -595,6 +667,7 @@ cardano_cli_log conway transaction sign \ "${GENESIS_SIGNING[@]}" \ "${DELEGATE_SIGNING[@]}" \ "${CC_SIGNING[@]}" \ + "${DREPS_SIGNING[@]}" \ --signing-key-file "$STATE_CLUSTER/shelley/genesis-utxo.skey" \ --testnet-magic "$NETWORK_MAGIC" \ --tx-body-file "$STATE_CLUSTER/shelley/transfer-register-delegate-tx.txbody" \ From 039953829556f0848e362fdfee9732646848199c Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 28 Oct 2024 15:21:48 +0100 Subject: [PATCH 038/168] refactor(cluster-scripts): remove unneded loops --- .../cluster_scripts/conway/start-cluster | 15 +++------------ .../cluster_scripts/conway_fast/start-cluster | 10 ++-------- .../cluster_scripts/mainnet_fast/start-cluster | 10 ++-------- 3 files changed, 7 insertions(+), 28 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index 2cbc09061..c01886183 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -740,6 +740,7 @@ done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ TTL="$(get_slot 1000)" POOL_ARGS=() +POOL_SIGNING=() for i in $(seq 1 "$NUM_POOLS"); do POOL_ARGS+=( \ "--tx-out" "$(<"$STATE_CLUSTER/nodes/node-pool$i/owner.addr")+$POOL_PLEDGE" \ @@ -748,10 +749,6 @@ for i in $(seq 1 "$NUM_POOLS"); do "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/register.cert" \ "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" \ ) -done - -POOL_SIGNING=() -for i in $(seq 1 "$NUM_POOLS"); do POOL_SIGNING+=( \ "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.skey" \ "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool$i/reward.skey" \ @@ -1296,16 +1293,13 @@ for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.skey; do done DREPS_ARGS=() +DREPS_SIGNING=() for i in $(seq 1 "$NUM_DREPS"); do DREPS_ARGS+=( \ "--tx-out" "$(<"$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr")+$DREP_DELEGATED" \ "--certificate-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" \ "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" \ ) -done - -DREPS_SIGNING=() -for i in $(seq 1 "$NUM_DREPS"); do DREPS_SIGNING+=( \ "--signing-key-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ "--signing-key-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ @@ -1314,14 +1308,11 @@ for i in $(seq 1 "$NUM_DREPS"); do done POOL_ARGS=() +POOL_SIGNING=() for i in $(seq 1 "$NUM_POOLS"); do POOL_ARGS+=( \ "--certificate-file" "$STATE_CLUSTER/nodes/node-pool${i}/stake-reward_vote_deleg.cert" \ ) -done - -POOL_SIGNING=() -for i in $(seq 1 "$NUM_POOLS"); do POOL_SIGNING+=( \ "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool${i}/reward.skey" \ ) diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index 66e19cecf..08f4c907e 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -601,6 +601,7 @@ done <<< "$(cardano_cli_log conway query utxo --testnet-magic \ grep -E "lovelace$|[0-9]$|lovelace \+ TxOutDatumNone$")" POOL_ARGS=() +POOL_SIGNING=() for i in $(seq 1 "$NUM_POOLS"); do POOL_ARGS+=( \ "--tx-out" "$(<"$STATE_CLUSTER/nodes/node-pool$i/owner.addr")+$POOL_PLEDGE" \ @@ -609,10 +610,6 @@ for i in $(seq 1 "$NUM_POOLS"); do "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/register.cert" \ "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" \ ) -done - -POOL_SIGNING=() -for i in $(seq 1 "$NUM_POOLS"); do POOL_SIGNING+=( \ "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.skey" \ "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool$i/reward.skey" \ @@ -633,16 +630,13 @@ for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.skey; do done DREPS_ARGS=() +DREPS_SIGNING=() for i in $(seq 1 "$NUM_DREPS"); do DREPS_ARGS+=( \ "--tx-out" "$(<"$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr")+$DREP_DELEGATED" \ "--certificate-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" \ "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" \ ) -done - -DREPS_SIGNING=() -for i in $(seq 1 "$NUM_DREPS"); do DREPS_SIGNING+=( \ "--signing-key-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ "--signing-key-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index e4b139fc9..d8457a985 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -601,6 +601,7 @@ done <<< "$(cardano_cli_log conway query utxo --testnet-magic \ grep -E "lovelace$|[0-9]$|lovelace \+ TxOutDatumNone$")" POOL_ARGS=() +POOL_SIGNING=() for i in $(seq 1 "$NUM_POOLS"); do POOL_ARGS+=( \ "--tx-out" "$(<"$STATE_CLUSTER/nodes/node-pool$i/owner.addr")+$POOL_PLEDGE" \ @@ -609,10 +610,6 @@ for i in $(seq 1 "$NUM_POOLS"); do "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/register.cert" \ "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" \ ) -done - -POOL_SIGNING=() -for i in $(seq 1 "$NUM_POOLS"); do POOL_SIGNING+=( \ "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.skey" \ "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool$i/reward.skey" \ @@ -633,16 +630,13 @@ for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.skey; do done DREPS_ARGS=() +DREPS_SIGNING=() for i in $(seq 1 "$NUM_DREPS"); do DREPS_ARGS+=( \ "--tx-out" "$(<"$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr")+$DREP_DELEGATED" \ "--certificate-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" \ "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" \ ) -done - -DREPS_SIGNING=() -for i in $(seq 1 "$NUM_DREPS"); do DREPS_SIGNING+=( \ "--signing-key-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ "--signing-key-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ From f31772117655426d20105696f25ef1b243c5ddfb Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 28 Oct 2024 16:19:50 +0100 Subject: [PATCH 039/168] fix(governance): save governace data after DReps are ready --- cardano_node_tests/utils/governance_setup.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/cardano_node_tests/utils/governance_setup.py b/cardano_node_tests/utils/governance_setup.py index 64344ee4e..f273057fb 100644 --- a/cardano_node_tests/utils/governance_setup.py +++ b/cardano_node_tests/utils/governance_setup.py @@ -225,6 +225,14 @@ def setup( for pn in cluster_management.Resources.ALL_POOLS ] + # When using "fast" cluster, we need to wait for at least epoch 1 for DReps + # to be usable. DReps don't vote in PV9. + if cluster_obj.g_query.get_protocol_params()["protocolVersion"]["major"] >= 10: + cluster_obj.wait_for_epoch(epoch_no=1, padding_seconds=5) + # TODO: check `cardano-cli conway query drep-stake-distribution` + + # The data needs to be saved only after DReps are ready. Other functions check + # presence of the pickle file. gov_data = save_default_governance( dreps_reg=drep_reg_records, drep_delegators=drep_users, @@ -232,12 +240,6 @@ def setup( pools_cold=node_cold_records, ) - # When using "fast" cluster, we need to wait for at least epoch 1 for DReps - # to be usable. DReps don't vote in PV9. - if cluster_obj.g_query.get_protocol_params()["protocolVersion"]["major"] >= 10: - cluster_obj.wait_for_epoch(epoch_no=1, padding_seconds=5) - # TODO: check `cardano-cli conway query drep-stake-distribution` - return gov_data From e555e3549cbb6965cf1c2197c91fde547c802c60 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 28 Oct 2024 16:41:09 +0100 Subject: [PATCH 040/168] fix(tests): ensure reconfiguration finishes within a single epoch. Ensure reconfiguration finishes within a single epoch and take submission delay into account when restarting node services. --- cardano_node_tests/tests/test_blocks.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/cardano_node_tests/tests/test_blocks.py b/cardano_node_tests/tests/test_blocks.py index 173d48bbb..d4938824a 100644 --- a/cardano_node_tests/tests/test_blocks.py +++ b/cardano_node_tests/tests/test_blocks.py @@ -379,7 +379,7 @@ def reconf_for_dynamic(self) -> None: with open(supervisor_conf, "w", encoding="utf-8") as fp_out: fp_out.write(supervisor_conf_content) - cluster_nodes.reload_supervisor_config() + cluster_nodes.reload_supervisor_config(delay=0) @pytest.fixture def payment_addrs( @@ -447,8 +447,8 @@ def _save_state(curr_epoch: int) -> tp.Dict[str, int]: blocks_before: tp.Dict[str, int] = ledger_state["blocksBefore"] return blocks_before - # The network needs to be at least in epoch 2 - cluster.wait_for_epoch(epoch_no=2) + # The network needs to be at least in epoch 1 + cluster.wait_for_epoch(epoch_no=1) # Wait for the epoch to be at least half way through and not too close to the end. # We want the original pool to have time to forge blocks in this epoch, before it becomes @@ -457,8 +457,9 @@ def _save_state(curr_epoch: int) -> tp.Dict[str, int]: clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=(cluster.epoch_length_sec // 2), - stop=-50, + stop=-(configuration.TX_SUBMISSION_DELAY + 20), ) + reconf_epoch = cluster.g_query.get_epoch() # The cluster needs respin after this point cluster_manager.set_needs_respin() @@ -468,9 +469,13 @@ def _save_state(curr_epoch: int) -> tp.Dict[str, int]: cluster_nodes.restart_all_nodes() tip = cluster.g_query.get_tip() - epoch_end = cluster.time_to_epoch_end(tip) curr_epoch = int(tip["epoch"]) - reconf_epoch = curr_epoch + + assert ( + reconf_epoch == curr_epoch + ), "Failed to finish reconfiguration in single epoch, it would affect other checks" + + epoch_end = cluster.time_to_epoch_end(tip) curr_time = time.time() epoch_end_timestamp = curr_time + epoch_end test_end_timestamp = epoch_end_timestamp + (num_epochs * cluster.epoch_length_sec) From 2d5f61f1f2e49655e76578290459117302dcd426 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 29 Oct 2024 09:40:29 +0100 Subject: [PATCH 041/168] refactor(governance): make governance setup more robust Caller needs to obtain a lock before attempting governace setup. We no longer depend on governance state file existence. --- cardano_node_tests/utils/governance_setup.py | 37 +++++++++++--------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/cardano_node_tests/utils/governance_setup.py b/cardano_node_tests/utils/governance_setup.py index f273057fb..2e4306963 100644 --- a/cardano_node_tests/utils/governance_setup.py +++ b/cardano_node_tests/utils/governance_setup.py @@ -225,14 +225,6 @@ def setup( for pn in cluster_management.Resources.ALL_POOLS ] - # When using "fast" cluster, we need to wait for at least epoch 1 for DReps - # to be usable. DReps don't vote in PV9. - if cluster_obj.g_query.get_protocol_params()["protocolVersion"]["major"] >= 10: - cluster_obj.wait_for_epoch(epoch_no=1, padding_seconds=5) - # TODO: check `cardano-cli conway query drep-stake-distribution` - - # The data needs to be saved only after DReps are ready. Other functions check - # presence of the pickle file. gov_data = save_default_governance( dreps_reg=drep_reg_records, drep_delegators=drep_users, @@ -240,6 +232,12 @@ def setup( pools_cold=node_cold_records, ) + # When using "fast" cluster, we need to wait for at least epoch 1 for DReps + # to be usable. DReps don't vote in PV9. + if cluster_obj.g_query.get_protocol_params()["protocolVersion"]["major"] >= 10: + cluster_obj.wait_for_epoch(epoch_no=1, padding_seconds=5) + # TODO: check `cardano-cli conway query drep-stake-distribution` + return gov_data @@ -253,16 +251,21 @@ def get_default_governance( governance_data = None def _setup_gov() -> tp.Optional[governance_utils.GovernanceRecords]: - with locking.FileLockIfXdist(str(cluster_env.state_dir / f".{GOV_DATA_STORE}.lock")): - if gov_data_store.exists(): - return None + if gov_data_store.exists(): + return None - return setup( - cluster_obj=cluster_obj, - cluster_manager=cluster_manager, - ) + gov_records = setup( + cluster_obj=cluster_obj, + cluster_manager=cluster_manager, + ) + + if not gov_data_store.exists(): + msg = f"File `{gov_data_store}` not found" + raise FileNotFoundError(msg) + + return gov_records - if not gov_data_store.exists(): + with locking.FileLockIfXdist(str(cluster_env.state_dir / f".{GOV_DATA_STORE}.lock")): governance_data = _setup_gov() gov_data_checksum = helpers.checksum(gov_data_store) @@ -271,7 +274,7 @@ def _setup_gov() -> tp.Optional[governance_utils.GovernanceRecords]: if fixture_cache.value: return fixture_cache.value # type: ignore - if not governance_data and gov_data_store.exists(): + if governance_data is None: with open(gov_data_store, "rb") as in_data: governance_data = pickle.load(in_data) From 12d98f3472cd0de8ab69119f4cdcad0ae1056d38 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 29 Oct 2024 09:49:43 +0100 Subject: [PATCH 042/168] refactor: remove unused DREPS_NUM constant The DREPS_NUM constant was removed from governance_setup.py as it was not being used anywhere in the code. This cleanup helps in maintaining the codebase by removing unnecessary variables. --- cardano_node_tests/utils/governance_setup.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/cardano_node_tests/utils/governance_setup.py b/cardano_node_tests/utils/governance_setup.py index 2e4306963..3e0fe3235 100644 --- a/cardano_node_tests/utils/governance_setup.py +++ b/cardano_node_tests/utils/governance_setup.py @@ -17,8 +17,6 @@ GOV_DATA_DIR = "governance_data" GOV_DATA_STORE = "governance_data.pickle" -DREPS_NUM = 5 - def _get_committee_val(data: tp.Dict[str, tp.Any]) -> tp.Dict[str, tp.Any]: return data.get("committee") or data.get("commitee") or {} From 8c497bef2ee66146541d455a980928c8e85debab Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 29 Oct 2024 13:13:04 +0100 Subject: [PATCH 043/168] feat(cluster): add support for protocol version 10 to HF cluster - Introduced PV10 environment variable to enable protocol version 10. - Implemented hard fork proposal submission and voting for PV10. --- .../cluster_scripts/conway/start-cluster | 195 +++++++++++++++++- 1 file changed, 184 insertions(+), 11 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index c01886183..c283a36cb 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -6,6 +6,7 @@ # MIXED_P2P - if set, local cluster will use P2P for some nodes and legacy topology for others # UTXO_BACKEND - 'mem' or 'disk', default is 'mem' (or legacy) if unset # NO_CC - if set, will not create committee +# PV10 - if set, will use protocol version 10 # DRY_RUN - if set, will not start the cluster set -euo pipefail @@ -296,6 +297,13 @@ if [ -z "${NO_CC:-""}" ]; then rm -f "$STATE_CLUSTER/shelley/genesis.conway.json_jq" fi +KEY_DEPOSIT="$(jq '.protocolParams.keyDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.json")" +DREP_DEPOSIT="$(jq '.dRepDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.conway.json")" +GOV_ACTION_DEPOSIT="$(jq '.govActionDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.conway.json")" + BYRON_GENESIS_HASH="$(cardano_cli_log byron genesis print-genesis-hash --genesis-json \ "$STATE_CLUSTER/byron/genesis.json")" SHELLEY_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ @@ -537,11 +545,6 @@ mv "$STATE_CLUSTER/shelley/utxo-keys/utxo1.vkey" "$STATE_CLUSTER/shelley/genesis mv "$STATE_CLUSTER/shelley/utxo-keys/utxo1.skey" "$STATE_CLUSTER/shelley/genesis-utxo.skey" rmdir "$STATE_CLUSTER/shelley/utxo-keys" -KEY_DEPOSIT="$(jq '.protocolParams.keyDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.json")" -DREP_DEPOSIT="$(jq '.dRepDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.conway.json")" - for i in $(seq 1 "$NUM_DREPS"); do # DRep keys cardano_cli_log conway governance drep key-gen \ @@ -1239,6 +1242,8 @@ wait_for_epoch "$((BABBAGE_PV8_EPOCH + 1))" wait_for_era "Conway" +CONWAY_PV9_EPOCH="$(get_epoch)" + cardano_cli_log conway query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/pparams.json" @@ -1259,7 +1264,7 @@ done DREP_DEPOSIT="$((KEY_DEPOSIT + DREP_DEPOSIT))" DREP_NEEDED_AMOUNT="$(( (DREP_DELEGATED + DREP_DEPOSIT) * NUM_DREPS ))" -STOP_TXIN_AMOUNT="$(( FEE + DREP_NEEDED_AMOUNT))" +STOP_TXIN_AMOUNT="$((FEE + DREP_NEEDED_AMOUNT))" TXINS=() TXIN_COUNT=0 @@ -1278,7 +1283,7 @@ done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ TXOUT_AMOUNT="$((TXIN_AMOUNT - STOP_TXIN_AMOUNT))" -V9_TX_NAME="setup_governance" +V9_TX="$STATE_CLUSTER/governance_data/setup_governance" CC_ARGS=() for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_hot_auth.cert; do @@ -1325,7 +1330,7 @@ cardano_cli_log conway transaction build-raw \ "${DREPS_ARGS[@]}" \ "${POOL_ARGS[@]}" \ --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ - --out-file "${V9_TX_NAME}-tx.txbody" + --out-file "${V9_TX}-tx.txbody" cardano_cli_log conway transaction sign \ --signing-key-file "$FAUCET_SKEY" \ @@ -1333,11 +1338,11 @@ cardano_cli_log conway transaction sign \ "${DREPS_SIGNING[@]}" \ "${POOL_SIGNING[@]}" \ --testnet-magic "$NETWORK_MAGIC" \ - --tx-body-file "${V9_TX_NAME}-tx.txbody" \ - --out-file "${V9_TX_NAME}-tx.tx" + --tx-body-file "${V9_TX}-tx.txbody" \ + --out-file "${V9_TX}-tx.tx" cardano_cli_log conway transaction submit \ - --tx-file "${V9_TX_NAME}-tx.tx" \ + --tx-file "${V9_TX}-tx.tx" \ --testnet-magic "$NETWORK_MAGIC" sleep "$SUBMIT_DELAY" @@ -1353,4 +1358,172 @@ if [ -z "${NO_CC:-""}" ]; then { echo "The CC members were not registered, line $LINENO" >&2; exit 1; } # assert fi +# Hard fork to Conway protocol version 10 + +if [ -n "${PV10:-""}" ]; then + PV10_ACTION="$STATE_CLUSTER/governance_data/hardfork_pv10_action" + + echo "Submitting hard fork proposal to update to Conway PV10" + + cardano_cli_log conway governance action create-hardfork \ + --testnet \ + --governance-action-deposit "$GOV_ACTION_DEPOSIT" \ + --deposit-return-stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool1/reward.vkey" \ + --anchor-url "http://www.hardfork-pv10.com" \ + --anchor-data-hash 5d372dca1a4cc90d7d16d966c48270e33e3aa0abcb0e78f0d5ca7ff330d2245d \ + --protocol-major-version 10 \ + --protocol-minor-version 0 \ + --out-file "$PV10_ACTION.action" + + STOP_TXIN_AMOUNT="$((FEE + GOV_ACTION_DEPOSIT))" + + TXINS=() + TXIN_COUNT=0 + TXIN_AMOUNT=0 + while read -r txhash txix amount _; do + TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" + TXIN_COUNT="$((TXIN_COUNT + 1))" + TXINS+=("--tx-in" "${txhash}#${txix}") + if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then + break + fi + done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ + "$NETWORK_MAGIC" \ + --address "$FAUCET_ADDR" | + grep -E "lovelace \+ TxOutDatumNone$")" + + TXOUT_AMOUNT="$((TXIN_AMOUNT - STOP_TXIN_AMOUNT))" + + cardano_cli_log conway transaction build-raw \ + --fee "$FEE" \ + "${TXINS[@]}" \ + --proposal-file "$PV10_ACTION.action" \ + --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ + --out-file "${PV10_ACTION}-tx.txbody" + + cardano_cli_log conway transaction sign \ + --signing-key-file "$FAUCET_SKEY" \ + --testnet-magic "$NETWORK_MAGIC" \ + --tx-body-file "${PV10_ACTION}-tx.txbody" \ + --out-file "${PV10_ACTION}-tx.tx" + + cardano_cli_log conway transaction submit \ + --tx-file "${PV10_ACTION}-tx.tx" \ + --testnet-magic "$NETWORK_MAGIC" + + sleep "$SUBMIT_DELAY" + if ! check_spend_success "${TXINS[@]}"; then + echo "Failed to spend Tx inputs, line $LINENO" >&2 # assert + exit 1 + fi + + ACTION_TXID="$(cardano_cli_log conway transaction txid --tx-body-file "${PV10_ACTION}-tx.txbody")" + + echo "Voting on hard fork proposal" + + INDEX=0 + for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_hot.vkey; do + [ -e "$f" ] || continue + INDEX="$((INDEX + 1))" + cardano_cli_log conway governance vote create \ + --yes \ + --governance-action-tx-id "$ACTION_TXID" \ + --governance-action-index 0 \ + --cc-hot-verification-key-file "$f" \ + --anchor-url "http://www.cc-vote${INDEX}-hf-pv10.com" \ + --anchor-data-hash 5d372dca1a4cc90d7d16d966c48270e33e3aa0abcb0e78f0d5ca7ff330d2245d \ + --out-file "${PV10_ACTION}_cc${INDEX}.vote" + done + + INDEX=0 + for f in "$STATE_CLUSTER"/nodes/node-pool*/cold.vkey; do + INDEX="$((INDEX + 1))" + cardano_cli_log conway governance vote create \ + --yes \ + --governance-action-tx-id "$ACTION_TXID" \ + --governance-action-index 0 \ + --cold-verification-key-file "$f" \ + --anchor-url "http://www.spo-vote${INDEX}-hf-pv10.com" \ + --anchor-data-hash 5d372dca1a4cc90d7d16d966c48270e33e3aa0abcb0e78f0d5ca7ff330d2245d \ + --out-file "${PV10_ACTION}_spo${INDEX}.vote" + done + + # Submit the votes + + PV10_VOTES="$STATE_CLUSTER/governance_data/hardfork_pv10_votes" + + VOTE_FILES=() + for f in "$PV10_ACTION"_*.vote; do + VOTE_FILES+=( "--vote-file" "$f" ) + done + + CC_SIGNING=() + for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_hot.skey; do + [ -e "$f" ] || continue + CC_SIGNING+=( "--signing-key-file" "$f" ) + done + + POOL_SIGNING=() + for f in "$STATE_CLUSTER"/nodes/node-pool*/cold.skey; do + POOL_SIGNING+=( \ + "--signing-key-file" "$f" \ + ) + done + + STOP_TXIN_AMOUNT=$FEE + + TXINS=() + TXIN_COUNT=0 + TXIN_AMOUNT=0 + while read -r txhash txix amount _; do + TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" + TXIN_COUNT="$((TXIN_COUNT + 1))" + TXINS+=("--tx-in" "${txhash}#${txix}") + if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then + break + fi + done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ + "$NETWORK_MAGIC" \ + --address "$FAUCET_ADDR" | + grep -E "lovelace \+ TxOutDatumNone$")" + + TXOUT_AMOUNT="$((TXIN_AMOUNT - STOP_TXIN_AMOUNT))" + + cardano_cli_log conway transaction build-raw \ + --fee "$FEE" \ + "${TXINS[@]}" \ + "${VOTE_FILES[@]}" \ + --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ + --out-file "${PV10_VOTES}-tx.txbody" + + cardano_cli_log conway transaction sign \ + --signing-key-file "$FAUCET_SKEY" \ + "${CC_SIGNING[@]}" \ + "${POOL_SIGNING[@]}" \ + --testnet-magic "$NETWORK_MAGIC" \ + --tx-body-file "${PV10_VOTES}-tx.txbody" \ + --out-file "${PV10_VOTES}-tx.tx" + + cardano_cli_log conway transaction submit \ + --tx-file "${PV10_VOTES}-tx.tx" \ + --testnet-magic "$NETWORK_MAGIC" + + sleep "$SUBMIT_DELAY" + if ! check_spend_success "${TXINS[@]}"; then + echo "Failed to spend Tx inputs, line $LINENO" >&2 # assert + exit 1 + fi + + echo "Waiting for Conway PV10 to start" + wait_for_epoch "$((CONWAY_PV9_EPOCH + 2))" + + cardano_cli_log conway query protocol-parameters \ + --testnet-magic "$NETWORK_MAGIC" \ + --out-file "$STATE_CLUSTER/pparams.json" + + PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "$STATE_CLUSTER/pparams.json")" + + [ "$PROTOCOL_VERSION" = 10 ] || { echo "Unexpected protocol version '$PROTOCOL_VERSION' on line $LINENO" >&2; exit 1; } # assert +fi + echo "Cluster started. Run \`$SCRIPT_DIR/stop-cluster\` to stop" From e3eb9a49a17de272bb263433d623db9875f98d68 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 29 Oct 2024 14:23:30 +0100 Subject: [PATCH 044/168] refactor: extract repeated code into get_txins function This commit refactors the start-cluster scripts by extracting the repeated code for fetching transaction inputs into a new function called get_txins. This improves code readability and maintainability by reducing redundancy and centralizing the logic for querying UTXOs. --- .../cluster_scripts/conway/start-cluster | 191 ++++-------------- .../cluster_scripts/conway_fast/start-cluster | 39 ++-- .../mainnet_fast/start-cluster | 39 ++-- 3 files changed, 82 insertions(+), 187 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index c283a36cb..009f7c1c9 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -154,6 +154,28 @@ check_spend_success() { return 1 } +get_txins() { + local txin_addr stop_txin_amount txhash txix amount _ + + txin_addr="${1:?"Missing TxIn address"}" + stop_txin_amount="${2:?"Missing stop TxIn amount"}" + + stop_txin_amount="$((stop_txin_amount + 2000000))" + + TXINS=() + TXIN_AMOUNT=0 + while read -r txhash txix amount _; do + TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" + TXINS+=("--tx-in" "${txhash}#${txix}") + if [ "$TXIN_AMOUNT" -ge "$stop_txin_amount" ]; then + break + fi + done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ + "$NETWORK_MAGIC" \ + --address "$txin_addr" | + grep -E "lovelace$|[0-9]$|lovelace \+ TxOutDatumNone$")" +} + ENABLE_SUBMIT_API="$(type cardano-submit-api >/dev/null 2>&1 && echo 1 || echo 0)" if [ -e "$SCRIPT_DIR/shell_env" ]; then @@ -725,21 +747,9 @@ DEPOSITS="$(jq '.protocolParams.poolDeposit + (2 * .protocolParams.keyDeposit)' NEEDED_AMOUNT="$(( (POOL_PLEDGE + DEPOSITS) * NUM_POOLS ))" STOP_TXIN_AMOUNT="$((NEEDED_AMOUNT + FEE))" -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace$|[0-9]$")" +get_txins "$FAUCET_ADDR" "$STOP_TXIN_AMOUNT" +TXOUT_AMOUNT="$((TXIN_AMOUNT - STOP_TXIN_AMOUNT))" TTL="$(get_slot 1000)" POOL_ARGS=() @@ -759,8 +769,6 @@ for i in $(seq 1 "$NUM_POOLS"); do ) done -TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE - NEEDED_AMOUNT))" - cardano_cli_log shelley transaction build-raw \ --ttl "$TTL" \ --fee "$FEE" \ @@ -818,22 +826,7 @@ cardano_cli_log allegra query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/pparams.json" -STOP_TXIN_AMOUNT=$FEE - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace$|[0-9]$")" +get_txins "$FAUCET_ADDR" "$FEE" TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" @@ -889,22 +882,7 @@ cardano_cli_log mary query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/pparams.json" -STOP_TXIN_AMOUNT=$FEE - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace$|[0-9]$")" +get_txins "$FAUCET_ADDR" "$FEE" TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" @@ -961,22 +939,7 @@ cardano_cli_log alonzo query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/pparams.json" -STOP_TXIN_AMOUNT=$FEE - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace \+ TxOutDatumNone$")" +get_txins "$FAUCET_ADDR" "$FEE" TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" @@ -1042,22 +1005,7 @@ cardano_cli_log legacy governance create-update-proposal \ --protocol-major-version 7 \ --protocol-minor-version 0 -STOP_TXIN_AMOUNT=$FEE - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace \+ TxOutDatumNone$")" +get_txins "$FAUCET_ADDR" "$FEE" TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" @@ -1114,22 +1062,7 @@ cardano_cli_log babbage query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/pparams.json" -STOP_TXIN_AMOUNT=$FEE - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace \+ TxOutDatumNone$")" +get_txins "$FAUCET_ADDR" "$FEE" TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" @@ -1189,22 +1122,7 @@ cardano_cli_log legacy governance create-update-proposal \ --protocol-major-version 9 \ --protocol-minor-version 0 -STOP_TXIN_AMOUNT=$FEE - -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace \+ TxOutDatumNone$")" +get_txins "$FAUCET_ADDR" "$FEE" TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" @@ -1266,20 +1184,7 @@ DREP_DEPOSIT="$((KEY_DEPOSIT + DREP_DEPOSIT))" DREP_NEEDED_AMOUNT="$(( (DREP_DELEGATED + DREP_DEPOSIT) * NUM_DREPS ))" STOP_TXIN_AMOUNT="$((FEE + DREP_NEEDED_AMOUNT))" -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace \+ TxOutDatumNone$")" +get_txins "$FAUCET_ADDR" "$STOP_TXIN_AMOUNT" TXOUT_AMOUNT="$((TXIN_AMOUNT - STOP_TXIN_AMOUNT))" @@ -1377,20 +1282,7 @@ if [ -n "${PV10:-""}" ]; then STOP_TXIN_AMOUNT="$((FEE + GOV_ACTION_DEPOSIT))" - TXINS=() - TXIN_COUNT=0 - TXIN_AMOUNT=0 - while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi - done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace \+ TxOutDatumNone$")" + get_txins "$FAUCET_ADDR" "$STOP_TXIN_AMOUNT" TXOUT_AMOUNT="$((TXIN_AMOUNT - STOP_TXIN_AMOUNT))" @@ -1470,24 +1362,9 @@ if [ -n "${PV10:-""}" ]; then ) done - STOP_TXIN_AMOUNT=$FEE - - TXINS=() - TXIN_COUNT=0 - TXIN_AMOUNT=0 - while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi - done <<< "$(cardano_cli_log latest query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$FAUCET_ADDR" | - grep -E "lovelace \+ TxOutDatumNone$")" + get_txins "$FAUCET_ADDR" "$FEE" - TXOUT_AMOUNT="$((TXIN_AMOUNT - STOP_TXIN_AMOUNT))" + TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" cardano_cli_log conway transaction build-raw \ --fee "$FEE" \ diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index 08f4c907e..eb68ddb44 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -92,6 +92,28 @@ check_spend_success() { return 1 } +get_txins() { + local txin_addr stop_txin_amount txhash txix amount _ + + txin_addr="${1:?"Missing TxIn address"}" + stop_txin_amount="${2:?"Missing stop TxIn amount"}" + + stop_txin_amount="$((stop_txin_amount + 2000000))" + + TXINS=() + TXIN_AMOUNT=0 + while read -r txhash txix amount _; do + TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" + TXINS+=("--tx-in" "${txhash}#${txix}") + if [ "$TXIN_AMOUNT" -ge "$stop_txin_amount" ]; then + break + fi + done <<< "$(cardano_cli_log conway query utxo --testnet-magic \ + "$NETWORK_MAGIC" \ + --address "$txin_addr" | + grep -E "lovelace$|[0-9]$|lovelace \+ TxOutDatumNone$")" +} + ENABLE_SUBMIT_API="$(command -v cardano-submit-api >/dev/null 2>&1 && echo 1 || echo 0)" if [ -e "$SCRIPT_DIR/shell_env" ]; then @@ -581,24 +603,11 @@ DREP_DEPOSIT="$((KEY_DEPOSIT + DREP_DEPOSIT))" DREP_NEEDED_AMOUNT="$(( (DREP_DELEGATED + DREP_DEPOSIT) * NUM_DREPS ))" NEEDED_AMOUNT="$(( POOL_NEEDED_AMOUNT + DREP_NEEDED_AMOUNT))" -TXIN_ADDR="$(<"$STATE_CLUSTER"/shelley/genesis-utxo.addr)" FEE_BUFFER=100000000 +TXIN_ADDR="$(<"$STATE_CLUSTER"/shelley/genesis-utxo.addr)" STOP_TXIN_AMOUNT="$((NEEDED_AMOUNT + FEE_BUFFER))" -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log conway query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$TXIN_ADDR" | - grep -E "lovelace$|[0-9]$|lovelace \+ TxOutDatumNone$")" +get_txins "$TXIN_ADDR" "$STOP_TXIN_AMOUNT" POOL_ARGS=() POOL_SIGNING=() diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index d8457a985..9d8c7f22d 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -92,6 +92,28 @@ check_spend_success() { return 1 } +get_txins() { + local txin_addr stop_txin_amount txhash txix amount _ + + txin_addr="${1:?"Missing TxIn address"}" + stop_txin_amount="${2:?"Missing stop TxIn amount"}" + + stop_txin_amount="$((stop_txin_amount + 2000000))" + + TXINS=() + TXIN_AMOUNT=0 + while read -r txhash txix amount _; do + TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" + TXINS+=("--tx-in" "${txhash}#${txix}") + if [ "$TXIN_AMOUNT" -ge "$stop_txin_amount" ]; then + break + fi + done <<< "$(cardano_cli_log conway query utxo --testnet-magic \ + "$NETWORK_MAGIC" \ + --address "$txin_addr" | + grep -E "lovelace$|[0-9]$|lovelace \+ TxOutDatumNone$")" +} + ENABLE_SUBMIT_API="$(command -v cardano-submit-api >/dev/null 2>&1 && echo 1 || echo 0)" if [ -e "$SCRIPT_DIR/shell_env" ]; then @@ -581,24 +603,11 @@ DREP_DEPOSIT="$((KEY_DEPOSIT + DREP_DEPOSIT))" DREP_NEEDED_AMOUNT="$(( (DREP_DELEGATED + DREP_DEPOSIT) * NUM_DREPS ))" NEEDED_AMOUNT="$(( POOL_NEEDED_AMOUNT + DREP_NEEDED_AMOUNT))" -TXIN_ADDR="$(<"$STATE_CLUSTER"/shelley/genesis-utxo.addr)" FEE_BUFFER=100000000 +TXIN_ADDR="$(<"$STATE_CLUSTER"/shelley/genesis-utxo.addr)" STOP_TXIN_AMOUNT="$((NEEDED_AMOUNT + FEE_BUFFER))" -TXINS=() -TXIN_COUNT=0 -TXIN_AMOUNT=0 -while read -r txhash txix amount _; do - TXIN_AMOUNT="$((TXIN_AMOUNT + amount))" - TXIN_COUNT="$((TXIN_COUNT + 1))" - TXINS+=("--tx-in" "${txhash}#${txix}") - if [ "$TXIN_AMOUNT" -ge "$STOP_TXIN_AMOUNT" ]; then - break - fi -done <<< "$(cardano_cli_log conway query utxo --testnet-magic \ - "$NETWORK_MAGIC" \ - --address "$TXIN_ADDR" | - grep -E "lovelace$|[0-9]$|lovelace \+ TxOutDatumNone$")" +get_txins "$TXIN_ADDR" "$STOP_TXIN_AMOUNT" POOL_ARGS=() POOL_SIGNING=() From 8a7c7eca5ed062e1603edfd64464cfb9956402c7 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 29 Oct 2024 14:44:08 +0100 Subject: [PATCH 045/168] feat(cluster): refactor deposit calculations Refactor the deposit calculations in cluster scripts to improve readability and maintainability. Extracted key and pool deposit values from genesis files and simplified the arithmetic operations for needed amounts. --- .../cluster_scripts/conway/start-cluster | 11 +++++----- .../cluster_scripts/conway_fast/start-cluster | 21 +++++++++---------- .../mainnet_fast/start-cluster | 21 +++++++++---------- 3 files changed, 26 insertions(+), 27 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index 009f7c1c9..be881f60f 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -321,6 +321,8 @@ fi KEY_DEPOSIT="$(jq '.protocolParams.keyDeposit' \ < "$STATE_CLUSTER/shelley/genesis.json")" +POOL_DEPOSIT="$(jq '.protocolParams.poolDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.json")" DREP_DEPOSIT="$(jq '.dRepDeposit' \ < "$STATE_CLUSTER/shelley/genesis.conway.json")" GOV_ACTION_DEPOSIT="$(jq '.govActionDeposit' \ @@ -742,8 +744,7 @@ cardano_cli_log shelley query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/pparams.json" -DEPOSITS="$(jq '.protocolParams.poolDeposit + (2 * .protocolParams.keyDeposit)' \ - < "$STATE_CLUSTER/shelley/genesis.json")" +DEPOSITS="$((POOL_DEPOSIT + (2 * KEY_DEPOSIT) ))" NEEDED_AMOUNT="$(( (POOL_PLEDGE + DEPOSITS) * NUM_POOLS ))" STOP_TXIN_AMOUNT="$((NEEDED_AMOUNT + FEE))" @@ -1180,9 +1181,9 @@ for i in $(seq 1 "$NUM_POOLS"); do --out-file "$STATE_CLUSTER/nodes/node-pool${i}/stake-reward_vote_deleg.cert" done -DREP_DEPOSIT="$((KEY_DEPOSIT + DREP_DEPOSIT))" -DREP_NEEDED_AMOUNT="$(( (DREP_DELEGATED + DREP_DEPOSIT) * NUM_DREPS ))" -STOP_TXIN_AMOUNT="$((FEE + DREP_NEEDED_AMOUNT))" +DEPOSITS="$((KEY_DEPOSIT + DREP_DEPOSIT))" +NEEDED_AMOUNT="$(( (DREP_DELEGATED + DEPOSITS) * NUM_DREPS ))" +STOP_TXIN_AMOUNT="$((FEE + NEEDED_AMOUNT))" get_txins "$FAUCET_ADDR" "$STOP_TXIN_AMOUNT" diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index eb68ddb44..ad151e2c7 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -271,6 +271,11 @@ cardano_cli_log conway address build --payment-verification-key-file \ mv "$STATE_CLUSTER/create_staked/stake-delegator-keys" "$STATE_CLUSTER/shelley/stake-delegator-keys" +KEY_DEPOSIT="$(jq '.protocolParams.keyDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.json")" +DREP_DEPOSIT="$(jq '.dRepDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.conway.json")" + BYRON_GENESIS_HASH="$(cardano_cli_log byron genesis print-genesis-hash --genesis-json \ "$STATE_CLUSTER/byron/genesis.json")" SHELLEY_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ @@ -337,9 +342,6 @@ for i in $(seq 1 $NUM_BFT_NODES); do echo "$BFT_PORT" > "$STATE_CLUSTER/nodes/node-bft$i/port" done -KEY_DEPOSIT="$(jq '.protocolParams.keyDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.json")" - for i in $(seq 1 "$NUM_POOLS"); do mkdir -p "$STATE_CLUSTER/nodes/node-pool$i" mv "$STATE_CLUSTER/create_staked/pools/cold$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/cold.skey" @@ -453,9 +455,6 @@ done rm -rf "$STATE_CLUSTER/shelley/create_staked" -DREP_DEPOSIT="$(jq '.dRepDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.conway.json")" - for i in $(seq 1 "$NUM_DREPS"); do # DRep keys cardano_cli_log conway governance drep key-gen \ @@ -597,11 +596,11 @@ cardano_cli_log conway query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/pparams.json" -POOL_DEPOSIT="$((KEY_DEPOSIT * 2))" -POOL_NEEDED_AMOUNT="$(( (POOL_PLEDGE + POOL_DEPOSIT) * NUM_POOLS ))" -DREP_DEPOSIT="$((KEY_DEPOSIT + DREP_DEPOSIT))" -DREP_NEEDED_AMOUNT="$(( (DREP_DELEGATED + DREP_DEPOSIT) * NUM_DREPS ))" -NEEDED_AMOUNT="$(( POOL_NEEDED_AMOUNT + DREP_NEEDED_AMOUNT))" +DEPOSIT_FOR_POOLS="$((KEY_DEPOSIT * 2))" +NEEDED_AMOUNT_POOLS="$(( (POOL_PLEDGE + DEPOSIT_FOR_POOLS) * NUM_POOLS ))" +DEPOSIT_FOR_DREPS="$((KEY_DEPOSIT + DREP_DEPOSIT))" +NEEDED_AMOUNT_DREPS="$(( (DREP_DELEGATED + DEPOSIT_FOR_DREPS) * NUM_DREPS ))" +NEEDED_AMOUNT="$((NEEDED_AMOUNT_POOLS + NEEDED_AMOUNT_DREPS))" FEE_BUFFER=100000000 TXIN_ADDR="$(<"$STATE_CLUSTER"/shelley/genesis-utxo.addr)" diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index 9d8c7f22d..d30505537 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -271,6 +271,11 @@ cardano_cli_log conway address build --payment-verification-key-file \ mv "$STATE_CLUSTER/create_staked/stake-delegator-keys" "$STATE_CLUSTER/shelley/stake-delegator-keys" +KEY_DEPOSIT="$(jq '.protocolParams.keyDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.json")" +DREP_DEPOSIT="$(jq '.dRepDeposit' \ + < "$STATE_CLUSTER/shelley/genesis.conway.json")" + BYRON_GENESIS_HASH="$(cardano_cli_log byron genesis print-genesis-hash --genesis-json \ "$STATE_CLUSTER/byron/genesis.json")" SHELLEY_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ @@ -337,9 +342,6 @@ for i in $(seq 1 $NUM_BFT_NODES); do echo "$BFT_PORT" > "$STATE_CLUSTER/nodes/node-bft$i/port" done -KEY_DEPOSIT="$(jq '.protocolParams.keyDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.json")" - for i in $(seq 1 "$NUM_POOLS"); do mkdir -p "$STATE_CLUSTER/nodes/node-pool$i" mv "$STATE_CLUSTER/create_staked/pools/cold$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/cold.skey" @@ -453,9 +455,6 @@ done rm -rf "$STATE_CLUSTER/shelley/create_staked" -DREP_DEPOSIT="$(jq '.dRepDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.conway.json")" - for i in $(seq 1 "$NUM_DREPS"); do # DRep keys cardano_cli_log conway governance drep key-gen \ @@ -597,11 +596,11 @@ cardano_cli_log conway query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/pparams.json" -POOL_DEPOSIT="$((KEY_DEPOSIT * 2))" -POOL_NEEDED_AMOUNT="$(( (POOL_PLEDGE + POOL_DEPOSIT) * NUM_POOLS ))" -DREP_DEPOSIT="$((KEY_DEPOSIT + DREP_DEPOSIT))" -DREP_NEEDED_AMOUNT="$(( (DREP_DELEGATED + DREP_DEPOSIT) * NUM_DREPS ))" -NEEDED_AMOUNT="$(( POOL_NEEDED_AMOUNT + DREP_NEEDED_AMOUNT))" +DEPOSIT_FOR_POOLS="$((KEY_DEPOSIT * 2))" +NEEDED_AMOUNT_POOLS="$(( (POOL_PLEDGE + DEPOSIT_FOR_POOLS) * NUM_POOLS ))" +DEPOSIT_FOR_DREPS="$((KEY_DEPOSIT + DREP_DEPOSIT))" +NEEDED_AMOUNT_DREPS="$(( (DREP_DELEGATED + DEPOSIT_FOR_DREPS) * NUM_DREPS ))" +NEEDED_AMOUNT="$((NEEDED_AMOUNT_POOLS + NEEDED_AMOUNT_DREPS))" FEE_BUFFER=100000000 TXIN_ADDR="$(<"$STATE_CLUSTER"/shelley/genesis-utxo.addr)" From 1cefae8d20a99ccea7c301f5b6868f133bb5660f Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 29 Oct 2024 17:45:39 +0100 Subject: [PATCH 046/168] feat: remove redundant protocol-parameters queries This commit removes redundant cardano-cli protocol-parameters queries from various cluster start scripts. The queries were previously used to calculate fee, but they are no longer necessary for the current workflow. This change helps streamline the scripts and improve their efficiency. --- .../cluster_scripts/conway/start-cluster | 34 ++++--------------- .../cluster_scripts/conway_fast/start-cluster | 4 --- .../mainnet_fast/start-cluster | 4 --- 3 files changed, 6 insertions(+), 36 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index be881f60f..70bd91c5a 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -515,6 +515,12 @@ for i in $(seq 1 "$NUM_POOLS"); do --operational-certificate-issue-counter-file "$STATE_CLUSTER/nodes/node-pool$i/cold.counter" \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/op.cert" + # delegate pool reward to alwaysAbstain DRep + cardano_cli_log conway stake-address vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool${i}/reward.vkey" \ + --always-abstain \ + --out-file "$STATE_CLUSTER/nodes/node-pool${i}/stake-reward_vote_deleg.cert" + POOL_NAME="TestPool$i" POOL_DESC="Test Pool $i" POOL_TICKER="TP$i" @@ -740,10 +746,6 @@ cardano_cli_log legacy governance create-update-proposal \ # Transfer funds, register pools and delegations, submit update proposal, all in one big transaction: -cardano_cli_log shelley query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - DEPOSITS="$((POOL_DEPOSIT + (2 * KEY_DEPOSIT) ))" NEEDED_AMOUNT="$(( (POOL_PLEDGE + DEPOSITS) * NUM_POOLS ))" STOP_TXIN_AMOUNT="$((NEEDED_AMOUNT + FEE))" @@ -823,10 +825,6 @@ cardano_cli_log legacy governance create-update-proposal \ --protocol-major-version 4 \ --protocol-minor-version 0 -cardano_cli_log allegra query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - get_txins "$FAUCET_ADDR" "$FEE" TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" @@ -879,10 +877,6 @@ cardano_cli_log legacy governance create-update-proposal \ --protocol-major-version 5 \ --protocol-minor-version 0 -cardano_cli_log mary query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - get_txins "$FAUCET_ADDR" "$FEE" TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" @@ -936,10 +930,6 @@ cardano_cli_log legacy governance create-update-proposal \ --protocol-major-version 6 \ --protocol-minor-version 0 -cardano_cli_log alonzo query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - get_txins "$FAUCET_ADDR" "$FEE" TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" @@ -1059,10 +1049,6 @@ cardano_cli_log legacy governance create-update-proposal \ --protocol-major-version 8 \ --protocol-minor-version 0 -cardano_cli_log babbage query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - get_txins "$FAUCET_ADDR" "$FEE" TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" @@ -1173,14 +1159,6 @@ PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "$STATE_CLUSTER/pparams.json") # Register CC members, DReps, all in one big transaction: -# delegate pool reward to alwaysAbstain DRep -for i in $(seq 1 "$NUM_POOLS"); do - cardano_cli_log conway stake-address vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool${i}/reward.vkey" \ - --always-abstain \ - --out-file "$STATE_CLUSTER/nodes/node-pool${i}/stake-reward_vote_deleg.cert" -done - DEPOSITS="$((KEY_DEPOSIT + DREP_DEPOSIT))" NEEDED_AMOUNT="$(( (DREP_DELEGATED + DEPOSITS) * NUM_DREPS ))" STOP_TXIN_AMOUNT="$((FEE + NEEDED_AMOUNT))" diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index ad151e2c7..a9692bb3f 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -592,10 +592,6 @@ done # Transfer funds, register stake addresses and pools, CC members, DReps, all in one big transaction: -cardano_cli_log conway query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - DEPOSIT_FOR_POOLS="$((KEY_DEPOSIT * 2))" NEEDED_AMOUNT_POOLS="$(( (POOL_PLEDGE + DEPOSIT_FOR_POOLS) * NUM_POOLS ))" DEPOSIT_FOR_DREPS="$((KEY_DEPOSIT + DREP_DEPOSIT))" diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index d30505537..fb423efd5 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -592,10 +592,6 @@ done # Transfer funds, register stake addresses and pools, CC members, DReps, all in one big transaction: -cardano_cli_log conway query protocol-parameters \ - --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" - DEPOSIT_FOR_POOLS="$((KEY_DEPOSIT * 2))" NEEDED_AMOUNT_POOLS="$(( (POOL_PLEDGE + DEPOSIT_FOR_POOLS) * NUM_POOLS ))" DEPOSIT_FOR_DREPS="$((KEY_DEPOSIT + DREP_DEPOSIT))" From 8f9804d81e4b9a71f56c6c47a81e0c6b777ad0a5 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 29 Oct 2024 17:51:32 +0100 Subject: [PATCH 047/168] feat(tests): add test for top-level `query tip` command Added a new test to verify that the `query tip` command is available at the top level in the CLI. This test ensures that the command is accessible and returns the expected keys in the output. Also, added a blocker for issue 953 in the `cardano-cli` repository. --- cardano_node_tests/tests/issues.py | 6 ++++++ cardano_node_tests/tests/test_cli.py | 26 ++++++++++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/cardano_node_tests/tests/issues.py b/cardano_node_tests/tests/issues.py index ac15cc6a9..f76632ffa 100644 --- a/cardano_node_tests/tests/issues.py +++ b/cardano_node_tests/tests/issues.py @@ -92,6 +92,12 @@ fixed_in="10.0.0.1", # Fixed in some release after 10.0.0.0 message="build command doesn't balance key deposit.", ) +cli_953 = blockers.GH( + issue=953, + repo="IntersectMBO/cardano-cli", + fixed_in="10.1.2.0", # Fixed in some release after 10.1.1.0 + message="query tip is not a top level command.", +) consensus_973 = blockers.GH( issue=973, diff --git a/cardano_node_tests/tests/test_cli.py b/cardano_node_tests/tests/test_cli.py index 57a195c83..0c986adc5 100644 --- a/cardano_node_tests/tests/test_cli.py +++ b/cardano_node_tests/tests/test_cli.py @@ -61,6 +61,32 @@ def test_protocol_mode(self, cluster: clusterlib.ClusterLib): ] ) + @allure.link(helpers.get_vcs_link()) + @pytest.mark.smoke + @pytest.mark.testnets + def test_toplevel_query_tip(self, cluster: clusterlib.ClusterLib): + """Check that `query tip` is available in top level.""" + common.get_test_id(cluster) + + try: + cli_out = cluster.cli( + [ + "cardano-cli", + "query", + "tip", + *cluster.magic_args, + ], + add_default_args=False, + ) + except clusterlib.CLIError as exc: + if "Invalid argument `query'" in str(exc): + issues.cli_953.finish_test() + raise + + tip = json.loads(cli_out.stdout.decode("utf-8").strip()) + keys = set(tip) + assert {"block", "epoch", "era", "hash", "slot"}.issubset(keys) + @allure.link(helpers.get_vcs_link()) @pytest.mark.smoke @pytest.mark.testnets From cc9c974fd9531846dcda5f554cc8a4f3b514a536 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 30 Oct 2024 10:33:36 +0100 Subject: [PATCH 048/168] feat(tests): parametrize CLI query tests Refactor the `test_toplevel_query_tip` test to `test_toplevel_queries` and parametrize it to cover multiple CLI query commands. This change improves test coverage by checking the availability of various queries at the top level. Commands covered: - protocol-parameters - tip - stake-pools - stake-distribution - stake-address-info - utxo - ledger-state - protocol-state - stake-snapshot - leadership-schedule - kes-period-info - pool-state - tx-mempool - slot-number --- cardano_node_tests/tests/test_cli.py | 42 ++++++++++++++++++---------- 1 file changed, 27 insertions(+), 15 deletions(-) diff --git a/cardano_node_tests/tests/test_cli.py b/cardano_node_tests/tests/test_cli.py index 0c986adc5..7467c532e 100644 --- a/cardano_node_tests/tests/test_cli.py +++ b/cardano_node_tests/tests/test_cli.py @@ -62,30 +62,42 @@ def test_protocol_mode(self, cluster: clusterlib.ClusterLib): ) @allure.link(helpers.get_vcs_link()) + @pytest.mark.parametrize( + "command", + ( + "protocol-parameters", + "tip", + "stake-pools", + "stake-distribution", + "stake-address-info", + "utxo", + "ledger-state", + "protocol-state", + "stake-snapshot", + "leadership-schedule", + "kes-period-info", + "pool-state", + "tx-mempool", + "slot-number", + ), + ) @pytest.mark.smoke @pytest.mark.testnets - def test_toplevel_query_tip(self, cluster: clusterlib.ClusterLib): - """Check that `query tip` is available in top level.""" + def test_toplevel_queries(self, cluster: clusterlib.ClusterLib, command: str): + """Check that various queries are available in top level.""" common.get_test_id(cluster) try: - cli_out = cluster.cli( - [ - "cardano-cli", - "query", - "tip", - *cluster.magic_args, - ], + cluster.cli( + ["cardano-cli", "query", command], add_default_args=False, ) except clusterlib.CLIError as exc: - if "Invalid argument `query'" in str(exc): + str_exc = str(exc) + if "Invalid argument `query'" in str_exc: issues.cli_953.finish_test() - raise - - tip = json.loads(cli_out.stdout.decode("utf-8").strip()) - keys = set(tip) - assert {"block", "epoch", "era", "hash", "slot"}.issubset(keys) + elif f"Usage: cardano-cli query {command}" not in str_exc: + raise @allure.link(helpers.get_vcs_link()) @pytest.mark.smoke From 962726ac9a8f6d2d31e50090e601bdaf11172d66 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 30 Oct 2024 16:17:00 +0100 Subject: [PATCH 049/168] feat(cluster): create combined certs only for Conway PV10 Combined certs are supported only by cardano-cli bundled with node 10.1.1. Create old certificates on PV9 for backwards compatibility. --- .../cluster_scripts/conway/start-cluster | 44 +++++++++++++------ .../cluster_scripts/conway_fast/start-cluster | 40 +++++++++++------ .../mainnet_fast/start-cluster | 40 +++++++++++------ 3 files changed, 85 insertions(+), 39 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index 70bd91c5a..8364d3443 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -487,7 +487,25 @@ for i in $(seq 1 "$NUM_POOLS"); do # stake reward address registration cert cardano_cli_log shelley stake-address registration-certificate \ --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.shelley_reg.cert" + # stake reward delegation to alwaysAbstain DRep cert + cardano_cli_log conway stake-address vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool${i}/reward.vkey" \ + --always-abstain \ + --out-file "$STATE_CLUSTER/nodes/node-pool${i}/stake-reward_vote_deleg.cert" + if [ -n "${PV10:-""}" ]; then + # stake reward address registration and vote delegation cert, to be used later in tests + # when re-registering the reward address in Conway + cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ + --always-abstain \ + --key-reg-deposit-amt "$KEY_DEPOSIT" \ + --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + else + ln \ + "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.shelley_reg.cert" \ + "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + fi # pool keys cardano_cli_log latest node key-gen \ @@ -515,12 +533,6 @@ for i in $(seq 1 "$NUM_POOLS"); do --operational-certificate-issue-counter-file "$STATE_CLUSTER/nodes/node-pool$i/cold.counter" \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/op.cert" - # delegate pool reward to alwaysAbstain DRep - cardano_cli_log conway stake-address vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool${i}/reward.vkey" \ - --always-abstain \ - --out-file "$STATE_CLUSTER/nodes/node-pool${i}/stake-reward_vote_deleg.cert" - POOL_NAME="TestPool$i" POOL_DESC="Test Pool $i" POOL_TICKER="TP$i" @@ -610,12 +622,17 @@ for i in $(seq 1 "$NUM_DREPS"); do --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.addr" - # delegatee stake address registration and vote delegation cert - cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ + # delegatee stake address registration cert + cardano_cli_log conway stake-address registration-certificate \ --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ - --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg.cert" + + # delegatee vote delegation cert + cardano_cli_log conway stake-address vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" done # create scripts for cluster starting / stopping @@ -761,7 +778,7 @@ for i in $(seq 1 "$NUM_POOLS"); do POOL_ARGS+=( \ "--tx-out" "$(<"$STATE_CLUSTER/nodes/node-pool$i/owner.addr")+$POOL_PLEDGE" \ "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/stake.reg.cert" \ - "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" \ + "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.shelley_reg.cert" \ "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/register.cert" \ "--certificate-file" "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" \ ) @@ -1187,7 +1204,8 @@ for i in $(seq 1 "$NUM_DREPS"); do DREPS_ARGS+=( \ "--tx-out" "$(<"$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr")+$DREP_DELEGATED" \ "--certificate-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" \ - "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" \ + "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg.cert" \ + "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" \ ) DREPS_SIGNING+=( \ "--signing-key-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index a9692bb3f..a0a644349 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -389,19 +389,33 @@ for i in $(seq 1 "$NUM_POOLS"); do --key-reg-deposit-amt "$KEY_DEPOSIT" \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake.reg.cert" - # stake reward address registration cert - cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ - --always-abstain \ - --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" - - # owner stake address delegation certs - cardano_cli_log conway stake-address stake-and-vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --always-abstain \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" + if [ -n "${PV10:-""}" ]; then + # stake reward address registration and vote delegation cert + cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ + --always-abstain \ + --key-reg-deposit-amt "$KEY_DEPOSIT" \ + --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + + # owner stake address stake and vote delegation cert + cardano_cli_log conway stake-address stake-and-vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ + --always-abstain \ + --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" + else + # stake reward address registration cert + cardano_cli_log conway stake-address registration-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ + --key-reg-deposit-amt "$KEY_DEPOSIT" \ + --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + + # owner stake address stake delegation cert + cardano_cli_log conway stake-address stake-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ + --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" + fi POOL_NAME="TestPool$i" POOL_DESC="Test Pool $i" diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index fb423efd5..1e452790f 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -389,19 +389,33 @@ for i in $(seq 1 "$NUM_POOLS"); do --key-reg-deposit-amt "$KEY_DEPOSIT" \ --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake.reg.cert" - # stake reward address registration cert - cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ - --always-abstain \ - --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" - - # owner stake address delegation certs - cardano_cli_log conway stake-address stake-and-vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --always-abstain \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" + if [ -n "${PV10:-""}" ]; then + # stake reward address registration and vote delegation cert + cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ + --always-abstain \ + --key-reg-deposit-amt "$KEY_DEPOSIT" \ + --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + + # owner stake address stake and vote delegation cert + cardano_cli_log conway stake-address stake-and-vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ + --always-abstain \ + --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" + else + # stake reward address registration cert + cardano_cli_log conway stake-address registration-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ + --key-reg-deposit-amt "$KEY_DEPOSIT" \ + --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + + # owner stake address stake delegation cert + cardano_cli_log conway stake-address stake-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ + --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" + fi POOL_NAME="TestPool$i" POOL_DESC="Test Pool $i" From dd6b97d0dec5b74881ad209ba3bc3e9a8adca0bd Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 30 Oct 2024 16:25:19 +0100 Subject: [PATCH 050/168] feat(tests): run test_consensus_reached only on mainnet The `securityParam` is too low on local testnet with short epochs. Also run `test_permanent_fork` only on testnet with short epochs, as `securityParam` is too high on mainnet like testnet. --- cardano_node_tests/tests/test_rollback.py | 23 ++++++++++++++++++----- scripts/test_rollbacks.sh | 14 +++++++------- 2 files changed, 25 insertions(+), 12 deletions(-) diff --git a/cardano_node_tests/tests/test_rollback.py b/cardano_node_tests/tests/test_rollback.py index 05b77f269..cca06a6ad 100644 --- a/cardano_node_tests/tests/test_rollback.py +++ b/cardano_node_tests/tests/test_rollback.py @@ -198,6 +198,14 @@ def node_wait_for_block( os.environ["CARDANO_NODE_SOCKET_PATH"] = orig_socket @allure.link(helpers.get_vcs_link()) + # There's a submission delay of 60 sec. Therefore on testnet with low `securityParam`, + # it is not possible to restart the nodes, submit transaction, and still be under + # `securityParam` blocks. + @pytest.mark.skipif( + "mainnet_fast" not in configuration.SCRIPTS_DIRNAME, + reason="cannot run on testnet with low `securityParam`", + ) + @pytest.mark.long def test_consensus_reached( self, cluster_manager: cluster_management.ClusterManager, @@ -306,11 +314,12 @@ def test_consensus_reached( ), "The Tx number 3 doesn't exist on cluster 2" # Wait for new block to let chains progress. - # We can't wait for too long, because if both clusters has produced more than - # `securityParam` number of blocks while the topology was fragmented, it would not be - # possible to bring the the clusters back into global consensus. On local cluster, - # the value of `securityParam` is 10. - cluster.wait_for_new_block() + # If both clusters has produced more than `securityParam` number of blocks while + # the topology was fragmented, it would not be possible to bring the the clusters + # back into global consensus. + # On fast epoch local cluster, the value of `securityParam` is 10. + # On mainnet, the value of `securityParam` is 2160. + cluster.wait_for_new_block(new_blocks=15) if ROLLBACK_PAUSE: print("PHASE2: cluster with separated into cluster1 and cluster2") @@ -361,6 +370,10 @@ def test_consensus_reached( ), "Neither Tx number 2 nor Tx number 3 was rolled back" @allure.link(helpers.get_vcs_link()) + @pytest.mark.skipif( + "mainnet" in configuration.SCRIPTS_DIRNAME, + reason="cannot run on testnet with high `securityParam`", + ) @pytest.mark.long def test_permanent_fork( self, diff --git a/scripts/test_rollbacks.sh b/scripts/test_rollbacks.sh index 4005339fa..c7ba83a72 100755 --- a/scripts/test_rollbacks.sh +++ b/scripts/test_rollbacks.sh @@ -15,14 +15,14 @@ set -euo pipefail TOP_DIR="$(readlink -m "${0%/*}/..")" -export NUM_POOLS="${NUM_POOLS:-"10"}" +export NUM_POOLS="${NUM_POOLS:-"10"}" CLUSTERS_COUNT=1 TEST_THREADS=0 if [ -n "${INTERACTIVE:-""}" ]; then - export ROLLBACK_PAUSE=1 SCRIPTS_DIRNAME="${SCRIPTS_DIRNAME:-mainnet_fast}" PYTEST_ARGS="-s -k test_consensus_reached" + export ROLLBACK_PAUSE=1 SCRIPTS_DIRNAME="mainnet_fast" PYTEST_ARGS="-s -k test_consensus_reached" + "$TOP_DIR/.github/regression.sh" else - export SCRIPTS_DIRNAME="${SCRIPTS_DIRNAME:-conway_fast}" PYTEST_ARGS="-k TestRollback" + export SCRIPTS_DIRNAME="conway_fast" PYTEST_ARGS="-k test_permanent_fork" + "$TOP_DIR/.github/regression.sh" || exit "$?" + export SCRIPTS_DIRNAME="mainnet_fast" PYTEST_ARGS="-k test_consensus_reached" + "$TOP_DIR/.github/regression.sh" fi - -export CLUSTERS_COUNT=1 TEST_THREADS=0 - -"$TOP_DIR/.github/regression.sh" From 50a97bd10e0a215b4fd869aeca353c1f68d55bdf Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 30 Oct 2024 17:13:50 +0100 Subject: [PATCH 051/168] feat(tests): force mainnet_fast for reconnect test Added a skip condition to the `test_metrics_reconnect` test to ensure it does not run on testnet with short epochs. --- cardano_node_tests/tests/test_reconnect.py | 4 ++++ scripts/test_node_reconnect.sh | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/cardano_node_tests/tests/test_reconnect.py b/cardano_node_tests/tests/test_reconnect.py index 2f5a810d3..25593695f 100644 --- a/cardano_node_tests/tests/test_reconnect.py +++ b/cardano_node_tests/tests/test_reconnect.py @@ -242,6 +242,10 @@ def _assert(tx_outputs: tp.List[clusterlib.TxRawOutput]) -> None: ) @pytest.mark.skipif(configuration.NUM_POOLS != 3, reason="`NUM_POOLS` must be 3") @pytest.mark.skipif(configuration.ENABLE_LEGACY, reason="Works only with P2P topology") + @pytest.mark.skipif( + "mainnet_fast" not in configuration.SCRIPTS_DIRNAME, + reason="Cannot run on testnet with short epochs", + ) def test_metrics_reconnect( self, cluster_manager: cluster_management.ClusterManager, diff --git a/scripts/test_node_reconnect.sh b/scripts/test_node_reconnect.sh index f3af1b73d..1719c07ef 100755 --- a/scripts/test_node_reconnect.sh +++ b/scripts/test_node_reconnect.sh @@ -11,7 +11,7 @@ TOP_DIR="$(readlink -m "${0%/*}/..")" export \ CLUSTERS_COUNT=1 \ TEST_THREADS=0 \ - SCRIPTS_DIRNAME="${SCRIPTS_DIRNAME:-mainnet_fast}" \ + SCRIPTS_DIRNAME="mainnet_fast" \ PYTEST_ARGS="-s -k TestNodeReconnect" "$TOP_DIR/.github/regression.sh" From 83f7d7683c858995263b8e96e7d770b20816f9e9 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 30 Oct 2024 17:44:20 +0100 Subject: [PATCH 052/168] feat(cluster): split registration and delegation certs Split the registration and vote delegation certificates into separate commands for compatibility with older node releases. Updated the scripts to generate individual registration and vote delegation certificates for each delegatee stake address. --- .../cluster_scripts/conway_fast/start-cluster | 16 +++++++++++----- .../cluster_scripts/mainnet_fast/start-cluster | 16 +++++++++++----- 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index a0a644349..b26dd7efd 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -504,12 +504,17 @@ for i in $(seq 1 "$NUM_DREPS"); do --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.addr" - # delegatee stake address registration and vote delegation cert - cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ + # delegatee stake address registration cert + cardano_cli_log conway stake-address registration-certificate \ --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ - --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg.cert" + + # delegatee vote delegation cert + cardano_cli_log conway stake-address vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" done # create scripts for cluster starting / stopping @@ -653,7 +658,8 @@ for i in $(seq 1 "$NUM_DREPS"); do DREPS_ARGS+=( \ "--tx-out" "$(<"$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr")+$DREP_DELEGATED" \ "--certificate-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" \ - "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" \ + "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg.cert" \ + "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" \ ) DREPS_SIGNING+=( \ "--signing-key-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index 1e452790f..98520d728 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -504,12 +504,17 @@ for i in $(seq 1 "$NUM_DREPS"); do --testnet-magic "$NETWORK_MAGIC" \ --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.addr" - # delegatee stake address registration and vote delegation cert - cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ + # delegatee stake address registration cert + cardano_cli_log conway stake-address registration-certificate \ --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ - --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg.cert" + + # delegatee vote delegation cert + cardano_cli_log conway stake-address vote-delegation-certificate \ + --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ + --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" done # create scripts for cluster starting / stopping @@ -653,7 +658,8 @@ for i in $(seq 1 "$NUM_DREPS"); do DREPS_ARGS+=( \ "--tx-out" "$(<"$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr")+$DREP_DELEGATED" \ "--certificate-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" \ - "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg_vote_deleg.cert" \ + "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg.cert" \ + "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" \ ) DREPS_SIGNING+=( \ "--signing-key-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ From adcdc5108c310eda504b19747461d6d7c0655783 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 30 Oct 2024 19:07:51 +0100 Subject: [PATCH 053/168] fix(node_upgrade_pytest.sh): update cardano-cli command Updated the cardano-cli command to include 'latest' for querying the tip with the testnet magic. --- .github/node_upgrade_pytest.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/node_upgrade_pytest.sh b/.github/node_upgrade_pytest.sh index d7ca87cf6..d96b075ce 100755 --- a/.github/node_upgrade_pytest.sh +++ b/.github/node_upgrade_pytest.sh @@ -218,7 +218,7 @@ elif [ "$1" = "step2" ]; then # waiting to make sure the chain is synced NETWORK_MAGIC="$(jq '.networkMagic' "$STATE_CLUSTER/shelley/genesis.json")" for _ in {1..10}; do - sync_progress="$(cardano-cli query tip --testnet-magic "$NETWORK_MAGIC" | jq -r '.syncProgress')" + sync_progress="$(cardano-cli latest query tip --testnet-magic "$NETWORK_MAGIC" | jq -r '.syncProgress')" if [ "$sync_progress" = "100.00" ]; then break fi From e7207e3951676aedd477e2c56abc722fa454e947 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 31 Oct 2024 09:50:03 +0100 Subject: [PATCH 054/168] feat: add PID to startup messages for better tracking This commit updates the startup messages for cardano-node and cardano-submit-api scripts to include the process ID (PID). This enhancement helps in better tracking and debugging of running processes by providing the PID in the log messages. --- cardano_node_tests/cluster_scripts/conway/cardano-node-bft1 | 2 +- .../cluster_scripts/conway/run-cardano-submit-api | 2 +- .../cluster_scripts/conway/template-cardano-node-pool | 2 +- .../cluster_scripts/conway_fast/cardano-node-bft1 | 2 +- .../cluster_scripts/conway_fast/run-cardano-submit-api | 2 +- .../cluster_scripts/conway_fast/template-cardano-node-pool | 2 +- .../cluster_scripts/mainnet_fast/cardano-node-bft1 | 2 +- .../cluster_scripts/mainnet_fast/run-cardano-submit-api | 2 +- .../cluster_scripts/mainnet_fast/template-cardano-node-pool | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/cardano-node-bft1 b/cardano_node_tests/cluster_scripts/conway/cardano-node-bft1 index b9fa142cc..f21a09f3a 100644 --- a/cardano_node_tests/cluster_scripts/conway/cardano-node-bft1 +++ b/cardano_node_tests/cluster_scripts/conway/cardano-node-bft1 @@ -16,7 +16,7 @@ case "${UTXO_BACKEND:-""}" in ;; esac -echo "Starting cardano-node run: cardano-node run" +echo "Starting cardano-node run with PID $$: cardano-node run" echo "--config ./state-cluster%%INSTANCE_NUM%%/config-bft1.json" echo "--database-path ./state-cluster%%INSTANCE_NUM%%/db-bft1" echo "--topology ./state-cluster%%INSTANCE_NUM%%/topology-bft1.json" diff --git a/cardano_node_tests/cluster_scripts/conway/run-cardano-submit-api b/cardano_node_tests/cluster_scripts/conway/run-cardano-submit-api index fb4825d15..9611361ff 100644 --- a/cardano_node_tests/cluster_scripts/conway/run-cardano-submit-api +++ b/cardano_node_tests/cluster_scripts/conway/run-cardano-submit-api @@ -2,7 +2,7 @@ testnet_magic="$(<./state-cluster%%INSTANCE_NUM%%/db-bft1/protocolMagicId)" -echo "Starting cardano-submit-api: cardano-submit-api" +echo "Starting cardano-submit-api with PID $$: cardano-submit-api" echo "--config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json" echo "--socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket" echo "--listen-address 127.0.0.1" diff --git a/cardano_node_tests/cluster_scripts/conway/template-cardano-node-pool b/cardano_node_tests/cluster_scripts/conway/template-cardano-node-pool index 66404c64c..f585b6531 100644 --- a/cardano_node_tests/cluster_scripts/conway/template-cardano-node-pool +++ b/cardano_node_tests/cluster_scripts/conway/template-cardano-node-pool @@ -16,7 +16,7 @@ case "${UTXO_BACKEND:-""}" in ;; esac -echo "Starting cardano-node run: cardano-node run" +echo "Starting cardano-node run with PID $$: cardano-node run" echo "--config ./state-cluster%%INSTANCE_NUM%%/config-pool%%POOL_NUM%%.json" echo "--database-path ./state-cluster%%INSTANCE_NUM%%/db-pool%%POOL_NUM%%" echo "--topology ./state-cluster%%INSTANCE_NUM%%/topology-pool%%POOL_NUM%%.json" diff --git a/cardano_node_tests/cluster_scripts/conway_fast/cardano-node-bft1 b/cardano_node_tests/cluster_scripts/conway_fast/cardano-node-bft1 index 05b2ee118..14a7a1afe 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/cardano-node-bft1 +++ b/cardano_node_tests/cluster_scripts/conway_fast/cardano-node-bft1 @@ -16,7 +16,7 @@ case "${UTXO_BACKEND:-""}" in ;; esac -echo "Starting cardano-node run: cardano-node run" +echo "Starting cardano-node run with PID $$: cardano-node run" echo "--config ./state-cluster%%INSTANCE_NUM%%/config-bft1.json" echo "--database-path ./state-cluster%%INSTANCE_NUM%%/db-bft1" echo "--topology ./state-cluster%%INSTANCE_NUM%%/topology-bft1.json" diff --git a/cardano_node_tests/cluster_scripts/conway_fast/run-cardano-submit-api b/cardano_node_tests/cluster_scripts/conway_fast/run-cardano-submit-api index fb4825d15..9611361ff 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/run-cardano-submit-api +++ b/cardano_node_tests/cluster_scripts/conway_fast/run-cardano-submit-api @@ -2,7 +2,7 @@ testnet_magic="$(<./state-cluster%%INSTANCE_NUM%%/db-bft1/protocolMagicId)" -echo "Starting cardano-submit-api: cardano-submit-api" +echo "Starting cardano-submit-api with PID $$: cardano-submit-api" echo "--config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json" echo "--socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket" echo "--listen-address 127.0.0.1" diff --git a/cardano_node_tests/cluster_scripts/conway_fast/template-cardano-node-pool b/cardano_node_tests/cluster_scripts/conway_fast/template-cardano-node-pool index 66404c64c..f585b6531 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/template-cardano-node-pool +++ b/cardano_node_tests/cluster_scripts/conway_fast/template-cardano-node-pool @@ -16,7 +16,7 @@ case "${UTXO_BACKEND:-""}" in ;; esac -echo "Starting cardano-node run: cardano-node run" +echo "Starting cardano-node run with PID $$: cardano-node run" echo "--config ./state-cluster%%INSTANCE_NUM%%/config-pool%%POOL_NUM%%.json" echo "--database-path ./state-cluster%%INSTANCE_NUM%%/db-pool%%POOL_NUM%%" echo "--topology ./state-cluster%%INSTANCE_NUM%%/topology-pool%%POOL_NUM%%.json" diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/cardano-node-bft1 b/cardano_node_tests/cluster_scripts/mainnet_fast/cardano-node-bft1 index 05b2ee118..14a7a1afe 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/cardano-node-bft1 +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/cardano-node-bft1 @@ -16,7 +16,7 @@ case "${UTXO_BACKEND:-""}" in ;; esac -echo "Starting cardano-node run: cardano-node run" +echo "Starting cardano-node run with PID $$: cardano-node run" echo "--config ./state-cluster%%INSTANCE_NUM%%/config-bft1.json" echo "--database-path ./state-cluster%%INSTANCE_NUM%%/db-bft1" echo "--topology ./state-cluster%%INSTANCE_NUM%%/topology-bft1.json" diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/run-cardano-submit-api b/cardano_node_tests/cluster_scripts/mainnet_fast/run-cardano-submit-api index fb4825d15..9611361ff 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/run-cardano-submit-api +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/run-cardano-submit-api @@ -2,7 +2,7 @@ testnet_magic="$(<./state-cluster%%INSTANCE_NUM%%/db-bft1/protocolMagicId)" -echo "Starting cardano-submit-api: cardano-submit-api" +echo "Starting cardano-submit-api with PID $$: cardano-submit-api" echo "--config ./state-cluster%%INSTANCE_NUM%%/submit-api-config.json" echo "--socket-path ./state-cluster%%INSTANCE_NUM%%/bft1.socket" echo "--listen-address 127.0.0.1" diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/template-cardano-node-pool b/cardano_node_tests/cluster_scripts/mainnet_fast/template-cardano-node-pool index 66404c64c..f585b6531 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/template-cardano-node-pool +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/template-cardano-node-pool @@ -16,7 +16,7 @@ case "${UTXO_BACKEND:-""}" in ;; esac -echo "Starting cardano-node run: cardano-node run" +echo "Starting cardano-node run with PID $$: cardano-node run" echo "--config ./state-cluster%%INSTANCE_NUM%%/config-pool%%POOL_NUM%%.json" echo "--database-path ./state-cluster%%INSTANCE_NUM%%/db-pool%%POOL_NUM%%" echo "--topology ./state-cluster%%INSTANCE_NUM%%/topology-pool%%POOL_NUM%%.json" From bfc9811194762e2e0b9b9eeb851095fb8128a0c2 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 31 Oct 2024 10:04:59 +0100 Subject: [PATCH 055/168] feat: update POOL_COST initialization logic - Removed hardcoded POOL_COST value from start-cluster scripts. - Added logic to fetch POOL_COST from genesis.spec.json. - Set default POOL_COST to 600 if fetched value is 0. --- cardano_node_tests/cluster_scripts/conway/start-cluster | 5 ++++- cardano_node_tests/cluster_scripts/conway_fast/start-cluster | 5 ++++- .../cluster_scripts/mainnet_fast/start-cluster | 5 ++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index 8364d3443..65ad205b5 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -31,7 +31,6 @@ TX_SUBMISSION_DELAY=60 PROPOSAL_DELAY=5 SUBMIT_DELAY=5 POOL_PLEDGE=1000000000000 -POOL_COST=600 DREP_DELEGATED=500000000000 FEE=5000000 @@ -41,6 +40,10 @@ NETWORK_MAGIC="$(jq '.networkMagic' < "$SCRIPT_DIR/genesis.spec.json")" MAX_SUPPLY="$(jq '.maxLovelaceSupply' < "$SCRIPT_DIR/genesis.spec.json")" SLOT_LENGTH="$(jq '.slotLength' < "$SCRIPT_DIR/genesis.spec.json")" EPOCH_SEC="$(jq '.epochLength * .slotLength | ceil' < "$SCRIPT_DIR/genesis.spec.json")" +POOL_COST="$(jq '.protocolParams.minPoolCost' < "$SCRIPT_DIR/genesis.spec.json")" +if [ "$POOL_COST" -eq 0 ]; then + POOL_COST=600 +fi if [ -f "$STATE_CLUSTER/supervisord.pid" ]; then echo "Cluster already running. Please run \`$SCRIPT_DIR/stop-cluster\` first!" >&2 diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index b26dd7efd..ce9d59594 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -30,7 +30,6 @@ NUM_DREPS=5 TX_SUBMISSION_DELAY=60 SUBMIT_DELAY=5 POOL_PLEDGE=1000000000000 -POOL_COST=600 DREP_DELEGATED=500000000000 BYRON_INIT_SUPPLY=10020000000 PROTOCOL_VERSION=9 @@ -41,6 +40,10 @@ fi SECURITY_PARAM="$(jq '.securityParam' < "$SCRIPT_DIR/genesis.spec.json")" NETWORK_MAGIC="$(jq '.networkMagic' < "$SCRIPT_DIR/genesis.spec.json")" MAX_SUPPLY="$(jq '.maxLovelaceSupply' < "$SCRIPT_DIR/genesis.spec.json")" +POOL_COST="$(jq '.protocolParams.minPoolCost' < "$SCRIPT_DIR/genesis.spec.json")" +if [ "$POOL_COST" -eq 0 ]; then + POOL_COST=600 +fi # There is some weird calculation going on, and the deleg supply needs to have a minimum value, # that is somehow based on non-delegated supply. diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index 98520d728..ce9d59594 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -39,8 +39,11 @@ fi SECURITY_PARAM="$(jq '.securityParam' < "$SCRIPT_DIR/genesis.spec.json")" NETWORK_MAGIC="$(jq '.networkMagic' < "$SCRIPT_DIR/genesis.spec.json")" -POOL_COST="$(jq '.protocolParams.minPoolCost' < "$SCRIPT_DIR/genesis.spec.json")" MAX_SUPPLY="$(jq '.maxLovelaceSupply' < "$SCRIPT_DIR/genesis.spec.json")" +POOL_COST="$(jq '.protocolParams.minPoolCost' < "$SCRIPT_DIR/genesis.spec.json")" +if [ "$POOL_COST" -eq 0 ]; then + POOL_COST=600 +fi # There is some weird calculation going on, and the deleg supply needs to have a minimum value, # that is somehow based on non-delegated supply. From ce3a60afb1cec1bd9c1f4abcaa9d2a407de3ef99 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 31 Oct 2024 10:16:34 +0100 Subject: [PATCH 056/168] fix: ensure proper restart of local cluster nodes Added commands to stop all processes and added a sleep interval before restarting supervisord. This ensures that the local cluster nodes are properly restarted with binaries from the new cluster-node version. --- .github/node_upgrade_pytest.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/node_upgrade_pytest.sh b/.github/node_upgrade_pytest.sh index d96b075ce..43f399f25 100755 --- a/.github/node_upgrade_pytest.sh +++ b/.github/node_upgrade_pytest.sh @@ -186,6 +186,8 @@ elif [ "$1" = "step2" ]; then # Restart local cluster nodes with binaries from new cluster-node version. # It is necessary to restart supervisord with new environment. + "$STATE_CLUSTER/supervisorctl" stop all + sleep 5 "$STATE_CLUSTER/supervisord_stop" sleep 3 "$STATE_CLUSTER/supervisord_start" || exit 6 From 1704636cf225189447457b9172953a5f7a5e544f Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 31 Oct 2024 11:06:19 +0100 Subject: [PATCH 057/168] fix(start-cluster): unify varaibles notation Updated the start-cluster scripts to use braces for variable expansion in cases where the variable is part of a string. --- .../cluster_scripts/conway/start-cluster | 472 +++++++++--------- .../cluster_scripts/conway_fast/start-cluster | 352 ++++++------- .../mainnet_fast/start-cluster | 352 ++++++------- 3 files changed, 588 insertions(+), 588 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index 65ad205b5..4b15c39b0 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -35,18 +35,18 @@ DREP_DELEGATED=500000000000 FEE=5000000 -SECURITY_PARAM="$(jq '.securityParam' < "$SCRIPT_DIR/genesis.spec.json")" -NETWORK_MAGIC="$(jq '.networkMagic' < "$SCRIPT_DIR/genesis.spec.json")" -MAX_SUPPLY="$(jq '.maxLovelaceSupply' < "$SCRIPT_DIR/genesis.spec.json")" -SLOT_LENGTH="$(jq '.slotLength' < "$SCRIPT_DIR/genesis.spec.json")" -EPOCH_SEC="$(jq '.epochLength * .slotLength | ceil' < "$SCRIPT_DIR/genesis.spec.json")" -POOL_COST="$(jq '.protocolParams.minPoolCost' < "$SCRIPT_DIR/genesis.spec.json")" +SECURITY_PARAM="$(jq '.securityParam' < "${SCRIPT_DIR}/genesis.spec.json")" +NETWORK_MAGIC="$(jq '.networkMagic' < "${SCRIPT_DIR}/genesis.spec.json")" +MAX_SUPPLY="$(jq '.maxLovelaceSupply' < "${SCRIPT_DIR}/genesis.spec.json")" +SLOT_LENGTH="$(jq '.slotLength' < "${SCRIPT_DIR}/genesis.spec.json")" +EPOCH_SEC="$(jq '.epochLength * .slotLength | ceil' < "${SCRIPT_DIR}/genesis.spec.json")" +POOL_COST="$(jq '.protocolParams.minPoolCost' < "${SCRIPT_DIR}/genesis.spec.json")" if [ "$POOL_COST" -eq 0 ]; then POOL_COST=600 fi -if [ -f "$STATE_CLUSTER/supervisord.pid" ]; then - echo "Cluster already running. Please run \`$SCRIPT_DIR/stop-cluster\` first!" >&2 +if [ -f "${STATE_CLUSTER}/supervisord.pid" ]; then + echo "Cluster already running. Please run \`${SCRIPT_DIR}/stop-cluster\` first!" >&2 exit 1 fi @@ -59,7 +59,7 @@ cardano_cli_log() { local out local retval - echo cardano-cli "$@" >> "$STATE_CLUSTER/start_cluster_cmds.log" + echo cardano-cli "$@" >> "${STATE_CLUSTER}/start_cluster_cmds.log" for _ in {1..3}; do set +e @@ -181,22 +181,22 @@ get_txins() { ENABLE_SUBMIT_API="$(type cardano-submit-api >/dev/null 2>&1 && echo 1 || echo 0)" -if [ -e "$SCRIPT_DIR/shell_env" ]; then +if [ -e "${SCRIPT_DIR}/shell_env" ]; then # shellcheck disable=SC1090,SC1091 - source "$SCRIPT_DIR/shell_env" + source "${SCRIPT_DIR}/shell_env" fi rm -rf "$STATE_CLUSTER" mkdir -p "$STATE_CLUSTER"/{shelley,webserver,db-sync} -cd "$STATE_CLUSTER/.." +cd "${STATE_CLUSTER}/.." cp "$SCRIPT_DIR"/cardano-node-* "$STATE_CLUSTER" -cp "$SCRIPT_DIR/run-cardano-submit-api" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/byron-params.json" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/dbsync-config.yaml" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/submit-api-config.json" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/supervisor.conf" "$STATE_CLUSTER" -cp "$SCRIPT_DIR"/*genesis*.spec.json "$STATE_CLUSTER/shelley/" +cp "${SCRIPT_DIR}/run-cardano-submit-api" "$STATE_CLUSTER" +cp "${SCRIPT_DIR}/byron-params.json" "$STATE_CLUSTER" +cp "${SCRIPT_DIR}/dbsync-config.yaml" "$STATE_CLUSTER" +cp "${SCRIPT_DIR}/submit-api-config.json" "$STATE_CLUSTER" +cp "${SCRIPT_DIR}/supervisor.conf" "$STATE_CLUSTER" +cp "$SCRIPT_DIR"/*genesis*.spec.json "${STATE_CLUSTER}/shelley/" if [ -z "${ENABLE_LEGACY:-""}" ]; then # use P2P topology files @@ -210,7 +210,7 @@ fi case "${UTXO_BACKEND:=""}" in "" | mem | disk) - echo "$UTXO_BACKEND" > "$STATE_CLUSTER/utxo_backend" + echo "$UTXO_BACKEND" > "${STATE_CLUSTER}/utxo_backend" ;; *) echo "Unknown \`UTXO_BACKEND\`: '$UTXO_BACKEND', line $LINENO" >&2 @@ -220,20 +220,20 @@ esac # enable db-sync service if [ -n "${DBSYNC_REPO:-""}" ]; then - [ -e "$DBSYNC_REPO/db-sync-node/bin/cardano-db-sync" ] || \ - { echo "The \`$DBSYNC_REPO/db-sync-node/bin/cardano-db-sync\` not found, line $LINENO" >&2; exit 1; } # assert + [ -e "${DBSYNC_REPO}/db-sync-node/bin/cardano-db-sync" ] || \ + { echo "The \`${DBSYNC_REPO}/db-sync-node/bin/cardano-db-sync\` not found, line $LINENO" >&2; exit 1; } # assert # create clean database if [ -z "${DRY_RUN:-""}" ]; then - "$SCRIPT_DIR/postgres-setup.sh" + "${SCRIPT_DIR}/postgres-setup.sh" fi - cat >> "$STATE_CLUSTER/supervisor.conf" <> "${STATE_CLUSTER}/supervisor.conf" <> "$STATE_CLUSTER/supervisor.conf" <> "${STATE_CLUSTER}/supervisor.conf" < "$STATE_CLUSTER/cluster_start_time" +START_TIME_SHELLEY="$(date --utc +"%Y-%m-%dT%H:%M:%SZ" --date="5 seconds")" +START_TIME="$(date +%s --date="$START_TIME_SHELLEY")" +echo "$START_TIME" > "${STATE_CLUSTER}/cluster_start_time" cardano_cli_log byron genesis genesis \ --protocol-magic "$NETWORK_MAGIC" \ @@ -270,14 +270,14 @@ cardano_cli_log byron genesis genesis \ --delegate-share 1 \ --avvm-entry-count 0 \ --avvm-entry-balance 0 \ - --protocol-parameters-file "$STATE_CLUSTER/byron-params.json" \ - --genesis-output-dir "$STATE_CLUSTER/byron" \ + --protocol-parameters-file "${STATE_CLUSTER}/byron-params.json" \ + --genesis-output-dir "${STATE_CLUSTER}/byron" \ --start-time "$START_TIME" -mv "$STATE_CLUSTER/byron-params.json" "$STATE_CLUSTER/byron/params.json" +mv "${STATE_CLUSTER}/byron-params.json" "${STATE_CLUSTER}/byron/params.json" cardano_cli_log legacy genesis create \ - --genesis-dir "$STATE_CLUSTER/shelley" \ + --genesis-dir "${STATE_CLUSTER}/shelley" \ --testnet-magic "$NETWORK_MAGIC" \ --gen-genesis-keys "$NUM_BFT_NODES" \ --start-time "$START_TIME_SHELLEY" \ @@ -285,60 +285,60 @@ cardano_cli_log legacy genesis create \ jq -r ' .initialFunds = {}' \ - < "$STATE_CLUSTER/shelley/genesis.json" > "$STATE_CLUSTER/shelley/genesis.json_jq" -cat "$STATE_CLUSTER/shelley/genesis.json_jq" > "$STATE_CLUSTER/shelley/genesis.json" -rm -f "$STATE_CLUSTER/shelley/genesis.json_jq" + < "${STATE_CLUSTER}/shelley/genesis.json" > "${STATE_CLUSTER}/shelley/genesis.json_jq" +cat "${STATE_CLUSTER}/shelley/genesis.json_jq" > "${STATE_CLUSTER}/shelley/genesis.json" +rm -f "${STATE_CLUSTER}/shelley/genesis.json_jq" -mkdir -p "$STATE_CLUSTER/governance_data" +mkdir -p "${STATE_CLUSTER}/governance_data" # Create committee keys if [ -z "${NO_CC:-""}" ]; then for i in $(seq 1 "$NUM_CC"); do cardano_cli_log conway governance committee key-gen-cold \ - --cold-verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.vkey" \ - --cold-signing-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.skey" + --cold-verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.vkey" \ + --cold-signing-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.skey" cardano_cli_log conway governance committee key-gen-hot \ - --verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot.vkey" \ - --signing-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot.skey" + --verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_hot.vkey" \ + --signing-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_hot.skey" cardano_cli_log conway governance committee create-hot-key-authorization-certificate \ - --cold-verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.vkey" \ - --hot-verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot.vkey" \ - --out-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot_auth.cert" + --cold-verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.vkey" \ + --hot-verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_hot.vkey" \ + --out-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_hot_auth.cert" cardano_cli_log conway governance committee key-hash \ - --verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.vkey" \ - > "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.hash" + --verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.vkey" \ + > "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.hash" done # Pre-register committee in genesis - KEY_HASH_JSON=$(jq -nR '[inputs | {("keyHash-" + .): 10000}] | add' \ - "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.hash) + KEY_HASH_JSON="$(jq -nR '[inputs | {("keyHash-" + .): 10000}] | add' \ + "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.hash)" jq \ --argjson keyHashJson "$KEY_HASH_JSON" \ '.committee.members = $keyHashJson | .committee.threshold = 0.6 | .committeeMinSize = 2' \ - "$STATE_CLUSTER/shelley/genesis.conway.json" > "$STATE_CLUSTER/shelley/genesis.conway.json_jq" - cat "$STATE_CLUSTER/shelley/genesis.conway.json_jq" > "$STATE_CLUSTER/shelley/genesis.conway.json" - rm -f "$STATE_CLUSTER/shelley/genesis.conway.json_jq" + "${STATE_CLUSTER}/shelley/genesis.conway.json" > "${STATE_CLUSTER}/shelley/genesis.conway.json_jq" + cat "${STATE_CLUSTER}/shelley/genesis.conway.json_jq" > "${STATE_CLUSTER}/shelley/genesis.conway.json" + rm -f "${STATE_CLUSTER}/shelley/genesis.conway.json_jq" fi KEY_DEPOSIT="$(jq '.protocolParams.keyDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.json")" + < "${STATE_CLUSTER}/shelley/genesis.json")" POOL_DEPOSIT="$(jq '.protocolParams.poolDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.json")" + < "${STATE_CLUSTER}/shelley/genesis.json")" DREP_DEPOSIT="$(jq '.dRepDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.conway.json")" + < "${STATE_CLUSTER}/shelley/genesis.conway.json")" GOV_ACTION_DEPOSIT="$(jq '.govActionDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.conway.json")" + < "${STATE_CLUSTER}/shelley/genesis.conway.json")" BYRON_GENESIS_HASH="$(cardano_cli_log byron genesis print-genesis-hash --genesis-json \ - "$STATE_CLUSTER/byron/genesis.json")" + "${STATE_CLUSTER}/byron/genesis.json")" SHELLEY_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.json")" + "${STATE_CLUSTER}/shelley/genesis.json")" ALONZO_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.alonzo.json")" + "${STATE_CLUSTER}/shelley/genesis.alonzo.json")" CONWAY_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.conway.json")" + "${STATE_CLUSTER}/shelley/genesis.conway.json")" for conf in "$SCRIPT_DIR"/config-*.json; do fname="${conf##*/}" @@ -351,7 +351,7 @@ for conf in "$SCRIPT_DIR"/config-*.json; do | .ShelleyGenesisHash = $shelley_hash | .AlonzoGenesisHash = $alonzo_hash | .ConwayGenesisHash = $conway_hash' \ - "$conf" > "$STATE_CLUSTER/$fname" + "$conf" > "${STATE_CLUSTER}/${fname}" # enable P2P if [ -z "${ENABLE_LEGACY:-""}" ]; then @@ -368,7 +368,7 @@ for conf in "$SCRIPT_DIR"/config-*.json; do pool_num="${fname##*-pool}" pool_num="${pool_num%.json}" if [ "$((pool_num % 2))" != 0 ]; then - cp -f "$SCRIPT_DIR/topology-pool${pool_num}.json" "$STATE_CLUSTER" + cp -f "${SCRIPT_DIR}/topology-pool${pool_num}.json" "$STATE_CLUSTER" continue fi fi @@ -384,172 +384,172 @@ for conf in "$SCRIPT_DIR"/config-*.json; do | .ExperimentalProtocolsEnabled = true | .TraceBlockFetchClient = true | .TraceChainSyncClient = true' \ - "$STATE_CLUSTER/$fname" > "$STATE_CLUSTER/${fname}_jq" - cat "$STATE_CLUSTER/${fname}_jq" > "$STATE_CLUSTER/$fname" - rm -f "$STATE_CLUSTER/${fname}_jq" + "${STATE_CLUSTER}/${fname}" > "${STATE_CLUSTER}/${fname}_jq" + cat "${STATE_CLUSTER}/${fname}_jq" > "${STATE_CLUSTER}/${fname}" + rm -f "${STATE_CLUSTER}/${fname}_jq" fi done -for i in $(seq 1 $NUM_BFT_NODES); do - mkdir -p "$STATE_CLUSTER/nodes/node-bft$i" - ln -s "../../shelley/delegate-keys/delegate$i.vrf.skey" "$STATE_CLUSTER/nodes/node-bft$i/vrf.skey" - ln -s "../../shelley/delegate-keys/delegate$i.vrf.vkey" "$STATE_CLUSTER/nodes/node-bft$i/vrf.vkey" +for i in $(seq 1 "$NUM_BFT_NODES"); do + mkdir -p "${STATE_CLUSTER}/nodes/node-bft$i" + ln -s "../../shelley/delegate-keys/delegate${i}.vrf.skey" "${STATE_CLUSTER}/nodes/node-bft${i}/vrf.skey" + ln -s "../../shelley/delegate-keys/delegate${i}.vrf.vkey" "${STATE_CLUSTER}/nodes/node-bft${i}/vrf.vkey" cardano_cli_log latest node key-gen-KES \ - --verification-key-file "$STATE_CLUSTER/nodes/node-bft$i/kes.vkey" \ - --signing-key-file "$STATE_CLUSTER/nodes/node-bft$i/kes.skey" + --verification-key-file "${STATE_CLUSTER}/nodes/node-bft${i}/kes.vkey" \ + --signing-key-file "${STATE_CLUSTER}/nodes/node-bft${i}/kes.skey" cardano_cli_log latest node issue-op-cert \ --kes-period 0 \ - --cold-signing-key-file "$STATE_CLUSTER/shelley/delegate-keys/delegate$i.skey" \ - --kes-verification-key-file "$STATE_CLUSTER/nodes/node-bft$i/kes.vkey" \ + --cold-signing-key-file "${STATE_CLUSTER}/shelley/delegate-keys/delegate${i}.skey" \ + --kes-verification-key-file "${STATE_CLUSTER}/nodes/node-bft${i}/kes.vkey" \ --operational-certificate-issue-counter-file \ - "$STATE_CLUSTER/shelley/delegate-keys/delegate$i.counter" \ - --out-file "$STATE_CLUSTER/nodes/node-bft$i/op.cert" + "${STATE_CLUSTER}/shelley/delegate-keys/delegate${i}.counter" \ + --out-file "${STATE_CLUSTER}/nodes/node-bft${i}/op.cert" INDEX="$(printf "%03d" $((i - 1)))" cardano_cli_log byron key keygen \ - --secret "$STATE_CLUSTER/byron/payment-keys.$INDEX.key" + --secret "${STATE_CLUSTER}/byron/payment-keys.${INDEX}.key" cardano_cli_log byron key signing-key-address \ --byron-formats \ --testnet-magic "$NETWORK_MAGIC" \ - --secret "$STATE_CLUSTER/byron/payment-keys.$INDEX.key" > "$STATE_CLUSTER/byron/address-$INDEX" + --secret "${STATE_CLUSTER}/byron/payment-keys.${INDEX}.key" > "${STATE_CLUSTER}/byron/address-${INDEX}" # write Genesis addresses to files cardano_cli_log byron key signing-key-address \ --byron-formats \ --testnet-magic "$NETWORK_MAGIC" \ - --secret "$STATE_CLUSTER/byron/genesis-keys.$INDEX.key" \ - > "$STATE_CLUSTER/byron/genesis-address-$INDEX" + --secret "${STATE_CLUSTER}/byron/genesis-keys.${INDEX}.key" \ + > "${STATE_CLUSTER}/byron/genesis-address-${INDEX}" - ln -s "../../byron/delegate-keys.$INDEX.key" "$STATE_CLUSTER/nodes/node-bft$i/byron-deleg.key" - ln -s "../../byron/delegation-cert.$INDEX.json" "$STATE_CLUSTER/nodes/node-bft$i/byron-deleg.json" + ln -s "../../byron/delegate-keys.${INDEX}.key" "${STATE_CLUSTER}/nodes/node-bft${i}/byron-deleg.key" + ln -s "../../byron/delegation-cert.${INDEX}.json" "${STATE_CLUSTER}/nodes/node-bft${i}/byron-deleg.json" # create Byron address that moves funds out of the genesis UTxO into a regular address cardano_cli_log byron transaction issue-genesis-utxo-expenditure \ - --genesis-json "$STATE_CLUSTER/byron/genesis.json" \ + --genesis-json "${STATE_CLUSTER}/byron/genesis.json" \ --testnet-magic "$NETWORK_MAGIC" \ --byron-formats \ - --tx "$STATE_CLUSTER/byron/tx$i.tx" \ - --wallet-key "$STATE_CLUSTER/nodes/node-bft$i/byron-deleg.key" \ - --rich-addr-from "$(head -n 1 "$STATE_CLUSTER/byron/genesis-address-$INDEX")" \ - --txout "(\"$(head -n 1 "$STATE_CLUSTER/byron/address-$INDEX")\", $FUNDS_PER_BYRON_ADDRESS)" + --tx "${STATE_CLUSTER}/byron/tx${i}.tx" \ + --wallet-key "${STATE_CLUSTER}/nodes/node-bft${i}/byron-deleg.key" \ + --rich-addr-from "$(head -n 1 "${STATE_CLUSTER}/byron/genesis-address-${INDEX}")" \ + --txout "(\"$(head -n 1 "${STATE_CLUSTER}/byron/address-${INDEX}")\", ${FUNDS_PER_BYRON_ADDRESS})" # convert to Shelley addresses and keys cardano_cli_log latest key convert-byron-key \ - --byron-signing-key-file "$STATE_CLUSTER/byron/payment-keys.$INDEX.key" \ - --out-file "$STATE_CLUSTER/byron/payment-keys.$INDEX-converted.skey" \ + --byron-signing-key-file "${STATE_CLUSTER}/byron/payment-keys.${INDEX}.key" \ + --out-file "${STATE_CLUSTER}/byron/payment-keys.${INDEX}-converted.skey" \ --byron-payment-key-type cardano_cli_log latest key verification-key \ - --signing-key-file "$STATE_CLUSTER/byron/payment-keys.$INDEX-converted.skey" \ - --verification-key-file "$STATE_CLUSTER/byron/payment-keys.$INDEX-converted.vkey" + --signing-key-file "${STATE_CLUSTER}/byron/payment-keys.${INDEX}-converted.skey" \ + --verification-key-file "${STATE_CLUSTER}/byron/payment-keys.${INDEX}-converted.vkey" cardano_cli_log latest address build \ --testnet-magic "$NETWORK_MAGIC" \ - --payment-verification-key-file "$STATE_CLUSTER/byron/payment-keys.$INDEX-converted.vkey" \ - > "$STATE_CLUSTER/byron/address-$INDEX-converted" + --payment-verification-key-file "${STATE_CLUSTER}/byron/payment-keys.${INDEX}-converted.vkey" \ + > "${STATE_CLUSTER}/byron/address-${INDEX}-converted" - BFT_PORT=$(("%%NODE_PORT_BASE%%" + (i - 1) * "%%PORTS_PER_NODE%%" )) - echo "$BFT_PORT" > "$STATE_CLUSTER/nodes/node-bft$i/port" + BFT_PORT="$(("%%NODE_PORT_BASE%%" + (i - 1) * "%%PORTS_PER_NODE%%" ))" + echo "$BFT_PORT" > "${STATE_CLUSTER}/nodes/node-bft${i}/port" done for i in $(seq 1 "$NUM_POOLS"); do - mkdir -p "$STATE_CLUSTER/nodes/node-pool$i" + mkdir -p "${STATE_CLUSTER}/nodes/node-pool$i" echo "Generating Pool $i Secrets" # pool owner addresses and keys cardano_cli_log latest address key-gen \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.skey" \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" + --signing-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-utxo.skey" \ + --verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-utxo.vkey" cardano_cli_log latest stake-address key-gen \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.skey" \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" + --signing-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.skey" \ + --verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" # payment address cardano_cli_log latest address build \ - --payment-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --payment-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-utxo.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner.addr" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner.addr" # stake address cardano_cli_log latest stake-address build \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.addr" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.addr" # stake address registration cert cardano_cli_log shelley stake-address registration-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake.reg.cert" + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/stake.reg.cert" # stake reward keys cardano_cli_log latest stake-address key-gen \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.skey" \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" + --signing-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/reward.skey" \ + --verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" # stake reward address registration cert cardano_cli_log shelley stake-address registration-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.shelley_reg.cert" + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" \ + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/stake-reward.shelley_reg.cert" # stake reward delegation to alwaysAbstain DRep cert cardano_cli_log conway stake-address vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool${i}/reward.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" \ --always-abstain \ - --out-file "$STATE_CLUSTER/nodes/node-pool${i}/stake-reward_vote_deleg.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/stake-reward_vote_deleg.cert" if [ -n "${PV10:-""}" ]; then # stake reward address registration and vote delegation cert, to be used later in tests # when re-registering the reward address in Conway cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" \ --always-abstain \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/stake-reward.reg.cert" else ln \ - "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.shelley_reg.cert" \ - "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + "${STATE_CLUSTER}/nodes/node-pool${i}/stake-reward.shelley_reg.cert" \ + "${STATE_CLUSTER}/nodes/node-pool${i}/stake-reward.reg.cert" fi # pool keys cardano_cli_log latest node key-gen \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --cold-signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.skey" \ - --operational-certificate-issue-counter-file "$STATE_CLUSTER/nodes/node-pool$i/cold.counter" + --cold-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.vkey" \ + --cold-signing-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.skey" \ + --operational-certificate-issue-counter-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.counter" cardano_cli_log latest node key-gen-KES \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/kes.vkey" \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/kes.skey" + --verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/kes.vkey" \ + --signing-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/kes.skey" cardano_cli_log latest node key-gen-VRF \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/vrf.skey" + --verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/vrf.vkey" \ + --signing-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/vrf.skey" # stake address delegation certs cardano_cli_log shelley stake-address stake-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ + --cold-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.vkey" \ + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.deleg.cert" # pool opcert cardano_cli_log latest node issue-op-cert \ --kes-period 0 \ - --cold-signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.skey" \ - --kes-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/kes.vkey" \ - --operational-certificate-issue-counter-file "$STATE_CLUSTER/nodes/node-pool$i/cold.counter" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/op.cert" + --cold-signing-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.skey" \ + --kes-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/kes.vkey" \ + --operational-certificate-issue-counter-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.counter" \ + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/op.cert" - POOL_NAME="TestPool$i" + POOL_NAME="TestPool${i}" POOL_DESC="Test Pool $i" - POOL_TICKER="TP$i" + POOL_TICKER="TP${i}" - cat > "$STATE_CLUSTER/webserver/pool$i.html" < "${STATE_CLUSTER}/webserver/pool${i}.html" < -$POOL_NAME +${POOL_NAME} -name: $POOL_NAME
-description: $POOL_DESC
-ticker: $POOL_TICKER
+name: ${POOL_NAME}
+description: ${POOL_DESC}
+ticker: ${POOL_TICKER}
EoF @@ -559,109 +559,109 @@ EoF --arg name "$POOL_NAME" \ --arg description "$POOL_DESC" \ --arg ticker "$POOL_TICKER" \ - --arg homepage "http://localhost:%%WEBSERVER_PORT%%/pool$i.html" \ + --arg homepage "http://localhost:%%WEBSERVER_PORT%%/pool${i}.html" \ '{"name": $name, "description": $description, "ticker": $ticker, "homepage": $homepage}' \ - > "$STATE_CLUSTER/webserver/pool$i.json" + > "${STATE_CLUSTER}/webserver/pool${i}.json" - METADATA_URL="http://localhost:%%WEBSERVER_PORT%%/pool$i.json" - METADATA_HASH=$(cardano_cli_log latest stake-pool metadata-hash --pool-metadata-file \ - "$STATE_CLUSTER/webserver/pool$i.json") - POOL_PORT=$(("%%NODE_PORT_BASE%%" + ("$NUM_BFT_NODES" + i - 1) * "%%PORTS_PER_NODE%%")) - echo "$POOL_PORT" > "$STATE_CLUSTER/nodes/node-pool$i/port" - echo $POOL_PLEDGE > "$STATE_CLUSTER/nodes/node-pool$i/pledge" + METADATA_URL="http://localhost:%%WEBSERVER_PORT%%/pool${i}.json" + METADATA_HASH="$(cardano_cli_log latest stake-pool metadata-hash --pool-metadata-file \ + "${STATE_CLUSTER}/webserver/pool${i}.json")" + POOL_PORT="$(("%%NODE_PORT_BASE%%" + (NUM_BFT_NODES + i - 1) * "%%PORTS_PER_NODE%%"))" + echo "$POOL_PORT" > "${STATE_CLUSTER}/nodes/node-pool${i}/port" + echo "$POOL_PLEDGE" > "${STATE_CLUSTER}/nodes/node-pool${i}/pledge" cardano_cli_log shelley stake-pool registration-certificate \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --vrf-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" \ + --cold-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.vkey" \ + --vrf-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/vrf.vkey" \ --pool-pledge "$POOL_PLEDGE" \ --pool-margin 0.35 \ --pool-cost "$POOL_COST" \ - --pool-reward-account-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ - --pool-owner-stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --pool-reward-account-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" \ + --pool-owner-stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ --metadata-url "$METADATA_URL" \ --metadata-hash "$METADATA_HASH" \ --pool-relay-port "$POOL_PORT" \ --pool-relay-ipv4 "127.0.0.1" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/register.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/register.cert" done -mv "$STATE_CLUSTER/shelley/utxo-keys/utxo1.vkey" "$STATE_CLUSTER/shelley/genesis-utxo.vkey" -mv "$STATE_CLUSTER/shelley/utxo-keys/utxo1.skey" "$STATE_CLUSTER/shelley/genesis-utxo.skey" -rmdir "$STATE_CLUSTER/shelley/utxo-keys" +mv "${STATE_CLUSTER}/shelley/utxo-keys/utxo1.vkey" "${STATE_CLUSTER}/shelley/genesis-utxo.vkey" +mv "${STATE_CLUSTER}/shelley/utxo-keys/utxo1.skey" "${STATE_CLUSTER}/shelley/genesis-utxo.skey" +rmdir "${STATE_CLUSTER}/shelley/utxo-keys" for i in $(seq 1 "$NUM_DREPS"); do # DRep keys cardano_cli_log conway governance drep key-gen \ - --signing-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ - --verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" + --signing-key-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.skey" \ + --verification-key-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.vkey" # DRep registration cardano_cli_log conway governance drep registration-certificate \ - --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ + --drep-verification-key-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.vkey" \ --key-reg-deposit-amt "$DREP_DEPOSIT" \ - --out-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" + --out-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep_reg.cert" # delegatee payment keys cardano_cli_log conway address key-gen \ - --signing-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ - --verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.vkey" + --signing-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.skey" \ + --verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.vkey" # delegatee stake keys cardano_cli_log conway stake-address key-gen \ - --signing-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.skey" \ - --verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" + --signing-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.skey" \ + --verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" # delegatee payment address cardano_cli_log conway address build \ - --payment-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.vkey" \ - --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --payment-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr" + --out-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.addr" # delegatee stake address cardano_cli_log conway stake-address build \ - --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.addr" + --out-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.addr" # delegatee stake address registration cert cardano_cli_log conway stake-address registration-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg.cert" + --out-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.reg.cert" # delegatee vote delegation cert cardano_cli_log conway stake-address vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ - --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" + --stake-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" \ + --drep-verification-key-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.vkey" \ + --out-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" done # create scripts for cluster starting / stopping -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start all" > "$STATE_CLUSTER/supervisorctl_start" -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% restart nodes:" > "$STATE_CLUSTER/supervisorctl_restart_nodes" -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% \"\$@\"" > "$STATE_CLUSTER/supervisorctl" +printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start all" > "${STATE_CLUSTER}/supervisorctl_start" +printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% restart nodes:" > "${STATE_CLUSTER}/supervisorctl_restart_nodes" +printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% \"\$@\"" > "${STATE_CLUSTER}/supervisorctl" -cat > "$STATE_CLUSTER/supervisord_start" < "${STATE_CLUSTER}/supervisord_start" < "$STATE_CLUSTER/supervisord_stop" < "${STATE_CLUSTER}/supervisord_stop" <&2; exit 1; } # assert sleep "$PROPOSAL_DELAY" echo "Submitting update proposal to update to Babbage" -BABBAGE_UPDATE_PROPOSAL="$STATE_CLUSTER/shelley/update-proposal-babbage" +BABBAGE_UPDATE_PROPOSAL="${STATE_CLUSTER}/shelley/update-proposal-babbage" # protocol version + dapps parameters update cardano_cli_log legacy governance create-update-proposal \ @@ -1023,7 +1023,7 @@ TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" cardano_cli_log alonzo transaction build-raw \ --fee "$FEE" \ "${TXINS[@]}" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ + --tx-out "${FAUCET_ADDR}+${TXOUT_AMOUNT}" \ --update-proposal-file "${BABBAGE_UPDATE_PROPOSAL}.proposal" \ --out-file "${BABBAGE_UPDATE_PROPOSAL}-tx.txbody" @@ -1059,7 +1059,7 @@ BABBAGE_EPOCH="$(get_epoch)" sleep "$PROPOSAL_DELAY" echo "Submitting update proposal to update to PV8" -BABBAGE_UPDATE_PROPOSAL_PV8="$STATE_CLUSTER/shelley/update-proposal-babbage-pv8" +BABBAGE_UPDATE_PROPOSAL_PV8="${STATE_CLUSTER}/shelley/update-proposal-babbage-pv8" # protocol version 8 cardano_cli_log legacy governance create-update-proposal \ @@ -1076,7 +1076,7 @@ TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" cardano_cli_log babbage transaction build-raw \ --fee "$FEE" \ "${TXINS[@]}" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ + --tx-out "${FAUCET_ADDR}+${TXOUT_AMOUNT}" \ --update-proposal-file "${BABBAGE_UPDATE_PROPOSAL_PV8}.proposal" \ --out-file "${BABBAGE_UPDATE_PROPOSAL_PV8}-tx.txbody" @@ -1109,9 +1109,9 @@ BABBAGE_PV8_EPOCH="$(get_epoch)" cardano_cli_log babbage query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" + --out-file "${STATE_CLUSTER}/pparams.json" -PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "$STATE_CLUSTER/pparams.json")" +PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "${STATE_CLUSTER}/pparams.json")" [ "$PROTOCOL_VERSION" = 8 ] || { echo "Unexpected protocol version '$PROTOCOL_VERSION' on line $LINENO" >&2; exit 1; } # assert @@ -1119,7 +1119,7 @@ PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "$STATE_CLUSTER/pparams.json") sleep "$PROPOSAL_DELAY" echo "Submitting update proposal to update to Conway" -CONWAY_UPDATE_PROPOSAL="$STATE_CLUSTER/shelley/update-proposal-conway" +CONWAY_UPDATE_PROPOSAL="${STATE_CLUSTER}/shelley/update-proposal-conway" # protocol version 9 cardano_cli_log legacy governance create-update-proposal \ @@ -1136,7 +1136,7 @@ TXOUT_AMOUNT="$((TXIN_AMOUNT - FEE))" cardano_cli_log babbage transaction build-raw \ --fee "$FEE" \ "${TXINS[@]}" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ + --tx-out "${FAUCET_ADDR}+${TXOUT_AMOUNT}" \ --update-proposal-file "${CONWAY_UPDATE_PROPOSAL}.proposal" \ --out-file "${CONWAY_UPDATE_PROPOSAL}-tx.txbody" @@ -1171,9 +1171,9 @@ CONWAY_PV9_EPOCH="$(get_epoch)" cardano_cli_log conway query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" + --out-file "${STATE_CLUSTER}/pparams.json" -PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "$STATE_CLUSTER/pparams.json")" +PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "${STATE_CLUSTER}/pparams.json")" [ "$PROTOCOL_VERSION" = 9 ] || { echo "Unexpected protocol version '$PROTOCOL_VERSION' on line $LINENO" >&2; exit 1; } # assert @@ -1187,7 +1187,7 @@ get_txins "$FAUCET_ADDR" "$STOP_TXIN_AMOUNT" TXOUT_AMOUNT="$((TXIN_AMOUNT - STOP_TXIN_AMOUNT))" -V9_TX="$STATE_CLUSTER/governance_data/setup_governance" +V9_TX="${STATE_CLUSTER}/governance_data/setup_governance" CC_ARGS=() for f in "$STATE_CLUSTER"/governance_data/cc_member*_committee_hot_auth.cert; do @@ -1205,15 +1205,15 @@ DREPS_ARGS=() DREPS_SIGNING=() for i in $(seq 1 "$NUM_DREPS"); do DREPS_ARGS+=( \ - "--tx-out" "$(<"$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr")+$DREP_DELEGATED" \ - "--certificate-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" \ - "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg.cert" \ - "--certificate-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" \ + "--tx-out" "$(<"${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.addr")+${DREP_DELEGATED}" \ + "--certificate-file" "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep_reg.cert" \ + "--certificate-file" "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.reg.cert" \ + "--certificate-file" "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" \ ) DREPS_SIGNING+=( \ - "--signing-key-file" "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ - "--signing-key-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ - "--signing-key-file" "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.skey" \ + "--signing-key-file" "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.skey" \ + "--signing-key-file" "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.skey" \ + "--signing-key-file" "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.skey" \ ) done @@ -1221,10 +1221,10 @@ POOL_ARGS=() POOL_SIGNING=() for i in $(seq 1 "$NUM_POOLS"); do POOL_ARGS+=( \ - "--certificate-file" "$STATE_CLUSTER/nodes/node-pool${i}/stake-reward_vote_deleg.cert" \ + "--certificate-file" "${STATE_CLUSTER}/nodes/node-pool${i}/stake-reward_vote_deleg.cert" \ ) POOL_SIGNING+=( \ - "--signing-key-file" "$STATE_CLUSTER/nodes/node-pool${i}/reward.skey" \ + "--signing-key-file" "${STATE_CLUSTER}/nodes/node-pool${i}/reward.skey" \ ) done @@ -1234,7 +1234,7 @@ cardano_cli_log conway transaction build-raw \ "${CC_ARGS[@]}" \ "${DREPS_ARGS[@]}" \ "${POOL_ARGS[@]}" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ + --tx-out "${FAUCET_ADDR}+${TXOUT_AMOUNT}" \ --out-file "${V9_TX}-tx.txbody" cardano_cli_log conway transaction sign \ @@ -1266,19 +1266,19 @@ fi # Hard fork to Conway protocol version 10 if [ -n "${PV10:-""}" ]; then - PV10_ACTION="$STATE_CLUSTER/governance_data/hardfork_pv10_action" + PV10_ACTION="${STATE_CLUSTER}/governance_data/hardfork_pv10_action" echo "Submitting hard fork proposal to update to Conway PV10" cardano_cli_log conway governance action create-hardfork \ --testnet \ --governance-action-deposit "$GOV_ACTION_DEPOSIT" \ - --deposit-return-stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool1/reward.vkey" \ + --deposit-return-stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool1/reward.vkey" \ --anchor-url "http://www.hardfork-pv10.com" \ --anchor-data-hash 5d372dca1a4cc90d7d16d966c48270e33e3aa0abcb0e78f0d5ca7ff330d2245d \ --protocol-major-version 10 \ --protocol-minor-version 0 \ - --out-file "$PV10_ACTION.action" + --out-file "${PV10_ACTION}.action" STOP_TXIN_AMOUNT="$((FEE + GOV_ACTION_DEPOSIT))" @@ -1289,8 +1289,8 @@ if [ -n "${PV10:-""}" ]; then cardano_cli_log conway transaction build-raw \ --fee "$FEE" \ "${TXINS[@]}" \ - --proposal-file "$PV10_ACTION.action" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ + --proposal-file "${PV10_ACTION}.action" \ + --tx-out "${FAUCET_ADDR}+${TXOUT_AMOUNT}" \ --out-file "${PV10_ACTION}-tx.txbody" cardano_cli_log conway transaction sign \ @@ -1342,7 +1342,7 @@ if [ -n "${PV10:-""}" ]; then # Submit the votes - PV10_VOTES="$STATE_CLUSTER/governance_data/hardfork_pv10_votes" + PV10_VOTES="${STATE_CLUSTER}/governance_data/hardfork_pv10_votes" VOTE_FILES=() for f in "$PV10_ACTION"_*.vote; do @@ -1370,7 +1370,7 @@ if [ -n "${PV10:-""}" ]; then --fee "$FEE" \ "${TXINS[@]}" \ "${VOTE_FILES[@]}" \ - --tx-out "$FAUCET_ADDR+$TXOUT_AMOUNT" \ + --tx-out "${FAUCET_ADDR}+${TXOUT_AMOUNT}" \ --out-file "${PV10_VOTES}-tx.txbody" cardano_cli_log conway transaction sign \ @@ -1396,11 +1396,11 @@ if [ -n "${PV10:-""}" ]; then cardano_cli_log conway query protocol-parameters \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/pparams.json" + --out-file "${STATE_CLUSTER}/pparams.json" - PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "$STATE_CLUSTER/pparams.json")" + PROTOCOL_VERSION="$(jq ".protocolVersion.major" < "${STATE_CLUSTER}/pparams.json")" [ "$PROTOCOL_VERSION" = 10 ] || { echo "Unexpected protocol version '$PROTOCOL_VERSION' on line $LINENO" >&2; exit 1; } # assert fi -echo "Cluster started. Run \`$SCRIPT_DIR/stop-cluster\` to stop" +echo "Cluster started. Run \`${SCRIPT_DIR}/stop-cluster\` to stop" diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index ce9d59594..f6cac4976 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -37,10 +37,10 @@ if [ -n "${PV10:-""}" ]; then PROTOCOL_VERSION=10 fi -SECURITY_PARAM="$(jq '.securityParam' < "$SCRIPT_DIR/genesis.spec.json")" -NETWORK_MAGIC="$(jq '.networkMagic' < "$SCRIPT_DIR/genesis.spec.json")" -MAX_SUPPLY="$(jq '.maxLovelaceSupply' < "$SCRIPT_DIR/genesis.spec.json")" -POOL_COST="$(jq '.protocolParams.minPoolCost' < "$SCRIPT_DIR/genesis.spec.json")" +SECURITY_PARAM="$(jq '.securityParam' < "${SCRIPT_DIR}/genesis.spec.json")" +NETWORK_MAGIC="$(jq '.networkMagic' < "${SCRIPT_DIR}/genesis.spec.json")" +MAX_SUPPLY="$(jq '.maxLovelaceSupply' < "${SCRIPT_DIR}/genesis.spec.json")" +POOL_COST="$(jq '.protocolParams.minPoolCost' < "${SCRIPT_DIR}/genesis.spec.json")" if [ "$POOL_COST" -eq 0 ]; then POOL_COST=600 fi @@ -51,8 +51,8 @@ DELEG_MAGIC_VALUE=3340000000000000 DELEG_SUPPLY="$((POOL_PLEDGE * NUM_POOLS + DELEG_MAGIC_VALUE))" NONDELEG_SUPPLY="$(( (MAX_SUPPLY - DELEG_SUPPLY) * 8 / 10))" -if [ -f "$STATE_CLUSTER/supervisord.pid" ]; then - echo "Cluster already running. Please run \`$SCRIPT_DIR/stop-cluster\` first!" >&2 +if [ -f "${STATE_CLUSTER}/supervisord.pid" ]; then + echo "Cluster already running. Please run \`${SCRIPT_DIR}/stop-cluster\` first!" >&2 exit 1 fi @@ -62,7 +62,7 @@ if [ "$NUM_POOLS" -lt 3 ]; then fi cardano_cli_log() { - echo cardano-cli "$@" >> "$STATE_CLUSTER/start_cluster_cmds.log" + echo cardano-cli "$@" >> "${STATE_CLUSTER}/start_cluster_cmds.log" for _ in {1..3}; do set +e @@ -119,22 +119,22 @@ get_txins() { ENABLE_SUBMIT_API="$(command -v cardano-submit-api >/dev/null 2>&1 && echo 1 || echo 0)" -if [ -e "$SCRIPT_DIR/shell_env" ]; then +if [ -e "${SCRIPT_DIR}/shell_env" ]; then # shellcheck disable=SC1090,SC1091 - source "$SCRIPT_DIR/shell_env" + source "${SCRIPT_DIR}/shell_env" fi rm -rf "$STATE_CLUSTER" mkdir -p "$STATE_CLUSTER"/{shelley,webserver,db-sync,create_staked} -cd "$STATE_CLUSTER/.." +cd "${STATE_CLUSTER}/.." cp "$SCRIPT_DIR"/cardano-node-* "$STATE_CLUSTER" -cp "$SCRIPT_DIR/run-cardano-submit-api" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/byron-params.json" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/dbsync-config.yaml" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/submit-api-config.json" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/supervisor.conf" "$STATE_CLUSTER" -cp "$SCRIPT_DIR"/*genesis*.spec.json "$STATE_CLUSTER/create_staked/" +cp "${SCRIPT_DIR}/run-cardano-submit-api" "$STATE_CLUSTER" +cp "${SCRIPT_DIR}/byron-params.json" "$STATE_CLUSTER" +cp "${SCRIPT_DIR}/dbsync-config.yaml" "$STATE_CLUSTER" +cp "${SCRIPT_DIR}/submit-api-config.json" "$STATE_CLUSTER" +cp "${SCRIPT_DIR}/supervisor.conf" "$STATE_CLUSTER" +cp "$SCRIPT_DIR"/*genesis*.spec.json "${STATE_CLUSTER}/create_staked/" if [ -z "${ENABLE_LEGACY:-""}" ]; then # use P2P topology files @@ -148,7 +148,7 @@ fi case "${UTXO_BACKEND:=""}" in "" | mem | disk) - echo "$UTXO_BACKEND" > "$STATE_CLUSTER/utxo_backend" + echo "$UTXO_BACKEND" > "${STATE_CLUSTER}/utxo_backend" ;; *) echo "Unknown \`UTXO_BACKEND\`: '$UTXO_BACKEND', line $LINENO" >&2 @@ -158,20 +158,20 @@ esac # enable db-sync service if [ -n "${DBSYNC_REPO:-""}" ]; then - [ -e "$DBSYNC_REPO/db-sync-node/bin/cardano-db-sync" ] || \ - { echo "The \`$DBSYNC_REPO/db-sync-node/bin/cardano-db-sync\` not found, line $LINENO" >&2; exit 1; } # assert + [ -e "${DBSYNC_REPO}/db-sync-node/bin/cardano-db-sync" ] || \ + { echo "The \`${DBSYNC_REPO}/db-sync-node/bin/cardano-db-sync\` not found, line $LINENO" >&2; exit 1; } # assert # create clean database if [ -z "${DRY_RUN:-""}" ]; then - "$SCRIPT_DIR/postgres-setup.sh" + "${SCRIPT_DIR}/postgres-setup.sh" fi - cat >> "$STATE_CLUSTER/supervisor.conf" <> "${STATE_CLUSTER}/supervisor.conf" <> "$STATE_CLUSTER/supervisor.conf" <> "${STATE_CLUSTER}/supervisor.conf" < "$STATE_CLUSTER/cluster_start_time" +START_TIME_SHELLEY="$(date --utc +"%Y-%m-%dT%H:%M:%SZ" --date="5 seconds")" +START_TIME="$(date +%s --date="$START_TIME_SHELLEY")" +echo "$START_TIME" > "${STATE_CLUSTER}/cluster_start_time" cardano_cli_log byron genesis genesis \ --protocol-magic "$NETWORK_MAGIC" \ @@ -205,14 +205,14 @@ cardano_cli_log byron genesis genesis \ --delegate-share 1 \ --avvm-entry-count 0 \ --avvm-entry-balance 0 \ - --protocol-parameters-file "$STATE_CLUSTER/byron-params.json" \ - --genesis-output-dir "$STATE_CLUSTER/byron" \ + --protocol-parameters-file "${STATE_CLUSTER}/byron-params.json" \ + --genesis-output-dir "${STATE_CLUSTER}/byron" \ --start-time "$START_TIME" -mv "$STATE_CLUSTER/byron-params.json" "$STATE_CLUSTER/byron/params.json" +mv "${STATE_CLUSTER}/byron-params.json" "${STATE_CLUSTER}/byron/params.json" cardano_cli_log legacy genesis create-staked \ - --genesis-dir "$STATE_CLUSTER/create_staked" \ + --genesis-dir "${STATE_CLUSTER}/create_staked" \ --testnet-magic "$NETWORK_MAGIC" \ --gen-pools "$NUM_POOLS" \ --gen-utxo-keys 1 \ @@ -221,72 +221,72 @@ cardano_cli_log legacy genesis create-staked \ --supply-delegated "$DELEG_SUPPLY" \ --start-time "$START_TIME_SHELLEY" -mkdir -p "$STATE_CLUSTER/governance_data" +mkdir -p "${STATE_CLUSTER}/governance_data" # Create committee keys if [ -z "${NO_CC:-""}" ]; then for i in $(seq 1 "$NUM_CC"); do cardano_cli_log conway governance committee key-gen-cold \ - --cold-verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.vkey" \ - --cold-signing-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.skey" + --cold-verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.vkey" \ + --cold-signing-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.skey" cardano_cli_log conway governance committee key-gen-hot \ - --verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot.vkey" \ - --signing-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot.skey" + --verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_hot.vkey" \ + --signing-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_hot.skey" cardano_cli_log conway governance committee create-hot-key-authorization-certificate \ - --cold-verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.vkey" \ - --hot-verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot.vkey" \ - --out-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot_auth.cert" + --cold-verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.vkey" \ + --hot-verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_hot.vkey" \ + --out-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_hot_auth.cert" cardano_cli_log conway governance committee key-hash \ - --verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.vkey" \ - > "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.hash" + --verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.vkey" \ + > "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.hash" done # Pre-register committee in genesis - KEY_HASH_JSON=$(jq -nR '[inputs | {("keyHash-" + .): 10000}] | add' \ - "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.hash) + KEY_HASH_JSON="$(jq -nR '[inputs | {("keyHash-" + .): 10000}] | add' \ + "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.hash)" jq \ --argjson keyHashJson "$KEY_HASH_JSON" \ '.committee.members = $keyHashJson | .committee.threshold = 0.6 | .committeeMinSize = 2' \ - "$STATE_CLUSTER/create_staked/genesis.conway.json" > "$STATE_CLUSTER/create_staked/genesis.conway.json_jq" - cat "$STATE_CLUSTER/create_staked/genesis.conway.json_jq" > "$STATE_CLUSTER/create_staked/genesis.conway.json" - rm -f "$STATE_CLUSTER/create_staked/genesis.conway.json_jq" + "${STATE_CLUSTER}/create_staked/genesis.conway.json" > "${STATE_CLUSTER}/create_staked/genesis.conway.json_jq" + cat "${STATE_CLUSTER}/create_staked/genesis.conway.json_jq" > "${STATE_CLUSTER}/create_staked/genesis.conway.json" + rm -f "${STATE_CLUSTER}/create_staked/genesis.conway.json_jq" fi -mv "$STATE_CLUSTER/create_staked/delegate-keys" "$STATE_CLUSTER/shelley/delegate-keys" -mv "$STATE_CLUSTER/create_staked/genesis-keys" "$STATE_CLUSTER/shelley/genesis-keys" +mv "${STATE_CLUSTER}/create_staked/delegate-keys" "${STATE_CLUSTER}/shelley/delegate-keys" +mv "${STATE_CLUSTER}/create_staked/genesis-keys" "${STATE_CLUSTER}/shelley/genesis-keys" jq \ --argjson max_supply "$MAX_SUPPLY" \ --argjson prot_ver "$PROTOCOL_VERSION" \ '.protocolParams.protocolVersion.major = $prot_ver | .maxLovelaceSupply = $max_supply' \ - "$STATE_CLUSTER/create_staked/genesis.json" > "$STATE_CLUSTER/shelley/genesis.json" -rm -f "$STATE_CLUSTER/create_staked/genesis.json" -mv "$STATE_CLUSTER"/create_staked/genesis*.json "$STATE_CLUSTER/shelley/" + "${STATE_CLUSTER}/create_staked/genesis.json" > "${STATE_CLUSTER}/shelley/genesis.json" +rm -f "${STATE_CLUSTER}/create_staked/genesis.json" +mv "$STATE_CLUSTER"/create_staked/genesis*.json "${STATE_CLUSTER}/shelley/" -mv "$STATE_CLUSTER/create_staked/utxo-keys/utxo1.skey" "$STATE_CLUSTER/shelley/genesis-utxo.skey" -mv "$STATE_CLUSTER/create_staked/utxo-keys/utxo1.vkey" "$STATE_CLUSTER/shelley/genesis-utxo.vkey" +mv "${STATE_CLUSTER}/create_staked/utxo-keys/utxo1.skey" "${STATE_CLUSTER}/shelley/genesis-utxo.skey" +mv "${STATE_CLUSTER}/create_staked/utxo-keys/utxo1.vkey" "${STATE_CLUSTER}/shelley/genesis-utxo.vkey" cardano_cli_log conway address build --payment-verification-key-file \ - "$STATE_CLUSTER/shelley/genesis-utxo.vkey" \ - --out-file "$STATE_CLUSTER/shelley/genesis-utxo.addr" \ + "${STATE_CLUSTER}/shelley/genesis-utxo.vkey" \ + --out-file "${STATE_CLUSTER}/shelley/genesis-utxo.addr" \ --testnet-magic "$NETWORK_MAGIC" -mv "$STATE_CLUSTER/create_staked/stake-delegator-keys" "$STATE_CLUSTER/shelley/stake-delegator-keys" +mv "${STATE_CLUSTER}/create_staked/stake-delegator-keys" "${STATE_CLUSTER}/shelley/stake-delegator-keys" KEY_DEPOSIT="$(jq '.protocolParams.keyDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.json")" + < "${STATE_CLUSTER}/shelley/genesis.json")" DREP_DEPOSIT="$(jq '.dRepDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.conway.json")" + < "${STATE_CLUSTER}/shelley/genesis.conway.json")" BYRON_GENESIS_HASH="$(cardano_cli_log byron genesis print-genesis-hash --genesis-json \ - "$STATE_CLUSTER/byron/genesis.json")" + "${STATE_CLUSTER}/byron/genesis.json")" SHELLEY_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.json")" + "${STATE_CLUSTER}/shelley/genesis.json")" ALONZO_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.alonzo.json")" + "${STATE_CLUSTER}/shelley/genesis.alonzo.json")" CONWAY_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.conway.json")" + "${STATE_CLUSTER}/shelley/genesis.conway.json")" for conf in "$SCRIPT_DIR"/config-*.json; do fname="${conf##*/}" @@ -301,7 +301,7 @@ for conf in "$SCRIPT_DIR"/config-*.json; do | .AlonzoGenesisHash = $alonzo_hash | .ConwayGenesisHash = $conway_hash | ."LastKnownBlockVersion-Major" = $prot_ver' \ - "$conf" > "$STATE_CLUSTER/$fname" + "$conf" > "${STATE_CLUSTER}/${fname}" # enable P2P if [ -z "${ENABLE_LEGACY:-""}" ]; then @@ -318,7 +318,7 @@ for conf in "$SCRIPT_DIR"/config-*.json; do pool_num="${fname##*-pool}" pool_num="${pool_num%.json}" if [ "$((pool_num % 2))" != 0 ]; then - cp -f "$SCRIPT_DIR/topology-pool${pool_num}.json" "$STATE_CLUSTER" + cp -f "${SCRIPT_DIR}/topology-pool${pool_num}.json" "$STATE_CLUSTER" continue fi fi @@ -333,107 +333,107 @@ for conf in "$SCRIPT_DIR"/config-*.json; do | .TargetNumberOfActivePeers = 20 | .TraceBlockFetchClient = true | .TraceChainSyncClient = true' \ - "$STATE_CLUSTER/$fname" > "$STATE_CLUSTER/${fname}_jq" - cat "$STATE_CLUSTER/${fname}_jq" > "$STATE_CLUSTER/$fname" - rm -f "$STATE_CLUSTER/${fname}_jq" + "${STATE_CLUSTER}/${fname}" > "${STATE_CLUSTER}/${fname}_jq" + cat "${STATE_CLUSTER}/${fname}_jq" > "${STATE_CLUSTER}/${fname}" + rm -f "${STATE_CLUSTER}/${fname}_jq" fi done -for i in $(seq 1 $NUM_BFT_NODES); do - mkdir -p "$STATE_CLUSTER/nodes/node-bft$i" - BFT_PORT=$(("%%NODE_PORT_BASE%%" + (i - 1) * "%%PORTS_PER_NODE%%" )) - echo "$BFT_PORT" > "$STATE_CLUSTER/nodes/node-bft$i/port" +for i in $(seq 1 "$NUM_BFT_NODES"); do + mkdir -p "${STATE_CLUSTER}/nodes/node-bft$i" + BFT_PORT="$(("%%NODE_PORT_BASE%%" + (i - 1) * "%%PORTS_PER_NODE%%" ))" + echo "$BFT_PORT" > "${STATE_CLUSTER}/nodes/node-bft${i}/port" done for i in $(seq 1 "$NUM_POOLS"); do - mkdir -p "$STATE_CLUSTER/nodes/node-pool$i" - mv "$STATE_CLUSTER/create_staked/pools/cold$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/cold.skey" - mv "$STATE_CLUSTER/create_staked/pools/cold$i.vkey" "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" + mkdir -p "${STATE_CLUSTER}/nodes/node-pool$i" + mv "${STATE_CLUSTER}/create_staked/pools/cold${i}.skey" "${STATE_CLUSTER}/nodes/node-pool${i}/cold.skey" + mv "${STATE_CLUSTER}/create_staked/pools/cold${i}.vkey" "${STATE_CLUSTER}/nodes/node-pool${i}/cold.vkey" - mv "$STATE_CLUSTER/create_staked/pools/kes$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/kes.skey" - mv "$STATE_CLUSTER/create_staked/pools/kes$i.vkey" "$STATE_CLUSTER/nodes/node-pool$i/kes.vkey" + mv "${STATE_CLUSTER}/create_staked/pools/kes${i}.skey" "${STATE_CLUSTER}/nodes/node-pool${i}/kes.skey" + mv "${STATE_CLUSTER}/create_staked/pools/kes${i}.vkey" "${STATE_CLUSTER}/nodes/node-pool${i}/kes.vkey" - mv "$STATE_CLUSTER/create_staked/pools/opcert$i.cert" "$STATE_CLUSTER/nodes/node-pool$i/op.cert" - mv "$STATE_CLUSTER/create_staked/pools/opcert$i.counter" "$STATE_CLUSTER/nodes/node-pool$i/cold.counter" + mv "${STATE_CLUSTER}/create_staked/pools/opcert${i}.cert" "${STATE_CLUSTER}/nodes/node-pool${i}/op.cert" + mv "${STATE_CLUSTER}/create_staked/pools/opcert${i}.counter" "${STATE_CLUSTER}/nodes/node-pool${i}/cold.counter" # stake reward keys - mv "$STATE_CLUSTER/create_staked/pools/staking-reward$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/reward.skey" - mv "$STATE_CLUSTER/create_staked/pools/staking-reward$i.vkey" "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" + mv "${STATE_CLUSTER}/create_staked/pools/staking-reward${i}.skey" "${STATE_CLUSTER}/nodes/node-pool${i}/reward.skey" + mv "${STATE_CLUSTER}/create_staked/pools/staking-reward${i}.vkey" "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" - mv "$STATE_CLUSTER/create_staked/pools/vrf$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/vrf.skey" - mv "$STATE_CLUSTER/create_staked/pools/vrf$i.vkey" "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" + mv "${STATE_CLUSTER}/create_staked/pools/vrf${i}.skey" "${STATE_CLUSTER}/nodes/node-pool${i}/vrf.skey" + mv "${STATE_CLUSTER}/create_staked/pools/vrf${i}.vkey" "${STATE_CLUSTER}/nodes/node-pool${i}/vrf.vkey" echo "Generating Pool $i Secrets" # pool owner addresses and keys cardano_cli_log conway address key-gen \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.skey" \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" + --signing-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-utxo.skey" \ + --verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-utxo.vkey" cardano_cli_log conway stake-address key-gen \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.skey" \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" + --signing-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.skey" \ + --verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" # payment address cardano_cli_log conway address build \ - --payment-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --payment-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-utxo.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner.addr" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner.addr" # stake address cardano_cli_log conway stake-address build \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.addr" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.addr" # stake address registration cert cardano_cli_log conway stake-address registration-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake.reg.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/stake.reg.cert" if [ -n "${PV10:-""}" ]; then # stake reward address registration and vote delegation cert cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" \ --always-abstain \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/stake-reward.reg.cert" # owner stake address stake and vote delegation cert cardano_cli_log conway stake-address stake-and-vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ + --cold-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.vkey" \ --always-abstain \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.deleg.cert" else # stake reward address registration cert cardano_cli_log conway stake-address registration-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/stake-reward.reg.cert" # owner stake address stake delegation cert cardano_cli_log conway stake-address stake-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ + --cold-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.vkey" \ + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.deleg.cert" fi - POOL_NAME="TestPool$i" + POOL_NAME="TestPool${i}" POOL_DESC="Test Pool $i" - POOL_TICKER="TP$i" + POOL_TICKER="TP${i}" - cat > "$STATE_CLUSTER/webserver/pool$i.html" < "${STATE_CLUSTER}/webserver/pool${i}.html" < -$POOL_NAME +${POOL_NAME} -name: $POOL_NAME
-description: $POOL_DESC
-ticker: $POOL_TICKER
+name: ${POOL_NAME}
+description: ${POOL_DESC}
+ticker: ${POOL_TICKER}
EoF @@ -443,107 +443,107 @@ EoF --arg name "$POOL_NAME" \ --arg description "$POOL_DESC" \ --arg ticker "$POOL_TICKER" \ - --arg homepage "http://localhost:%%WEBSERVER_PORT%%/pool$i.html" \ + --arg homepage "http://localhost:%%WEBSERVER_PORT%%/pool${i}.html" \ '{"name": $name, "description": $description, "ticker": $ticker, "homepage": $homepage}' \ - > "$STATE_CLUSTER/webserver/pool$i.json" + > "${STATE_CLUSTER}/webserver/pool${i}.json" - METADATA_URL="http://localhost:%%WEBSERVER_PORT%%/pool$i.json" - METADATA_HASH=$(cardano_cli_log conway stake-pool metadata-hash --pool-metadata-file \ - "$STATE_CLUSTER/webserver/pool$i.json") - POOL_PORT=$(("%%NODE_PORT_BASE%%" + ("$NUM_BFT_NODES" + i - 1) * "%%PORTS_PER_NODE%%")) - echo "$POOL_PORT" > "$STATE_CLUSTER/nodes/node-pool$i/port" - echo $POOL_PLEDGE > "$STATE_CLUSTER/nodes/node-pool$i/pledge" + METADATA_URL="http://localhost:%%WEBSERVER_PORT%%/pool${i}.json" + METADATA_HASH="$(cardano_cli_log conway stake-pool metadata-hash --pool-metadata-file \ + "${STATE_CLUSTER}/webserver/pool${i}.json")" + POOL_PORT="$(("%%NODE_PORT_BASE%%" + (NUM_BFT_NODES + i - 1) * "%%PORTS_PER_NODE%%"))" + echo "$POOL_PORT" > "${STATE_CLUSTER}/nodes/node-pool${i}/port" + echo "$POOL_PLEDGE" > "${STATE_CLUSTER}/nodes/node-pool${i}/pledge" cardano_cli_log conway stake-pool registration-certificate \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --vrf-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" \ + --cold-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.vkey" \ + --vrf-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/vrf.vkey" \ --pool-pledge "$POOL_PLEDGE" \ --pool-margin 0.35 \ --pool-cost "$POOL_COST" \ - --pool-reward-account-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ - --pool-owner-stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --pool-reward-account-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" \ + --pool-owner-stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ --metadata-url "$METADATA_URL" \ --metadata-hash "$METADATA_HASH" \ --pool-relay-port "$POOL_PORT" \ --pool-relay-ipv4 "127.0.0.1" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/register.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/register.cert" done -rm -rf "$STATE_CLUSTER/shelley/create_staked" +rm -rf "${STATE_CLUSTER}/shelley/create_staked" for i in $(seq 1 "$NUM_DREPS"); do # DRep keys cardano_cli_log conway governance drep key-gen \ - --signing-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ - --verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" + --signing-key-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.skey" \ + --verification-key-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.vkey" # DRep registration cardano_cli_log conway governance drep registration-certificate \ - --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ + --drep-verification-key-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.vkey" \ --key-reg-deposit-amt "$DREP_DEPOSIT" \ - --out-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" + --out-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep_reg.cert" # delegatee payment keys cardano_cli_log conway address key-gen \ - --signing-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ - --verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.vkey" + --signing-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.skey" \ + --verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.vkey" # delegatee stake keys cardano_cli_log conway stake-address key-gen \ - --signing-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.skey" \ - --verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" + --signing-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.skey" \ + --verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" # delegatee payment address cardano_cli_log conway address build \ - --payment-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.vkey" \ - --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --payment-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr" + --out-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.addr" # delegatee stake address cardano_cli_log conway stake-address build \ - --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.addr" + --out-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.addr" # delegatee stake address registration cert cardano_cli_log conway stake-address registration-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg.cert" + --out-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.reg.cert" # delegatee vote delegation cert cardano_cli_log conway stake-address vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ - --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" + --stake-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" \ + --drep-verification-key-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.vkey" \ + --out-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" done # create scripts for cluster starting / stopping -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start all" > "$STATE_CLUSTER/supervisorctl_start" -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% restart nodes:" > "$STATE_CLUSTER/supervisorctl_restart_nodes" -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% \"\$@\"" > "$STATE_CLUSTER/supervisorctl" +printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start all" > "${STATE_CLUSTER}/supervisorctl_start" +printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% restart nodes:" > "${STATE_CLUSTER}/supervisorctl_restart_nodes" +printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% \"\$@\"" > "${STATE_CLUSTER}/supervisorctl" -cat > "$STATE_CLUSTER/supervisord_start" < "${STATE_CLUSTER}/supervisord_start" < "$STATE_CLUSTER/supervisord_stop" < "${STATE_CLUSTER}/supervisord_stop" <&2 +if [ -f "${STATE_CLUSTER}/supervisord.pid" ]; then + echo "Cluster already running. Please run \`${SCRIPT_DIR}/stop-cluster\` first!" >&2 exit 1 fi @@ -62,7 +62,7 @@ if [ "$NUM_POOLS" -lt 3 ]; then fi cardano_cli_log() { - echo cardano-cli "$@" >> "$STATE_CLUSTER/start_cluster_cmds.log" + echo cardano-cli "$@" >> "${STATE_CLUSTER}/start_cluster_cmds.log" for _ in {1..3}; do set +e @@ -119,22 +119,22 @@ get_txins() { ENABLE_SUBMIT_API="$(command -v cardano-submit-api >/dev/null 2>&1 && echo 1 || echo 0)" -if [ -e "$SCRIPT_DIR/shell_env" ]; then +if [ -e "${SCRIPT_DIR}/shell_env" ]; then # shellcheck disable=SC1090,SC1091 - source "$SCRIPT_DIR/shell_env" + source "${SCRIPT_DIR}/shell_env" fi rm -rf "$STATE_CLUSTER" mkdir -p "$STATE_CLUSTER"/{shelley,webserver,db-sync,create_staked} -cd "$STATE_CLUSTER/.." +cd "${STATE_CLUSTER}/.." cp "$SCRIPT_DIR"/cardano-node-* "$STATE_CLUSTER" -cp "$SCRIPT_DIR/run-cardano-submit-api" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/byron-params.json" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/dbsync-config.yaml" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/submit-api-config.json" "$STATE_CLUSTER" -cp "$SCRIPT_DIR/supervisor.conf" "$STATE_CLUSTER" -cp "$SCRIPT_DIR"/*genesis*.spec.json "$STATE_CLUSTER/create_staked/" +cp "${SCRIPT_DIR}/run-cardano-submit-api" "$STATE_CLUSTER" +cp "${SCRIPT_DIR}/byron-params.json" "$STATE_CLUSTER" +cp "${SCRIPT_DIR}/dbsync-config.yaml" "$STATE_CLUSTER" +cp "${SCRIPT_DIR}/submit-api-config.json" "$STATE_CLUSTER" +cp "${SCRIPT_DIR}/supervisor.conf" "$STATE_CLUSTER" +cp "$SCRIPT_DIR"/*genesis*.spec.json "${STATE_CLUSTER}/create_staked/" if [ -z "${ENABLE_LEGACY:-""}" ]; then # use P2P topology files @@ -148,7 +148,7 @@ fi case "${UTXO_BACKEND:=""}" in "" | mem | disk) - echo "$UTXO_BACKEND" > "$STATE_CLUSTER/utxo_backend" + echo "$UTXO_BACKEND" > "${STATE_CLUSTER}/utxo_backend" ;; *) echo "Unknown \`UTXO_BACKEND\`: '$UTXO_BACKEND', line $LINENO" >&2 @@ -158,20 +158,20 @@ esac # enable db-sync service if [ -n "${DBSYNC_REPO:-""}" ]; then - [ -e "$DBSYNC_REPO/db-sync-node/bin/cardano-db-sync" ] || \ - { echo "The \`$DBSYNC_REPO/db-sync-node/bin/cardano-db-sync\` not found, line $LINENO" >&2; exit 1; } # assert + [ -e "${DBSYNC_REPO}/db-sync-node/bin/cardano-db-sync" ] || \ + { echo "The \`${DBSYNC_REPO}/db-sync-node/bin/cardano-db-sync\` not found, line $LINENO" >&2; exit 1; } # assert # create clean database if [ -z "${DRY_RUN:-""}" ]; then - "$SCRIPT_DIR/postgres-setup.sh" + "${SCRIPT_DIR}/postgres-setup.sh" fi - cat >> "$STATE_CLUSTER/supervisor.conf" <> "${STATE_CLUSTER}/supervisor.conf" <> "$STATE_CLUSTER/supervisor.conf" <> "${STATE_CLUSTER}/supervisor.conf" < "$STATE_CLUSTER/cluster_start_time" +START_TIME_SHELLEY="$(date --utc +"%Y-%m-%dT%H:%M:%SZ" --date="5 seconds")" +START_TIME="$(date +%s --date="$START_TIME_SHELLEY")" +echo "$START_TIME" > "${STATE_CLUSTER}/cluster_start_time" cardano_cli_log byron genesis genesis \ --protocol-magic "$NETWORK_MAGIC" \ @@ -205,14 +205,14 @@ cardano_cli_log byron genesis genesis \ --delegate-share 1 \ --avvm-entry-count 0 \ --avvm-entry-balance 0 \ - --protocol-parameters-file "$STATE_CLUSTER/byron-params.json" \ - --genesis-output-dir "$STATE_CLUSTER/byron" \ + --protocol-parameters-file "${STATE_CLUSTER}/byron-params.json" \ + --genesis-output-dir "${STATE_CLUSTER}/byron" \ --start-time "$START_TIME" -mv "$STATE_CLUSTER/byron-params.json" "$STATE_CLUSTER/byron/params.json" +mv "${STATE_CLUSTER}/byron-params.json" "${STATE_CLUSTER}/byron/params.json" cardano_cli_log legacy genesis create-staked \ - --genesis-dir "$STATE_CLUSTER/create_staked" \ + --genesis-dir "${STATE_CLUSTER}/create_staked" \ --testnet-magic "$NETWORK_MAGIC" \ --gen-pools "$NUM_POOLS" \ --gen-utxo-keys 1 \ @@ -221,72 +221,72 @@ cardano_cli_log legacy genesis create-staked \ --supply-delegated "$DELEG_SUPPLY" \ --start-time "$START_TIME_SHELLEY" -mkdir -p "$STATE_CLUSTER/governance_data" +mkdir -p "${STATE_CLUSTER}/governance_data" # Create committee keys if [ -z "${NO_CC:-""}" ]; then for i in $(seq 1 "$NUM_CC"); do cardano_cli_log conway governance committee key-gen-cold \ - --cold-verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.vkey" \ - --cold-signing-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.skey" + --cold-verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.vkey" \ + --cold-signing-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.skey" cardano_cli_log conway governance committee key-gen-hot \ - --verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot.vkey" \ - --signing-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot.skey" + --verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_hot.vkey" \ + --signing-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_hot.skey" cardano_cli_log conway governance committee create-hot-key-authorization-certificate \ - --cold-verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.vkey" \ - --hot-verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot.vkey" \ - --out-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_hot_auth.cert" + --cold-verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.vkey" \ + --hot-verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_hot.vkey" \ + --out-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_hot_auth.cert" cardano_cli_log conway governance committee key-hash \ - --verification-key-file "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.vkey" \ - > "$STATE_CLUSTER/governance_data/cc_member${i}_committee_cold.hash" + --verification-key-file "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.vkey" \ + > "${STATE_CLUSTER}/governance_data/cc_member${i}_committee_cold.hash" done # Pre-register committee in genesis - KEY_HASH_JSON=$(jq -nR '[inputs | {("keyHash-" + .): 10000}] | add' \ - "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.hash) + KEY_HASH_JSON="$(jq -nR '[inputs | {("keyHash-" + .): 10000}] | add' \ + "$STATE_CLUSTER"/governance_data/cc_member*_committee_cold.hash)" jq \ --argjson keyHashJson "$KEY_HASH_JSON" \ '.committee.members = $keyHashJson | .committee.threshold = 0.6 | .committeeMinSize = 2' \ - "$STATE_CLUSTER/create_staked/genesis.conway.json" > "$STATE_CLUSTER/create_staked/genesis.conway.json_jq" - cat "$STATE_CLUSTER/create_staked/genesis.conway.json_jq" > "$STATE_CLUSTER/create_staked/genesis.conway.json" - rm -f "$STATE_CLUSTER/create_staked/genesis.conway.json_jq" + "${STATE_CLUSTER}/create_staked/genesis.conway.json" > "${STATE_CLUSTER}/create_staked/genesis.conway.json_jq" + cat "${STATE_CLUSTER}/create_staked/genesis.conway.json_jq" > "${STATE_CLUSTER}/create_staked/genesis.conway.json" + rm -f "${STATE_CLUSTER}/create_staked/genesis.conway.json_jq" fi -mv "$STATE_CLUSTER/create_staked/delegate-keys" "$STATE_CLUSTER/shelley/delegate-keys" -mv "$STATE_CLUSTER/create_staked/genesis-keys" "$STATE_CLUSTER/shelley/genesis-keys" +mv "${STATE_CLUSTER}/create_staked/delegate-keys" "${STATE_CLUSTER}/shelley/delegate-keys" +mv "${STATE_CLUSTER}/create_staked/genesis-keys" "${STATE_CLUSTER}/shelley/genesis-keys" jq \ --argjson max_supply "$MAX_SUPPLY" \ --argjson prot_ver "$PROTOCOL_VERSION" \ '.protocolParams.protocolVersion.major = $prot_ver | .maxLovelaceSupply = $max_supply' \ - "$STATE_CLUSTER/create_staked/genesis.json" > "$STATE_CLUSTER/shelley/genesis.json" -rm -f "$STATE_CLUSTER/create_staked/genesis.json" -mv "$STATE_CLUSTER"/create_staked/genesis*.json "$STATE_CLUSTER/shelley/" + "${STATE_CLUSTER}/create_staked/genesis.json" > "${STATE_CLUSTER}/shelley/genesis.json" +rm -f "${STATE_CLUSTER}/create_staked/genesis.json" +mv "$STATE_CLUSTER"/create_staked/genesis*.json "${STATE_CLUSTER}/shelley/" -mv "$STATE_CLUSTER/create_staked/utxo-keys/utxo1.skey" "$STATE_CLUSTER/shelley/genesis-utxo.skey" -mv "$STATE_CLUSTER/create_staked/utxo-keys/utxo1.vkey" "$STATE_CLUSTER/shelley/genesis-utxo.vkey" +mv "${STATE_CLUSTER}/create_staked/utxo-keys/utxo1.skey" "${STATE_CLUSTER}/shelley/genesis-utxo.skey" +mv "${STATE_CLUSTER}/create_staked/utxo-keys/utxo1.vkey" "${STATE_CLUSTER}/shelley/genesis-utxo.vkey" cardano_cli_log conway address build --payment-verification-key-file \ - "$STATE_CLUSTER/shelley/genesis-utxo.vkey" \ - --out-file "$STATE_CLUSTER/shelley/genesis-utxo.addr" \ + "${STATE_CLUSTER}/shelley/genesis-utxo.vkey" \ + --out-file "${STATE_CLUSTER}/shelley/genesis-utxo.addr" \ --testnet-magic "$NETWORK_MAGIC" -mv "$STATE_CLUSTER/create_staked/stake-delegator-keys" "$STATE_CLUSTER/shelley/stake-delegator-keys" +mv "${STATE_CLUSTER}/create_staked/stake-delegator-keys" "${STATE_CLUSTER}/shelley/stake-delegator-keys" KEY_DEPOSIT="$(jq '.protocolParams.keyDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.json")" + < "${STATE_CLUSTER}/shelley/genesis.json")" DREP_DEPOSIT="$(jq '.dRepDeposit' \ - < "$STATE_CLUSTER/shelley/genesis.conway.json")" + < "${STATE_CLUSTER}/shelley/genesis.conway.json")" BYRON_GENESIS_HASH="$(cardano_cli_log byron genesis print-genesis-hash --genesis-json \ - "$STATE_CLUSTER/byron/genesis.json")" + "${STATE_CLUSTER}/byron/genesis.json")" SHELLEY_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.json")" + "${STATE_CLUSTER}/shelley/genesis.json")" ALONZO_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.alonzo.json")" + "${STATE_CLUSTER}/shelley/genesis.alonzo.json")" CONWAY_GENESIS_HASH="$(cardano_cli_log legacy genesis hash --genesis \ - "$STATE_CLUSTER/shelley/genesis.conway.json")" + "${STATE_CLUSTER}/shelley/genesis.conway.json")" for conf in "$SCRIPT_DIR"/config-*.json; do fname="${conf##*/}" @@ -301,7 +301,7 @@ for conf in "$SCRIPT_DIR"/config-*.json; do | .AlonzoGenesisHash = $alonzo_hash | .ConwayGenesisHash = $conway_hash | ."LastKnownBlockVersion-Major" = $prot_ver' \ - "$conf" > "$STATE_CLUSTER/$fname" + "$conf" > "${STATE_CLUSTER}/${fname}" # enable P2P if [ -z "${ENABLE_LEGACY:-""}" ]; then @@ -318,7 +318,7 @@ for conf in "$SCRIPT_DIR"/config-*.json; do pool_num="${fname##*-pool}" pool_num="${pool_num%.json}" if [ "$((pool_num % 2))" != 0 ]; then - cp -f "$SCRIPT_DIR/topology-pool${pool_num}.json" "$STATE_CLUSTER" + cp -f "${SCRIPT_DIR}/topology-pool${pool_num}.json" "$STATE_CLUSTER" continue fi fi @@ -333,107 +333,107 @@ for conf in "$SCRIPT_DIR"/config-*.json; do | .TargetNumberOfActivePeers = 20 | .TraceBlockFetchClient = true | .TraceChainSyncClient = true' \ - "$STATE_CLUSTER/$fname" > "$STATE_CLUSTER/${fname}_jq" - cat "$STATE_CLUSTER/${fname}_jq" > "$STATE_CLUSTER/$fname" - rm -f "$STATE_CLUSTER/${fname}_jq" + "${STATE_CLUSTER}/${fname}" > "${STATE_CLUSTER}/${fname}_jq" + cat "${STATE_CLUSTER}/${fname}_jq" > "${STATE_CLUSTER}/${fname}" + rm -f "${STATE_CLUSTER}/${fname}_jq" fi done -for i in $(seq 1 $NUM_BFT_NODES); do - mkdir -p "$STATE_CLUSTER/nodes/node-bft$i" - BFT_PORT=$(("%%NODE_PORT_BASE%%" + (i - 1) * "%%PORTS_PER_NODE%%" )) - echo "$BFT_PORT" > "$STATE_CLUSTER/nodes/node-bft$i/port" +for i in $(seq 1 "$NUM_BFT_NODES"); do + mkdir -p "${STATE_CLUSTER}/nodes/node-bft$i" + BFT_PORT="$(("%%NODE_PORT_BASE%%" + (i - 1) * "%%PORTS_PER_NODE%%" ))" + echo "$BFT_PORT" > "${STATE_CLUSTER}/nodes/node-bft${i}/port" done for i in $(seq 1 "$NUM_POOLS"); do - mkdir -p "$STATE_CLUSTER/nodes/node-pool$i" - mv "$STATE_CLUSTER/create_staked/pools/cold$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/cold.skey" - mv "$STATE_CLUSTER/create_staked/pools/cold$i.vkey" "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" + mkdir -p "${STATE_CLUSTER}/nodes/node-pool$i" + mv "${STATE_CLUSTER}/create_staked/pools/cold${i}.skey" "${STATE_CLUSTER}/nodes/node-pool${i}/cold.skey" + mv "${STATE_CLUSTER}/create_staked/pools/cold${i}.vkey" "${STATE_CLUSTER}/nodes/node-pool${i}/cold.vkey" - mv "$STATE_CLUSTER/create_staked/pools/kes$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/kes.skey" - mv "$STATE_CLUSTER/create_staked/pools/kes$i.vkey" "$STATE_CLUSTER/nodes/node-pool$i/kes.vkey" + mv "${STATE_CLUSTER}/create_staked/pools/kes${i}.skey" "${STATE_CLUSTER}/nodes/node-pool${i}/kes.skey" + mv "${STATE_CLUSTER}/create_staked/pools/kes${i}.vkey" "${STATE_CLUSTER}/nodes/node-pool${i}/kes.vkey" - mv "$STATE_CLUSTER/create_staked/pools/opcert$i.cert" "$STATE_CLUSTER/nodes/node-pool$i/op.cert" - mv "$STATE_CLUSTER/create_staked/pools/opcert$i.counter" "$STATE_CLUSTER/nodes/node-pool$i/cold.counter" + mv "${STATE_CLUSTER}/create_staked/pools/opcert${i}.cert" "${STATE_CLUSTER}/nodes/node-pool${i}/op.cert" + mv "${STATE_CLUSTER}/create_staked/pools/opcert${i}.counter" "${STATE_CLUSTER}/nodes/node-pool${i}/cold.counter" # stake reward keys - mv "$STATE_CLUSTER/create_staked/pools/staking-reward$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/reward.skey" - mv "$STATE_CLUSTER/create_staked/pools/staking-reward$i.vkey" "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" + mv "${STATE_CLUSTER}/create_staked/pools/staking-reward${i}.skey" "${STATE_CLUSTER}/nodes/node-pool${i}/reward.skey" + mv "${STATE_CLUSTER}/create_staked/pools/staking-reward${i}.vkey" "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" - mv "$STATE_CLUSTER/create_staked/pools/vrf$i.skey" "$STATE_CLUSTER/nodes/node-pool$i/vrf.skey" - mv "$STATE_CLUSTER/create_staked/pools/vrf$i.vkey" "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" + mv "${STATE_CLUSTER}/create_staked/pools/vrf${i}.skey" "${STATE_CLUSTER}/nodes/node-pool${i}/vrf.skey" + mv "${STATE_CLUSTER}/create_staked/pools/vrf${i}.vkey" "${STATE_CLUSTER}/nodes/node-pool${i}/vrf.vkey" echo "Generating Pool $i Secrets" # pool owner addresses and keys cardano_cli_log conway address key-gen \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.skey" \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" + --signing-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-utxo.skey" \ + --verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-utxo.vkey" cardano_cli_log conway stake-address key-gen \ - --signing-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.skey" \ - --verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" + --signing-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.skey" \ + --verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" # payment address cardano_cli_log conway address build \ - --payment-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-utxo.vkey" \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --payment-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-utxo.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner.addr" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner.addr" # stake address cardano_cli_log conway stake-address build \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.addr" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.addr" # stake address registration cert cardano_cli_log conway stake-address registration-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake.reg.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/stake.reg.cert" if [ -n "${PV10:-""}" ]; then # stake reward address registration and vote delegation cert cardano_cli_log conway stake-address registration-and-vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" \ --always-abstain \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/stake-reward.reg.cert" # owner stake address stake and vote delegation cert cardano_cli_log conway stake-address stake-and-vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ + --cold-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.vkey" \ --always-abstain \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.deleg.cert" else # stake reward address registration cert cardano_cli_log conway stake-address registration-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/stake-reward.reg.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/stake-reward.reg.cert" # owner stake address stake delegation cert cardano_cli_log conway stake-address stake-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.deleg.cert" + --stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ + --cold-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.vkey" \ + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.deleg.cert" fi - POOL_NAME="TestPool$i" + POOL_NAME="TestPool${i}" POOL_DESC="Test Pool $i" - POOL_TICKER="TP$i" + POOL_TICKER="TP${i}" - cat > "$STATE_CLUSTER/webserver/pool$i.html" < "${STATE_CLUSTER}/webserver/pool${i}.html" < -$POOL_NAME +${POOL_NAME} -name: $POOL_NAME
-description: $POOL_DESC
-ticker: $POOL_TICKER
+name: ${POOL_NAME}
+description: ${POOL_DESC}
+ticker: ${POOL_TICKER}
EoF @@ -443,107 +443,107 @@ EoF --arg name "$POOL_NAME" \ --arg description "$POOL_DESC" \ --arg ticker "$POOL_TICKER" \ - --arg homepage "http://localhost:%%WEBSERVER_PORT%%/pool$i.html" \ + --arg homepage "http://localhost:%%WEBSERVER_PORT%%/pool${i}.html" \ '{"name": $name, "description": $description, "ticker": $ticker, "homepage": $homepage}' \ - > "$STATE_CLUSTER/webserver/pool$i.json" + > "${STATE_CLUSTER}/webserver/pool${i}.json" - METADATA_URL="http://localhost:%%WEBSERVER_PORT%%/pool$i.json" - METADATA_HASH=$(cardano_cli_log conway stake-pool metadata-hash --pool-metadata-file \ - "$STATE_CLUSTER/webserver/pool$i.json") - POOL_PORT=$(("%%NODE_PORT_BASE%%" + ("$NUM_BFT_NODES" + i - 1) * "%%PORTS_PER_NODE%%")) - echo "$POOL_PORT" > "$STATE_CLUSTER/nodes/node-pool$i/port" - echo $POOL_PLEDGE > "$STATE_CLUSTER/nodes/node-pool$i/pledge" + METADATA_URL="http://localhost:%%WEBSERVER_PORT%%/pool${i}.json" + METADATA_HASH="$(cardano_cli_log conway stake-pool metadata-hash --pool-metadata-file \ + "${STATE_CLUSTER}/webserver/pool${i}.json")" + POOL_PORT="$(("%%NODE_PORT_BASE%%" + (NUM_BFT_NODES + i - 1) * "%%PORTS_PER_NODE%%"))" + echo "$POOL_PORT" > "${STATE_CLUSTER}/nodes/node-pool${i}/port" + echo "$POOL_PLEDGE" > "${STATE_CLUSTER}/nodes/node-pool${i}/pledge" cardano_cli_log conway stake-pool registration-certificate \ - --cold-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/cold.vkey" \ - --vrf-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/vrf.vkey" \ + --cold-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/cold.vkey" \ + --vrf-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/vrf.vkey" \ --pool-pledge "$POOL_PLEDGE" \ --pool-margin 0.35 \ --pool-cost "$POOL_COST" \ - --pool-reward-account-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/reward.vkey" \ - --pool-owner-stake-verification-key-file "$STATE_CLUSTER/nodes/node-pool$i/owner-stake.vkey" \ + --pool-reward-account-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/reward.vkey" \ + --pool-owner-stake-verification-key-file "${STATE_CLUSTER}/nodes/node-pool${i}/owner-stake.vkey" \ --metadata-url "$METADATA_URL" \ --metadata-hash "$METADATA_HASH" \ --pool-relay-port "$POOL_PORT" \ --pool-relay-ipv4 "127.0.0.1" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/nodes/node-pool$i/register.cert" + --out-file "${STATE_CLUSTER}/nodes/node-pool${i}/register.cert" done -rm -rf "$STATE_CLUSTER/shelley/create_staked" +rm -rf "${STATE_CLUSTER}/shelley/create_staked" for i in $(seq 1 "$NUM_DREPS"); do # DRep keys cardano_cli_log conway governance drep key-gen \ - --signing-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.skey" \ - --verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" + --signing-key-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.skey" \ + --verification-key-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.vkey" # DRep registration cardano_cli_log conway governance drep registration-certificate \ - --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ + --drep-verification-key-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.vkey" \ --key-reg-deposit-amt "$DREP_DEPOSIT" \ - --out-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep_reg.cert" + --out-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep_reg.cert" # delegatee payment keys cardano_cli_log conway address key-gen \ - --signing-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.skey" \ - --verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.vkey" + --signing-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.skey" \ + --verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.vkey" # delegatee stake keys cardano_cli_log conway stake-address key-gen \ - --signing-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.skey" \ - --verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" + --signing-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.skey" \ + --verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" # delegatee payment address cardano_cli_log conway address build \ - --payment-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.vkey" \ - --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --payment-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}.addr" + --out-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}.addr" # delegatee stake address cardano_cli_log conway stake-address build \ - --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" \ --testnet-magic "$NETWORK_MAGIC" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.addr" + --out-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.addr" # delegatee stake address registration cert cardano_cli_log conway stake-address registration-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ + --stake-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" \ --key-reg-deposit-amt "$KEY_DEPOSIT" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.reg.cert" + --out-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.reg.cert" # delegatee vote delegation cert cardano_cli_log conway stake-address vote-delegation-certificate \ - --stake-verification-key-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vkey" \ - --drep-verification-key-file "$STATE_CLUSTER/governance_data/default_drep_${i}_drep.vkey" \ - --out-file "$STATE_CLUSTER/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" + --stake-verification-key-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vkey" \ + --drep-verification-key-file "${STATE_CLUSTER}/governance_data/default_drep_${i}_drep.vkey" \ + --out-file "${STATE_CLUSTER}/governance_data/vote_stake_addr${i}_stake.vote_deleg.cert" done # create scripts for cluster starting / stopping -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start all" > "$STATE_CLUSTER/supervisorctl_start" -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% restart nodes:" > "$STATE_CLUSTER/supervisorctl_restart_nodes" -printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% \"\$@\"" > "$STATE_CLUSTER/supervisorctl" +printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% start all" > "${STATE_CLUSTER}/supervisorctl_start" +printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% restart nodes:" > "${STATE_CLUSTER}/supervisorctl_restart_nodes" +printf "#!/bin/sh\n\nsupervisorctl -s http://127.0.0.1:%%SUPERVISOR_PORT%% \"\$@\"" > "${STATE_CLUSTER}/supervisorctl" -cat > "$STATE_CLUSTER/supervisord_start" < "${STATE_CLUSTER}/supervisord_start" < "$STATE_CLUSTER/supervisord_stop" < "${STATE_CLUSTER}/supervisord_stop" < Date: Thu, 31 Oct 2024 17:39:23 +0100 Subject: [PATCH 058/168] feat: update node upgrade script and test - Add NETWORK_MAGIC export to node_upgrade_pytest.sh - Modify genesis file handling in step1 and step2 - Add delay for Tx submission and chain sync checks - Update test_node_upgrade.py to handle new serialization format --- .github/node_upgrade_pytest.sh | 77 +++++++++++-------- cardano_node_tests/tests/test_node_upgrade.py | 4 +- 2 files changed, 47 insertions(+), 34 deletions(-) diff --git a/.github/node_upgrade_pytest.sh b/.github/node_upgrade_pytest.sh index 43f399f25..575ebe50e 100755 --- a/.github/node_upgrade_pytest.sh +++ b/.github/node_upgrade_pytest.sh @@ -12,6 +12,9 @@ export COMMAND_ERA="$CLUSTER_ERA" CLUSTER_SCRIPTS_DIR="$WORKDIR/cluster0_${CLUSTER_ERA}" STATE_CLUSTER="${CARDANO_NODE_SOCKET_PATH_CI%/*}" +NETWORK_MAGIC="$(jq '.networkMagic' "$STATE_CLUSTER/shelley/genesis.json")" +export NETWORK_MAGIC + # init dir for step1 binaries STEP1_BIN="$WORKDIR/step1-bin" mkdir -p "$STEP1_BIN" @@ -69,9 +72,7 @@ if [ "$1" = "step1" ]; then # backup the original genesis files cp -f "$STATE_CLUSTER/shelley/genesis.alonzo.json" "$STATE_CLUSTER/shelley/genesis.alonzo.step1.json" - if [ -e "$STATE_CLUSTER/shelley/genesis.conway.json" ]; then - cp -f "$STATE_CLUSTER/shelley/genesis.conway.json" "$STATE_CLUSTER/shelley/genesis.conway.step1.json" - fi + cp -f "$STATE_CLUSTER/shelley/genesis.conway.json" "$STATE_CLUSTER/shelley/genesis.conway.step1.json" # run smoke tests pytest \ @@ -122,29 +123,32 @@ elif [ "$1" = "step2" ]; then DRY_RUN=1 \ "$CLUSTER_SCRIPTS_DIR/start-cluster" - # hashes of old and new Conway genesis files - CONWAY_GENESIS_HASH="$(jq -r ".ConwayGenesisHash" "$WORKDIR/dry_mixed/state-cluster0/config-bft1.json")" - CONWAY_GENESIS_STEP1_HASH="" - if [ -e "$STATE_CLUSTER/shelley/genesis.conway.json" ]; then - CONWAY_GENESIS_STEP1_HASH="$(jq -r ".ConwayGenesisHash" "$STATE_CLUSTER/config-bft1.json")" - fi - - # hashes of old and new Alonzo genesis files - ALONZO_GENESIS_HASH="$(jq -r ".AlonzoGenesisHash" "$WORKDIR/dry_mixed/state-cluster0/config-bft1.json")" - ALONZO_GENESIS_STEP1_HASH="$(jq -r ".AlonzoGenesisHash" "$STATE_CLUSTER/config-bft1.json")" - - # use the original genesis files - BYRON_GENESIS_HASH="$(jq -r ".ByronGenesisHash" "$STATE_CLUSTER/config-bft1.json")" - SHELLEY_GENESIS_HASH="$(jq -r ".ShelleyGenesisHash" "$STATE_CLUSTER/config-bft1.json")" - # copy newly generated topology files to the cluster state dir cp -f "$WORKDIR"/dry_mixed/state-cluster0/topology-*.json "$STATE_CLUSTER" - # copy newly generated Alonzo genesis to the cluster state dir - cp -f "$WORKDIR/dry_mixed/state-cluster0/shelley/genesis.alonzo.json" "$STATE_CLUSTER/shelley" + if [ -n "${REPLACE_GENESIS_STEP2:-""}" ]; then + # Copy newly generated Alonzo genesis to the cluster state dir + cp -f "$WORKDIR/dry_mixed/state-cluster0/shelley/genesis.alonzo.json" "$STATE_CLUSTER/shelley" + + # Copy newly generated Conway genesis file to the cluster state dir, use committee members from the original + # Conway genesis. + jq \ + --argfile src "$STATE_CLUSTER/shelley/genesis.conway.step1.json" \ + '.committee.members = $src.committee.members' \ + "$WORKDIR/dry_mixed/state-cluster0/shelley/genesis.conway.json" > "$STATE_CLUSTER/shelley/genesis.conway.json" + fi - # copy newly generated Conway genesis file to the cluster state dir - cp -f "$WORKDIR/dry_mixed/state-cluster0/shelley/genesis.conway.json" "$STATE_CLUSTER/shelley" + # use the original shelley and byron genesis files + BYRON_GENESIS_HASH="$(jq -r ".ByronGenesisHash" "$STATE_CLUSTER/config-bft1.json")" + SHELLEY_GENESIS_HASH="$(jq -r ".ShelleyGenesisHash" "$STATE_CLUSTER/config-bft1.json")" + # hashes of the original alonzo and conway genesis files + CONWAY_GENESIS_STEP1_HASH="$(jq -r ".ConwayGenesisHash" "$STATE_CLUSTER/config-bft1.json")" + ALONZO_GENESIS_STEP1_HASH="$(jq -r ".AlonzoGenesisHash" "$STATE_CLUSTER/config-bft1.json")" + # hashes of genesis files that were potentially replaced + ALONZO_GENESIS_HASH="$(cardano-cli legacy genesis hash --genesis \ + "$STATE_CLUSTER/shelley/genesis.alonzo.json")" + CONWAY_GENESIS_HASH="$(cardano-cli legacy genesis hash --genesis \ + "$STATE_CLUSTER/shelley/genesis.conway.json")" # copy newly generated config files to the cluster state dir for conf in "$WORKDIR"/dry_mixed/state-cluster0/config-*.json; do @@ -208,19 +212,13 @@ elif [ "$1" = "step2" ]; then exit 6 fi - # waiting for node to fully start - for _ in {1..10}; do - if [ -S "$CARDANO_NODE_SOCKET_PATH" ]; then - break - fi - sleep 5 - done - [ -S "$CARDANO_NODE_SOCKET_PATH" ] || { echo "Failed to start node" >&2; exit 6; } # assert + # Tx submission delay + sleep 60 # waiting to make sure the chain is synced - NETWORK_MAGIC="$(jq '.networkMagic' "$STATE_CLUSTER/shelley/genesis.json")" for _ in {1..10}; do - sync_progress="$(cardano-cli latest query tip --testnet-magic "$NETWORK_MAGIC" | jq -r '.syncProgress')" + sync_progress="$(cardano-cli latest query tip \ + --testnet-magic "$NETWORK_MAGIC" | jq -r '.syncProgress')" if [ "$sync_progress" = "100.00" ]; then break fi @@ -324,6 +322,21 @@ elif [ "$1" = "step3" ]; then exit 6 fi + # Tx submission delay + sleep 60 + + # waiting to make sure the chain on pool3 is synced + for _ in {1..10}; do + sync_progress="$(cardano-cli latest query tip \ + --testnet-magic "$NETWORK_MAGIC" \ + --socket-path "${STATE_CLUSTER}/pool3.socket" | jq -r '.syncProgress')" + if [ "$sync_progress" = "100.00" ]; then + break + fi + sleep 5 + done + [ "$sync_progress" = "100.00" ] || { echo "Failed to sync node" >&2; exit 6; } # assert + # Test for ignoring expected errors in log files. Run separately to make sure it runs first. pytest cardano_node_tests/tests/test_node_upgrade.py -k test_ignore_log_errors err_retval="$?" diff --git a/cardano_node_tests/tests/test_node_upgrade.py b/cardano_node_tests/tests/test_node_upgrade.py index 5ba380ca5..1a40f72ba 100644 --- a/cardano_node_tests/tests/test_node_upgrade.py +++ b/cardano_node_tests/tests/test_node_upgrade.py @@ -94,11 +94,11 @@ def test_ignore_log_errors( cluster = cluster_singleton common.get_test_id(cluster) - if UPGRADE_REVISION >= version.parse("9.0.0") > BASE_REVISION: + if UPGRADE_REVISION >= version.parse("10.1.0") > BASE_REVISION: logfiles.add_ignore_rule( files_glob="*.stdout", regex="ChainDB:Error:.* Invalid snapshot DiskSnapshot .*DeserialiseFailure " - ".*expected list len or indef", + ".* expected change in the serialization format", ignore_file_id=worker_id, ) From 163e5b22ed35982fc24ebdbab81938d272a3abad Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 31 Oct 2024 17:50:16 +0100 Subject: [PATCH 059/168] feat(tests): add check for updated PlutusV2 cost model Added a check in the test to skip if the PlutusV2 cost model has already been updated. This prevents redundant updates and ensures the test runs only when necessary. --- .../tests/tests_conway/test_update_plutusv2_builtins.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py index d66387dc1..7a5654f27 100644 --- a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py +++ b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py @@ -97,6 +97,10 @@ def test_update_in_pv9( if not conway_common.is_in_bootstrap(cluster_obj=cluster): pytest.skip("Can run only during bootstrap period.") + init_cost_model = cluster.g_query.get_protocol_params()["costModels"]["PlutusV2"] + if len(init_cost_model) >= 185: + pytest.skip("PlutusV2 cost model was already updated.") + cost_proposal_file = DATA_DIR / "cost_models_list_185_v2_v3.json" def _update_cost_model() -> None: From b2f0ef8988b5ce7cecbec2eca88089ee1781e672 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 9 Oct 2024 14:22:01 +0200 Subject: [PATCH 060/168] feat: Add end-to-end tests for the newest batch of Plutus Core built-in funcitons --- .../v3/failingReadBitPolicyScriptV3_1.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_10.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_11.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_12.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_13.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_14.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_2.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_3.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_4.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_5.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_6.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_7.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_8.plutus | 5 + .../v3/failingReadBitPolicyScriptV3_9.plutus | 5 + ...ailingReplicateBytePolicyScriptV3_1.plutus | 5 + ...ailingReplicateBytePolicyScriptV3_2.plutus | 5 + ...ailingReplicateBytePolicyScriptV3_3.plutus | 5 + ...ailingReplicateBytePolicyScriptV3_4.plutus | 5 + ...ailingReplicateBytePolicyScriptV3_5.plutus | 5 + ...ailingReplicateBytePolicyScriptV3_6.plutus | 5 + .../failingWriteBitsPolicyScriptV3_1.plutus | 5 + .../failingWriteBitsPolicyScriptV3_10.plutus | 5 + .../failingWriteBitsPolicyScriptV3_11.plutus | 5 + .../failingWriteBitsPolicyScriptV3_12.plutus | 5 + .../failingWriteBitsPolicyScriptV3_13.plutus | 5 + .../failingWriteBitsPolicyScriptV3_14.plutus | 5 + .../failingWriteBitsPolicyScriptV3_15.plutus | 5 + .../failingWriteBitsPolicyScriptV3_16.plutus | 5 + .../failingWriteBitsPolicyScriptV3_17.plutus | 5 + .../failingWriteBitsPolicyScriptV3_18.plutus | 5 + .../failingWriteBitsPolicyScriptV3_19.plutus | 5 + .../failingWriteBitsPolicyScriptV3_2.plutus | 5 + .../failingWriteBitsPolicyScriptV3_3.plutus | 5 + .../failingWriteBitsPolicyScriptV3_4.plutus | 5 + .../failingWriteBitsPolicyScriptV3_5.plutus | 5 + .../failingWriteBitsPolicyScriptV3_6.plutus | 5 + .../failingWriteBitsPolicyScriptV3_7.plutus | 5 + .../failingWriteBitsPolicyScriptV3_8.plutus | 5 + .../failingWriteBitsPolicyScriptV3_9.plutus | 5 + ...cceedingAndByteStringPolicyScriptV3.plutus | 5 + ...gComplementByteStringPolicyScriptV3.plutus | 5 + ...ucceedingCountSetBitsPolicyScriptV3.plutus | 5 + ...eedingFindFirstSetBitPolicyScriptV3.plutus | 5 + ...ucceedingOrByteStringPolicyScriptV3.plutus | 5 + .../v3/succeedingReadBitPolicyScriptV3.plutus | 5 + ...cceedingReplicateBytePolicyScriptV3.plutus | 5 + .../v3/succeedingRipemd_160Policy.plutus | 5 + ...edingRotateByteStringPolicyScriptV3.plutus | 5 + ...eedingShiftByteStringPolicyScriptV3.plutus | 5 + .../succeedingWriteBitsPolicyScriptV3.plutus | 5 + ...cceedingXorByteStringPolicyScriptV3.plutus | 5 + cardano_node_tests/tests/plutus_common.py | 182 ++++++++++++++++++ .../tests/tests_plutus/test_mint_build.py | 171 ++++++++++++++++ 53 files changed, 608 insertions(+) create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_1.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_10.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_11.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_12.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_13.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_14.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_2.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_4.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_5.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_6.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_7.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_8.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_9.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_1.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_2.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_4.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_5.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_6.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_1.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_10.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_11.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_12.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_13.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_14.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_15.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_16.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_17.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_18.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_19.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_2.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_4.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_5.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_6.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_7.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_8.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_9.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/succeedingAndByteStringPolicyScriptV3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/succeedingComplementByteStringPolicyScriptV3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/succeedingCountSetBitsPolicyScriptV3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/succeedingFindFirstSetBitPolicyScriptV3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/succeedingOrByteStringPolicyScriptV3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/succeedingReadBitPolicyScriptV3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/succeedingReplicateBytePolicyScriptV3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/succeedingRipemd_160Policy.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/succeedingRotateByteStringPolicyScriptV3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/succeedingShiftByteStringPolicyScriptV3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/succeedingWriteBitsPolicyScriptV3.plutus create mode 100644 cardano_node_tests/tests/data/plutus/v3/succeedingXorByteStringPolicyScriptV3.plutus diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_1.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_1.plutus new file mode 100644 index 000000000..4fa7a2dfb --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_1.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5880587e0101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e66600491010048000011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_10.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_10.plutus new file mode 100644 index 000000000..d56a81d2a --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_10.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58ad58ab0101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e66600491012b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000482c014011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_11.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_11.plutus new file mode 100644 index 000000000..8d98959a5 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_11.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58b558b30101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e66600491012b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000483fbfffffffffffffffc04011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_12.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_12.plutus new file mode 100644 index 000000000..0f8d1d8c4 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_12.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58b558b30101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e66600491012b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00048202020202020202020008011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_13.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_13.plutus new file mode 100644 index 000000000..9a0e85231 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_13.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58b558b30101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e66600491012b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000483fffffffffffffffffc04011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_14.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_14.plutus new file mode 100644 index 000000000..08c4bcdbd --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_14.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58b558b30101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e66600491012b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00048206020202020202020008011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_2.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_2.plutus new file mode 100644 index 000000000..c763fb632 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_2.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5881587f0101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e666004910100482c814011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_3.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_3.plutus new file mode 100644 index 000000000..958a2dcfb --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5880587e0101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e66600491010048004011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_4.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_4.plutus new file mode 100644 index 000000000..8c65f99fc --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_4.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "588258800101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e666004910101ff0048004011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_5.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_5.plutus new file mode 100644 index 000000000..8c65f99fc --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_5.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "588258800101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e666004910101ff0048004011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_6.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_6.plutus new file mode 100644 index 000000000..8c65f99fc --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_6.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "588258800101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e666004910101ff0048004011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_7.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_7.plutus new file mode 100644 index 000000000..178e36f0b --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_7.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "588258800101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e666004910101f40048030011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_8.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_8.plutus new file mode 100644 index 000000000..db232b190 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_8.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "588358810101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e666004910102fff40048080011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_9.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_9.plutus new file mode 100644 index 000000000..d56a81d2a --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReadBitPolicyScriptV3_9.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58ad58ab0101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e66600491012b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000482c014011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_1.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_1.plutus new file mode 100644 index 000000000..fbcae22ff --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_1.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5864586201010033222300300232300100122590018a4d22190029112a999ab9a3371e66f4400c00800444cc01c01c01045833351222335122335004333002480052000489005005222800801c00a00221290028008600024500300280084848a400e0050011" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_2.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_2.plutus new file mode 100644 index 000000000..d3c46680c --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_2.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5864586201010033222300300232300100122590018a4d22190029112a999ab9a3371e66f4400c00800444cc01c01c01045833351222335122335004333002480052006489005005222800801c00a00221290028008600024500300280084848a400e0050011" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_3.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_3.plutus new file mode 100644 index 000000000..ee2f382a8 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5864586201010033222300300232300100122590018a4d22190029112a999ab9a3371e66f4400c00800444cc01c01c01045833351222335122335004333002480092001489005005222800801c00a00221290028008600024500300280084848a400e0050011" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_4.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_4.plutus new file mode 100644 index 000000000..f60ed4327 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_4.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5864586201010033222300300232300100122590018a4d22190029112a999ab9a3371e66f4400c00800444cc01c01c01045833351222335122335004333002480212001489005005222800801c00a00221290028008600024500300280084848a400e0050011" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_5.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_5.plutus new file mode 100644 index 000000000..bc4efc6f9 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_5.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5865586301010033222300300232300100122590018a4d22190029112a999ab9a3371e66f4400c00800444cc01c01c0104583335122233512233500433300248021208004489005005222800801c00a00221290028008600024500300280084848a400e0050011" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_6.plutus b/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_6.plutus new file mode 100644 index 000000000..7a9e1e715 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingReplicateBytePolicyScriptV3_6.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5867586501010033222300300232300100122590018a4d22190029112a999ab9a3371e66f4400c00800444cc01c01c010458333512223351223350043330024820a0005209a02489005005222800801c00a00221290028008600024500300280084848a400e0050011" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_1.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_1.plutus new file mode 100644 index 000000000..bdd7224fe --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_1.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58a358a10101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f5812233574000460080022323001001230022330020020013335122233351222335122335004333300248810033500448001401401d22010050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_10.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_10.plutus new file mode 100644 index 000000000..0ac82543b --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_10.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58aa58a80101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f58122335740004600800223230010012300223300200200133351222333512223351223350043333002488101ff0033500448040cd40112002500500748810050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_11.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_11.plutus new file mode 100644 index 000000000..dacf61e6f --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_11.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58a558a30101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f58122335740004600800223230010012300223300200200133351222333512223351223350043333002488101000033500448005401402122010050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_12.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_12.plutus new file mode 100644 index 000000000..774709f66 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_12.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58a558a30101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f58122335740004600800223230010012300223300200200133351222333512223351223350043333002488101000033500448041401402122010050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_13.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_13.plutus new file mode 100644 index 000000000..1c6ec24d2 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_13.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58dc58da0101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f5812233574000460080022323001001230022330020020013335122233351222335122335004333300248812b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000335004482a014cd401120ac05335004482c01540140212210050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_14.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_14.plutus new file mode 100644 index 000000000..e1d57a476 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_14.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58dd58db0101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f5812233574000460080022323001001230022330020020013335122233351222335122335004333300248812b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000335004482a014cd401120ac053350044820225e940140212210050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_15.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_15.plutus new file mode 100644 index 000000000..540acd6b2 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_15.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58d858d60101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f5812233574000460080022323001001230022330020020013335122233351222335122335004333300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000335004483fbfffffffffffffffc05401402122010050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_16.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_16.plutus new file mode 100644 index 000000000..0d0d5de24 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_16.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58d858d60101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f5812233574000460080022323001001230022330020020013335122233351222335122335004333300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00033500448202020202020202020009401402122010050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_17.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_17.plutus new file mode 100644 index 000000000..108ff9e8a --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_17.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58d858d60101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f5812233574000460080022323001001230022330020020013335122233351222335122335004333300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000335004483fffffffffffffffffc05401402122010050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_18.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_18.plutus new file mode 100644 index 000000000..28cf05311 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_18.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58d858d60101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f5812233574000460080022323001001230022330020020013335122233351222335122335004333300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00033500448206020202020202020009401402122010050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_19.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_19.plutus new file mode 100644 index 000000000..f152ad765 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_19.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58d858d60101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f5812233574000460080022323001001230022330020020013335122233351222335122335004333300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00033500448202020202020202020011401402122010050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_2.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_2.plutus new file mode 100644 index 000000000..21da9f1fb --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_2.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58a358a10101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f5812233574000460080022323001001230022330020020013335122233351222335122335004333300248810033500448079401401d22010050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_3.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_3.plutus new file mode 100644 index 000000000..f3f3e5398 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58a358a10101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f5812233574000460080022323001001230022330020020013335122233351222335122335004333300248810033500448001401402122010050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_4.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_4.plutus new file mode 100644 index 000000000..4d529cb75 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_4.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58a858a60101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f5812233574000460080022323001001230022330020020013335122233351222335122335004333300248810033500448000cd40112002500500748810050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_5.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_5.plutus new file mode 100644 index 000000000..99af4acf2 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_5.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58a558a30101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f58122335740004600800223230010012300223300200200133351222333512223351223350043333002488101ff0033500448005401401d22010050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_6.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_6.plutus new file mode 100644 index 000000000..3b173205a --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_6.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58aa58a80101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f58122335740004600800223230010012300223300200200133351222333512223351223350043333002488101ff0033500448000cd40112001500500748810050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_7.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_7.plutus new file mode 100644 index 000000000..1f9b46809 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_7.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58aa58a80101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f58122335740004600800223230010012300223300200200133351222333512223351223350043333002488101ff0033500448004cd40112000500500748810050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_8.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_8.plutus new file mode 100644 index 000000000..0000a5fb7 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_8.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58a558a30101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f58122335740004600800223230010012300223300200200133351222333512223351223350043333002488101ff0033500448041401401d22010050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_9.plutus b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_9.plutus new file mode 100644 index 000000000..9559cd7c9 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/failingWriteBitsPolicyScriptV3_9.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58aa58a80101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f58122335740004600800223230010012300223300200200133351222333512223351223350043333002488101ff0033500448008cd40112010500500748810050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/succeedingAndByteStringPolicyScriptV3.plutus b/cardano_node_tests/tests/data/plutus/v3/succeedingAndByteStringPolicyScriptV3.plutus new file mode 100644 index 000000000..ad5b1db8c --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/succeedingAndByteStringPolicyScriptV3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5903cb5903c801010033222300300232300100122590018a4d221900291112a999ab9a3371e666f2d6401229462940006004002226601001000a22c199a8911199a891119a89119a803999980100224500488101ff004881003350073333002004488101ff004881004881003350073333002004488101ff004881010000488101000033500733330020044881010000488101ff00488101000033500733330020044881024f0000488101f40048810144003350073333002005488100488101ff00488101ff003350073333002005488101ff00488100488101ff003350073333002005488101ff004881010000488101000033500733330020054881010000488101ff00488101000033500733330020054881024f0000488101f400488102440000335007333300200448812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af30048812b13808014808080189900422a0288ac0203640002540830091400164401020021093f210424001cc550821000335007333300200548812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af30048813213808014808080189900422a0288ac0203640002540830091400164401020021093f210424001cc55082101b55b625553af3003350073333002004488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af30048812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00048812b13808014808080189900422a0288ac0203640002540830091400164401020021093f210424001cc5508210003350073333002005488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af30048812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00048813213808014808080189900422a0288ac0203640002540830091400164401020021093f210424001cc55082101b55b625553af30050082222800802400e0050011094801400500080110914801c00a0021800091400c00a00212122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/succeedingComplementByteStringPolicyScriptV3.plutus b/cardano_node_tests/tests/data/plutus/v3/succeedingComplementByteStringPolicyScriptV3.plutus new file mode 100644 index 000000000..b96170025 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/succeedingComplementByteStringPolicyScriptV3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "59016159015e01010033222300300232300100122590018a4d2219002912a999ab9a3371e6f3800800444cc01801800c458333512223351223350043300248810048810033500433002489010f00488101f0003350043300248902b00b004881024ff4003350043300248932db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af300488132246379e3675c2e6346d73dd5cd5551f5b08bfeec23b78cb2c3ffe9a834702d46b68050e95bf3e1322829a7e4aa49daaac50c003350043300248932246379e3675c2e6346d73dd5cd5551f5b08bfeec23b78cb2c3ffe9a834702d46b68050e95bf3e1322829a7e4aa49daaac50c00488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af300500522800801400442520050010c00048a006005001090914801c00a0021" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/succeedingCountSetBitsPolicyScriptV3.plutus b/cardano_node_tests/tests/data/plutus/v3/succeedingCountSetBitsPolicyScriptV3.plutus new file mode 100644 index 000000000..91518b9c4 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/succeedingCountSetBitsPolicyScriptV3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "59016659016301010033222300300232300100122590018a4d2219002912a999ab9a3370e6f5000800444cc01801800c458333512223351223350043300248810048000cd4010cc00922010200000048000cd4010cc00922010201000048008cd4010cc00922010200010048008cd4010cc00922012b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b0004831808cd4010cc00922012b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000048000cd4010cc00922012b000000000000001000000000000000000000000000000000000000000000000000000000000000000000000048008cd4010cc00922012bffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00482c01540148a00200500110948014004300012280180140042424520070028009" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/succeedingFindFirstSetBitPolicyScriptV3.plutus b/cardano_node_tests/tests/data/plutus/v3/succeedingFindFirstSetBitPolicyScriptV3.plutus new file mode 100644 index 000000000..bfad0eaef --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/succeedingFindFirstSetBitPolicyScriptV3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "59012e59012b01010033222300300232300100122590018a4d2219002912a999ab9a3370e6f5400800444cc01801800c458333512223351223350043300248810048004cd4010cc00922010200000048004cd4010cc00922010200020048008cd4010cc009220102fff20048008cd4010cc00922012b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000048004cd4010cc00922012b000000000000000000000000000000000000000000000000000000000000000000000000000000000000010048000cd4010cc00922012b5000000000000000000000000000000000000000000000000000000000000000000000000000000000000000482a01540148a00200500110948014004300012280180140042424520070028009" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/succeedingOrByteStringPolicyScriptV3.plutus b/cardano_node_tests/tests/data/plutus/v3/succeedingOrByteStringPolicyScriptV3.plutus new file mode 100644 index 000000000..516616f0a --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/succeedingOrByteStringPolicyScriptV3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5903cb5903c801010033222300300232300100122590018a4d221900291112a999ab9a3371e666f316401229462940006004002226601001000a22c199a8911199a891119a89119a803999980100224500488101ff004881003350073333002004488101ff004881004881003350073333002004488101ff004881010000488101ff0033500733330020044881010000488101ff00488101ff0033500733330020044881024f0000488101f400488101ff003350073333002005488100488101ff00488101ff003350073333002005488101ff00488100488101ff003350073333002005488101ff004881010000488101ff0033500733330020054881010000488101ff00488101ff0033500733330020054881024f0000488101f400488102ff0000335007333300200448812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af30048812bfbdef6de9da7d1bfbde8db3b3bbebf6ecffdc1dffc6bf3dd7f613e57df8fd7ffdd7fefb6e7be5ecfdff7f800335007333300200548812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af300488132fbdef6de9da7d1bfbde8db3b3bbebf6ecffdc1dffc6bf3dd7f613e57df8fd7ffdd7fefb6e7be5ecfdff7f81b55b625553af3003350073333002004488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af30048812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00048812bfbdef6de9da7d1bfbde8db3b3bbebf6ecffdc1dffc6bf3dd7f613e57df8fd7ffdd7fefb6e7be5ecfdff7f8003350073333002005488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af30048812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000488132fbdef6de9da7d1bfbde8db3b3bbebf6ecffdc1dffc6bf3dd7f613e57df8fd7ffdd7fefb6e7be5ecfdff7f81b55b625553af30050082222800802400e0050011094801400500080110914801c00a0021800091400c00a00212122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/succeedingReadBitPolicyScriptV3.plutus b/cardano_node_tests/tests/data/plutus/v3/succeedingReadBitPolicyScriptV3.plutus new file mode 100644 index 000000000..9cf25a50a --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/succeedingReadBitPolicyScriptV3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5901a95901a60101003322230030023230010012322590018a4d22190029112a999ab9a3379e00600422b2003130070048b045640062d130070040466004004002666a2444666a244466a24466a00e666004910101f40048000010cd401cccc009220101f40048008010cd401cccc009220101f40048010014cd401cccc009220101f40048018010cd401cccc009220101f40048020014cd401cccc009220101f40048028014cd401cccc009220101f40048030014cd401cccc009220101f40048038014cd401cccc009220102f4ff0048050014cd401cccc00922012b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00048000010cd401cccc00922012b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000482a814014cd401cccc00922012b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000482b814011402088a002007002800884a400a0028004008848a400e0050010c00048a006005001090914801c00a00201" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/succeedingReplicateBytePolicyScriptV3.plutus b/cardano_node_tests/tests/data/plutus/v3/succeedingReplicateBytePolicyScriptV3.plutus new file mode 100644 index 000000000..94739128b --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/succeedingReplicateBytePolicyScriptV3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "59210a59210701010033222300300232300100122590018a4d22190029112a999ab9a3371e66f4400c00800444cc01c01c010458333512223351223350043330024800120fe034890033500433300248320052046489642323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232300335004333002482020005209a02489ff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8dff8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d208d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d8d005005222800801c00a00221290028008600024500300280084848a400e0050011" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/succeedingRipemd_160Policy.plutus b/cardano_node_tests/tests/data/plutus/v3/succeedingRipemd_160Policy.plutus new file mode 100644 index 000000000..2fd605126 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/succeedingRipemd_160Policy.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "58ad58ab01010033222300300232300100122590018a4d2219002912a999ab9a3371e6f5800800444cc01801800c45833351222335122335004330024881004881149c1185a5c5e9fc54612808977ee8f548b2258d310033500433002489192e7ea84da4bc4d7cfb463e3f2c8647057afff3fbececa1d20000488114f18921115370b049e99dfdd49fc92b371dd7c7e900500522800801400442520050010c00048a006005001090914801c00a0021" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/succeedingRotateByteStringPolicyScriptV3.plutus b/cardano_node_tests/tests/data/plutus/v3/succeedingRotateByteStringPolicyScriptV3.plutus new file mode 100644 index 000000000..e7cd5e114 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/succeedingRotateByteStringPolicyScriptV3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "59051559051201010033222300300232300100122590018a4d22190029112a999ab9a3371e66f4c00c00800444cc01c01c0104583335122233512233500433300248810048019221003350043330024881004800522100335004333002488102ebfc0048029221027f9d00335004333002488102ebfc004802522102e75f00335004333002488102ebfc004808122102ebfc00335004333002488102ebfc004807d22102ebfc00335004333002488102ebfc00480a9221027f9d00335004333002488102ebfc00480a522102e75f0033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000480012212b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00048203fca52212b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000483fff8a52212b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000480b92212b6b42c2401dcee02d9d85ce5eb341f6e0673a15d84cabb09f220a8102b3ce9fb0d233d92e63ac51d819e1780033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000482bbfca52212b6b42c2401dcee02d9d85ce5eb341f6e0673a15d84cabb09f220a8102b3ce9fb0d233d92e63ac51d819e1780033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00048347d68c0d2212b6b42c2401dcee02d9d85ce5eb341f6e0673a15d84cabb09f220a8102b3ce9fb0d233d92e63ac51d819e1780033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000483fbfffffffffffffffc052212b41f6e0673a15d84cabb09f220a8102b3ce9fb0d233d92e63ac51d819e1786b42c2401dcee02d9d85ce5eb30033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000482020202020202020200092212b83edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b033c2f0d68584803b9dc05b3b0b9cbd660033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000483fffffffffffffffffc052212b44150205679d3f61a467b25cc758a3b033c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e0033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000482060202020202020200092212b220a8102b3ce9fb0d233d92e63ac51d819e1786b42c2401dcee02d9d85ce5eb341f6e0673a15d84cabb09f005005222800801c00a00221290028008600024500300280084848a400e0050011" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/succeedingShiftByteStringPolicyScriptV3.plutus b/cardano_node_tests/tests/data/plutus/v3/succeedingShiftByteStringPolicyScriptV3.plutus new file mode 100644 index 000000000..4d142869a --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/succeedingShiftByteStringPolicyScriptV3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "59046e59046b01010033222300300232300100122590018a4d22190029112a999ab9a3371e66f4800c00800444cc01c01c0104583335122233512233500433300248810048019221003350043330024881004801522100335004333002488102ebfc0048029221027f8000335004333002488102ebfc004802522102075f00335004333002488102ebfc00480812210200000033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000480012212b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000480b92212b6b42c2401dcee02d9d85ce5eb341f6e0673a15d84cabb09f220a8102b3ce9fb0d233d92e63ac51d80000000033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000480b52212b0000006785e1ad0b0900773b80b67617397acd07db819ce8576132aec27c882a040acf3a7ec348cf64b98e0033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b0004820225e92212b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000483fe21e92212b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000483fbfffffffffffffffc052212b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000482020202020202020200092212b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000483fffffffffffffffffc052212b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000033500433300248812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000482060202020202020200092212b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000005005222800801c00a00221290028008600024500300280084848a400e0050011" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/succeedingWriteBitsPolicyScriptV3.plutus b/cardano_node_tests/tests/data/plutus/v3/succeedingWriteBitsPolicyScriptV3.plutus new file mode 100644 index 000000000..df995488c --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/succeedingWriteBitsPolicyScriptV3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "59053a5905370101003323232223003002300222590018a4d221900291112a999ab9a3371e666f40010c02400d6400a2946294000222601000a22c1800914800d2f58122335740004600800223230010012300223300200200133351222333512223351223350043333002488101ff0033500448001401401d220101fe00335004333300248901ff0033500448009401401d220101fd00335004333300248901ff0033500448011401401d220101fb00335004333300248901ff0033500448019401401d220101f700335004333300248901ff0033500448021401401d220101ef00335004333300248901ff0033500448029401401d220101df00335004333300248901ff0033500448031401401d220101bf00335004333300248901ff0033500448039401401d2201017f003350043333002489010000335004480014014021220101010033500433330024890100003350044800940140212201010200335004333300248901000033500448011401402122010104003350043333002489010000335004480194014021220101080033500433330024890100003350044802140140212201011000335004333300248901000033500448029401402122010120003350043333002489010000335004480314014021220101400033500433330024890100003350044803940140212201018000335004333300248902f4ff0033500448051401401d220102f0ff00335004333300248902f4ff0033500448008cd401120145005007488102f0fd00335004333300248902f4ff0033500448050cd401120025005007488102f0fd00335004333300248902f4ff0033500448050cd4011200233500448050cd4011200233500448008cd4011200233500448008cd4011201433500448050cd4011201433500448008cd4011200233500448008cd4011200233500448008cd4011200233500448050cd401120025005007488102f0fd00335004333300248902f4ff0033500448008cd4011201433500448008cd4011201433500448008cd4011201433500448008cd4011201433500448008cd4011201433500448008cd4011201433500448050cd4011201433500448050cd4011201633500448058cd401120125005007488102f0fd00335004333300248901ff00335004480014014021220101ff00335004333300248901000033500448001401401d220101000033500433330024892b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000335004482a014cd401120ac05335004482b81540140212212bd00000000000000000000000000000000000000000000000000000000000000000000000000000000000000033500433330024892b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000335004482a014cd401120ac05335004482b815401401d2212b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000033500433330024892b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000335004482a014cd401120ac05335004482b814cd401120ae05335004482b014cd401120a805335004482a014cd401120ae05335004482b014cd401120a805335004482a014cd401120a80550050084892bd00000000000000000000000000000000000000000000000000000000000000000000000000000000000000050052222800802400e0050011094801400430001228018014004242452007002800a0010022122900380140041" +} diff --git a/cardano_node_tests/tests/data/plutus/v3/succeedingXorByteStringPolicyScriptV3.plutus b/cardano_node_tests/tests/data/plutus/v3/succeedingXorByteStringPolicyScriptV3.plutus new file mode 100644 index 000000000..d510ca650 --- /dev/null +++ b/cardano_node_tests/tests/data/plutus/v3/succeedingXorByteStringPolicyScriptV3.plutus @@ -0,0 +1,5 @@ +{ + "type": "PlutusScriptV3", + "description": "", + "cborHex": "5903cb5903c801010033222300300232300100122590018a4d221900291112a999ab9a3371e666f356401229462940006004002226601001000a22c199a8911199a891119a89119a803999980100224500488101ff004881003350073333002004488101ff004881004881003350073333002004488101ff004881010000488101ff0033500733330020044881010000488101ff00488101ff0033500733330020044881024f0000488101f400488101bb003350073333002005488100488101ff00488101ff003350073333002005488101ff00488100488101ff003350073333002005488101ff004881010000488101ff0033500733330020054881010000488101ff00488101ff0033500733330020054881024f0000488101f400488102bb0000335007333300200448812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af30048812be85e76ca1d2751a724e899113936136ccc99c1dda863c3d46b612813de8dd7ded440ceb2c3be420a8f75e800335007333300200548812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af300488132e85e76ca1d2751a724e899113936136ccc99c1dda863c3d46b612813de8dd7ded440ceb2c3be420a8f75e81b55b625553af3003350073333002004488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af30048812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b00048812be85e76ca1d2751a724e899113936136ccc99c1dda863c3d46b612813de8dd7ded440ceb2c3be420a8f75e8003350073333002005488132db9c861c98a3d19cb928c22a32aaae0a4f740113dc48734d3c001657cb8fd2b9497faf16a40c1ecdd7d6581b55b625553af30048812b33c2f0d68584803b9dc05b3b0b9cbd6683edc0ce742bb09957613e44150205679d3f61a467b25cc758a3b000488132e85e76ca1d2751a724e899113936136ccc99c1dda863c3d46b612813de8dd7ded440ceb2c3be420a8f75e81b55b625553af30050082222800802400e0050011094801400500080110914801c00a0021800091400c00a00212122900380140041" +} diff --git a/cardano_node_tests/tests/plutus_common.py b/cardano_node_tests/tests/plutus_common.py index aecbb6097..0e9bab9c1 100644 --- a/cardano_node_tests/tests/plutus_common.py +++ b/cardano_node_tests/tests/plutus_common.py @@ -283,6 +283,188 @@ class PlutusScriptData: } +# ----- Succeeding bitwise tests ----- # + +UNKNOWN_FIXED_COST = 777_777 + +MINTING_ANDBYTESTRING_PLUTUS_V3 = SCRIPTS_V3_DIR / "succeedingAndByteStringPolicyScriptV3.plutus" +MINTING_ANDBYTESTRING_V3 = PlutusScriptData( + script_file=MINTING_ANDBYTESTRING_PLUTUS_V3, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=ExecutionCost( + per_time=19269680, per_space=102266, fixed_cost=UNKNOWN_FIXED_COST + ), +) + +MINTING_ORBYTESTRING_PLUTUS_V3 = SCRIPTS_V3_DIR / "succeedingOrByteStringPolicyScriptV3.plutus" +MINTING_ORBYTESTRING_V3 = PlutusScriptData( + script_file=MINTING_ORBYTESTRING_PLUTUS_V3, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=ExecutionCost( + per_time=19269680, per_space=102266, fixed_cost=UNKNOWN_FIXED_COST + ), +) + +MINTING_XORBYTESTRING_PLUTUS_V3 = SCRIPTS_V3_DIR / "succeedingXorByteStringPolicyScriptV3.plutus" +MINTING_XORBYTESTRING_V3 = PlutusScriptData( + script_file=MINTING_XORBYTESTRING_PLUTUS_V3, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=ExecutionCost( + per_time=19269680, per_space=102266, fixed_cost=UNKNOWN_FIXED_COST + ), +) + +MINTING_COMPLEMENTBYTESTRING_PLUTUS_V3 = ( + SCRIPTS_V3_DIR / "succeedingComplementByteStringPolicyScriptV3.plutus" +) +MINTING_COMPLEMENTBYTESTRING_V3 = PlutusScriptData( + script_file=MINTING_COMPLEMENTBYTESTRING_PLUTUS_V3, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=ExecutionCost(per_time=5863431, per_space=30027, fixed_cost=UNKNOWN_FIXED_COST), +) + +MINTING_COUNTSETBITS_PLUTUS_V3 = SCRIPTS_V3_DIR / "succeedingCountSetBitsPolicyScriptV3.plutus" +MINTING_COUNTSETBITS_V3 = PlutusScriptData( + script_file=MINTING_COUNTSETBITS_PLUTUS_V3, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=ExecutionCost(per_time=9211420, per_space=45324, fixed_cost=UNKNOWN_FIXED_COST), +) + +MINTING_FINDFIRSTSET_PLUTUS_V3 = SCRIPTS_V3_DIR / "succeedingFindFirstSetBitPolicyScriptV3.plutus" +MINTING_FINDFIRSTSET_V3 = PlutusScriptData( + script_file=MINTING_FINDFIRSTSET_PLUTUS_V3, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=ExecutionCost(per_time=8071583, per_space=40221, fixed_cost=UNKNOWN_FIXED_COST), +) + +MINTING_READBIT_PLUTUS_V3 = SCRIPTS_V3_DIR / "succeedingReadBitPolicyScriptV3.plutus" +MINTING_READBIT_V3 = PlutusScriptData( + script_file=MINTING_READBIT_PLUTUS_V3, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=ExecutionCost(per_time=15272720, per_space=82724, fixed_cost=UNKNOWN_FIXED_COST), +) + +MINTING_REPLICATEBYTE_PLUTUS_V3 = SCRIPTS_V3_DIR / "succeedingReplicateBytePolicyScriptV3.plutus" +MINTING_REPLICATEBYTE_V3 = PlutusScriptData( + script_file=MINTING_REPLICATEBYTE_PLUTUS_V3, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=ExecutionCost(per_time=4549650, per_space=22946, fixed_cost=UNKNOWN_FIXED_COST), +) + +MINTING_ROTATEBYTESTRING_PLUTUS_V3 = ( + SCRIPTS_V3_DIR / "succeedingRotateByteStringPolicyScriptV3.plutus" +) +MINTING_ROTATEBYTESTRING_V3 = PlutusScriptData( + script_file=MINTING_ROTATEBYTESTRING_PLUTUS_V3, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=ExecutionCost( + per_time=22778618, per_space=109004, fixed_cost=UNKNOWN_FIXED_COST + ), +) + +MINTING_SHIFTBYTESTRING_PLUTUS_V3 = ( + SCRIPTS_V3_DIR / "succeedingShiftByteStringPolicyScriptV3.plutus" +) +MINTING_SHIFTBYTESTRING_V3 = PlutusScriptData( + script_file=MINTING_SHIFTBYTESTRING_PLUTUS_V3, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=ExecutionCost(per_time=17922844, per_space=85787, fixed_cost=UNKNOWN_FIXED_COST), +) + +MINTING_WRITEBITS_PLUTUS_V3 = SCRIPTS_V3_DIR / "succeedingWriteBitsPolicyScriptV3.plutus" +MINTING_WRITEBITS_V3 = PlutusScriptData( + script_file=MINTING_WRITEBITS_PLUTUS_V3, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=ExecutionCost( + per_time=90646820, per_space=462457, fixed_cost=UNKNOWN_FIXED_COST + ), +) + +# ----- All succeeding bitwise tests ----- # + +SUCCEEDING_MINTING_BITWISE_SCRIPTS_V3 = ( + MINTING_ANDBYTESTRING_V3, + MINTING_ORBYTESTRING_V3, + MINTING_XORBYTESTRING_V3, + MINTING_COMPLEMENTBYTESTRING_V3, + MINTING_COUNTSETBITS_V3, + MINTING_FINDFIRSTSET_V3, + MINTING_READBIT_V3, + MINTING_REPLICATEBYTE_V3, + MINTING_ROTATEBYTESTRING_V3, + MINTING_SHIFTBYTESTRING_V3, + MINTING_WRITEBITS_V3, +) + + +# ----- All failing bitwise tests ----- # + +FAILING_BITWISE_SCRIPT_FILES_V3 = ( + "failingReadBitPolicyScriptV3_1.plutus", + "failingReadBitPolicyScriptV3_2.plutus", + "failingReadBitPolicyScriptV3_3.plutus", + "failingReadBitPolicyScriptV3_4.plutus", + "failingReadBitPolicyScriptV3_5.plutus", + "failingReadBitPolicyScriptV3_6.plutus", + "failingReadBitPolicyScriptV3_7.plutus", + "failingReadBitPolicyScriptV3_8.plutus", + "failingReadBitPolicyScriptV3_9.plutus", + "failingReadBitPolicyScriptV3_10.plutus", + "failingReadBitPolicyScriptV3_11.plutus", + "failingReadBitPolicyScriptV3_12.plutus", + "failingReadBitPolicyScriptV3_13.plutus", + "failingReadBitPolicyScriptV3_14.plutus", + "failingReplicateBytePolicyScriptV3_1.plutus", + "failingReplicateBytePolicyScriptV3_2.plutus", + "failingReplicateBytePolicyScriptV3_3.plutus", + "failingReplicateBytePolicyScriptV3_4.plutus", + "failingReplicateBytePolicyScriptV3_5.plutus", + "failingReplicateBytePolicyScriptV3_6.plutus", + "failingWriteBitsPolicyScriptV3_1.plutus", + "failingWriteBitsPolicyScriptV3_2.plutus", + "failingWriteBitsPolicyScriptV3_3.plutus", + "failingWriteBitsPolicyScriptV3_4.plutus", + "failingWriteBitsPolicyScriptV3_5.plutus", + "failingWriteBitsPolicyScriptV3_6.plutus", + "failingWriteBitsPolicyScriptV3_7.plutus", + "failingWriteBitsPolicyScriptV3_8.plutus", + "failingWriteBitsPolicyScriptV3_9.plutus", + "failingWriteBitsPolicyScriptV3_10.plutus", + "failingWriteBitsPolicyScriptV3_11.plutus", + "failingWriteBitsPolicyScriptV3_12.plutus", + "failingWriteBitsPolicyScriptV3_13.plutus", + "failingWriteBitsPolicyScriptV3_14.plutus", + "failingWriteBitsPolicyScriptV3_15.plutus", + "failingWriteBitsPolicyScriptV3_16.plutus", + "failingWriteBitsPolicyScriptV3_17.plutus", + "failingWriteBitsPolicyScriptV3_18.plutus", + "failingWriteBitsPolicyScriptV3_19.plutus", +) + +# We're not currently checking the costs (and it seems to be difficult when the +# script fails anyway), so the values here don't really matter. +UNDETERMINED_COST = ExecutionCost(per_time=1_000_000, per_space=100_000, fixed_cost=1234) + + +FAILING_MINTING_BITWISE_SCRIPTS_V3 = ( + PlutusScriptData( + script_file=SCRIPTS_V3_DIR / n, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=UNDETERMINED_COST, + ) + for n in FAILING_BITWISE_SCRIPT_FILES_V3 +) + +MINTING_RIPEMD_160_PLUTUS_V3 = SCRIPTS_V3_DIR / "succeedingRipemd_160Policy.plutus" +MINTING_RIPEMD_160_V3 = PlutusScriptData( + script_file=MINTING_RIPEMD_160_PLUTUS_V3, + script_type=clusterlib.ScriptTypes.PLUTUS_V3, + execution_cost=ExecutionCost(per_time=6598460, per_space=14710, fixed_cost=UNKNOWN_FIXED_COST), +) + +SUCCEEDING_MINTING_RIPEMD_160_SCRIPTS_V3 = (MINTING_RIPEMD_160_V3,) + + @dataclasses.dataclass(frozen=True, order=True) class PlutusOp: script_file: clusterlib.FileType diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_build.py b/cardano_node_tests/tests/tests_plutus/test_mint_build.py index 589a87b39..c8e6811d7 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_build.py @@ -1404,3 +1404,174 @@ def test_duplicated_collateral( assert ( return_collateral + total_collateral == collateral_utxos[0].amount ), "Return collateral amount is wrong" + + +@common.SKIPIF_PLUTUSV3_UNUSABLE +class TestPlutusBatch5V3Builtins: + """Tests for batch5 of Plutus Core built-in functions.""" + + success_scripts = ( + *plutus_common.SUCCEEDING_MINTING_RIPEMD_160_SCRIPTS_V3, + *plutus_common.SUCCEEDING_MINTING_BITWISE_SCRIPTS_V3, + ) + fail_scripts = plutus_common.FAILING_MINTING_BITWISE_SCRIPTS_V3 + + @pytest.fixture + def skip_bootstrap( + self, + cluster: clusterlib.ClusterLib, + ) -> None: + pparams = cluster.g_query.get_protocol_params() + if pparams["protocolVersion"]["major"] < 10 or len(pparams["costModels"]["PlutusV3"]) < 297: + pytest.skip("Needs to run on PV10+ with updated PlutusV3 cost model.") + + @pytest.fixture + def payment_addrs( + self, + skip_bootstrap: None, # noqa: ARG002 + cluster_manager: cluster_management.ClusterManager, + cluster: clusterlib.ClusterLib, + ) -> tp.List[clusterlib.AddressRecord]: + """Create new payment address.""" + test_id = common.get_test_id(cluster) + addrs = clusterlib_utils.create_payment_addr_records( + *[f"{test_id}_payment_addr_{i}" for i in range(2)], + cluster_obj=cluster, + ) + + # Fund source address + clusterlib_utils.fund_from_faucet( + addrs[0], + cluster_obj=cluster, + all_faucets=cluster_manager.cache.addrs_data, + amount=100_000_000, + ) + + return addrs + + def run_scenario( + self, + cluster_obj: clusterlib.ClusterLib, + payment_addrs: tp.List[clusterlib.AddressRecord], + plutus_v_record: plutus_common.PlutusScriptData, + success_expected: bool, + ): + """Run an e2e test for a Plutus builtin.""" + temp_template = common.get_test_id(cluster_obj) + + payment_addr = payment_addrs[0] + issuer_addr = payment_addrs[1] + + lovelace_amount = 2_000_000 + token_amount = 5 + script_fund = 20_000_000 + + minting_cost = plutus_common.compute_cost( + execution_cost=plutus_v_record.execution_cost, + protocol_params=cluster_obj.g_query.get_protocol_params(), + ) + + # Step 1: fund the token issuer and create UTXO for collaterals + + mint_utxos, collateral_utxos, tx_output_step1 = mint_build._fund_issuer( + cluster_obj=cluster_obj, + temp_template=temp_template, + payment_addr=payment_addr, + issuer_addr=issuer_addr, + minting_cost=minting_cost, + amount=script_fund, + ) + + # Step 2: mint the "qacoin" + + policyid = cluster_obj.g_transaction.get_policyid(plutus_v_record.script_file) + asset_name = f"qacoin{clusterlib.get_rand_str(4)}".encode().hex() + token = f"{policyid}.{asset_name}" + mint_txouts = [ + clusterlib.TxOut(address=issuer_addr.address, amount=token_amount, coin=token) + ] + + plutus_mint_data = [ + clusterlib.Mint( + txouts=mint_txouts, + script_file=plutus_v_record.script_file, + collaterals=collateral_utxos, + redeemer_file=plutus_common.REDEEMER_42, + ) + ] + + tx_files_step2 = clusterlib.TxFiles( + signing_key_files=[issuer_addr.skey_file], + ) + txouts_step2 = [ + clusterlib.TxOut(address=issuer_addr.address, amount=lovelace_amount), + *mint_txouts, + ] + + try: + tx_output_step2 = clusterlib_utils.build_and_submit_tx( + cluster_obj=cluster_obj, + name_template=f"{temp_template}_step2", + src_address=payment_addr.address, + use_build_cmd=True, + tx_files=tx_files_step2, + txins=mint_utxos, + txouts=txouts_step2, + mint=plutus_mint_data, + ) + except clusterlib.CLIError as excp: + if success_expected: + raise + if "The machine terminated because of an error" in str(excp): + return + raise + + out_utxos = cluster_obj.g_query.get_utxo(tx_raw_output=tx_output_step2) + token_utxo = clusterlib.filter_utxos( + utxos=out_utxos, address=issuer_addr.address, coin=token + ) + assert token_utxo and token_utxo[0].amount == token_amount, "The token was not minted" + + @allure.link(helpers.get_vcs_link()) + @pytest.mark.parametrize( + "script", + success_scripts, + ids=(s.script_file.name for s in success_scripts), + ) + @pytest.mark.smoke + def test_plutus_success( + self, + skip_bootstrap: None, # noqa: ARG002 + cluster: clusterlib.ClusterLib, + payment_addrs: tp.List[clusterlib.AddressRecord], + script: plutus_common.PlutusScriptData, + ): + """Test scenarios that are supposed to succeed.""" + self.run_scenario( + cluster_obj=cluster, + payment_addrs=payment_addrs, + plutus_v_record=script, + success_expected=True, + ) + + @allure.link(helpers.get_vcs_link()) + @pytest.mark.parametrize( + "script", + fail_scripts, + ids=(s.script_file.name for s in fail_scripts), + ) + @pytest.mark.smoke + def test_plutus_fail( + self, + skip_bootstrap: None, # noqa: ARG002 + cluster: clusterlib.ClusterLib, + payment_addrs: tp.List[clusterlib.AddressRecord], + script: plutus_common.PlutusScriptData, + ): + """Test scenarios that are supposed to fail.""" + self.run_scenario( + cluster_obj=cluster, + payment_addrs=payment_addrs, + plutus_v_record=script, + success_expected=False, + ) From bf028dfe09ea3250154b0284c3d4d803e2e2e301 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 31 Oct 2024 18:57:51 +0100 Subject: [PATCH 061/168] feat: add PlutusV3 cost model for new builtins This cost model is used in protocol version 10. --- .../conway/genesis.conway.spec.pv10.json | 338 ++++++++++++++++++ .../cluster_scripts/conway/start-cluster | 6 + .../conway_fast/genesis.conway.spec.pv10.json | 338 ++++++++++++++++++ .../cluster_scripts/conway_fast/start-cluster | 6 + .../genesis.conway.spec.pv10.json | 338 ++++++++++++++++++ .../mainnet_fast/start-cluster | 6 + 6 files changed, 1032 insertions(+) create mode 100644 cardano_node_tests/cluster_scripts/conway/genesis.conway.spec.pv10.json create mode 100644 cardano_node_tests/cluster_scripts/conway_fast/genesis.conway.spec.pv10.json create mode 100644 cardano_node_tests/cluster_scripts/mainnet_fast/genesis.conway.spec.pv10.json diff --git a/cardano_node_tests/cluster_scripts/conway/genesis.conway.spec.pv10.json b/cardano_node_tests/cluster_scripts/conway/genesis.conway.spec.pv10.json new file mode 100644 index 000000000..5ce74cb95 --- /dev/null +++ b/cardano_node_tests/cluster_scripts/conway/genesis.conway.spec.pv10.json @@ -0,0 +1,338 @@ +{ + "poolVotingThresholds": { + "motionNoConfidence": 0.51, + "committeeNormal": 0.51, + "committeeNoConfidence": 0.51, + "hardForkInitiation": 0.51, + "ppSecurityGroup": 0.51 + }, + "dRepVotingThresholds": { + "motionNoConfidence": 0.51, + "committeeNormal": 0.51, + "committeeNoConfidence": 0.51, + "updateToConstitution": 0.51, + "hardForkInitiation": 0.51, + "ppNetworkGroup": 0.51, + "ppEconomicGroup": 0.51, + "ppTechnicalGroup": 0.51, + "ppGovGroup": 0.51, + "treasuryWithdrawal": 0.51 + }, + "committeeMinSize": 0, + "committeeMaxTermLength": 11000, + "govActionLifetime": 2, + "govActionDeposit": 100000000, + "dRepDeposit": 2000000, + "dRepActivity": 100, + "minFeeRefScriptCostPerByte": 0, + "plutusV3CostModel": [ + 100788, + 420, + 1, + 1, + 1000, + 173, + 0, + 1, + 1000, + 59957, + 4, + 1, + 11183, + 32, + 201305, + 8356, + 4, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 100, + 100, + 16000, + 100, + 94375, + 32, + 132994, + 32, + 61462, + 4, + 72010, + 178, + 0, + 1, + 22151, + 32, + 91189, + 769, + 4, + 2, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 1, + 1000, + 42921, + 4, + 2, + 24548, + 29498, + 38, + 1, + 898148, + 27279, + 1, + 51775, + 558, + 1, + 39184, + 1000, + 60594, + 1, + 141895, + 32, + 83150, + 32, + 15299, + 32, + 76049, + 1, + 13169, + 4, + 22100, + 10, + 28999, + 74, + 1, + 28999, + 74, + 1, + 43285, + 552, + 1, + 44749, + 541, + 1, + 33852, + 32, + 68246, + 32, + 72362, + 32, + 7243, + 32, + 7391, + 32, + 11546, + 32, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 90434, + 519, + 0, + 1, + 74433, + 32, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 1, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 955506, + 213312, + 0, + 2, + 270652, + 22588, + 4, + 1457325, + 64566, + 4, + 20467, + 1, + 4, + 0, + 141992, + 32, + 100788, + 420, + 1, + 1, + 81663, + 32, + 59498, + 32, + 20142, + 32, + 24588, + 32, + 20744, + 32, + 25933, + 32, + 24623, + 32, + 43053543, + 10, + 53384111, + 14333, + 10, + 43574283, + 26308, + 10, + 16000, + 100, + 16000, + 100, + 962335, + 18, + 2780678, + 6, + 442008, + 1, + 52538055, + 3756, + 18, + 267929, + 18, + 76433006, + 8868, + 18, + 52948122, + 18, + 1995836, + 36, + 3227919, + 12, + 901022, + 1, + 166917843, + 4307, + 36, + 284546, + 36, + 158221314, + 26549, + 36, + 74698472, + 36, + 333849714, + 1, + 254006273, + 72, + 2174038, + 72, + 2261318, + 64571, + 4, + 207616, + 8310, + 4, + 1293828, + 28716, + 63, + 0, + 1, + 1006041, + 43623, + 251, + 0, + 1, + 100181, + 726, + 719, + 0, + 1, + 100181, + 726, + 719, + 0, + 1, + 100181, + 726, + 719, + 0, + 1, + 107878, + 680, + 0, + 1, + 95336, + 1, + 281145, + 18848, + 0, + 1, + 180194, + 159, + 1, + 1, + 158519, + 8942, + 0, + 1, + 159378, + 8813, + 0, + 1, + 107490, + 3298, + 1, + 106057, + 655, + 1, + 1964219, + 24520, + 3 + ], + "constitution": { + "anchor": { + "url": "", + "dataHash": "0000000000000000000000000000000000000000000000000000000000000000" + } + }, + "committee": { + "members": { + }, + "threshold": 0.0 + } +} diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index 4b15c39b0..59ed6718d 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -198,6 +198,12 @@ cp "${SCRIPT_DIR}/submit-api-config.json" "$STATE_CLUSTER" cp "${SCRIPT_DIR}/supervisor.conf" "$STATE_CLUSTER" cp "$SCRIPT_DIR"/*genesis*.spec.json "${STATE_CLUSTER}/shelley/" +if [ -n "${PV10:-""}" ]; then + mv \ + "${SCRIPT_DIR}/genesis.conway.spec.pv10.json" \ + "${STATE_CLUSTER}/shelley/genesis.conway.spec.json" +fi + if [ -z "${ENABLE_LEGACY:-""}" ]; then # use P2P topology files for tconf in "$SCRIPT_DIR"/p2p-topology-*.json; do diff --git a/cardano_node_tests/cluster_scripts/conway_fast/genesis.conway.spec.pv10.json b/cardano_node_tests/cluster_scripts/conway_fast/genesis.conway.spec.pv10.json new file mode 100644 index 000000000..5ce74cb95 --- /dev/null +++ b/cardano_node_tests/cluster_scripts/conway_fast/genesis.conway.spec.pv10.json @@ -0,0 +1,338 @@ +{ + "poolVotingThresholds": { + "motionNoConfidence": 0.51, + "committeeNormal": 0.51, + "committeeNoConfidence": 0.51, + "hardForkInitiation": 0.51, + "ppSecurityGroup": 0.51 + }, + "dRepVotingThresholds": { + "motionNoConfidence": 0.51, + "committeeNormal": 0.51, + "committeeNoConfidence": 0.51, + "updateToConstitution": 0.51, + "hardForkInitiation": 0.51, + "ppNetworkGroup": 0.51, + "ppEconomicGroup": 0.51, + "ppTechnicalGroup": 0.51, + "ppGovGroup": 0.51, + "treasuryWithdrawal": 0.51 + }, + "committeeMinSize": 0, + "committeeMaxTermLength": 11000, + "govActionLifetime": 2, + "govActionDeposit": 100000000, + "dRepDeposit": 2000000, + "dRepActivity": 100, + "minFeeRefScriptCostPerByte": 0, + "plutusV3CostModel": [ + 100788, + 420, + 1, + 1, + 1000, + 173, + 0, + 1, + 1000, + 59957, + 4, + 1, + 11183, + 32, + 201305, + 8356, + 4, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 100, + 100, + 16000, + 100, + 94375, + 32, + 132994, + 32, + 61462, + 4, + 72010, + 178, + 0, + 1, + 22151, + 32, + 91189, + 769, + 4, + 2, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 1, + 1000, + 42921, + 4, + 2, + 24548, + 29498, + 38, + 1, + 898148, + 27279, + 1, + 51775, + 558, + 1, + 39184, + 1000, + 60594, + 1, + 141895, + 32, + 83150, + 32, + 15299, + 32, + 76049, + 1, + 13169, + 4, + 22100, + 10, + 28999, + 74, + 1, + 28999, + 74, + 1, + 43285, + 552, + 1, + 44749, + 541, + 1, + 33852, + 32, + 68246, + 32, + 72362, + 32, + 7243, + 32, + 7391, + 32, + 11546, + 32, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 90434, + 519, + 0, + 1, + 74433, + 32, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 1, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 955506, + 213312, + 0, + 2, + 270652, + 22588, + 4, + 1457325, + 64566, + 4, + 20467, + 1, + 4, + 0, + 141992, + 32, + 100788, + 420, + 1, + 1, + 81663, + 32, + 59498, + 32, + 20142, + 32, + 24588, + 32, + 20744, + 32, + 25933, + 32, + 24623, + 32, + 43053543, + 10, + 53384111, + 14333, + 10, + 43574283, + 26308, + 10, + 16000, + 100, + 16000, + 100, + 962335, + 18, + 2780678, + 6, + 442008, + 1, + 52538055, + 3756, + 18, + 267929, + 18, + 76433006, + 8868, + 18, + 52948122, + 18, + 1995836, + 36, + 3227919, + 12, + 901022, + 1, + 166917843, + 4307, + 36, + 284546, + 36, + 158221314, + 26549, + 36, + 74698472, + 36, + 333849714, + 1, + 254006273, + 72, + 2174038, + 72, + 2261318, + 64571, + 4, + 207616, + 8310, + 4, + 1293828, + 28716, + 63, + 0, + 1, + 1006041, + 43623, + 251, + 0, + 1, + 100181, + 726, + 719, + 0, + 1, + 100181, + 726, + 719, + 0, + 1, + 100181, + 726, + 719, + 0, + 1, + 107878, + 680, + 0, + 1, + 95336, + 1, + 281145, + 18848, + 0, + 1, + 180194, + 159, + 1, + 1, + 158519, + 8942, + 0, + 1, + 159378, + 8813, + 0, + 1, + 107490, + 3298, + 1, + 106057, + 655, + 1, + 1964219, + 24520, + 3 + ], + "constitution": { + "anchor": { + "url": "", + "dataHash": "0000000000000000000000000000000000000000000000000000000000000000" + } + }, + "committee": { + "members": { + }, + "threshold": 0.0 + } +} diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index f6cac4976..63ce1e2c3 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -136,6 +136,12 @@ cp "${SCRIPT_DIR}/submit-api-config.json" "$STATE_CLUSTER" cp "${SCRIPT_DIR}/supervisor.conf" "$STATE_CLUSTER" cp "$SCRIPT_DIR"/*genesis*.spec.json "${STATE_CLUSTER}/create_staked/" +if [ -n "${PV10:-""}" ]; then + mv \ + "${SCRIPT_DIR}/genesis.conway.spec.pv10.json" \ + "${STATE_CLUSTER}/create_staked/genesis.conway.spec.json" +fi + if [ -z "${ENABLE_LEGACY:-""}" ]; then # use P2P topology files for tconf in "$SCRIPT_DIR"/p2p-topology-*.json; do diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.conway.spec.pv10.json b/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.conway.spec.pv10.json new file mode 100644 index 000000000..5ce74cb95 --- /dev/null +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/genesis.conway.spec.pv10.json @@ -0,0 +1,338 @@ +{ + "poolVotingThresholds": { + "motionNoConfidence": 0.51, + "committeeNormal": 0.51, + "committeeNoConfidence": 0.51, + "hardForkInitiation": 0.51, + "ppSecurityGroup": 0.51 + }, + "dRepVotingThresholds": { + "motionNoConfidence": 0.51, + "committeeNormal": 0.51, + "committeeNoConfidence": 0.51, + "updateToConstitution": 0.51, + "hardForkInitiation": 0.51, + "ppNetworkGroup": 0.51, + "ppEconomicGroup": 0.51, + "ppTechnicalGroup": 0.51, + "ppGovGroup": 0.51, + "treasuryWithdrawal": 0.51 + }, + "committeeMinSize": 0, + "committeeMaxTermLength": 11000, + "govActionLifetime": 2, + "govActionDeposit": 100000000, + "dRepDeposit": 2000000, + "dRepActivity": 100, + "minFeeRefScriptCostPerByte": 0, + "plutusV3CostModel": [ + 100788, + 420, + 1, + 1, + 1000, + 173, + 0, + 1, + 1000, + 59957, + 4, + 1, + 11183, + 32, + 201305, + 8356, + 4, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 16000, + 100, + 100, + 100, + 16000, + 100, + 94375, + 32, + 132994, + 32, + 61462, + 4, + 72010, + 178, + 0, + 1, + 22151, + 32, + 91189, + 769, + 4, + 2, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 1, + 1000, + 42921, + 4, + 2, + 24548, + 29498, + 38, + 1, + 898148, + 27279, + 1, + 51775, + 558, + 1, + 39184, + 1000, + 60594, + 1, + 141895, + 32, + 83150, + 32, + 15299, + 32, + 76049, + 1, + 13169, + 4, + 22100, + 10, + 28999, + 74, + 1, + 28999, + 74, + 1, + 43285, + 552, + 1, + 44749, + 541, + 1, + 33852, + 32, + 68246, + 32, + 72362, + 32, + 7243, + 32, + 7391, + 32, + 11546, + 32, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 90434, + 519, + 0, + 1, + 74433, + 32, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 1, + 85848, + 123203, + 7305, + -900, + 1716, + 549, + 57, + 85848, + 0, + 1, + 955506, + 213312, + 0, + 2, + 270652, + 22588, + 4, + 1457325, + 64566, + 4, + 20467, + 1, + 4, + 0, + 141992, + 32, + 100788, + 420, + 1, + 1, + 81663, + 32, + 59498, + 32, + 20142, + 32, + 24588, + 32, + 20744, + 32, + 25933, + 32, + 24623, + 32, + 43053543, + 10, + 53384111, + 14333, + 10, + 43574283, + 26308, + 10, + 16000, + 100, + 16000, + 100, + 962335, + 18, + 2780678, + 6, + 442008, + 1, + 52538055, + 3756, + 18, + 267929, + 18, + 76433006, + 8868, + 18, + 52948122, + 18, + 1995836, + 36, + 3227919, + 12, + 901022, + 1, + 166917843, + 4307, + 36, + 284546, + 36, + 158221314, + 26549, + 36, + 74698472, + 36, + 333849714, + 1, + 254006273, + 72, + 2174038, + 72, + 2261318, + 64571, + 4, + 207616, + 8310, + 4, + 1293828, + 28716, + 63, + 0, + 1, + 1006041, + 43623, + 251, + 0, + 1, + 100181, + 726, + 719, + 0, + 1, + 100181, + 726, + 719, + 0, + 1, + 100181, + 726, + 719, + 0, + 1, + 107878, + 680, + 0, + 1, + 95336, + 1, + 281145, + 18848, + 0, + 1, + 180194, + 159, + 1, + 1, + 158519, + 8942, + 0, + 1, + 159378, + 8813, + 0, + 1, + 107490, + 3298, + 1, + 106057, + 655, + 1, + 1964219, + 24520, + 3 + ], + "constitution": { + "anchor": { + "url": "", + "dataHash": "0000000000000000000000000000000000000000000000000000000000000000" + } + }, + "committee": { + "members": { + }, + "threshold": 0.0 + } +} diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index f6cac4976..63ce1e2c3 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -136,6 +136,12 @@ cp "${SCRIPT_DIR}/submit-api-config.json" "$STATE_CLUSTER" cp "${SCRIPT_DIR}/supervisor.conf" "$STATE_CLUSTER" cp "$SCRIPT_DIR"/*genesis*.spec.json "${STATE_CLUSTER}/create_staked/" +if [ -n "${PV10:-""}" ]; then + mv \ + "${SCRIPT_DIR}/genesis.conway.spec.pv10.json" \ + "${STATE_CLUSTER}/create_staked/genesis.conway.spec.json" +fi + if [ -z "${ENABLE_LEGACY:-""}" ]; then # use P2P topology files for tconf in "$SCRIPT_DIR"/p2p-topology-*.json; do From 04e1d4566ab00b5ee89d51fbe805db845eb589f3 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 1 Nov 2024 12:12:08 +0100 Subject: [PATCH 062/168] fix(tests): update script file identifiers - Change FAILING_MINTING_BITWISE_SCRIPTS_V3 to tuple instead of generator - Update pytest parametrize ids to use script_file.stem instead of name This ensures better readability and consistency in test identifiers. --- cardano_node_tests/tests/plutus_common.py | 2 +- cardano_node_tests/tests/tests_plutus/test_mint_build.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/cardano_node_tests/tests/plutus_common.py b/cardano_node_tests/tests/plutus_common.py index 0e9bab9c1..824abf89a 100644 --- a/cardano_node_tests/tests/plutus_common.py +++ b/cardano_node_tests/tests/plutus_common.py @@ -446,7 +446,7 @@ class PlutusScriptData: UNDETERMINED_COST = ExecutionCost(per_time=1_000_000, per_space=100_000, fixed_cost=1234) -FAILING_MINTING_BITWISE_SCRIPTS_V3 = ( +FAILING_MINTING_BITWISE_SCRIPTS_V3 = tuple( PlutusScriptData( script_file=SCRIPTS_V3_DIR / n, script_type=clusterlib.ScriptTypes.PLUTUS_V3, diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_build.py b/cardano_node_tests/tests/tests_plutus/test_mint_build.py index c8e6811d7..8eac3d7c1 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_build.py @@ -1536,7 +1536,7 @@ def run_scenario( @pytest.mark.parametrize( "script", success_scripts, - ids=(s.script_file.name for s in success_scripts), + ids=(s.script_file.stem for s in success_scripts), ) @pytest.mark.smoke def test_plutus_success( @@ -1558,7 +1558,7 @@ def test_plutus_success( @pytest.mark.parametrize( "script", fail_scripts, - ids=(s.script_file.name for s in fail_scripts), + ids=(s.script_file.stem for s in fail_scripts), ) @pytest.mark.smoke def test_plutus_fail( From d53814d547d0b94fdf259a3051345b55b855aac5 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 1 Nov 2024 12:18:54 +0100 Subject: [PATCH 063/168] feat: add team_plutus marker to tests and workflows - Added `team_plutus` marker to the regression-dbsync and regression workflows. - Marked relevant tests in `test_mint_build.py` with `@pytest.mark.team_plutus`. - Updated `pyproject.toml` to include the new `team_plutus` marker. --- .github/workflows/regression-dbsync.yaml | 1 + .github/workflows/regression.yaml | 1 + cardano_node_tests/tests/tests_plutus/test_mint_build.py | 2 ++ pyproject.toml | 1 + 4 files changed, 5 insertions(+) diff --git a/.github/workflows/regression-dbsync.yaml b/.github/workflows/regression-dbsync.yaml index 627b29b23..2c7114058 100644 --- a/.github/workflows/regression-dbsync.yaml +++ b/.github/workflows/regression-dbsync.yaml @@ -33,6 +33,7 @@ on: - smoke - plutus - plutus and smoke + - team_plutus - not long - conway only - dbsync and smoke diff --git a/.github/workflows/regression.yaml b/.github/workflows/regression.yaml index e61abea03..12323335c 100644 --- a/.github/workflows/regression.yaml +++ b/.github/workflows/regression.yaml @@ -29,6 +29,7 @@ on: - smoke - plutus - plutus and smoke + - team_plutus - not long - conway only default: all diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_build.py b/cardano_node_tests/tests/tests_plutus/test_mint_build.py index 8eac3d7c1..28cbb7fdc 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_build.py @@ -1538,6 +1538,7 @@ def run_scenario( success_scripts, ids=(s.script_file.stem for s in success_scripts), ) + @pytest.mark.team_plutus @pytest.mark.smoke def test_plutus_success( self, @@ -1560,6 +1561,7 @@ def test_plutus_success( fail_scripts, ids=(s.script_file.stem for s in fail_scripts), ) + @pytest.mark.team_plutus @pytest.mark.smoke def test_plutus_fail( self, diff --git a/pyproject.toml b/pyproject.toml index eb3fb1b5d..92f96778d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -135,6 +135,7 @@ markers = [ "smoke: fast test(s) under 1 minute", "upgrade: test(s) for upgrade testing", "plutus: test(s) for plutus", + "team_plutus: test(s) from Plutus dev team", "disabled: temporarily disabled test(s)", "noop: placeholder marker", ] From 74690845dc3346d4f2d67812fd05aa2f2e75a4b5 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 1 Nov 2024 16:58:51 +0100 Subject: [PATCH 064/168] feat(docs): add test results for tag 10.1.1 - Added new file `tag_10_1_1.rst` with regression and other test results - Updated `tag_tests.rst` to include the new test results file - Included links to release notes, tag commits, and detailed test results - Documented new issues and breaking changes in the release --- .../source/test_results/node/tag_10_1_1.rst | 70 +++++++++++++++++++ src_docs/source/test_results/tag_tests.rst | 1 + 2 files changed, 71 insertions(+) create mode 100644 src_docs/source/test_results/node/tag_10_1_1.rst diff --git a/src_docs/source/test_results/node/tag_10_1_1.rst b/src_docs/source/test_results/node/tag_10_1_1.rst new file mode 100644 index 000000000..2473b8931 --- /dev/null +++ b/src_docs/source/test_results/node/tag_10_1_1.rst @@ -0,0 +1,70 @@ +10.1.1 +====== + +* Release notes - +* Tag commits - + + +Regression testing on a local cluster +------------------------------------- + +.. list-table:: Regression Testsuite + :widths: 64 7 + :header-rows: 0 + + * - P2P ON - `Conway PV9 `__ + - |:heavy_check_mark:| + * - Mix P2P and Legacy - `Conway PV10 `__ + - |:heavy_check_mark:| + +.. list-table:: Other Testing + :widths: 64 7 + :header-rows: 0 + + * - Upgrade testing (9.2.1 to 10.1.1) + - |:heavy_check_mark:| + * - Rollback testing + - |:heavy_check_mark:| + * - Reconnection testing + - |:heavy_check_mark:| + * - Block production testing on network with 10 pools, 5 of them P2P, 5 of them Legacy - `results (sqlite db) `__ + - |:heavy_check_mark:| + * - Sanity checks of the submit-api REST service + - |:heavy_check_mark:| + * - P2P Dynamic Block Production testing + - |:heavy_check_mark:| + + +Release testing checklist +------------------------- + +.. list-table:: + :widths: 64 7 + :header-rows: 0 + + * - `10.1.1` pushed to `preview` + - |:heavy_check_mark:| + * - Regression testing against `preview` + - |:hourglass_flowing_sand:| + * - `Sync testing ran against Mainnet (Linux) `__ + - |:hourglass_flowing_sand:| + * - DB re-validation testing (ledger snapshots compatibility) + - |:heavy_check_mark:| + * - Backward compatibility testing (Node with version N-1) + - |:heavy_check_mark:| + * - Check build instructions changes + - |:hourglass_flowing_sand:| + + +New functionalities in this tag +------------------------------- + + +New issues +---------- + +* `Era agnostic queries missing from top level in cardano-cli. `__ + + +Breaking changes +---------------- diff --git a/src_docs/source/test_results/tag_tests.rst b/src_docs/source/test_results/tag_tests.rst index 476a5441c..52171d616 100644 --- a/src_docs/source/test_results/tag_tests.rst +++ b/src_docs/source/test_results/tag_tests.rst @@ -4,6 +4,7 @@ Tag Testing .. toctree:: :maxdepth: 4 + node/tag_10_1_1.rst node/tag_9_0_0.rst node/tag_8_11_0.rst node/tag_8_9_3.rst From 4ffe92e75f7aab942f546ac8f571b70672016786 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 1 Nov 2024 17:14:50 +0100 Subject: [PATCH 065/168] feat(docs): add test results for node tag 10.1.2 - Added new file `tag_10_1_2.rst` with test results for node tag 10.1.2. - Updated `tag_tests.rst` to include the new test results file. - Documented regression testing, other testing, and release testing checklist for the new tag. --- .../source/test_results/node/tag_10_1_2.rst | 73 +++++++++++++++++++ src_docs/source/test_results/tag_tests.rst | 1 + 2 files changed, 74 insertions(+) create mode 100644 src_docs/source/test_results/node/tag_10_1_2.rst diff --git a/src_docs/source/test_results/node/tag_10_1_2.rst b/src_docs/source/test_results/node/tag_10_1_2.rst new file mode 100644 index 000000000..f6708ec87 --- /dev/null +++ b/src_docs/source/test_results/node/tag_10_1_2.rst @@ -0,0 +1,73 @@ +10.1.2 +====== + +* Release notes - +* Tag commits - + + +This is a minor update to 10.1.1 that fixes `cardano-cli issue `__ where era agnostic queries were missing from top level. + +Testing done for 10.1.1 release is valid for 10.1.2 as well. + + +Regression testing on a local cluster +------------------------------------- + +.. list-table:: Regression Testsuite + :widths: 64 7 + :header-rows: 0 + + * - P2P ON - `Conway PV9 `__ + - |:heavy_check_mark:| + * - P2P ON - `Conway PV10 `__ + - |:heavy_check_mark:| + +.. list-table:: Other Testing + :widths: 64 7 + :header-rows: 0 + + * - Upgrade testing (10.1.1 to 10.1.2) + - |:hourglass_flowing_sand:| + * - Rollback testing + - |:hourglass_flowing_sand:| + * - Reconnection testing + - |:hourglass_flowing_sand:| + * - Block production testing on network with 10 pools, 5 of them P2P, 5 of them Legacy - `results (sqlite db) `__ + - |:hourglass_flowing_sand:| + * - Sanity checks of the submit-api REST service + - |:heavy_check_mark:| + * - P2P Dynamic Block Production testing + - |:heavy_check_mark:| + + +Release testing checklist +------------------------- + +.. list-table:: + :widths: 64 7 + :header-rows: 0 + + * - `10.1.2` pushed to `preview` + - |:heavy_check_mark:| + * - Regression testing against `preview` + - |:hourglass_flowing_sand:| + * - `Sync testing ran against Mainnet (Linux) `__ + - |:hourglass_flowing_sand:| + * - DB re-validation testing (ledger snapshots compatibility) + - |:hourglass_flowing_sand:| + * - Backward compatibility testing (Node with version N-1) + - |:hourglass_flowing_sand:| + * - Check build instructions changes + - |:hourglass_flowing_sand:| + + +New functionalities in this tag +------------------------------- + + +New issues +---------- + + +Breaking changes +---------------- diff --git a/src_docs/source/test_results/tag_tests.rst b/src_docs/source/test_results/tag_tests.rst index 52171d616..bcead3c71 100644 --- a/src_docs/source/test_results/tag_tests.rst +++ b/src_docs/source/test_results/tag_tests.rst @@ -4,6 +4,7 @@ Tag Testing .. toctree:: :maxdepth: 4 + node/tag_10_1_2.rst node/tag_10_1_1.rst node/tag_9_0_0.rst node/tag_8_11_0.rst From 57043084070b3372ed35f55f09f0d9b65c5338f6 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 1 Nov 2024 18:21:04 +0100 Subject: [PATCH 066/168] fix(dbsync): filter tx collaterals by Lovelace Updated the `check_tx_collaterals` function to filter transaction collaterals by Lovelace. This change ensures that only collateral UTxOs with Lovelace are considered, while multi-assets in collateral inputs are not yet supported. TODO: Add support for multi-assets in collateral inputs. --- cardano_node_tests/utils/dbsync_check_tx.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/cardano_node_tests/utils/dbsync_check_tx.py b/cardano_node_tests/utils/dbsync_check_tx.py index aebfb0ba6..c2e7c2db6 100644 --- a/cardano_node_tests/utils/dbsync_check_tx.py +++ b/cardano_node_tests/utils/dbsync_check_tx.py @@ -351,7 +351,9 @@ def check_tx_collaterals( *tx_raw_output.script_withdrawals, ) ] - tx_collaterals = set(itertools.chain.from_iterable(tx_collaterals_nested)) + tx_collaterals_flat = set(itertools.chain.from_iterable(tx_collaterals_nested)) + # TODO: support multi-assets in collateral inputs + tx_collaterals = {r for r in tx_collaterals_flat if r.coin == clusterlib.DEFAULT_COIN} db_collaterals = {utxorecord2utxodata(utxorecord=r) for r in response.collaterals} assert ( From cb00576e3962d7f562f5e4b11865978d9d6b1502 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 4 Nov 2024 11:18:04 +0100 Subject: [PATCH 067/168] feat(cluster): change mv to cp for genesis spec files Changed the command from mv to cp for genesis.conway.spec.pv10.json in start-cluster scripts. This ensures the original file remains intact while copying it to the target directory. This change affects the conway, conway_fast, and mainnet_fast cluster scripts. --- cardano_node_tests/cluster_scripts/conway/start-cluster | 2 +- cardano_node_tests/cluster_scripts/conway_fast/start-cluster | 2 +- cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index 59ed6718d..7b6d3a740 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -199,7 +199,7 @@ cp "${SCRIPT_DIR}/supervisor.conf" "$STATE_CLUSTER" cp "$SCRIPT_DIR"/*genesis*.spec.json "${STATE_CLUSTER}/shelley/" if [ -n "${PV10:-""}" ]; then - mv \ + cp \ "${SCRIPT_DIR}/genesis.conway.spec.pv10.json" \ "${STATE_CLUSTER}/shelley/genesis.conway.spec.json" fi diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index 63ce1e2c3..8062b097b 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -137,7 +137,7 @@ cp "${SCRIPT_DIR}/supervisor.conf" "$STATE_CLUSTER" cp "$SCRIPT_DIR"/*genesis*.spec.json "${STATE_CLUSTER}/create_staked/" if [ -n "${PV10:-""}" ]; then - mv \ + cp \ "${SCRIPT_DIR}/genesis.conway.spec.pv10.json" \ "${STATE_CLUSTER}/create_staked/genesis.conway.spec.json" fi diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index 63ce1e2c3..8062b097b 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -137,7 +137,7 @@ cp "${SCRIPT_DIR}/supervisor.conf" "$STATE_CLUSTER" cp "$SCRIPT_DIR"/*genesis*.spec.json "${STATE_CLUSTER}/create_staked/" if [ -n "${PV10:-""}" ]; then - mv \ + cp \ "${SCRIPT_DIR}/genesis.conway.spec.pv10.json" \ "${STATE_CLUSTER}/create_staked/genesis.conway.spec.json" fi From f98e88b7bb88c0ecf9b9766faa4dff6a45635deb Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 4 Nov 2024 12:22:19 +0100 Subject: [PATCH 068/168] refactor(tests): make `test_expired_kes` to run only with HF shortcut --- cardano_node_tests/tests/test_kes.py | 76 ++++++++++++---------------- 1 file changed, 32 insertions(+), 44 deletions(-) diff --git a/cardano_node_tests/tests/test_kes.py b/cardano_node_tests/tests/test_kes.py index 0d5381d1d..d5e8b6f6c 100644 --- a/cardano_node_tests/tests/test_kes.py +++ b/cardano_node_tests/tests/test_kes.py @@ -29,28 +29,19 @@ LOGGER = logging.getLogger(__name__) -# number of epochs traversed during local cluster startup -# NOTE: must be kept up-to-date -if VERSIONS.cluster_era == VERSIONS.BABBAGE: - NUM_OF_EPOCHS = 8 # PV7 + PV8 -elif VERSIONS.cluster_era == VERSIONS.CONWAY: - NUM_OF_EPOCHS = 9 -else: - msg = f"Unsupported era '{VERSIONS.cluster_era_name}'" - raise AssertionError(msg) - - pytestmark = common.SKIPIF_WRONG_ERA - # TODO: It would be better to use `cluster_nodes.get_cluster_type().uses_shortcut`, but we would # need to get a cluster instance first. That would be too expensive in this module, as we are using # custom startup scripts. -SKIPIF_HF_SHORTCUT = pytest.mark.skipif( - "_fast" in configuration.SCRIPTS_DIRNAME, - reason="cannot run on local cluster with HF shortcut", +SKIPIF_NOT_HF_SHORTCUT = pytest.mark.skipif( + "_fast" not in configuration.SCRIPTS_DIRNAME, + reason="Runs only on local cluster with HF shortcut.", ) +# Slot number where KES certificate expires when using `cluster_kes` +KES_EXPIRE_SLOT = 2100 + @pytest.fixture(scope="module") def short_kes_start_cluster() -> pl.Path: @@ -58,12 +49,12 @@ def short_kes_start_cluster() -> pl.Path: shared_tmp = temptools.get_pytest_shared_tmp() max_kes_evolutions = 10 - # need to lock because this same fixture can run on several workers in parallel + # Need to lock because this same fixture can run on several workers in parallel with locking.FileLockIfXdist(f"{shared_tmp}/startup_files_short_kes.lock"): destdir = shared_tmp / "startup_files_short_kes" destdir.mkdir(exist_ok=True) - # return existing script if it is already generated by other worker + # Return existing script if it is already generated by other worker destdir_ls = list(destdir.glob("start-cluster*")) if destdir_ls: return destdir_ls[0] @@ -77,14 +68,7 @@ def short_kes_start_cluster() -> pl.Path: # KES needs to be valid at least until the local cluster is fully started. # We need to calculate how many slots there is from the start of Shelley epoch # until the cluster is fully started. - # Assume k=10, i.e. k * 10 = 100 slots in Byron era. - # Subtract one Byron epoch and current (last) epoch when calculating slots in - # Shelley epochs. - epoch_length = genesis_spec["epochLength"] - cluster_start_time_slots = int((NUM_OF_EPOCHS - 2) * epoch_length + 100) - exact_kes_period_slots = int(cluster_start_time_slots / max_kes_evolutions) - - genesis_spec["slotsPerKESPeriod"] = int(exact_kes_period_slots * 1.2) # add buffer + genesis_spec["slotsPerKESPeriod"] = int(KES_EXPIRE_SLOT / max_kes_evolutions) genesis_spec["maxKESEvolutions"] = max_kes_evolutions with open(startup_files.genesis_spec, "w", encoding="utf-8") as fp_out: @@ -153,12 +137,9 @@ class TestKES: MAX_INT_VAL = 2**64 @allure.link(helpers.get_vcs_link()) - @SKIPIF_HF_SHORTCUT - # It takes long time to setup the cluster instance (when starting from Byron). - # We mark the tests as "long" and set the highest priority, so the setup is done at the - # beginning of the testrun, instead of needing to respin a cluster that is already running. - @common.ORDER5_BYRON - @common.LONG_BYRON + @SKIPIF_NOT_HF_SHORTCUT + @pytest.mark.order(5) + @pytest.mark.long def test_expired_kes( self, cluster_kes: clusterlib.ClusterLib, @@ -182,7 +163,7 @@ def test_expired_kes( kes_period_info_errors_list = [] temp_template = common.get_test_id(cluster) - expire_timeout = 200 + expire_slot = KES_EXPIRE_SLOT + 100 expire_node_name = "pool1" expire_pool_num = 1 expire_pool_name = f"node-{expire_node_name}" @@ -192,13 +173,13 @@ def test_expired_kes( ) expire_pool_id_dec = helpers.decode_bech32(expire_pool_id) - # refresh opcert on all pools except of pool1, so KES doesn't expire on those pools and - # the pools keep minting blocks + # Refresh opcert on all pools except of pool1, so KES doesn't expire on those pools and + # the pools keep minting blocks. refreshed_nodes = [ f"pool{i}" for i in range(2, len(cluster_management.Resources.ALL_POOLS) + 1) ] - # use socket of pool2 for this test - once bft1 KES expires, bft1 stops syncing + # Use socket of pool2 for this test - once bft1 KES expires, bft1 stops syncing cluster_nodes.set_cluster_env( instance_num=cluster_nodes.get_instance_num(), socket_file_name="pool2.socket" ) @@ -221,6 +202,7 @@ def _save_all_period_info(temp_template: str) -> None: ) def _refresh_opcerts() -> tp.Dict[str, int]: + """Refresh opcert on pools that are not supposed to expire.""" refreshed_nodes_kes_period = {} for n in refreshed_nodes: @@ -249,10 +231,12 @@ def _refresh_opcerts() -> tp.Dict[str, int]: _save_all_metrics(temp_template=f"{temp_template}_{this_epoch}_before_refresh") _save_all_period_info(temp_template=f"{temp_template}_{this_epoch}_before_refresh") + # Refresh opcerts on pools that are not supposed to expire + cluster.wait_for_epoch(epoch_no=1) _refresh_opcerts() expected_err_regexes = ["KESKeyAlreadyPoisoned", "KESCouldNotEvolve"] - # ignore expected errors in bft1 node log file, as bft1 opcert will not get refreshed + # Ignore expected errors in bft1 node log file, as bft1 opcert will not get refreshed logfiles.add_ignore_rule( files_glob="bft1.stdout", regex="|".join(expected_err_regexes), @@ -263,21 +247,21 @@ def _refresh_opcerts() -> tp.Dict[str, int]: regex="TraceNoLedgerView", ignore_file_id=worker_id, ) - # ignore `TraceNoLedgerView` in pool1 node log file as well + # Ignore `TraceNoLedgerView` in pool1 node log file as well logfiles.add_ignore_rule( files_glob="pool1.stdout", regex="TraceNoLedgerView", ignore_file_id=worker_id, ) - # search for expected errors only in log file corresponding to pool with expired KES + # Search for expected errors only in log file corresponding to pool with expired KES expected_errors = [(f"{expire_node_name}.stdout", err) for err in expected_err_regexes] with logfiles.expect_errors(expected_errors, worker_id=worker_id): LOGGER.info( f"{datetime.datetime.now(tz=datetime.timezone.utc)}: " - f"Waiting for {expire_timeout} sec for KES expiration." + f"Waiting for slot no {expire_slot} for KES expiration." ) - time.sleep(expire_timeout) + cluster.wait_for_slot(slot=expire_slot) LOGGER.info( f"{datetime.datetime.now(tz=datetime.timezone.utc)}: " f"KES expired (?); tip: '{cluster.g_query.get_tip()}'." @@ -286,6 +270,10 @@ def _refresh_opcerts() -> tp.Dict[str, int]: _save_all_metrics(temp_template=f"{temp_template}_after_expire") _save_all_period_info(temp_template=f"{temp_template}_after_expire") + # Prevent KES from expiring before reaching next epoch on pools that are not + # supposed to expire. + _refresh_opcerts() + this_epoch, is_minting = _check_block_production( cluster_obj=cluster, temp_template=temp_template, @@ -296,12 +284,12 @@ def _refresh_opcerts() -> tp.Dict[str, int]: _save_all_metrics(temp_template=f"{temp_template}_{this_epoch}_before_refresh") _save_all_period_info(temp_template=f"{temp_template}_{this_epoch}_before_refresh") - # check that the pool is not minting any blocks + # Check that the expired pool is not minting any blocks assert ( not is_minting ), f"The pool '{expire_pool_name}' has minted blocks in epoch {this_epoch}" - # refresh opcerts one more time + # Refresh opcerts on pools that are not supposed to expire one more time refreshed_nodes_kes_period = _refresh_opcerts() LOGGER.info( @@ -313,7 +301,7 @@ def _refresh_opcerts() -> tp.Dict[str, int]: _save_all_metrics(temp_template=f"{temp_template}_{this_epoch}_after_refresh") _save_all_period_info(temp_template=f"{temp_template}_{this_epoch}_after_refresh") - # check kes-period-info with an operational certificate with KES expired + # Check kes-period-info with an operational certificate with KES expired kes_info_expired = cluster.g_query.get_kes_period_info( opcert_file=expire_pool_rec["pool_operational_cert"] ) @@ -329,7 +317,7 @@ def _refresh_opcerts() -> tp.Dict[str, int]: ) ) - # check kes-period-info with valid operational certificates + # Check kes-period-info with valid operational certificates for idx, n in enumerate(refreshed_nodes): refreshed_pool_rec = cluster_manager.cache.addrs_data[f"node-{n}"] kes_info_valid = cluster.g_query.get_kes_period_info( From 12f80ef2732ae216f3bef85090a146de414d9a97 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 4 Nov 2024 14:49:32 +0100 Subject: [PATCH 069/168] refactor(tests): remove redundant `SKIPIF_NOT_HF_SHORTCUT` Removed the redundant `SKIPIF_NOT_HF_SHORTCUT` decorator and replaced it with an inline skipif condition in the `TestKES` class. Removed unused `MAX_INT_VAL` constant. --- cardano_node_tests/tests/test_kes.py | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/cardano_node_tests/tests/test_kes.py b/cardano_node_tests/tests/test_kes.py index d5e8b6f6c..a665a5bd5 100644 --- a/cardano_node_tests/tests/test_kes.py +++ b/cardano_node_tests/tests/test_kes.py @@ -31,14 +31,6 @@ pytestmark = common.SKIPIF_WRONG_ERA -# TODO: It would be better to use `cluster_nodes.get_cluster_type().uses_shortcut`, but we would -# need to get a cluster instance first. That would be too expensive in this module, as we are using -# custom startup scripts. -SKIPIF_NOT_HF_SHORTCUT = pytest.mark.skipif( - "_fast" not in configuration.SCRIPTS_DIRNAME, - reason="Runs only on local cluster with HF shortcut.", -) - # Slot number where KES certificate expires when using `cluster_kes` KES_EXPIRE_SLOT = 2100 @@ -134,10 +126,14 @@ def _check_block_production( class TestKES: """Basic tests for KES period.""" - MAX_INT_VAL = 2**64 - @allure.link(helpers.get_vcs_link()) - @SKIPIF_NOT_HF_SHORTCUT + # It would be better to use `cluster_nodes.get_cluster_type().uses_shortcut`, but we + # would need to get a cluster instance first. That would be too expensive in this test, + # as we are using custom startup scripts. + @pytest.mark.skipif( + "_fast" not in configuration.SCRIPTS_DIRNAME, + reason="Runs only on local cluster with HF shortcut.", + ) @pytest.mark.order(5) @pytest.mark.long def test_expired_kes( @@ -148,7 +144,6 @@ def test_expired_kes( ): """Test expired KES. - * start local cluster instance configured with short KES period and low number of key evolutions, so KES expires soon on all pools * refresh opcert on 2 of the 3 pools, so KES doesn't expire on those 2 pools and the pools keep minting blocks From cb5493d34961f563b2137aa11be7d041549483d4 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 4 Nov 2024 15:00:48 +0100 Subject: [PATCH 070/168] fix(tests): update KES expiration comment Updated the comment in test_kes.py to clarify the slot number where KES expires when using `cluster_kes`. This improves the readability and understanding of the code. --- cardano_node_tests/tests/test_kes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cardano_node_tests/tests/test_kes.py b/cardano_node_tests/tests/test_kes.py index a665a5bd5..42a11f638 100644 --- a/cardano_node_tests/tests/test_kes.py +++ b/cardano_node_tests/tests/test_kes.py @@ -31,7 +31,7 @@ pytestmark = common.SKIPIF_WRONG_ERA -# Slot number where KES certificate expires when using `cluster_kes` +# Slot number where KES expires when using `cluster_kes` KES_EXPIRE_SLOT = 2100 From 8a0c924cba25e47ae2f4beb210dd141443490539 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 4 Nov 2024 16:10:20 +0100 Subject: [PATCH 071/168] refactor: update current protocol params query method Updated the method for querying protocol parameters from `g_conway_governance.query.gov_state()["currentPParams"]` to `g_query.get_protocol_params()` across multiple test files. It is cheaper to get just protocol parameters than complete governance state. --- cardano_node_tests/tests/tests_conway/conway_common.py | 4 +--- cardano_node_tests/tests/tests_conway/test_committee.py | 2 +- cardano_node_tests/tests/tests_conway/test_pparam_update.py | 6 +++--- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/cardano_node_tests/tests/tests_conway/conway_common.py b/cardano_node_tests/tests/tests_conway/conway_common.py index a87bb1a90..38c585bc8 100644 --- a/cardano_node_tests/tests/tests_conway/conway_common.py +++ b/cardano_node_tests/tests/tests_conway/conway_common.py @@ -29,9 +29,7 @@ def is_in_bootstrap( cluster_obj: clusterlib.ClusterLib, ) -> bool: """Check if the cluster is in bootstrap period.""" - pv = cluster_obj.g_conway_governance.query.gov_state()["currentPParams"]["protocolVersion"][ - "major" - ] + pv = cluster_obj.g_query.get_protocol_params()["protocolVersion"]["major"] return bool(pv == 9) diff --git a/cardano_node_tests/tests/tests_conway/test_committee.py b/cardano_node_tests/tests/tests_conway/test_committee.py index b9dd2a715..bb43e04cb 100644 --- a/cardano_node_tests/tests/tests_conway/test_committee.py +++ b/cardano_node_tests/tests/tests_conway/test_committee.py @@ -340,7 +340,7 @@ def test_add_rm_committee_members( # noqa: C901 # undelegated stake is treated as Abstain. If undelegated stake was treated as No, it # would not be possible to approve any action. delegated_stake = governance_utils.get_delegated_stake(cluster_obj=cluster) - cur_pparams = cluster.g_conway_governance.query.gov_state()["currentPParams"] + cur_pparams = cluster.g_query.get_protocol_params() drep_constitution_threshold = cur_pparams["dRepVotingThresholds"]["committeeNormal"] spo_constitution_threshold = cur_pparams["poolVotingThresholds"]["committeeNormal"] is_drep_total_below_threshold = ( diff --git a/cardano_node_tests/tests/tests_conway/test_pparam_update.py b/cardano_node_tests/tests/tests_conway/test_pparam_update.py index 4c72cd074..c2175cc1e 100644 --- a/cardano_node_tests/tests/tests_conway/test_pparam_update.py +++ b/cardano_node_tests/tests/tests_conway/test_pparam_update.py @@ -264,7 +264,7 @@ def test_pparam_update( # noqa: C901 # undelegated stake is treated as Abstain. If undelegated stake was treated as Yes, then # missing votes would approve the action. delegated_stake = governance_utils.get_delegated_stake(cluster_obj=cluster) - cur_pparams = cluster.g_conway_governance.query.gov_state()["currentPParams"] + cur_pparams = cluster.g_query.get_protocol_params() drep_constitution_threshold = cur_pparams["dRepVotingThresholds"]["ppGovGroup"] spo_constitution_threshold = cur_pparams["poolVotingThresholds"]["ppSecurityGroup"] is_drep_total_below_threshold = ( @@ -590,7 +590,7 @@ def test_pparam_update( # noqa: C901 ] # Hand-picked parameters and values that can stay changed even for other tests - cur_pparams = cluster.g_conway_governance.query.gov_state()["currentPParams"] + cur_pparams = cluster.g_query.get_protocol_params() fin_update_proposals = [ # From network group clusterlib_utils.UpdateProposal( @@ -1353,7 +1353,7 @@ def test_pparam_keys( _url = helpers.get_vcs_link() [r.start(url=_url) for r in (reqc.cip075, reqc.cip076, reqc.cip077, reqc.cip078)] - cur_pparam = cluster.g_conway_governance.query.gov_state()["currentPParams"] + cur_pparam = cluster.g_query.get_protocol_params() cur_pparam_keys = set(cur_pparam.keys()) known_pparam_keys = set().union( NETWORK_GROUP_PPARAMS, From b2a97aa43c47bef8ef880f18f05b5064c356e08b Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 4 Nov 2024 16:11:54 +0100 Subject: [PATCH 072/168] fix(tests): simplify pparams extraction in test_update_plutusv2_builtins Simplified the extraction of `pparams` by removing redundant checks for `curPParams`, which is not needed for enacted state. --- .../tests/tests_conway/test_update_plutusv2_builtins.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py index 7a5654f27..5f6694e92 100644 --- a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py +++ b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py @@ -163,9 +163,7 @@ def _update_cost_model() -> None: conway_common.save_gov_state( gov_state=enact_gov_state, name_template=f"{temp_template}_enact_{enact_epoch}" ) - pparams = ( - enact_gov_state.get("curPParams") or enact_gov_state.get("currentPParams") or {} - ) + pparams = enact_gov_state.get("currentPParams") or {} assert len(pparams["costModels"]["PlutusV2"]) == 185 # Check that Plutus script fails as expected in PV9 From 4fdedd67f53ad6884fee7262e7b3e8eac940c69b Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 4 Nov 2024 16:18:02 +0100 Subject: [PATCH 073/168] refactor: make `check_conway_param_update_enactment` accept pparams Updated `check_conway_param_update_enactment` to accept pparams instead of full gov state for better efficiency. Updated function parameters in `check_conway_param_update_enactment` and `map_params_to_db_convention` for better clarity and consistency. --- .../tests/tests_conway/test_pparam_update.py | 4 +- cardano_node_tests/utils/dbsync_utils.py | 75 +++++++++---------- 2 files changed, 40 insertions(+), 39 deletions(-) diff --git a/cardano_node_tests/tests/tests_conway/test_pparam_update.py b/cardano_node_tests/tests/tests_conway/test_pparam_update.py index c2175cc1e..763256fcd 100644 --- a/cardano_node_tests/tests/tests_conway/test_pparam_update.py +++ b/cardano_node_tests/tests/tests_conway/test_pparam_update.py @@ -1245,7 +1245,9 @@ def _check_state(state: dict): # db-sync check try: reqc.db024.start(url=helpers.get_vcs_link()) - dbsync_utils.check_conway_param_update_enactment(enact_gov_state, enact_epoch) + dbsync_utils.check_conway_param_update_enactment( + pparams=enact_gov_state["currentPParams"], epoch_no=enact_epoch + ) reqc.db024.success() except AssertionError as exc: db_errors_final.append(f"db-sync params enactment error: {exc}") diff --git a/cardano_node_tests/utils/dbsync_utils.py b/cardano_node_tests/utils/dbsync_utils.py index 846251525..baf3bf041 100644 --- a/cardano_node_tests/utils/dbsync_utils.py +++ b/cardano_node_tests/utils/dbsync_utils.py @@ -864,43 +864,43 @@ def _get_float_pparam(pparam: tp.Any) -> tp.Optional[float]: return float(pparam) -def map_params_to_db_convention(pp: dict) -> tp.Dict[str, tp.Any]: +def map_params_to_db_convention(pparams: dict) -> tp.Dict[str, tp.Any]: # Get the prices of memory and steps - prices = pp.get("executionUnitPrices", {}) + prices = pparams.get("executionUnitPrices", {}) price_mem = _get_float_pparam(prices.get("priceMemory")) price_steps = _get_float_pparam(prices.get("priceSteps")) - dvt = pp.get("dRepVotingThresholds", {}) - pvt = pp.get("poolVotingThresholds", {}) + dvt = pparams.get("dRepVotingThresholds", {}) + pvt = pparams.get("poolVotingThresholds", {}) params_mapping = { # Network proposals group - "max_block_size": pp.get("maxBlockBodySize"), - "max_tx_size": pp.get("maxTxSize"), - "max_bh_size": pp.get("maxBlockHeaderSize"), - "max_val_size": pp.get("maxValueSize"), - "max_tx_ex_mem": pp.get("maxTxExecutionUnits", {}).get("memory"), - "max_tx_ex_steps": pp.get("maxTxExecutionUnits", {}).get("steps"), - "max_block_ex_mem": pp.get("maxBlockExecutionUnits", {}).get("memory"), - "max_block_ex_steps": pp.get("maxBlockExecutionUnits", {}).get("steps"), - "max_collateral_inputs": pp.get("maxCollateralInputs"), + "max_block_size": pparams.get("maxBlockBodySize"), + "max_tx_size": pparams.get("maxTxSize"), + "max_bh_size": pparams.get("maxBlockHeaderSize"), + "max_val_size": pparams.get("maxValueSize"), + "max_tx_ex_mem": pparams.get("maxTxExecutionUnits", {}).get("memory"), + "max_tx_ex_steps": pparams.get("maxTxExecutionUnits", {}).get("steps"), + "max_block_ex_mem": pparams.get("maxBlockExecutionUnits", {}).get("memory"), + "max_block_ex_steps": pparams.get("maxBlockExecutionUnits", {}).get("steps"), + "max_collateral_inputs": pparams.get("maxCollateralInputs"), # Economic proposals group - "min_fee_a": pp.get("txFeePerByte"), - "min_fee_b": pp.get("txFeeFixed"), - "key_deposit": pp.get("stakeAddressDeposit"), - "pool_deposit": pp.get("stakePoolDeposit"), - "monetary_expand_rate": _get_float_pparam(pp.get("monetaryExpansion")), - "treasury_growth_rate": _get_float_pparam(pp.get("treasuryCut")), - "min_pool_cost": pp.get("minPoolCost"), - "coins_per_utxo_size": pp.get("utxoCostPerByte"), - "min_fee_ref_script_cost_per_byte": pp.get("minFeeRefScriptCostPerByte"), + "min_fee_a": pparams.get("txFeePerByte"), + "min_fee_b": pparams.get("txFeeFixed"), + "key_deposit": pparams.get("stakeAddressDeposit"), + "pool_deposit": pparams.get("stakePoolDeposit"), + "monetary_expand_rate": _get_float_pparam(pparams.get("monetaryExpansion")), + "treasury_growth_rate": _get_float_pparam(pparams.get("treasuryCut")), + "min_pool_cost": pparams.get("minPoolCost"), + "coins_per_utxo_size": pparams.get("utxoCostPerByte"), + "min_fee_ref_script_cost_per_byte": pparams.get("minFeeRefScriptCostPerByte"), "price_mem": price_mem, "price_step": price_steps, # Technical proposals group - "influence": _get_float_pparam(pp.get("poolPledgeInfluence")), - "max_epoch": pp.get("poolRetireMaxEpoch"), - "optimal_pool_count": pp.get("stakePoolTargetNum"), - "collateral_percent": pp.get("collateralPercentage"), + "influence": _get_float_pparam(pparams.get("poolPledgeInfluence")), + "max_epoch": pparams.get("poolRetireMaxEpoch"), + "optimal_pool_count": pparams.get("stakePoolTargetNum"), + "collateral_percent": pparams.get("collateralPercentage"), # Governance proposal group # - DReps "dvt_committee_no_confidence": _get_float_pparam(dvt.get("committeeNoConfidence")), @@ -920,12 +920,12 @@ def map_params_to_db_convention(pp: dict) -> tp.Dict[str, tp.Any]: "pvt_motion_no_confidence": _get_float_pparam(pvt.get("motionNoConfidence")), "pvtpp_security_group": _get_float_pparam(pvt.get("ppSecurityGroup")), # General - "gov_action_lifetime": pp.get("govActionLifetime"), - "gov_action_deposit": pp.get("govActionDeposit"), - "drep_deposit": pp.get("dRepDeposit"), - "drep_activity": pp.get("dRepActivity"), - "committee_min_size": pp.get("committeeMinSize"), - "committee_max_term_length": pp.get("committeeMaxTermLength"), + "gov_action_lifetime": pparams.get("govActionLifetime"), + "gov_action_deposit": pparams.get("govActionDeposit"), + "drep_deposit": pparams.get("dRepDeposit"), + "drep_activity": pparams.get("dRepActivity"), + "committee_min_size": pparams.get("committeeMinSize"), + "committee_max_term_length": pparams.get("committeeMaxTermLength"), } return params_mapping @@ -956,7 +956,7 @@ def check_conway_param_update_proposal( return None param_proposal_db = dbsync_queries.query_param_proposal() - params_map = map_params_to_db_convention(param_proposal_ledger) + params_map = map_params_to_db_convention(pparams=param_proposal_ledger) failures = [] # Get cost models @@ -975,16 +975,15 @@ def check_conway_param_update_proposal( def check_conway_param_update_enactment( - gov_state: dict, epoch_no: int + pparams: dict, epoch_no: int ) -> tp.Optional[dbsync_queries.EpochParamDBRow]: """Check params enactment between ledger and epoch param in db-sync.""" if not configuration.HAS_DBSYNC: return None - curr_params_db = dbsync_queries.query_epoch_param(epoch_no) - curr_params_ledger = gov_state["currentPParams"] - params_map = map_params_to_db_convention(curr_params_ledger) - failures = _check_param_proposal(curr_params_db, params_map) + curr_params_db = dbsync_queries.query_epoch_param(epoch_no=epoch_no) + params_map = map_params_to_db_convention(pparams=pparams) + failures = _check_param_proposal(param_proposal_db=curr_params_db, params_map=params_map) if failures: failures_str = "\n".join(failures) From 56b0ed879c57b47172e0927b065988a2a8ba8101 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 4 Nov 2024 18:21:53 +0100 Subject: [PATCH 074/168] feat(tests): add MIR certificates tests for Conway+ eras - Introduce `TestMIRCerts` class for testing MIR certificates. - Parameterize `test_mir_certificates` to test different MIR cert scenarios. - Update assertions to check for specific error messages related to MIR certs. --- .../tests_conway/test_treasury_withdrawals.py | 132 +++++++++++++++--- 1 file changed, 109 insertions(+), 23 deletions(-) diff --git a/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py b/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py index 362bf3b45..26b1c8c3f 100644 --- a/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py +++ b/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py @@ -12,6 +12,7 @@ from cardano_node_tests.tests import common from cardano_node_tests.tests import reqs_conway as reqc from cardano_node_tests.tests.tests_conway import conway_common +from cardano_node_tests.utils import cluster_nodes from cardano_node_tests.utils import clusterlib_utils from cardano_node_tests.utils import configuration from cardano_node_tests.utils import dbsync_utils @@ -685,41 +686,126 @@ def test_expire_treasury_withdrawals( [r.success() for r in (reqc.cip032ex, reqc.cip069ex)] + +class TestMIRCerts: + """Tests for MIR certificates.""" + + @pytest.fixture + def payment_addr( + self, + cluster_manager: cluster_management.ClusterManager, + cluster: clusterlib.ClusterLib, + ) -> clusterlib.AddressRecord: + """Create new payment address.""" + test_id = common.get_test_id(cluster) + with cluster_manager.cache_fixture() as fixture_cache: + if fixture_cache.value: + return fixture_cache.value # type: ignore + + addr = clusterlib_utils.create_payment_addr_records( + f"{test_id}_payment_addr_0", + cluster_obj=cluster, + )[0] + fixture_cache.value = addr + + # Fund source addresses + clusterlib_utils.fund_from_faucet( + addr, + cluster_obj=cluster, + all_faucets=cluster_manager.cache.addrs_data, + amount=4_000_000, + ) + + return addr + @allure.link(helpers.get_vcs_link()) - @pytest.mark.parametrize("mir_cert", ("treasury", "rewards", "stake_addr")) - @pytest.mark.disabled # TODO: test that the certs cannot be submitted + @pytest.mark.parametrize( + "mir_cert", ("to_treasury", "to_rewards", "treasury_to_addr", "reserves_to_addr") + ) @pytest.mark.smoke def test_mir_certificates( self, cluster: clusterlib.ClusterLib, + payment_addr: clusterlib.AddressRecord, mir_cert: str, ): - """Try to withdraw funds from the treasury using MIR certificates. + """Try to use MIR certificates in Conway+ eras. Expect failure. + + * try and fail to build the Tx using `transaction build` + * successfully build the Tx as Babbage Tx using `transaction build-raw` + * try and fail to submit the Babbage Tx """ temp_template = common.get_test_id(cluster) - amount = 1_000_000_000_000 + amount = 1_500_000 + cluster_babbage = cluster_nodes.get_cluster_type().get_cluster_obj(command_era="babbage") reqc.cip070.start(url=helpers.get_vcs_link()) + + if mir_cert == "to_treasury": + cert_file = cluster.g_governance.gen_mir_cert_to_treasury( + transfer=amount, + tx_name=temp_template, + ) + elif mir_cert == "to_rewards": + cert_file = cluster.g_governance.gen_mir_cert_to_rewards( + transfer=amount, + tx_name=temp_template, + ) + elif mir_cert == "treasury_to_addr": + cert_file = cluster.g_governance.gen_mir_cert_stake_addr( + tx_name=temp_template, + stake_addr="stake_test1uzy5myemjnne3gr0jp7yhtznxx2lvx4qgv730jktsu46v5gaw7rmt", + reward=amount, + use_treasury=True, + ) + elif mir_cert == "reserves_to_addr": + cert_file = cluster.g_governance.gen_mir_cert_stake_addr( + tx_name=temp_template, + stake_addr="stake_test1uzy5myemjnne3gr0jp7yhtznxx2lvx4qgv730jktsu46v5gaw7rmt", + reward=amount, + use_treasury=False, + ) + else: + _verr = f"Unknown MIR cert scenario: {mir_cert}" + raise ValueError(_verr) + + tx_files = clusterlib.TxFiles( + certificate_files=[cert_file], + signing_key_files=[ + payment_addr.skey_file, + *cluster.g_genesis.genesis_keys.delegate_skeys, + ], + ) + + # The Tx cannot be build in Conway using `build` with pytest.raises(clusterlib.CLIError) as excinfo: - if mir_cert == "treasury": - cluster.g_governance.gen_mir_cert_to_treasury( - transfer=amount, - tx_name=temp_template, - ) - elif mir_cert == "rewards": - cluster.g_governance.gen_mir_cert_to_rewards( - transfer=amount, - tx_name=temp_template, - ) - else: - cluster.g_governance.gen_mir_cert_stake_addr( - tx_name=temp_template, - stake_addr="stake_test1uzy5myemjnne3gr0jp7yhtznxx2lvx4qgv730jktsu46v5gaw7rmt", - reward=amount, - use_treasury=True, - ) - err_str = str(excinfo.value) - assert "Invalid argument `create-mir-certificate'" in err_str, err_str + cluster.g_transaction.build_tx( + tx_name=temp_template, + src_address=payment_addr.address, + tx_files=tx_files, + ) + err_build = str(excinfo.value) + assert "TextEnvelope type error:" in err_build, err_build + + # The Tx can be build as Babbage Tx using `build-raw`, but cannot be submitted + tx_output = cluster_babbage.g_transaction.build_raw_tx( + tx_name=temp_template, + src_address=payment_addr.address, + fee=400_000, + tx_files=tx_files, + ) + + out_file_signed = cluster.g_transaction.sign_tx( + tx_body_file=tx_output.out_file, + signing_key_files=tx_files.signing_key_files, + tx_name=temp_template, + ) + + with pytest.raises(clusterlib.CLIError) as excinfo: + cluster.g_transaction.submit_tx(tx_file=out_file_signed, txins=tx_output.txins) + err_submit = str(excinfo.value) + assert "Error: The era of the node and the tx do not match." in err_submit, err_submit + reqc.cip070.success() From 1f7ac92a4936fd8a4d2d85374d3e99bf0e8bde1f Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 5 Nov 2024 10:29:47 +0100 Subject: [PATCH 075/168] fix(cluster-scripts): add force flag to cp command Added the -f (force) flag to the cp command in the start-cluster scripts for conway, conway_fast, and mainnet_fast. This ensures that the genesis.conway.spec.pv10.json file is copied even if it already exists in the destination directory. --- cardano_node_tests/cluster_scripts/conway/start-cluster | 2 +- cardano_node_tests/cluster_scripts/conway_fast/start-cluster | 2 +- cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/cardano_node_tests/cluster_scripts/conway/start-cluster b/cardano_node_tests/cluster_scripts/conway/start-cluster index 7b6d3a740..90397aacf 100644 --- a/cardano_node_tests/cluster_scripts/conway/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway/start-cluster @@ -199,7 +199,7 @@ cp "${SCRIPT_DIR}/supervisor.conf" "$STATE_CLUSTER" cp "$SCRIPT_DIR"/*genesis*.spec.json "${STATE_CLUSTER}/shelley/" if [ -n "${PV10:-""}" ]; then - cp \ + cp -f \ "${SCRIPT_DIR}/genesis.conway.spec.pv10.json" \ "${STATE_CLUSTER}/shelley/genesis.conway.spec.json" fi diff --git a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster index 8062b097b..dc80b52aa 100644 --- a/cardano_node_tests/cluster_scripts/conway_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/conway_fast/start-cluster @@ -137,7 +137,7 @@ cp "${SCRIPT_DIR}/supervisor.conf" "$STATE_CLUSTER" cp "$SCRIPT_DIR"/*genesis*.spec.json "${STATE_CLUSTER}/create_staked/" if [ -n "${PV10:-""}" ]; then - cp \ + cp -f \ "${SCRIPT_DIR}/genesis.conway.spec.pv10.json" \ "${STATE_CLUSTER}/create_staked/genesis.conway.spec.json" fi diff --git a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster index 8062b097b..dc80b52aa 100644 --- a/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster +++ b/cardano_node_tests/cluster_scripts/mainnet_fast/start-cluster @@ -137,7 +137,7 @@ cp "${SCRIPT_DIR}/supervisor.conf" "$STATE_CLUSTER" cp "$SCRIPT_DIR"/*genesis*.spec.json "${STATE_CLUSTER}/create_staked/" if [ -n "${PV10:-""}" ]; then - cp \ + cp -f \ "${SCRIPT_DIR}/genesis.conway.spec.pv10.json" \ "${STATE_CLUSTER}/create_staked/genesis.conway.spec.json" fi From 505ccb3d5a02f44e0a41f582278e7243ab0fa067 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 5 Nov 2024 10:55:37 +0100 Subject: [PATCH 076/168] feat(cluster): add conway genesis file symlink Added a symlink for the genesis-conway.json file in the start-cluster script to ensure it is correctly referenced in the Shelley directory. --- cardano_node_tests/cluster_scripts/testnets/start-cluster | 1 + 1 file changed, 1 insertion(+) diff --git a/cardano_node_tests/cluster_scripts/testnets/start-cluster b/cardano_node_tests/cluster_scripts/testnets/start-cluster index 8312dfa2a..09924de50 100644 --- a/cardano_node_tests/cluster_scripts/testnets/start-cluster +++ b/cardano_node_tests/cluster_scripts/testnets/start-cluster @@ -45,6 +45,7 @@ cp "$TESTNET_CONF_DIR"/genesis-*.json "$STATE_CLUSTER" ln -rs "$STATE_CLUSTER/genesis-byron.json" "$STATE_CLUSTER/byron/genesis.json" ln -rs "$STATE_CLUSTER/genesis-shelley.json" "$STATE_CLUSTER/shelley/genesis.json" ln -rs "$STATE_CLUSTER/genesis-alonzo.json" "$STATE_CLUSTER/shelley/genesis.alonzo.json" +ln -rs "$STATE_CLUSTER/genesis-conway.json" "$STATE_CLUSTER/shelley/genesis.conway.json" # edit port numbers in configuration RELAY1_EKG="%%EKG_PORT_RELAY1%%" From 4bc3f4d8f2e39f0e8bc9a3ccabb75c1cbf91e934 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 5 Nov 2024 11:02:59 +0100 Subject: [PATCH 077/168] feat(scripts): add test_node_ipc_shutdown.sh script Add a new script `test_node_ipc_shutdown.sh` to test the IPC shutdown mechanism of the Cardano node. This script is intended to run on long running testnets such as Preview. --- scripts/test_node_ipc_shutdown.sh | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100755 scripts/test_node_ipc_shutdown.sh diff --git a/scripts/test_node_ipc_shutdown.sh b/scripts/test_node_ipc_shutdown.sh new file mode 100755 index 000000000..7edf75783 --- /dev/null +++ b/scripts/test_node_ipc_shutdown.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +# Meant to run on long running testnet such as Preview. + +if [ -z "$CARDANO_NODE_SOCKET_PATH" ]; then + echo "CARDANO_NODE_SOCKET_PATH is not set" >&2 + exit 1 +fi + +BOOTSRAP_DIR="${CARDANO_NODE_SOCKET_PATH%/*}" +cd "$BOOTSRAP_DIR" || exit 1 + +rm -f nodefifo +mkfifo nodefifo + +( + exec 3 relay1.log 2>&1 +) & +bpid=$! +echo "$bpid" + +exec 4>nodefifo # open fifo for writing +sleep 120 # let node start and run for a while + +exec 4>&- # close the pipe, node should shutdown +echo Node should be shutting down +sleep 20 From 66b4ebce39356c9f131a94ccb14016cb369d036a Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 5 Nov 2024 12:37:19 +0100 Subject: [PATCH 078/168] feat(governance): add DRep stake distribution check Added a check for DRep stake distribution in the governance setup process. This ensures that the DRep stake distribution is properly setup when the protocol version is 10 or higher. --- cardano_node_tests/utils/governance_setup.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/cardano_node_tests/utils/governance_setup.py b/cardano_node_tests/utils/governance_setup.py index 3e0fe3235..49d44ff8f 100644 --- a/cardano_node_tests/utils/governance_setup.py +++ b/cardano_node_tests/utils/governance_setup.py @@ -232,9 +232,16 @@ def setup( # When using "fast" cluster, we need to wait for at least epoch 1 for DReps # to be usable. DReps don't vote in PV9. - if cluster_obj.g_query.get_protocol_params()["protocolVersion"]["major"] >= 10: + if ( + drep_reg_records + and cluster_obj.g_query.get_protocol_params()["protocolVersion"]["major"] >= 10 + ): cluster_obj.wait_for_epoch(epoch_no=1, padding_seconds=5) - # TODO: check `cardano-cli conway query drep-stake-distribution` + + drep1_rec = cluster_obj.g_conway_governance.query.drep_stake_distribution( + drep_vkey_file=drep_reg_records[0].key_pair.vkey_file + ) + assert drep1_rec, "DRep stake distribution not found" return gov_data From bfa11e4fc28e9ce3b3a01c1bb60b6da2866ec831 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 5 Nov 2024 14:16:33 +0100 Subject: [PATCH 079/168] refactor: do not depend on revision variables The commit removes variables `BASE_REVISION` and `UPGRADE_REVISION` from the `test_node_upgrade.py` file. The revisions don't need to be version strings, but also branches or commit hashes. Handling the revisions as version strings can be problematic. --- cardano_node_tests/tests/test_node_upgrade.py | 22 +++++++------------ 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/cardano_node_tests/tests/test_node_upgrade.py b/cardano_node_tests/tests/test_node_upgrade.py index 1a40f72ba..90bbd5fca 100644 --- a/cardano_node_tests/tests/test_node_upgrade.py +++ b/cardano_node_tests/tests/test_node_upgrade.py @@ -8,7 +8,6 @@ import allure import pytest from cardano_clusterlib import clusterlib -from packaging import version from cardano_node_tests.cluster_management import cluster_management from cardano_node_tests.tests import common @@ -20,9 +19,6 @@ LOGGER = logging.getLogger(__name__) UPGRADE_TESTS_STEP = int(os.environ.get("UPGRADE_TESTS_STEP") or 0) -BASE_REVISION = version.parse(os.environ.get("BASE_REVISION") or "0.0.0") -UPGRADE_REVISION = version.parse(os.environ.get("UPGRADE_REVISION") or "0.0.0") -GOV_DATA_DIR = "governance_data" pytestmark = [ pytest.mark.skipif(not UPGRADE_TESTS_STEP, reason="not upgrade testing"), @@ -91,16 +87,14 @@ def test_ignore_log_errors( worker_id: str, ): """Ignore selected errors in log right after node upgrade.""" - cluster = cluster_singleton - common.get_test_id(cluster) - - if UPGRADE_REVISION >= version.parse("10.1.0") > BASE_REVISION: - logfiles.add_ignore_rule( - files_glob="*.stdout", - regex="ChainDB:Error:.* Invalid snapshot DiskSnapshot .*DeserialiseFailure " - ".* expected change in the serialization format", - ignore_file_id=worker_id, - ) + common.get_test_id(cluster_singleton) + + logfiles.add_ignore_rule( + files_glob="*.stdout", + regex="ChainDB:Error:.* Invalid snapshot DiskSnapshot .*DeserialiseFailure " + ".* expected change in the serialization format", + ignore_file_id=worker_id, + ) @pytest.mark.upgrade From 846b71c104cb02c3264d078b7fde33b9df847240 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 5 Nov 2024 17:49:21 +0100 Subject: [PATCH 080/168] feat(tests): add PlutusV3 cost model update test - Added a new test `test_update_cost_models` to verify the update of PlutusV3 cost models. - Introduced a new JSON file `cost_models_pv10.json` containing the cost models for PlutusV3. - Updated the `node_upgrade_pytest.sh` script to include the new test for updating cost models. - Added necessary imports and constants in `test_node_upgrade.py`. --- .github/node_upgrade_pytest.sh | 3 + .../tests/data/cost_models_pv10.json | 301 ++++++++++++++++++ cardano_node_tests/tests/test_node_upgrade.py | 127 ++++++++ 3 files changed, 431 insertions(+) create mode 100644 cardano_node_tests/tests/data/cost_models_pv10.json diff --git a/.github/node_upgrade_pytest.sh b/.github/node_upgrade_pytest.sh index 575ebe50e..ef3802ad2 100755 --- a/.github/node_upgrade_pytest.sh +++ b/.github/node_upgrade_pytest.sh @@ -230,6 +230,9 @@ elif [ "$1" = "step2" ]; then pytest cardano_node_tests/tests/test_node_upgrade.py -k test_ignore_log_errors err_retval="$?" + # Update PlutusV3 cost models. + pytest cardano_node_tests/tests/test_node_upgrade.py -k test_update_cost_models || exit 2 + # run smoke tests pytest \ cardano_node_tests \ diff --git a/cardano_node_tests/tests/data/cost_models_pv10.json b/cardano_node_tests/tests/data/cost_models_pv10.json new file mode 100644 index 000000000..88a2059b6 --- /dev/null +++ b/cardano_node_tests/tests/data/cost_models_pv10.json @@ -0,0 +1,301 @@ +{ + "PlutusV3": { + "addInteger-cpu-arguments-intercept": 100788, + "addInteger-cpu-arguments-slope": 420, + "addInteger-memory-arguments-intercept": 1, + "addInteger-memory-arguments-slope": 1, + "appendByteString-cpu-arguments-intercept": 1000, + "appendByteString-cpu-arguments-slope": 173, + "appendByteString-memory-arguments-intercept": 0, + "appendByteString-memory-arguments-slope": 1, + "appendString-cpu-arguments-intercept": 1000, + "appendString-cpu-arguments-slope": 59957, + "appendString-memory-arguments-intercept": 4, + "appendString-memory-arguments-slope": 1, + "bData-cpu-arguments": 11183, + "bData-memory-arguments": 32, + "blake2b_256-cpu-arguments-intercept": 201305, + "blake2b_256-cpu-arguments-slope": 8356, + "blake2b_256-memory-arguments": 4, + "cekApplyCost-exBudgetCPU": 16000, + "cekApplyCost-exBudgetMemory": 100, + "cekBuiltinCost-exBudgetCPU": 16000, + "cekBuiltinCost-exBudgetMemory": 100, + "cekConstCost-exBudgetCPU": 16000, + "cekConstCost-exBudgetMemory": 100, + "cekDelayCost-exBudgetCPU": 16000, + "cekDelayCost-exBudgetMemory": 100, + "cekForceCost-exBudgetCPU": 16000, + "cekForceCost-exBudgetMemory": 100, + "cekLamCost-exBudgetCPU": 16000, + "cekLamCost-exBudgetMemory": 100, + "cekStartupCost-exBudgetCPU": 100, + "cekStartupCost-exBudgetMemory": 100, + "cekVarCost-exBudgetCPU": 16000, + "cekVarCost-exBudgetMemory": 100, + "chooseData-cpu-arguments": 94375, + "chooseData-memory-arguments": 32, + "chooseList-cpu-arguments": 132994, + "chooseList-memory-arguments": 32, + "chooseUnit-cpu-arguments": 61462, + "chooseUnit-memory-arguments": 4, + "consByteString-cpu-arguments-intercept": 72010, + "consByteString-cpu-arguments-slope": 178, + "consByteString-memory-arguments-intercept": 0, + "consByteString-memory-arguments-slope": 1, + "constrData-cpu-arguments": 22151, + "constrData-memory-arguments": 32, + "decodeUtf8-cpu-arguments-intercept": 91189, + "decodeUtf8-cpu-arguments-slope": 769, + "decodeUtf8-memory-arguments-intercept": 4, + "decodeUtf8-memory-arguments-slope": 2, + "divideInteger-cpu-arguments-constant": 85848, + "divideInteger-cpu-arguments-model-arguments-c00": 123203, + "divideInteger-cpu-arguments-model-arguments-c01": 7305, + "divideInteger-cpu-arguments-model-arguments-c02": -900, + "divideInteger-cpu-arguments-model-arguments-c10": 1716, + "divideInteger-cpu-arguments-model-arguments-c11": 549, + "divideInteger-cpu-arguments-model-arguments-c20": 57, + "divideInteger-cpu-arguments-model-arguments-minimum": 85848, + "divideInteger-memory-arguments-intercept": 0, + "divideInteger-memory-arguments-minimum": 1, + "divideInteger-memory-arguments-slope": 1, + "encodeUtf8-cpu-arguments-intercept": 1000, + "encodeUtf8-cpu-arguments-slope": 42921, + "encodeUtf8-memory-arguments-intercept": 4, + "encodeUtf8-memory-arguments-slope": 2, + "equalsByteString-cpu-arguments-constant": 24548, + "equalsByteString-cpu-arguments-intercept": 29498, + "equalsByteString-cpu-arguments-slope": 38, + "equalsByteString-memory-arguments": 1, + "equalsData-cpu-arguments-intercept": 898148, + "equalsData-cpu-arguments-slope": 27279, + "equalsData-memory-arguments": 1, + "equalsInteger-cpu-arguments-intercept": 51775, + "equalsInteger-cpu-arguments-slope": 558, + "equalsInteger-memory-arguments": 1, + "equalsString-cpu-arguments-constant": 39184, + "equalsString-cpu-arguments-intercept": 1000, + "equalsString-cpu-arguments-slope": 60594, + "equalsString-memory-arguments": 1, + "fstPair-cpu-arguments": 141895, + "fstPair-memory-arguments": 32, + "headList-cpu-arguments": 83150, + "headList-memory-arguments": 32, + "iData-cpu-arguments": 15299, + "iData-memory-arguments": 32, + "ifThenElse-cpu-arguments": 76049, + "ifThenElse-memory-arguments": 1, + "indexByteString-cpu-arguments": 13169, + "indexByteString-memory-arguments": 4, + "lengthOfByteString-cpu-arguments": 22100, + "lengthOfByteString-memory-arguments": 10, + "lessThanByteString-cpu-arguments-intercept": 28999, + "lessThanByteString-cpu-arguments-slope": 74, + "lessThanByteString-memory-arguments": 1, + "lessThanEqualsByteString-cpu-arguments-intercept": 28999, + "lessThanEqualsByteString-cpu-arguments-slope": 74, + "lessThanEqualsByteString-memory-arguments": 1, + "lessThanEqualsInteger-cpu-arguments-intercept": 43285, + "lessThanEqualsInteger-cpu-arguments-slope": 552, + "lessThanEqualsInteger-memory-arguments": 1, + "lessThanInteger-cpu-arguments-intercept": 44749, + "lessThanInteger-cpu-arguments-slope": 541, + "lessThanInteger-memory-arguments": 1, + "listData-cpu-arguments": 33852, + "listData-memory-arguments": 32, + "mapData-cpu-arguments": 68246, + "mapData-memory-arguments": 32, + "mkCons-cpu-arguments": 72362, + "mkCons-memory-arguments": 32, + "mkNilData-cpu-arguments": 7243, + "mkNilData-memory-arguments": 32, + "mkNilPairData-cpu-arguments": 7391, + "mkNilPairData-memory-arguments": 32, + "mkPairData-cpu-arguments": 11546, + "mkPairData-memory-arguments": 32, + "modInteger-cpu-arguments-constant": 85848, + "modInteger-cpu-arguments-model-arguments-c00": 123203, + "modInteger-cpu-arguments-model-arguments-c01": 7305, + "modInteger-cpu-arguments-model-arguments-c02": -900, + "modInteger-cpu-arguments-model-arguments-c10": 1716, + "modInteger-cpu-arguments-model-arguments-c11": 549, + "modInteger-cpu-arguments-model-arguments-c20": 57, + "modInteger-cpu-arguments-model-arguments-minimum": 85848, + "modInteger-memory-arguments-intercept": 0, + "modInteger-memory-arguments-slope": 1, + "multiplyInteger-cpu-arguments-intercept": 90434, + "multiplyInteger-cpu-arguments-slope": 519, + "multiplyInteger-memory-arguments-intercept": 0, + "multiplyInteger-memory-arguments-slope": 1, + "nullList-cpu-arguments": 74433, + "nullList-memory-arguments": 32, + "quotientInteger-cpu-arguments-constant": 85848, + "quotientInteger-cpu-arguments-model-arguments-c00": 123203, + "quotientInteger-cpu-arguments-model-arguments-c01": 7305, + "quotientInteger-cpu-arguments-model-arguments-c02": -900, + "quotientInteger-cpu-arguments-model-arguments-c10": 1716, + "quotientInteger-cpu-arguments-model-arguments-c11": 549, + "quotientInteger-cpu-arguments-model-arguments-c20": 57, + "quotientInteger-cpu-arguments-model-arguments-minimum": 85848, + "quotientInteger-memory-arguments-intercept": 0, + "quotientInteger-memory-arguments-minimum": 1, + "quotientInteger-memory-arguments-slope": 1, + "remainderInteger-cpu-arguments-constant": 85848, + "remainderInteger-cpu-arguments-model-arguments-c00": 123203, + "remainderInteger-cpu-arguments-model-arguments-c01": 7305, + "remainderInteger-cpu-arguments-model-arguments-c02": -900, + "remainderInteger-cpu-arguments-model-arguments-c10": 1716, + "remainderInteger-cpu-arguments-model-arguments-c11": 549, + "remainderInteger-cpu-arguments-model-arguments-c20": 57, + "remainderInteger-cpu-arguments-model-arguments-minimum": 85848, + "remainderInteger-memory-arguments-intercept": 0, + "remainderInteger-memory-arguments-slope": 1, + "serialiseData-cpu-arguments-intercept": 955506, + "serialiseData-cpu-arguments-slope": 213312, + "serialiseData-memory-arguments-intercept": 0, + "serialiseData-memory-arguments-slope": 2, + "sha2_256-cpu-arguments-intercept": 270652, + "sha2_256-cpu-arguments-slope": 22588, + "sha2_256-memory-arguments": 4, + "sha3_256-cpu-arguments-intercept": 1457325, + "sha3_256-cpu-arguments-slope": 64566, + "sha3_256-memory-arguments": 4, + "sliceByteString-cpu-arguments-intercept": 20467, + "sliceByteString-cpu-arguments-slope": 1, + "sliceByteString-memory-arguments-intercept": 4, + "sliceByteString-memory-arguments-slope": 0, + "sndPair-cpu-arguments": 141992, + "sndPair-memory-arguments": 32, + "subtractInteger-cpu-arguments-intercept": 100788, + "subtractInteger-cpu-arguments-slope": 420, + "subtractInteger-memory-arguments-intercept": 1, + "subtractInteger-memory-arguments-slope": 1, + "tailList-cpu-arguments": 81663, + "tailList-memory-arguments": 32, + "trace-cpu-arguments": 59498, + "trace-memory-arguments": 32, + "unBData-cpu-arguments": 20142, + "unBData-memory-arguments": 32, + "unConstrData-cpu-arguments": 24588, + "unConstrData-memory-arguments": 32, + "unIData-cpu-arguments": 20744, + "unIData-memory-arguments": 32, + "unListData-cpu-arguments": 25933, + "unListData-memory-arguments": 32, + "unMapData-cpu-arguments": 24623, + "unMapData-memory-arguments": 32, + "verifyEcdsaSecp256k1Signature-cpu-arguments": 43053543, + "verifyEcdsaSecp256k1Signature-memory-arguments": 10, + "verifyEd25519Signature-cpu-arguments-intercept": 53384111, + "verifyEd25519Signature-cpu-arguments-slope": 14333, + "verifyEd25519Signature-memory-arguments": 10, + "verifySchnorrSecp256k1Signature-cpu-arguments-intercept": 43574283, + "verifySchnorrSecp256k1Signature-cpu-arguments-slope": 26308, + "verifySchnorrSecp256k1Signature-memory-arguments": 10, + "cekConstrCost-exBudgetCPU": 16000, + "cekConstrCost-exBudgetMemory": 100, + "cekCaseCost-exBudgetCPU": 16000, + "cekCaseCost-exBudgetMemory": 100, + "bls12_381_G1_add-cpu-arguments": 962335, + "bls12_381_G1_add-memory-arguments": 18, + "bls12_381_G1_compress-cpu-arguments": 2780678, + "bls12_381_G1_compress-memory-arguments": 6, + "bls12_381_G1_equal-cpu-arguments": 442008, + "bls12_381_G1_equal-memory-arguments": 1, + "bls12_381_G1_hashToGroup-cpu-arguments-intercept": 52538055, + "bls12_381_G1_hashToGroup-cpu-arguments-slope": 3756, + "bls12_381_G1_hashToGroup-memory-arguments": 18, + "bls12_381_G1_neg-cpu-arguments": 267929, + "bls12_381_G1_neg-memory-arguments": 18, + "bls12_381_G1_scalarMul-cpu-arguments-intercept": 76433006, + "bls12_381_G1_scalarMul-cpu-arguments-slope": 8868, + "bls12_381_G1_scalarMul-memory-arguments": 18, + "bls12_381_G1_uncompress-cpu-arguments": 52948122, + "bls12_381_G1_uncompress-memory-arguments": 18, + "bls12_381_G2_add-cpu-arguments": 1995836, + "bls12_381_G2_add-memory-arguments": 36, + "bls12_381_G2_compress-cpu-arguments": 3227919, + "bls12_381_G2_compress-memory-arguments": 12, + "bls12_381_G2_equal-cpu-arguments": 901022, + "bls12_381_G2_equal-memory-arguments": 1, + "bls12_381_G2_hashToGroup-cpu-arguments-intercept": 166917843, + "bls12_381_G2_hashToGroup-cpu-arguments-slope": 4307, + "bls12_381_G2_hashToGroup-memory-arguments": 36, + "bls12_381_G2_neg-cpu-arguments": 284546, + "bls12_381_G2_neg-memory-arguments": 36, + "bls12_381_G2_scalarMul-cpu-arguments-intercept": 158221314, + "bls12_381_G2_scalarMul-cpu-arguments-slope": 26549, + "bls12_381_G2_scalarMul-memory-arguments": 36, + "bls12_381_G2_uncompress-cpu-arguments": 74698472, + "bls12_381_G2_uncompress-memory-arguments": 36, + "bls12_381_finalVerify-cpu-arguments": 333849714, + "bls12_381_finalVerify-memory-arguments": 1, + "bls12_381_millerLoop-cpu-arguments": 254006273, + "bls12_381_millerLoop-memory-arguments": 72, + "bls12_381_mulMlResult-cpu-arguments": 2174038, + "bls12_381_mulMlResult-memory-arguments": 72, + "keccak_256-cpu-arguments-intercept": 2261318, + "keccak_256-cpu-arguments-slope": 64571, + "keccak_256-memory-arguments": 4, + "blake2b_224-cpu-arguments-intercept": 207616, + "blake2b_224-cpu-arguments-slope": 8310, + "blake2b_224-memory-arguments": 4, + "integerToByteString-cpu-arguments-c0": 1293828, + "integerToByteString-cpu-arguments-c1": 28716, + "integerToByteString-cpu-arguments-c2": 63, + "integerToByteString-memory-arguments-intercept": 0, + "integerToByteString-memory-arguments-slope": 1, + "byteStringToInteger-cpu-arguments-c0": 1006041, + "byteStringToInteger-cpu-arguments-c1": 43623, + "byteStringToInteger-cpu-arguments-c2": 251, + "byteStringToInteger-memory-arguments-intercept": 0, + "byteStringToInteger-memory-arguments-slope": 1, + "andByteString-cpu-arguments-intercept": 100181, + "andByteString-cpu-arguments-slope1": 726, + "andByteString-cpu-arguments-slope2": 719, + "andByteString-memory-arguments-intercept": 0, + "andByteString-memory-arguments-slope": 1, + "orByteString-cpu-arguments-intercept": 100181, + "orByteString-cpu-arguments-slope1": 726, + "orByteString-cpu-arguments-slope2": 719, + "orByteString-memory-arguments-intercept": 0, + "orByteString-memory-arguments-slope": 1, + "xorByteString-cpu-arguments-intercept": 100181, + "xorByteString-cpu-arguments-slope1": 726, + "xorByteString-cpu-arguments-slope2": 719, + "xorByteString-memory-arguments-intercept": 0, + "xorByteString-memory-arguments-slope": 1, + "complementByteString-cpu-arguments-intercept": 107878, + "complementByteString-cpu-arguments-slope": 680, + "complementByteString-memory-arguments-intercept": 0, + "complementByteString-memory-arguments-slope": 1, + "readBit-cpu-arguments": 95336, + "readBit-memory-arguments": 1, + "writeBits-cpu-arguments-intercept": 281145, + "writeBits-cpu-arguments-slope": 18848, + "writeBits-memory-arguments-intercept": 0, + "writeBits-memory-arguments-slope": 1, + "replicateByte-cpu-arguments-intercept": 180194, + "replicateByte-cpu-arguments-slope": 159, + "replicateByte-memory-arguments-intercept": 1, + "replicateByte-memory-arguments-slope": 1, + "shiftByteString-cpu-arguments-intercept": 158519, + "shiftByteString-cpu-arguments-slope": 8942, + "shiftByteString-memory-arguments-intercept": 0, + "shiftByteString-memory-arguments-slope": 1, + "rotateByteString-cpu-arguments-intercept": 159378, + "rotateByteString-cpu-arguments-slope": 8813, + "rotateByteString-memory-arguments-intercept": 0, + "rotateByteString-memory-arguments-slope": 1, + "countSetBits-cpu-arguments-intercept": 107490, + "countSetBits-cpu-arguments-slope": 3298, + "countSetBits-memory-arguments": 1, + "findFirstSetBit-cpu-arguments-intercept": 106057, + "findFirstSetBit-cpu-arguments-slope": 655, + "findFirstSetBit-memory-arguments": 1, + "ripemd_160-cpu-arguments-intercept": 1964219, + "ripemd_160-cpu-arguments-slope": 24520, + "ripemd_160-memory-arguments": 3 + } +} diff --git a/cardano_node_tests/tests/test_node_upgrade.py b/cardano_node_tests/tests/test_node_upgrade.py index 90bbd5fca..b5669c211 100644 --- a/cardano_node_tests/tests/test_node_upgrade.py +++ b/cardano_node_tests/tests/test_node_upgrade.py @@ -1,7 +1,9 @@ """Tests for node upgrade.""" +import json import logging import os +import pathlib as pl import shutil import typing as tp @@ -11,13 +13,17 @@ from cardano_node_tests.cluster_management import cluster_management from cardano_node_tests.tests import common +from cardano_node_tests.tests.tests_conway import conway_common from cardano_node_tests.utils import clusterlib_utils +from cardano_node_tests.utils import governance_setup +from cardano_node_tests.utils import governance_utils from cardano_node_tests.utils import helpers from cardano_node_tests.utils import logfiles from cardano_node_tests.utils import temptools LOGGER = logging.getLogger(__name__) +DATA_DIR = pl.Path(__file__).parent / "data" UPGRADE_TESTS_STEP = int(os.environ.get("UPGRADE_TESTS_STEP") or 0) pytestmark = [ @@ -79,6 +85,20 @@ class TestSetup: Special tests that run outside of normal test run. """ + @pytest.fixture + def pool_user_singleton( + self, + cluster_manager: cluster_management.ClusterManager, + cluster_singleton: clusterlib.ClusterLib, + ) -> clusterlib.PoolUser: + """Create a pool user for singleton.""" + name_template = common.get_test_id(cluster_singleton) + return conway_common.get_registered_pool_user( + cluster_manager=cluster_manager, + name_template=name_template, + cluster_obj=cluster_singleton, + ) + @allure.link(helpers.get_vcs_link()) @pytest.mark.skipif(UPGRADE_TESTS_STEP < 2, reason="runs only on step >= 2 of upgrade testing") def test_ignore_log_errors( @@ -96,6 +116,113 @@ def test_ignore_log_errors( ignore_file_id=worker_id, ) + @allure.link(helpers.get_vcs_link()) + @pytest.mark.skipif(UPGRADE_TESTS_STEP != 2, reason="runs only on step 2 of upgrade testing") + def test_update_cost_models( + self, + cluster_manager: cluster_management.ClusterManager, + cluster_singleton: clusterlib.ClusterLib, + pool_user_singleton: clusterlib.PoolUser, + ): + """Test cost model update.""" + cluster = cluster_singleton + temp_template = common.get_test_id(cluster) + cost_proposal_file = DATA_DIR / "cost_models_pv10.json" + + governance_data = governance_setup.get_default_governance( + cluster_manager=cluster_manager, cluster_obj=cluster + ) + governance_utils.wait_delayed_ratification(cluster_obj=cluster) + + proposals = [ + clusterlib_utils.UpdateProposal( + arg="--cost-model-file", + value=str(cost_proposal_file), + name="", # costModels + ), + ] + + with open(cost_proposal_file, encoding="utf-8") as fp: + cost_models_in = json.load(fp) + + prev_action_rec = governance_utils.get_prev_action( + action_type=governance_utils.PrevGovActionIds.PPARAM_UPDATE, + gov_state=cluster.g_conway_governance.query.gov_state(), + ) + + def _propose_pparams_update( + name_template: str, + proposals: tp.List[clusterlib_utils.UpdateProposal], + ) -> conway_common.PParamPropRec: + anchor_url = f"http://www.pparam-action-{clusterlib.get_rand_str(4)}.com" + anchor_data_hash = cluster.g_conway_governance.get_anchor_data_hash(text=anchor_url) + return conway_common.propose_pparams_update( + cluster_obj=cluster, + name_template=name_template, + anchor_url=anchor_url, + anchor_data_hash=anchor_data_hash, + pool_user=pool_user_singleton, + proposals=proposals, + prev_action_rec=prev_action_rec, + ) + + def _check_models(cost_models: dict): + for m in ("PlutusV1", "PlutusV2", "PlutusV3"): + if m not in cost_models_in: + continue + assert len(cost_models_in[m]) == len(cost_models[m]), f"Unexpected length for {m}" + + # Make sure we have enough time to submit the proposal and vote in one epoch + clusterlib_utils.wait_for_epoch_interval( + cluster_obj=cluster, start=1, stop=common.EPOCH_STOP_SEC_BUFFER + ) + init_epoch = cluster.g_query.get_epoch() + + # Propose the action + prop_rec = _propose_pparams_update(name_template=temp_template, proposals=proposals) + _check_models(prop_rec.future_pparams["costModels"]) + + # Vote & approve the action by CC + conway_common.cast_vote( + cluster_obj=cluster, + governance_data=governance_data, + name_template=f"{temp_template}_cc", + payment_addr=pool_user_singleton.payment, + action_txid=prop_rec.action_txid, + action_ix=prop_rec.action_ix, + approve_cc=True, + ) + + assert ( + cluster.g_query.get_epoch() == init_epoch + ), "Epoch changed and it would affect other checks" + + # Check ratification + rat_epoch = cluster.wait_for_epoch(epoch_no=init_epoch + 1, padding_seconds=5) + rat_gov_state = cluster.g_conway_governance.query.gov_state() + conway_common.save_gov_state( + gov_state=rat_gov_state, name_template=f"{temp_template}_rat_{rat_epoch}" + ) + + rat_action = governance_utils.lookup_ratified_actions( + gov_state=rat_gov_state, action_txid=prop_rec.action_txid + ) + assert rat_action, "Action not found in ratified actions" + + next_rat_state = rat_gov_state["nextRatifyState"] + _check_models(next_rat_state["nextEnactState"]["curPParams"]["costModels"]) + assert not next_rat_state["ratificationDelayed"], "Ratification is delayed unexpectedly" + + # Check enactment + enact_epoch = cluster.wait_for_epoch( + epoch_no=init_epoch + 2, padding_seconds=5, future_is_ok=False + ) + enact_gov_state = cluster.g_conway_governance.query.gov_state() + conway_common.save_gov_state( + gov_state=enact_gov_state, name_template=f"{temp_template}_enact_{enact_epoch}" + ) + _check_models(enact_gov_state["currentPParams"]["costModels"]) + @pytest.mark.upgrade class TestUpgrade: From 40dca24a9df8df9c07f04527e35888cf08bea5ae Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 5 Nov 2024 17:50:59 +0100 Subject: [PATCH 081/168] feat: add hard fork test for PV10 - Added a new test `test_hardfork` to `test_node_upgrade.py` to handle the hard fork to protocol version 10. - Updated `node_upgrade_pytest.sh` to include the new hard fork test step. - Ensured the test runs only on step 3 of upgrade testing. --- .github/node_upgrade_pytest.sh | 3 + cardano_node_tests/tests/test_node_upgrade.py | 117 ++++++++++++++++++ 2 files changed, 120 insertions(+) diff --git a/.github/node_upgrade_pytest.sh b/.github/node_upgrade_pytest.sh index ef3802ad2..5f5683f66 100755 --- a/.github/node_upgrade_pytest.sh +++ b/.github/node_upgrade_pytest.sh @@ -344,6 +344,9 @@ elif [ "$1" = "step3" ]; then pytest cardano_node_tests/tests/test_node_upgrade.py -k test_ignore_log_errors err_retval="$?" + # Hard fork to PV10. + pytest cardano_node_tests/tests/test_node_upgrade.py -k test_hardfork || exit 2 + # Run smoke tests pytest \ cardano_node_tests \ diff --git a/cardano_node_tests/tests/test_node_upgrade.py b/cardano_node_tests/tests/test_node_upgrade.py index b5669c211..3380b6b60 100644 --- a/cardano_node_tests/tests/test_node_upgrade.py +++ b/cardano_node_tests/tests/test_node_upgrade.py @@ -223,6 +223,123 @@ def _check_models(cost_models: dict): ) _check_models(enact_gov_state["currentPParams"]["costModels"]) + @allure.link(helpers.get_vcs_link()) + @pytest.mark.skipif(UPGRADE_TESTS_STEP != 3, reason="runs only on step 3 of upgrade testing") + def test_hardfork( + self, + cluster_manager: cluster_management.ClusterManager, + cluster_singleton: clusterlib.ClusterLib, + pool_user_singleton: clusterlib.PoolUser, + ): + """Test hard fork.""" + cluster = cluster_singleton + temp_template = common.get_test_id(cluster) + + governance_data = governance_setup.get_default_governance( + cluster_manager=cluster_manager, cluster_obj=cluster + ) + governance_utils.wait_delayed_ratification(cluster_obj=cluster) + + # Create an action + deposit_amt = cluster.conway_genesis["govActionDeposit"] + anchor_url = "http://www.hardfork-upgrade-pv10.com" + anchor_data_hash = "5d372dca1a4cc90d7d16d966c48270e33e3aa0abcb0e78f0d5ca7ff330d2245d" + prev_action_rec = governance_utils.get_prev_action( + action_type=governance_utils.PrevGovActionIds.HARDFORK, + gov_state=cluster.g_conway_governance.query.gov_state(), + ) + + hardfork_action = cluster.g_conway_governance.action.create_hardfork( + action_name=temp_template, + deposit_amt=deposit_amt, + anchor_url=anchor_url, + anchor_data_hash=anchor_data_hash, + protocol_major_version=10, + protocol_minor_version=0, + prev_action_txid=prev_action_rec.txid, + prev_action_ix=prev_action_rec.ix, + deposit_return_stake_vkey_file=pool_user_singleton.stake.vkey_file, + ) + + tx_files_action = clusterlib.TxFiles( + proposal_files=[hardfork_action.action_file], + signing_key_files=[ + pool_user_singleton.payment.skey_file, + ], + ) + + # Make sure we have enough time to submit the proposal and the votes in one epoch + clusterlib_utils.wait_for_epoch_interval( + cluster_obj=cluster, start=1, stop=common.EPOCH_STOP_SEC_BUFFER - 20 + ) + init_epoch = cluster.g_query.get_epoch() + + tx_output_action = clusterlib_utils.build_and_submit_tx( + cluster_obj=cluster, + name_template=f"{temp_template}_action", + src_address=pool_user_singleton.payment.address, + use_build_cmd=True, + tx_files=tx_files_action, + ) + + action_txid = cluster.g_transaction.get_txid(tx_body_file=tx_output_action.out_file) + action_gov_state = cluster.g_conway_governance.query.gov_state() + action_epoch = cluster.g_query.get_epoch() + conway_common.save_gov_state( + gov_state=action_gov_state, name_template=f"{temp_template}_action_{action_epoch}" + ) + prop_action = governance_utils.lookup_proposal( + gov_state=action_gov_state, action_txid=action_txid + ) + assert prop_action, "Hardfork action not found" + assert ( + prop_action["proposalProcedure"]["govAction"]["tag"] + == governance_utils.ActionTags.HARDFORK_INIT.value + ), "Incorrect action tag" + + action_ix = prop_action["actionId"]["govActionIx"] + + # Vote & approve the action + conway_common.cast_vote( + cluster_obj=cluster, + governance_data=governance_data, + name_template=f"{temp_template}_yes", + payment_addr=pool_user_singleton.payment, + action_txid=action_txid, + action_ix=action_ix, + approve_cc=True, + approve_spo=True, + ) + + assert ( + cluster.g_query.get_epoch() == init_epoch + ), "Epoch changed and it would affect other checks" + + # Check ratification + rat_epoch = cluster.wait_for_epoch(epoch_no=init_epoch + 1, padding_seconds=5) + rat_gov_state = cluster.g_conway_governance.query.gov_state() + conway_common.save_gov_state( + gov_state=rat_gov_state, name_template=f"{temp_template}_rat_{rat_epoch}" + ) + rat_action = governance_utils.lookup_ratified_actions( + gov_state=rat_gov_state, action_txid=action_txid + ) + assert rat_action, "Action not found in ratified actions" + + assert ( + rat_gov_state["currentPParams"]["protocolVersion"]["major"] == 9 + ), "Incorrect major version" + + # Check enactment + enact_epoch = cluster.wait_for_epoch(epoch_no=init_epoch + 2, padding_seconds=5) + enact_gov_state = cluster.g_conway_governance.query.gov_state() + conway_common.save_gov_state( + gov_state=enact_gov_state, name_template=f"{temp_template}_enact_{enact_epoch}" + ) + assert ( + enact_gov_state["currentPParams"]["protocolVersion"]["major"] == 10 + ), "Incorrect major version" + @pytest.mark.upgrade class TestUpgrade: From 41365629af4424a601c30c3b7aa1ba49ed9b16c1 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 5 Nov 2024 17:51:42 +0100 Subject: [PATCH 082/168] feat(env): update BASE_TAR_URL to latest release Updated the BASE_TAR_URL in the env_nightly_upgrade file to point to the latest release version 10.1.2. This ensures that the nightly upgrade process uses the most recent released version of the cardano-node. --- .github/env_nightly_upgrade | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/env_nightly_upgrade b/.github/env_nightly_upgrade index bcf9ea6df..6c32d406e 100644 --- a/.github/env_nightly_upgrade +++ b/.github/env_nightly_upgrade @@ -1,2 +1,2 @@ -BASE_TAR_URL=https://github.com/IntersectMBO/cardano-node/releases/download/9.1.1/cardano-node-9.1.1-linux.tar.gz +BASE_TAR_URL=BASE_TAR_URL=https://github.com/IntersectMBO/cardano-node/releases/download/10.1.2/cardano-node-10.1.2-linux.tar.gz CI_BYRON_CLUSTER=true From 40d4700963b40eaaface26a6a585122f01731a11 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 6 Nov 2024 16:11:13 +0100 Subject: [PATCH 083/168] fix(env): correct BASE_TAR_URL in env_nightly_upgrade The BASE_TAR_URL was incorrectly duplicated in the env_nightly_upgrade file. This commit removes the duplicate and ensures the URL is correctly formatted. --- .github/env_nightly_upgrade | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/env_nightly_upgrade b/.github/env_nightly_upgrade index 6c32d406e..edd5da95a 100644 --- a/.github/env_nightly_upgrade +++ b/.github/env_nightly_upgrade @@ -1,2 +1,2 @@ -BASE_TAR_URL=BASE_TAR_URL=https://github.com/IntersectMBO/cardano-node/releases/download/10.1.2/cardano-node-10.1.2-linux.tar.gz +BASE_TAR_URL=https://github.com/IntersectMBO/cardano-node/releases/download/10.1.2/cardano-node-10.1.2-linux.tar.gz CI_BYRON_CLUSTER=true From 55c258b26b8bcff34c5ceb6ac1eb9a63c3bb315a Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 6 Nov 2024 16:23:31 +0100 Subject: [PATCH 084/168] chore: update .gitignore paths for consistency Updated the .gitignore file to ensure all paths are consistent by adding leading slashes where missing. This change helps in maintaining a uniform structure and improves readability. - Added leading slashes to testing artifacts directories. - Added leading slashes to workdir directories. - Added new entries for local files and environment variables. --- .gitignore | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/.gitignore b/.gitignore index 808f18294..73a0a3d11 100644 --- a/.gitignore +++ b/.gitignore @@ -1,17 +1,16 @@ # Testing artifacts -.artifacts/ -.cli_coverage/ -.reports/ +/.artifacts/ +/.cli_coverage/ +/.reports/ # Workdir for running tests -run_workdir/ -dev_workdir/ +/run_workdir/ +/dev_workdir/ -# Status files of cluster instances -/state-cluster*/ - -# Local temporary files +# Local files /tmp*/ +/.bin*/ +/.patches/ # Byte-compiled / optimized / DLL files __pycache__/ @@ -20,6 +19,13 @@ __pycache__/ # mypy .mypy_cache/ +.dmypy.json + +# Exuberant Ctags tag file +/tags + +# Env variables +/.source* # Distribution / packaging .Python @@ -68,9 +74,9 @@ coverage.xml *.pot # Sphinx documentation -src_docs/_build/ +/src_docs/_build/ # files generated by sphinx-apidoc -src_docs/source/cardano_node_tests*.rst +/src_docs/source/cardano_node_tests*.rst # PyBuilder target/ From 8d7aef2c4e69d13320a93694823472b16e1758f6 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 7 Nov 2024 18:11:31 +0100 Subject: [PATCH 085/168] feat(tests): add step-specific markers for upgrade tests - Added step-specific markers for upgrade tests in `node_upgrade_pytest.sh` - Updated `test_node_upgrade.py` to include new markers - Modified `test_info.py` and `test_treasury_withdrawals.py` to use new markers - Updated `pyproject.toml` with new markers - Documented changes in `nightly_system_tests.rst` --- .github/node_upgrade_pytest.sh | 13 ++++++++----- cardano_node_tests/tests/test_node_upgrade.py | 7 ++++++- cardano_node_tests/tests/tests_conway/test_info.py | 1 + .../tests/tests_conway/test_treasury_withdrawals.py | 1 + .../tests_conway/test_update_plutusv2_builtins.py | 2 +- pyproject.toml | 4 +++- .../source/test_results/nightly_system_tests.rst | 6 +++++- 7 files changed, 25 insertions(+), 9 deletions(-) diff --git a/.github/node_upgrade_pytest.sh b/.github/node_upgrade_pytest.sh index 5f5683f66..c934801d5 100755 --- a/.github/node_upgrade_pytest.sh +++ b/.github/node_upgrade_pytest.sh @@ -75,10 +75,11 @@ if [ "$1" = "step1" ]; then cp -f "$STATE_CLUSTER/shelley/genesis.conway.json" "$STATE_CLUSTER/shelley/genesis.conway.step1.json" # run smoke tests + printf "STEP1 tests: %(%H:%M:%S)T\n" -1 pytest \ cardano_node_tests \ -n "$TEST_THREADS" \ - -m "smoke or upgrade" \ + -m "smoke or upgrade_step1" \ --artifacts-base-dir="$ARTIFACTS_DIR" \ --cli-coverage-dir="$COVERAGE_DIR" \ --alluredir="$REPORTS_DIR" \ @@ -231,13 +232,14 @@ elif [ "$1" = "step2" ]; then err_retval="$?" # Update PlutusV3 cost models. - pytest cardano_node_tests/tests/test_node_upgrade.py -k test_update_cost_models || exit 2 + pytest cardano_node_tests/tests/test_node_upgrade.py -k test_update_cost_models || exit 6 # run smoke tests + printf "STEP2 tests: %(%H:%M:%S)T\n" -1 pytest \ cardano_node_tests \ -n "$TEST_THREADS" \ - -m "smoke or upgrade" \ + -m "smoke or upgrade_step2" \ --artifacts-base-dir="$ARTIFACTS_DIR" \ --cli-coverage-dir="$COVERAGE_DIR" \ --alluredir="$REPORTS_DIR" \ @@ -345,13 +347,14 @@ elif [ "$1" = "step3" ]; then err_retval="$?" # Hard fork to PV10. - pytest cardano_node_tests/tests/test_node_upgrade.py -k test_hardfork || exit 2 + pytest cardano_node_tests/tests/test_node_upgrade.py -k test_hardfork || exit 6 # Run smoke tests + printf "STEP3 tests: %(%H:%M:%S)T\n" -1 pytest \ cardano_node_tests \ -n "$TEST_THREADS" \ - -m "smoke or upgrade" \ + -m "smoke or upgrade_step3" \ --artifacts-base-dir="$ARTIFACTS_DIR" \ --cli-coverage-dir="$COVERAGE_DIR" \ --alluredir="$REPORTS_DIR" \ diff --git a/cardano_node_tests/tests/test_node_upgrade.py b/cardano_node_tests/tests/test_node_upgrade.py index 3380b6b60..8ad71ddc5 100644 --- a/cardano_node_tests/tests/test_node_upgrade.py +++ b/cardano_node_tests/tests/test_node_upgrade.py @@ -341,13 +341,15 @@ def test_hardfork( ), "Incorrect major version" -@pytest.mark.upgrade class TestUpgrade: """Tests for node upgrade testing.""" @allure.link(helpers.get_vcs_link()) @pytest.mark.skipif(UPGRADE_TESTS_STEP > 2, reason="doesn't run on step > 2 of upgrade testing") @pytest.mark.order(-1) + @pytest.mark.upgrade_step1 + @pytest.mark.upgrade_step2 + @pytest.mark.upgrade_step3 @common.PARAM_USE_BUILD_CMD @pytest.mark.parametrize( "for_step", @@ -447,6 +449,9 @@ def test_prepare_tx( @allure.link(helpers.get_vcs_link()) @pytest.mark.skipif(UPGRADE_TESTS_STEP < 2, reason="runs only on step >= 2 of upgrade testing") @pytest.mark.order(5) + @pytest.mark.upgrade_step1 + @pytest.mark.upgrade_step2 + @pytest.mark.upgrade_step3 @common.PARAM_USE_BUILD_CMD @pytest.mark.parametrize( "for_step", diff --git a/cardano_node_tests/tests/tests_conway/test_info.py b/cardano_node_tests/tests/tests_conway/test_info.py index 96e4d8578..3839f7ef7 100644 --- a/cardano_node_tests/tests/tests_conway/test_info.py +++ b/cardano_node_tests/tests/tests_conway/test_info.py @@ -55,6 +55,7 @@ class TestInfo: @allure.link(helpers.get_vcs_link()) @pytest.mark.long @pytest.mark.dbsync + @pytest.mark.upgrade_step1 def test_info( self, cluster_use_governance: governance_utils.GovClusterT, diff --git a/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py b/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py index 26b1c8c3f..a756efe66 100644 --- a/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py +++ b/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py @@ -93,6 +93,7 @@ class TestTreasuryWithdrawals: @allure.link(helpers.get_vcs_link()) @pytest.mark.dbsync @pytest.mark.long + @pytest.mark.upgrade_step3 def test_enact_treasury_withdrawals( self, cluster_use_governance_lock_treasury: governance_utils.GovClusterT, diff --git a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py index 5f6694e92..5def7002a 100644 --- a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py +++ b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py @@ -75,7 +75,7 @@ class TestUpdateBuiltIns: @allure.link(helpers.get_vcs_link()) @pytest.mark.skipif(not configuration.HAS_CC, reason="Runs only on setup with CC") @pytest.mark.long - @pytest.mark.upgrade + @pytest.mark.upgrade_step1 def test_update_in_pv9( self, cluster_lock_governance: governance_utils.GovClusterT, diff --git a/pyproject.toml b/pyproject.toml index 92f96778d..3c790c841 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -133,7 +133,9 @@ markers = [ "testnets: test(s) can run on testnets, like Shelley_qa", "long: test(s) run for a long time on testnets", "smoke: fast test(s) under 1 minute", - "upgrade: test(s) for upgrade testing", + "upgrade_step1: test(s) for upgrade testing in step1", + "upgrade_step2: test(s) for upgrade testing in step2", + "upgrade_step3: test(s) for upgrade testing in step3", "plutus: test(s) for plutus", "team_plutus: test(s) from Plutus dev team", "disabled: temporarily disabled test(s)", diff --git a/src_docs/source/test_results/nightly_system_tests.rst b/src_docs/source/test_results/nightly_system_tests.rst index 07bdb0145..8faf092d2 100644 --- a/src_docs/source/test_results/nightly_system_tests.rst +++ b/src_docs/source/test_results/nightly_system_tests.rst @@ -57,14 +57,18 @@ Nightly upgrade testing * Constitutional Commitee has 5 members * default (legacy) network topology * smoke tests + * governance info action test * `Step 2 `__: |nightly-upgrade-step2-badge| - * upgrade all nodes except one to latest cardano-node master + * upgrade all nodes except one to the latest cardano-node master * mixed network topology (half nodes P2P, half nodes legacy topology) + * update PlutusV3 cost models * smoke tests * `Step 3 `__: |nightly-upgrade-step3-badge| * upgrade the last remaining node to latest cardano-node master * P2P network topology + * hard fork to Conway protocol version 10 * smoke tests + * governance treasury withdrawal action test .. |nightly-badge| image:: https://img.shields.io/endpoint?url=https%3A%2F%2Fcardano-tests-reports-3-74-115-22.nip.io%2Fcardano-node-tests-nightly%2Fbadge.json :target: https://cardano-tests-reports-3-74-115-22.nip.io/cardano-node-tests-nightly/ From 623bd7a9b9c72a8027cb6e25b3173306c307a76d Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 7 Nov 2024 18:19:31 +0100 Subject: [PATCH 086/168] fix(tests): remove redundant epoch interval wait Removed unnecessary epoch interval wait before submitting proposal and voting in test_node_upgrade.py. The `conway_common.propose_pparams_update` waits for the epoch interval. --- cardano_node_tests/tests/test_node_upgrade.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/cardano_node_tests/tests/test_node_upgrade.py b/cardano_node_tests/tests/test_node_upgrade.py index 8ad71ddc5..fa8ff13fa 100644 --- a/cardano_node_tests/tests/test_node_upgrade.py +++ b/cardano_node_tests/tests/test_node_upgrade.py @@ -172,12 +172,6 @@ def _check_models(cost_models: dict): continue assert len(cost_models_in[m]) == len(cost_models[m]), f"Unexpected length for {m}" - # Make sure we have enough time to submit the proposal and vote in one epoch - clusterlib_utils.wait_for_epoch_interval( - cluster_obj=cluster, start=1, stop=common.EPOCH_STOP_SEC_BUFFER - ) - init_epoch = cluster.g_query.get_epoch() - # Propose the action prop_rec = _propose_pparams_update(name_template=temp_template, proposals=proposals) _check_models(prop_rec.future_pparams["costModels"]) @@ -192,13 +186,10 @@ def _check_models(cost_models: dict): action_ix=prop_rec.action_ix, approve_cc=True, ) - - assert ( - cluster.g_query.get_epoch() == init_epoch - ), "Epoch changed and it would affect other checks" + vote_epoch = cluster.g_query.get_epoch() # Check ratification - rat_epoch = cluster.wait_for_epoch(epoch_no=init_epoch + 1, padding_seconds=5) + rat_epoch = cluster.wait_for_epoch(epoch_no=vote_epoch + 1, padding_seconds=5) rat_gov_state = cluster.g_conway_governance.query.gov_state() conway_common.save_gov_state( gov_state=rat_gov_state, name_template=f"{temp_template}_rat_{rat_epoch}" @@ -215,7 +206,7 @@ def _check_models(cost_models: dict): # Check enactment enact_epoch = cluster.wait_for_epoch( - epoch_no=init_epoch + 2, padding_seconds=5, future_is_ok=False + epoch_no=vote_epoch + 2, padding_seconds=5, future_is_ok=False ) enact_gov_state = cluster.g_conway_governance.query.gov_state() conway_common.save_gov_state( From 2b09e2c9b40b121cecb833320088517e0188fe8b Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 11 Nov 2024 12:57:55 +0100 Subject: [PATCH 087/168] feat: add group for script setup in CI scripts This commit adds the "Script setup" group in the `node_upgrade.sh` and `regression.sh` scripts. This change improves the readability of the CI logs by clearly delineating the setup phase from the rest of the script execution. --- .github/node_upgrade.sh | 26 +++--- .github/regression.sh | 94 +++++++++++----------- .github/workflows/regression_reusable.yaml | 5 +- .github/workflows/upgrade_reusable.yaml | 7 +- 4 files changed, 71 insertions(+), 61 deletions(-) diff --git a/.github/node_upgrade.sh b/.github/node_upgrade.sh index 250796ffa..17993f446 100755 --- a/.github/node_upgrade.sh +++ b/.github/node_upgrade.sh @@ -55,6 +55,14 @@ mkdir -p "$COVERAGE_DIR" export SCHEDULING_LOG=scheduling.log true > "$SCHEDULING_LOG" +export DEV_CLUSTER_RUNNING=1 CLUSTERS_COUNT=1 FORBID_RESTART=1 TEST_THREADS=10 NUM_POOLS="${NUM_POOLS:-4}" +unset ENABLE_LEGACY MIXED_P2P + +echo "::endgroup::" # end group for "Script setup" + +echo "::group::Nix env setup" +printf "start: %(%H:%M:%S)T\n" -1 + # shellcheck disable=SC1090,SC1091 . .github/nix_override_cardano_node.sh @@ -70,12 +78,6 @@ else NODE_OVERRIDE=$(node_override) fi -export DEV_CLUSTER_RUNNING=1 CLUSTERS_COUNT=1 FORBID_RESTART=1 TEST_THREADS=10 NUM_POOLS="${NUM_POOLS:-4}" -unset ENABLE_LEGACY MIXED_P2P - -echo "::group::Nix env setup" -printf "start: %(%H:%M:%S)T\n" -1 - set +e # shellcheck disable=SC2086 nix flake update --accept-flake-config $NODE_OVERRIDE @@ -89,7 +91,7 @@ nix develop --accept-flake-config .#venv --command bash -c ' . .github/setup_venv.sh clean echo "::endgroup::" # end group for "Python venv setup" - echo "::group::Pytest step1" + echo "::group::-> PYTEST STEP1 <-" df -h . # prepare scripts for stating cluster instance, start cluster instance, run smoke tests ./.github/node_upgrade_pytest.sh step1 @@ -104,8 +106,8 @@ fi # retval 0 == all tests passed; 1 == some tests failed; > 1 == some runtime error and we don't want to continue [ "$retval" -le 1 ] || exit "$retval" -echo "::endgroup::" # end group for "Pytest step1" -echo "::group::Pytest step2" +echo "::endgroup::" # end group for "-> PYTEST STEP1 <-" +echo "::group::-> PYTEST STEP2 <-" # update cardano-node to specified branch and/or revision, or to the latest available revision if [ -n "${UPGRADE_REVISION:-""}" ]; then @@ -130,14 +132,14 @@ nix develop --accept-flake-config .#venv --command bash -c ' retval="$?" # retval 0 == all tests passed; 1 == some tests failed; > 1 == some runtime error and we dont want to continue [ "$retval" -le 1 ] || exit "$retval" - echo "::endgroup::" # end group for "Pytest step2" + echo "::endgroup::" # end group for "-> PYTEST STEP2 <-" - echo "::group::Pytest step3" + echo "::group::-> PYTEST STEP3 <-" df -h . # update to Conway, run smoke tests ./.github/node_upgrade_pytest.sh step3 retval="$?" - echo "::endgroup::" # end group for "Pytest step3" + echo "::endgroup::" # end group for "-> PYTEST STEP3 <-" echo "::group::Cluster teardown & artifacts" # teardown cluster diff --git a/.github/regression.sh b/.github/regression.sh index eadafc6c9..20e89c68d 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -26,48 +26,6 @@ mkdir -p "$WORKDIR" export TMPDIR="$WORKDIR/tmp" mkdir -p "$TMPDIR" -# setup dbsync (disabled by default) -case "${DBSYNC_REV:-""}" in - "" ) - ;; - "none" ) - unset DBSYNC_REV - ;; - * ) - # shellcheck disable=SC1090,SC1091 - . .github/source_dbsync.sh - df -h . - ;; -esac - -# Setup plutus-apps (disabled by default). -# The "plutus-apps" repo is needed for the `create-script-context` tool, which is used by the -# Plutus tests that are testing script context. -# TODO: The `create-script-context` tool is broken for a very long time, hence disabled. -# See https://github.com/IntersectMBO/plutus-apps/issues/1107 -case "${PLUTUS_APPS_REV:="none"}" in - "none" ) - unset PLUTUS_APPS_REV - ;; - * ) - # shellcheck disable=SC1090,SC1091 - . .github/source_plutus_apps.sh - ;; -esac - -# setup cardano-cli (use the built-in version by default) -case "${CARDANO_CLI_REV:-""}" in - "" ) - ;; - "none" ) - unset CARDANO_CLI_REV - ;; - * ) - # shellcheck disable=SC1090,SC1091 - . .github/source_cardano_cli.sh - ;; -esac - if [ "${CI_TOPOLOGY:-""}" = "legacy" ]; then export ENABLE_LEGACY=1 elif [ "${CI_TOPOLOGY:-""}" = "mixed" ]; then @@ -127,9 +85,49 @@ if [ -n "${BOOTSTRAP_DIR:-""}" ]; then export MAKE_TARGET="${MAKE_TARGET:-"testnets"}" fi -# function to update cardano-node to specified branch and/or revision, or to the latest available -# shellcheck disable=SC1090,SC1091 -. .github/nix_override_cardano_node.sh +echo "::endgroup::" # end group for "Script setup" + +# setup dbsync (disabled by default) +case "${DBSYNC_REV:-""}" in + "" ) + ;; + "none" ) + unset DBSYNC_REV + ;; + * ) + # shellcheck disable=SC1090,SC1091 + . .github/source_dbsync.sh + df -h . + ;; +esac + +# Setup plutus-apps (disabled by default). +# The "plutus-apps" repo is needed for the `create-script-context` tool, which is used by the +# Plutus tests that are testing script context. +# TODO: The `create-script-context` tool is broken for a very long time, hence disabled. +# See https://github.com/IntersectMBO/plutus-apps/issues/1107 +case "${PLUTUS_APPS_REV:="none"}" in + "none" ) + unset PLUTUS_APPS_REV + ;; + * ) + # shellcheck disable=SC1090,SC1091 + . .github/source_plutus_apps.sh + ;; +esac + +# setup cardano-cli (use the built-in version by default) +case "${CARDANO_CLI_REV:-""}" in + "" ) + ;; + "none" ) + unset CARDANO_CLI_REV + ;; + * ) + # shellcheck disable=SC1090,SC1091 + . .github/source_cardano_cli.sh + ;; +esac _cleanup() { # stop all running cluster instances @@ -176,6 +174,10 @@ trap 'set +e; _interrupted; exit 130' SIGINT echo "::group::Nix env setup" printf "start: %(%H:%M:%S)T\n" -1 +# function to update cardano-node to specified branch and/or revision, or to the latest available +# shellcheck disable=SC1090,SC1091 +. .github/nix_override_cardano_node.sh + # run tests and generate report set +e # shellcheck disable=SC2046,SC2119 @@ -190,7 +192,7 @@ nix develop --accept-flake-config .#venv --command bash -c ' . .github/setup_venv.sh clean echo "::endgroup::" # end group for "Python venv setup" - echo "::group::Pytest run" + echo "::group::-> PYTEST RUN <-" export PATH="${PWD}/.bin":"$WORKDIR/cardano-cli/cardano-cli-build/bin":"$PATH" export CARDANO_NODE_SOCKET_PATH="$CARDANO_NODE_SOCKET_PATH_CI" make "${MAKE_TARGET:-"tests"}" diff --git a/.github/workflows/regression_reusable.yaml b/.github/workflows/regression_reusable.yaml index cf1259f8d..f5bc4cb12 100644 --- a/.github/workflows/regression_reusable.yaml +++ b/.github/workflows/regression_reusable.yaml @@ -108,7 +108,10 @@ jobs: fi - name: Run CLI regression tests id: testing-step - run: .github/regression.sh + run: | + # Regression Tests + echo "::group::Script setup" + .github/regression.sh - name: Report test results if: (success() || failure()) && inputs.testrun_name run: | diff --git a/.github/workflows/upgrade_reusable.yaml b/.github/workflows/upgrade_reusable.yaml index cca9fef62..d9c60cfc2 100644 --- a/.github/workflows/upgrade_reusable.yaml +++ b/.github/workflows/upgrade_reusable.yaml @@ -43,9 +43,12 @@ jobs: cat .github_ci_env cat .github_ci_env >> $GITHUB_ENV echo "GITHUB_TOKEN=${{ secrets.GH_TOKEN }}" >> $GITHUB_ENV - - name: Run CLI regression tests + - name: Run upgrade tests id: testing-step - run: ./.github/node_upgrade.sh + run: | + # Upgrade Tests + echo "::group::Script setup" + ./.github/node_upgrade.sh - name: Upload testing artifacts on failure uses: actions/upload-artifact@v4 if: failure() From c80e2994de1a8692773195f328f738df6fa70a3a Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 11 Nov 2024 16:00:12 +0100 Subject: [PATCH 088/168] refactor: update group names in shell scripts Updated the group names in various shell scripts to provide more clarity and consistency. This includes changes in node_upgrade.sh, regression.sh, source_cardano_cli.sh, source_dbsync.sh, and source_plutus_apps.sh. The new group names better reflect the steps being performed in each section. --- .github/node_upgrade.sh | 20 ++++++++++++-------- .github/regression.sh | 6 ++++++ .github/source_cardano_cli.sh | 4 ---- .github/source_dbsync.sh | 4 ---- .github/source_plutus_apps.sh | 4 ---- 5 files changed, 18 insertions(+), 20 deletions(-) diff --git a/.github/node_upgrade.sh b/.github/node_upgrade.sh index 17993f446..379e88b1a 100755 --- a/.github/node_upgrade.sh +++ b/.github/node_upgrade.sh @@ -60,7 +60,7 @@ unset ENABLE_LEGACY MIXED_P2P echo "::endgroup::" # end group for "Script setup" -echo "::group::Nix env setup" +echo "::group::Nix env setup step1" printf "start: %(%H:%M:%S)T\n" -1 # shellcheck disable=SC1090,SC1091 @@ -85,11 +85,11 @@ nix flake update --accept-flake-config $NODE_OVERRIDE nix develop --accept-flake-config .#venv --command bash -c ' : > "$WORKDIR/.nix_step1" printf "finish: %(%H:%M:%S)T\n" -1 - echo "::endgroup::" # end group for "Nix env setup" + echo "::endgroup::" # end group for "Nix env setup step1" - echo "::group::Python venv setup" + echo "::group::Python venv setup step1" . .github/setup_venv.sh clean - echo "::endgroup::" # end group for "Python venv setup" + echo "::endgroup::" # end group for "Python venv setup step1" echo "::group::-> PYTEST STEP1 <-" df -h . @@ -107,7 +107,8 @@ fi [ "$retval" -le 1 ] || exit "$retval" echo "::endgroup::" # end group for "-> PYTEST STEP1 <-" -echo "::group::-> PYTEST STEP2 <-" +echo "::group::Nix env setup steps 2 & 3" +printf "start: %(%H:%M:%S)T\n" -1 # update cardano-node to specified branch and/or revision, or to the latest available revision if [ -n "${UPGRADE_REVISION:-""}" ]; then @@ -121,12 +122,15 @@ nix flake update --accept-flake-config $NODE_OVERRIDE # shellcheck disable=SC2016 nix develop --accept-flake-config .#venv --command bash -c ' : > "$WORKDIR/.nix_step2" - df -h . + printf "finish: %(%H:%M:%S)T\n" -1 + echo "::endgroup::" # end group for "Nix env setup steps 2 & 3" - echo "::group::Python venv setup" + echo "::group::Python venv setup steps 2 & 3" . .github/setup_venv.sh clean - echo "::endgroup::" # end group for "Python venv setup" + echo "::endgroup::" # end group for "Python venv setup steps 2 & 3" + echo "::group::-> PYTEST STEP2 <-" + df -h . # update cluster nodes, run smoke tests ./.github/node_upgrade_pytest.sh step2 retval="$?" diff --git a/.github/regression.sh b/.github/regression.sh index 20e89c68d..f2aae0590 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -95,9 +95,11 @@ case "${DBSYNC_REV:-""}" in unset DBSYNC_REV ;; * ) + echo "::group::db-sync setup" # shellcheck disable=SC1090,SC1091 . .github/source_dbsync.sh df -h . + echo "::endgroup::" ;; esac @@ -111,8 +113,10 @@ case "${PLUTUS_APPS_REV:="none"}" in unset PLUTUS_APPS_REV ;; * ) + echo "::group::plutus-apps setup" # shellcheck disable=SC1090,SC1091 . .github/source_plutus_apps.sh + echo "::endgroup::" ;; esac @@ -124,8 +128,10 @@ case "${CARDANO_CLI_REV:-""}" in unset CARDANO_CLI_REV ;; * ) + echo "::group::cardano-cli setup" # shellcheck disable=SC1090,SC1091 . .github/source_cardano_cli.sh + echo "::endgroup::" ;; esac diff --git a/.github/source_cardano_cli.sh b/.github/source_cardano_cli.sh index 0b410ea92..65fd231ed 100644 --- a/.github/source_cardano_cli.sh +++ b/.github/source_cardano_cli.sh @@ -1,7 +1,5 @@ #!/bin/bash -echo "::group::cardano-cli setup" - pushd "$WORKDIR" || exit 1 case "${CARDANO_CLI_REV:-""}" in @@ -39,5 +37,3 @@ nix build --accept-flake-config .#cardano-cli -o cardano-cli-build || exit 1 [ -e cardano-cli-build/bin/cardano-cli ] || exit 1 pushd "$REPODIR" || exit 1 - -echo "::endgroup::" diff --git a/.github/source_dbsync.sh b/.github/source_dbsync.sh index 120c10308..6f2b28ba0 100644 --- a/.github/source_dbsync.sh +++ b/.github/source_dbsync.sh @@ -1,7 +1,5 @@ #!/bin/bash -echo "::group::db-sync setup" - TEST_THREADS="${TEST_THREADS:-15}" CLUSTERS_COUNT="${CLUSTERS_COUNT:-4}" export TEST_THREADS CLUSTERS_COUNT @@ -121,5 +119,3 @@ export PGPORT=5432 # start and setup postgres ./scripts/postgres-start.sh "$WORKDIR/postgres" -k - -echo "::endgroup::" diff --git a/.github/source_plutus_apps.sh b/.github/source_plutus_apps.sh index 56f04e1be..4895a920d 100644 --- a/.github/source_plutus_apps.sh +++ b/.github/source_plutus_apps.sh @@ -1,7 +1,5 @@ #!/bin/bash -echo "::group::plutus-apps setup" - pushd "$WORKDIR" || exit 1 case "${PLUTUS_APPS_REV:-""}" in @@ -43,5 +41,3 @@ PATH="$(readlink -m create-script-context-build/bin)":"$PATH" export PATH pushd "$REPODIR" || exit 1 - -echo "::endgroup::" From a98f9136be3e922b842d8b2955ac6f117b531049 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 11 Nov 2024 17:40:13 +0100 Subject: [PATCH 089/168] feat(tests): add delayed ratification wait in guardrails Added a call to governance_utils.wait_delayed_ratification in the cluster_with_constitution function to ensure proper handling of delayed ratification in the test_guardrails.py file. --- cardano_node_tests/tests/tests_conway/test_guardrails.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cardano_node_tests/tests/tests_conway/test_guardrails.py b/cardano_node_tests/tests/tests_conway/test_guardrails.py index c201bb86e..d00917b5d 100644 --- a/cardano_node_tests/tests/tests_conway/test_guardrails.py +++ b/cardano_node_tests/tests/tests_conway/test_guardrails.py @@ -141,6 +141,8 @@ def _enact_script_constitution(): constitution_url = "http://www.const-with-plutus.com" constitution_hash = "0000000000000000000000000000000000000000000000000000000000000000" + governance_utils.wait_delayed_ratification(cluster_obj=cluster) + _, action_txid, action_ix = conway_common.propose_change_constitution( cluster_obj=cluster, name_template=temp_template, From bb8f0d9b5a30e622d70c7a0a711522539b657066 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 11 Nov 2024 17:43:48 +0100 Subject: [PATCH 090/168] fix(test_committee): ensure proposal and CC members in one epoch Moved the wait_for_epoch_interval call to ensure proposal is submitted and CC members data created within one epoch. This change ensures that the actions_epoch is correctly set before calculating the expiration of new CC members. --- .../tests/tests_conway/test_committee.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/cardano_node_tests/tests/tests_conway/test_committee.py b/cardano_node_tests/tests/tests_conway/test_committee.py index bb43e04cb..f292bd684 100644 --- a/cardano_node_tests/tests/tests_conway/test_committee.py +++ b/cardano_node_tests/tests/tests_conway/test_committee.py @@ -377,8 +377,14 @@ def test_add_rm_committee_members( # noqa: C901 [r.success() for r in (reqc.cli003, reqc.cli004, reqc.cli005, reqc.cli006)] + # Make sure we have enough time to submit the proposals in one epoch + clusterlib_utils.wait_for_epoch_interval( + cluster_obj=cluster, start=1, stop=common.EPOCH_STOP_SEC_BUFFER - 10 + ) + actions_epoch = cluster.g_query.get_epoch() + # New CC members to be added - cc_member1_expire = cluster.g_query.get_epoch() + 3 + cc_member1_expire = actions_epoch + 3 cc_members = [ clusterlib.CCMember( epoch=cc_member1_expire, @@ -707,12 +713,6 @@ def _check_resign_dbsync(res_member: clusterlib.CCMember) -> None: cc_member_cold_key=res_member.cold_vkey_hash ) - # Make sure we have enough time to submit the proposals in one epoch - clusterlib_utils.wait_for_epoch_interval( - cluster_obj=cluster, start=1, stop=common.EPOCH_STOP_SEC_BUFFER - 10 - ) - actions_epoch = cluster.g_query.get_epoch() - # Create an action to add new CC members add_cc_action, action_add_txid, action_add_ix = _add_members() From fcde18bf90a8c1c1de5fe9c1904f87851a3812dc Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 11 Nov 2024 18:28:09 +0100 Subject: [PATCH 091/168] refactor(tests): extract KES period info check to function Extracted the KES period info check logic into a separate function to improve code readability and maintainability. Check KES period info right after expiration / renewal, not after logging timeout. --- cardano_node_tests/tests/test_kes.py | 92 +++++++++++++++------------- 1 file changed, 51 insertions(+), 41 deletions(-) diff --git a/cardano_node_tests/tests/test_kes.py b/cardano_node_tests/tests/test_kes.py index 42a11f638..bb205aa9a 100644 --- a/cardano_node_tests/tests/test_kes.py +++ b/cardano_node_tests/tests/test_kes.py @@ -155,7 +155,6 @@ def test_expired_kes( """ # pylint: disable=too-many-statements,too-many-locals cluster = cluster_kes - kes_period_info_errors_list = [] temp_template = common.get_test_id(cluster) expire_slot = KES_EXPIRE_SLOT + 100 @@ -217,6 +216,49 @@ def _refresh_opcerts() -> tp.Dict[str, int]: cluster_nodes.restart_all_nodes(delay=5) return refreshed_nodes_kes_period + def _check_kes_period_info( + refreshed_nodes_kes_period: tp.Dict[str, int], + ) -> tp.List[str]: + errors = [] + # Check kes-period-info with an operational certificate with KES expired + kes_info_expired = cluster.g_query.get_kes_period_info( + opcert_file=expire_pool_rec["pool_operational_cert"] + ) + with open(f"{temp_template}_kes_period_info_1.json", "w", encoding="utf-8") as out_fp: + json.dump(kes_info_expired, out_fp, indent=2) + errors.extend( + kes.check_kes_period_info_result( + cluster_obj=cluster, + kes_output=kes_info_expired, + expected_scenario=kes.KesScenarios.INVALID_KES_PERIOD, + check_id="1", + pool_num=expire_pool_num, + ) + ) + + # Check kes-period-info with valid operational certificates + for idx, n in enumerate(refreshed_nodes): + refreshed_pool_rec = cluster_manager.cache.addrs_data[f"node-{n}"] + kes_info_valid = cluster.g_query.get_kes_period_info( + opcert_file=refreshed_pool_rec["pool_operational_cert"] + ) + check_id = str(2 + idx) + with open( + f"{temp_template}_kes_period_info_{check_id}.json", "w", encoding="utf-8" + ) as out_fp: + json.dump(kes_info_valid, out_fp, indent=2) + errors.extend( + kes.check_kes_period_info_result( + cluster_obj=cluster, + kes_output=kes_info_valid, + expected_scenario=kes.KesScenarios.ALL_VALID, + check_id=check_id, + expected_start_kes=refreshed_nodes_kes_period[n], + ) + ) + + return errors + this_epoch = cluster.g_query.get_epoch() clusterlib_utils.save_ledger_state( cluster_obj=cluster, @@ -284,54 +326,22 @@ def _refresh_opcerts() -> tp.Dict[str, int]: not is_minting ), f"The pool '{expire_pool_name}' has minted blocks in epoch {this_epoch}" - # Refresh opcerts on pools that are not supposed to expire one more time + # Refresh opcerts one more time on pools that are not supposed to expire refreshed_nodes_kes_period = _refresh_opcerts() - LOGGER.info( - f"{datetime.datetime.now(tz=datetime.timezone.utc)}: " - "Waiting 120 secs to make sure the expected errors make it to log files." - ) - time.sleep(120) - + this_epoch = cluster.g_query.get_epoch() _save_all_metrics(temp_template=f"{temp_template}_{this_epoch}_after_refresh") _save_all_period_info(temp_template=f"{temp_template}_{this_epoch}_after_refresh") - # Check kes-period-info with an operational certificate with KES expired - kes_info_expired = cluster.g_query.get_kes_period_info( - opcert_file=expire_pool_rec["pool_operational_cert"] - ) - with open(f"{temp_template}_kes_period_info_1.json", "w", encoding="utf-8") as out_fp: - json.dump(kes_info_expired, out_fp, indent=2) - kes_period_info_errors_list.extend( - kes.check_kes_period_info_result( - cluster_obj=cluster, - kes_output=kes_info_expired, - expected_scenario=kes.KesScenarios.INVALID_KES_PERIOD, - check_id="1", - pool_num=expire_pool_num, + kes_period_info_errors_list = _check_kes_period_info( + refreshed_nodes_kes_period=refreshed_nodes_kes_period ) - ) - # Check kes-period-info with valid operational certificates - for idx, n in enumerate(refreshed_nodes): - refreshed_pool_rec = cluster_manager.cache.addrs_data[f"node-{n}"] - kes_info_valid = cluster.g_query.get_kes_period_info( - opcert_file=refreshed_pool_rec["pool_operational_cert"] - ) - check_id = str(2 + idx) - with open( - f"{temp_template}_kes_period_info_{check_id}.json", "w", encoding="utf-8" - ) as out_fp: - json.dump(kes_info_valid, out_fp, indent=2) - kes_period_info_errors_list.extend( - kes.check_kes_period_info_result( - cluster_obj=cluster, - kes_output=kes_info_valid, - expected_scenario=kes.KesScenarios.ALL_VALID, - check_id=check_id, - expected_start_kes=refreshed_nodes_kes_period[n], - ) + LOGGER.info( + f"{datetime.datetime.now(tz=datetime.timezone.utc)}: " + "Waiting 90 secs to make sure the expected errors make it to log files." ) + time.sleep(90) kes.finish_on_errors(errors=kes_period_info_errors_list) From 90c979b6cfee1ee8baf21d46382b3bde2fcda176 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 11 Nov 2024 18:54:18 +0100 Subject: [PATCH 092/168] fix(tests): fix docstring for expired KES test Part of the docstring was accidentally stripped in one of the previous changes. --- cardano_node_tests/tests/test_kes.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cardano_node_tests/tests/test_kes.py b/cardano_node_tests/tests/test_kes.py index bb205aa9a..67cf1041b 100644 --- a/cardano_node_tests/tests/test_kes.py +++ b/cardano_node_tests/tests/test_kes.py @@ -144,6 +144,7 @@ def test_expired_kes( ): """Test expired KES. + * start local cluster instance configured with short KES period and low number of key evolutions, so KES expires soon on all pools * refresh opcert on 2 of the 3 pools, so KES doesn't expire on those 2 pools and the pools keep minting blocks From a57e5c3ea12a5de7b6783868fad54838de4d4e85 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 12 Nov 2024 09:06:28 +0100 Subject: [PATCH 093/168] feat(ci): make all CI output part of group - Improve log grouping by making all CI output part of some group. - Rename `_cleanup_testnet` to `_cleanup_testnet_on_interrupt` for clarity. --- .github/node_upgrade.sh | 3 ++- .github/regression.sh | 22 ++++++++++++---------- .github/workflows/regression_reusable.yaml | 4 ++-- .github/workflows/upgrade_reusable.yaml | 2 +- 4 files changed, 17 insertions(+), 14 deletions(-) diff --git a/.github/node_upgrade.sh b/.github/node_upgrade.sh index 379e88b1a..eac04f0bd 100755 --- a/.github/node_upgrade.sh +++ b/.github/node_upgrade.sh @@ -53,7 +53,7 @@ rm -rf "${COVERAGE_DIR:?}" mkdir -p "$COVERAGE_DIR" export SCHEDULING_LOG=scheduling.log -true > "$SCHEDULING_LOG" +: > "$SCHEDULING_LOG" export DEV_CLUSTER_RUNNING=1 CLUSTERS_COUNT=1 FORBID_RESTART=1 TEST_THREADS=10 NUM_POOLS="${NUM_POOLS:-4}" unset ENABLE_LEGACY MIXED_P2P @@ -107,6 +107,7 @@ fi [ "$retval" -le 1 ] || exit "$retval" echo "::endgroup::" # end group for "-> PYTEST STEP1 <-" + echo "::group::Nix env setup steps 2 & 3" printf "start: %(%H:%M:%S)T\n" -1 diff --git a/.github/regression.sh b/.github/regression.sh index f2aae0590..b288e739d 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -37,7 +37,7 @@ export ARTIFACTS_DIR="${ARTIFACTS_DIR:-".artifacts"}" rm -rf "${ARTIFACTS_DIR:?}" export SCHEDULING_LOG=scheduling.log -true > "$SCHEDULING_LOG" +: > "$SCHEDULING_LOG" MARKEXPR="${MARKEXPR:-""}" if [ "$MARKEXPR" = "all" ]; then @@ -87,6 +87,8 @@ fi echo "::endgroup::" # end group for "Script setup" +echo "::group::Dependencies setup" + # setup dbsync (disabled by default) case "${DBSYNC_REV:-""}" in "" ) @@ -95,11 +97,9 @@ case "${DBSYNC_REV:-""}" in unset DBSYNC_REV ;; * ) - echo "::group::db-sync setup" # shellcheck disable=SC1090,SC1091 . .github/source_dbsync.sh df -h . - echo "::endgroup::" ;; esac @@ -113,10 +113,8 @@ case "${PLUTUS_APPS_REV:="none"}" in unset PLUTUS_APPS_REV ;; * ) - echo "::group::plutus-apps setup" # shellcheck disable=SC1090,SC1091 . .github/source_plutus_apps.sh - echo "::endgroup::" ;; esac @@ -128,22 +126,24 @@ case "${CARDANO_CLI_REV:-""}" in unset CARDANO_CLI_REV ;; * ) - echo "::group::cardano-cli setup" # shellcheck disable=SC1090,SC1091 . .github/source_cardano_cli.sh - echo "::endgroup::" ;; esac +echo "::endgroup::" # end group for "Dependencies setup" + +echo "::group::Cleanup setup" + _cleanup() { # stop all running cluster instances stop_instances "$WORKDIR" # stop postgres if running - stop_postgres || true + stop_postgres || : } -_cleanup_testnet() { +_cleanup_testnet_on_interrupt() { [ -z "${BOOTSTRAP_DIR:-""}" ] && return _PYTEST_CURRENT="$(find "$WORKDIR" -type l -name pytest-current)" @@ -172,11 +172,13 @@ _cleanup_testnet() { _interrupted() { # Do testnet cleanup only on interrupted testrun. When not interrupted, # cleanup is done as part of a testrun. - _cleanup_testnet + _cleanup_testnet_on_interrupt _cleanup } trap 'set +e; _interrupted; exit 130' SIGINT +echo "::endgroup::" # end group for "Cleanup setup" + echo "::group::Nix env setup" printf "start: %(%H:%M:%S)T\n" -1 diff --git a/.github/workflows/regression_reusable.yaml b/.github/workflows/regression_reusable.yaml index f5bc4cb12..98f339072 100644 --- a/.github/workflows/regression_reusable.yaml +++ b/.github/workflows/regression_reusable.yaml @@ -106,10 +106,10 @@ jobs: curl -s -u ${{ secrets.TCACHE_BASIC_AUTH }} "${{ secrets.TCACHE_URL }}/${testrun_name_strip}/pypassed" > deselected_tests.txt echo "DESELECT_FROM_FILE=deselected_tests.txt" >> $GITHUB_ENV fi - - name: Run CLI regression tests + - name: Run regression tests id: testing-step run: | - # Regression Tests + # env echo "::group::Script setup" .github/regression.sh - name: Report test results diff --git a/.github/workflows/upgrade_reusable.yaml b/.github/workflows/upgrade_reusable.yaml index d9c60cfc2..6954ecded 100644 --- a/.github/workflows/upgrade_reusable.yaml +++ b/.github/workflows/upgrade_reusable.yaml @@ -46,7 +46,7 @@ jobs: - name: Run upgrade tests id: testing-step run: | - # Upgrade Tests + # env echo "::group::Script setup" ./.github/node_upgrade.sh - name: Upload testing artifacts on failure From 60843e0a50baeacae0d3f6a9b83bf8612f5f1bbc Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 12 Nov 2024 11:46:07 +0100 Subject: [PATCH 094/168] chore: update flake.lock dependencies - Updated CHaP to rev 25591f43ab943d5a070db5e8a2b9ff3a499d4d92 - Updated cardano-node to 10.1.2 --- flake.lock | 141 ++++++++++++++--------------------------------------- 1 file changed, 37 insertions(+), 104 deletions(-) diff --git a/flake.lock b/flake.lock index 44ce49674..83112c7b2 100644 --- a/flake.lock +++ b/flake.lock @@ -3,11 +3,11 @@ "CHaP": { "flake": false, "locked": { - "lastModified": 1725170790, - "narHash": "sha256-dByd5I847MxV5i9kps89yL1OAvi7iDyC95BU7EM2wtw=", + "lastModified": 1730295876, + "narHash": "sha256-ijnHTQ6eKIQ9FpEqDKt6c7vuFYN8aOBDhonp67utx2s=", "owner": "intersectmbo", "repo": "cardano-haskell-packages", - "rev": "3bed5fccc06ecc11d4a8427112f107876263e0f3", + "rev": "25591f43ab943d5a070db5e8a2b9ff3a499d4d92", "type": "github" }, "original": { @@ -208,7 +208,6 @@ "nixpkgs" ], "iohkNix": "iohkNix", - "nix2container": "nix2container_2", "nixpkgs": [ "cardano-node", "haskellNix", @@ -219,11 +218,11 @@ "utils": "utils_2" }, "locked": { - "lastModified": 1725255033, - "narHash": "sha256-VIwEjpaGk09+dAcKELjLSR2OP3qBCWTGHpd0SBjgbVc=", + "lastModified": 1730468447, + "narHash": "sha256-yNEv7MQEcOPY9I9k9RCzeMfJY6gzuGc7K53GKNHs6v8=", "owner": "input-output-hk", "repo": "cardano-node", - "rev": "efd560070aaf042d1eb4680ae37fc607c7742319", + "rev": "01bda2e2cb0a70cd95067d696dbb44665f1d680a", "type": "github" }, "original": { @@ -501,14 +500,14 @@ }, "flake-utils_5": { "inputs": { - "systems": "systems" + "systems": "systems_2" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1726560853, + "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a", "type": "github" }, "original": { @@ -522,11 +521,11 @@ "systems": "systems_3" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1726560853, + "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a", "type": "github" }, "original": { @@ -537,25 +536,7 @@ }, "flake-utils_7": { "inputs": { - "systems": "systems_4" - }, - "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, - "flake-utils_8": { - "inputs": { - "systems": "systems_6" + "systems": "systems_5" }, "locked": { "lastModified": 1689068808, @@ -647,11 +628,11 @@ "hackageNix": { "flake": false, "locked": { - "lastModified": 1719794527, - "narHash": "sha256-qHo/KumtwAzPkfLWODu/6EFY/LeK+C7iPJyAUdT8tGA=", + "lastModified": 1729039425, + "narHash": "sha256-sIglYcw8Dacj4n0bRlUWo+NLkDMcVi6vtmKvUyG+ZrQ=", "owner": "input-output-hk", "repo": "hackage.nix", - "rev": "da2a3bc9bd1b3dd41bb147279529c471c615fd3e", + "rev": "6dc43e5e01f113ce151056a8f94bce7bb2f13eb9", "type": "github" }, "original": { @@ -713,6 +694,7 @@ "original": { "owner": "input-output-hk", "repo": "haskell.nix", + "rev": "cb139fa956158397aa398186bb32dd26f7318784", "type": "github" } }, @@ -965,11 +947,11 @@ "sodium": "sodium" }, "locked": { - "lastModified": 1721825987, - "narHash": "sha256-PPcma4tjozwXJAWf+YtHUQUulmxwulVlwSQzKItx/n8=", + "lastModified": 1728687575, + "narHash": "sha256-38uD8SqT557eh5yyRYuthKm1yTtiWzAN0FH7L/01QKM=", "owner": "input-output-hk", "repo": "iohk-nix", - "rev": "eb61f2c14e1f610ec59117ad40f8690cddbf80cb", + "rev": "86c2bd46e8a08f62ea38ffe77cb4e9c337b42217", "type": "github" }, "original": { @@ -1096,11 +1078,11 @@ ] }, "locked": { - "lastModified": 1703863825, - "narHash": "sha256-rXwqjtwiGKJheXB43ybM8NwWB8rO2dSRrEqes0S7F5Y=", + "lastModified": 1729742964, + "narHash": "sha256-B4mzTcQ0FZHdpeWcpDYPERtyjJd/NIuaQ9+BV1h+MpA=", "owner": "nix-community", "repo": "nix-github-actions", - "rev": "5163432afc817cf8bd1f031418d1869e4c9d5547", + "rev": "e04df33f62cdcf93d73e9a04142464753a16db67", "type": "github" }, "original": { @@ -1187,25 +1169,6 @@ "type": "github" } }, - "nix2container_2": { - "inputs": { - "flake-utils": "flake-utils_5", - "nixpkgs": "nixpkgs_6" - }, - "locked": { - "lastModified": 1712990762, - "narHash": "sha256-hO9W3w7NcnYeX8u8cleHiSpK2YJo7ecarFTUlbybl7k=", - "owner": "nlewo", - "repo": "nix2container", - "rev": "20aad300c925639d5d6cbe30013c8357ce9f2a2e", - "type": "github" - }, - "original": { - "owner": "nlewo", - "repo": "nix2container", - "type": "github" - } - }, "nixago": { "inputs": { "flake-utils": [ @@ -1466,21 +1429,6 @@ } }, "nixpkgs_6": { - "locked": { - "lastModified": 1712920918, - "narHash": "sha256-1yxFvUcJfUphK9V91KufIQom7gCsztza0H4Rz2VCWUU=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "92323443a56f4e9fc4e4b712e3119f66d0969297", - "type": "github" - }, - "original": { - "owner": "NixOS", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs_7": { "locked": { "lastModified": 1708343346, "narHash": "sha256-qlzHvterVRzS8fS0ophQpkh0rqw0abijHEOAKm0HmV0=", @@ -1593,20 +1541,20 @@ }, "poetry2nix": { "inputs": { - "flake-utils": "flake-utils_7", + "flake-utils": "flake-utils_6", "nix-github-actions": "nix-github-actions", "nixpkgs": [ "nixpkgs" ], - "systems": "systems_5", + "systems": "systems_4", "treefmt-nix": "treefmt-nix" }, "locked": { - "lastModified": 1725532428, - "narHash": "sha256-dCfawQDwpukcwQw++Cn/3LIh/RZMmH+k3fm91Oc5Pf0=", + "lastModified": 1731205797, + "narHash": "sha256-F7N1mxH1VrkVNHR3JGNMRvp9+98KYO4b832KS8Gl2xI=", "owner": "nix-community", "repo": "poetry2nix", - "rev": "a313fd7169ae43ecd1a2ea2f1e4899fe3edba4d2", + "rev": "f554d27c1544d9c56e5f1f8e2b8aff399803674e", "type": "github" }, "original": { @@ -1616,7 +1564,7 @@ }, "poetry2nix-old": { "inputs": { - "flake-utils": "flake-utils_8", + "flake-utils": "flake-utils_7", "nix-github-actions": "nix-github-actions_2", "nixpkgs": [ "nixpkgs" @@ -1640,7 +1588,7 @@ "root": { "inputs": { "cardano-node": "cardano-node", - "flake-utils": "flake-utils_6", + "flake-utils": "flake-utils_5", "nixpkgs": [ "cardano-node", "nixpkgs" @@ -1776,7 +1724,7 @@ "std", "blank" ], - "nixpkgs": "nixpkgs_7", + "nixpkgs": "nixpkgs_6", "paisano": "paisano", "paisano-tui": "paisano-tui", "terranix": [ @@ -1846,21 +1794,6 @@ } }, "systems_4": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - }, - "systems_5": { "locked": { "lastModified": 1681028828, "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", @@ -1874,7 +1807,7 @@ "type": "indirect" } }, - "systems_6": { + "systems_5": { "locked": { "lastModified": 1681028828, "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", @@ -1897,11 +1830,11 @@ ] }, "locked": { - "lastModified": 1719749022, - "narHash": "sha256-ddPKHcqaKCIFSFc/cvxS14goUhCOAwsM1PbMr0ZtHMg=", + "lastModified": 1730120726, + "narHash": "sha256-LqHYIxMrl/1p3/kvm2ir925tZ8DkI0KA10djk8wecSk=", "owner": "numtide", "repo": "treefmt-nix", - "rev": "8df5ff62195d4e67e2264df0b7f5e8c9995fd0bd", + "rev": "9ef337e492a5555d8e17a51c911ff1f02635be15", "type": "github" }, "original": { @@ -1952,7 +1885,7 @@ }, "utils_2": { "inputs": { - "systems": "systems_2" + "systems": "systems" }, "locked": { "lastModified": 1710146030, From e05c91d31de8162ceaf74e2a750a220ec5a9f749 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 12 Nov 2024 12:08:42 +0100 Subject: [PATCH 095/168] chore(python): update python dependencies --- poetry.lock | 641 +++++++++++++++++++--------------------- pyproject.toml | 34 +-- requirements_freeze.txt | 86 +++--- src_docs/source/conf.py | 1 - 4 files changed, 362 insertions(+), 400 deletions(-) diff --git a/poetry.lock b/poetry.lock index 01ec6e4bc..0e76112bf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -119,48 +119,55 @@ packaging = "*" [[package]] name = "cbor2" -version = "5.6.4" +version = "5.6.5" description = "CBOR (de)serializer with extensive tag support" optional = false python-versions = ">=3.8" files = [ - {file = "cbor2-5.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c40c68779a363f47a11ded7b189ba16767391d5eae27fac289e7f62b730ae1fc"}, - {file = "cbor2-5.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0625c8d3c487e509458459de99bf052f62eb5d773cc9fc141c6a6ea9367726d"}, - {file = "cbor2-5.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de7137622204168c3a57882f15dd09b5135bda2bcb1cf8b56b58d26b5150dfca"}, - {file = "cbor2-5.6.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3545e1e62ec48944b81da2c0e0a736ca98b9e4653c2365cae2f10ae871e9113"}, - {file = "cbor2-5.6.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6749913cd00a24eba17406a0bfc872044036c30a37eb2fcde7acfd975317e8a"}, - {file = "cbor2-5.6.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:57db966ab08443ee54b6f154f72021a41bfecd4ba897fe108728183ad8784a2a"}, - {file = "cbor2-5.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:380e0c7f4db574dcd86e6eee1b0041863b0aae7efd449d49b0b784cf9a481b9b"}, - {file = "cbor2-5.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5c763d50a1714e0356b90ad39194fc8ef319356b89fb001667a2e836bfde88e3"}, - {file = "cbor2-5.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:58a7ac8861857a9f9b0de320a4808a2a5f68a2599b4c14863e2748d5a4686c99"}, - {file = "cbor2-5.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d715b2f101730335e84a25fe0893e2b6adf049d6d44da123bf243b8c875ffd8"}, - {file = "cbor2-5.6.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f53a67600038cb9668720b309fdfafa8c16d1a02570b96d2144d58d66774318"}, - {file = "cbor2-5.6.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f898bab20c4f42dca3688c673ff97c2f719b1811090430173c94452603fbcf13"}, - {file = "cbor2-5.6.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5e5d50fb9f47d295c1b7f55592111350424283aff4cc88766c656aad0300f11f"}, - {file = "cbor2-5.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:7f9d867dcd814ab8383ad132eb4063e2b69f6a9f688797b7a8ca34a4eadb3944"}, - {file = "cbor2-5.6.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e0860ca88edf8aaec5461ce0e498eb5318f1bcc70d93f90091b7a1f1d351a167"}, - {file = "cbor2-5.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c38a0ed495a63a8bef6400158746a9cb03c36f89aeed699be7ffebf82720bf86"}, - {file = "cbor2-5.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c8d8c2f208c223a61bed48dfd0661694b891e423094ed30bac2ed75032142aa"}, - {file = "cbor2-5.6.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24cd2ce6136e1985da989e5ba572521023a320dcefad5d1fff57fba261de80ca"}, - {file = "cbor2-5.6.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7facce04aed2bf69ef43bdffb725446fe243594c2451921e89cc305bede16f02"}, - {file = "cbor2-5.6.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f9c8ee0d89411e5e039a4f3419befe8b43c0dd8746eedc979e73f4c06fe0ef97"}, - {file = "cbor2-5.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:9b45d554daa540e2f29f1747df9f08f8d98ade65a67b1911791bc193d33a5923"}, - {file = "cbor2-5.6.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0a5cb2c16687ccd76b38cfbfdb34468ab7d5635fb92c9dc5e07831c1816bd0a9"}, - {file = "cbor2-5.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f985f531f7495527153c4f66c8c143e4cf8a658ec9e87b14bc5438e0a8d0911"}, - {file = "cbor2-5.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d9c7b4bd7c3ea7e5587d4f1bbe073b81719530ddadb999b184074f064896e2"}, - {file = "cbor2-5.6.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d06184dcdc275c389fee3cd0ea80b5e1769763df15f93ecd0bf4c281817365"}, - {file = "cbor2-5.6.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e9ba7116f201860fb4c3e80ef36be63851ec7e4a18af70fea22d09cab0b000bf"}, - {file = "cbor2-5.6.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:341468ae58bdedaa05c907ab16e90dd0d5c54d7d1e66698dfacdbc16a31e815b"}, - {file = "cbor2-5.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:bcb4994be1afcc81f9167c220645d878b608cae92e19f6706e770f9bc7bbff6c"}, - {file = "cbor2-5.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:41c43abffe217dce70ae51c7086530687670a0995dfc90cc35f32f2cf4d86392"}, - {file = "cbor2-5.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:227a7e68ba378fe53741ed892b5b03fe472b5bd23ef26230a71964accebf50a2"}, - {file = "cbor2-5.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13521b7c9a0551fcc812d36afd03fc554fa4e1b193659bb5d4d521889aa81154"}, - {file = "cbor2-5.6.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4816d290535d20c7b7e2663b76da5b0deb4237b90275c202c26343d8852b8a"}, - {file = "cbor2-5.6.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1e98d370106821335efcc8fbe4136ea26b4747bf29ca0e66512b6c4f6f5cc59f"}, - {file = "cbor2-5.6.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:68743a18e16167ff37654a29321f64f0441801dba68359c82dc48173cc6c87e1"}, - {file = "cbor2-5.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:7ba5e9c6ed17526d266a1116c045c0941f710860c5f2495758df2e0d848c1b6d"}, - {file = "cbor2-5.6.4-py3-none-any.whl", hash = "sha256:fe411c4bf464f5976605103ebcd0f60b893ac3e4c7c8d8bc8f4a0cb456e33c60"}, - {file = "cbor2-5.6.4.tar.gz", hash = "sha256:1c533c50dde86bef1c6950602054a0ffa3c376e8b0e20c7b8f5b108793f6983e"}, + {file = "cbor2-5.6.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e16c4a87fc999b4926f5c8f6c696b0d251b4745bc40f6c5aee51d69b30b15ca2"}, + {file = "cbor2-5.6.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87026fc838370d69f23ed8572939bd71cea2b3f6c8f8bb8283f573374b4d7f33"}, + {file = "cbor2-5.6.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a88f029522aec5425fc2f941b3df90da7688b6756bd3f0472ab886d21208acbd"}, + {file = "cbor2-5.6.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9d15b638539b68aa5d5eacc56099b4543a38b2d2c896055dccf7e83d24b7955"}, + {file = "cbor2-5.6.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:47261f54a024839ec649b950013c4de5b5f521afe592a2688eebbe22430df1dc"}, + {file = "cbor2-5.6.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:559dcf0d897260a9e95e7b43556a62253e84550b77147a1ad4d2c389a2a30192"}, + {file = "cbor2-5.6.5-cp310-cp310-win_amd64.whl", hash = "sha256:5b856fda4c50c5bc73ed3664e64211fa4f015970ed7a15a4d6361bd48462feaf"}, + {file = "cbor2-5.6.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:863e0983989d56d5071270790e7ed8ddbda88c9e5288efdb759aba2efee670bc"}, + {file = "cbor2-5.6.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5cff06464b8f4ca6eb9abcba67bda8f8334a058abc01005c8e616728c387ad32"}, + {file = "cbor2-5.6.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4c7dbcdc59ea7f5a745d3e30ee5e6b6ff5ce7ac244aa3de6786391b10027bb3"}, + {file = "cbor2-5.6.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34cf5ab0dc310c3d0196caa6ae062dc09f6c242e2544bea01691fe60c0230596"}, + {file = "cbor2-5.6.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6797b824b26a30794f2b169c0575301ca9b74ae99064e71d16e6ba0c9057de51"}, + {file = "cbor2-5.6.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:73b9647eed1493097db6aad61e03d8f1252080ee041a1755de18000dd2c05f37"}, + {file = "cbor2-5.6.5-cp311-cp311-win_amd64.whl", hash = "sha256:6e14a1bf6269d25e02ef1d4008e0ce8880aa271d7c6b4c329dba48645764f60e"}, + {file = "cbor2-5.6.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e25c2aebc9db99af7190e2261168cdde8ed3d639ca06868e4f477cf3a228a8e9"}, + {file = "cbor2-5.6.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fde21ac1cf29336a31615a2c469a9cb03cf0add3ae480672d4d38cda467d07fc"}, + {file = "cbor2-5.6.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8947c102cac79d049eadbd5e2ffb8189952890df7cbc3ee262bbc2f95b011a9"}, + {file = "cbor2-5.6.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38886c41bebcd7dca57739439455bce759f1e4c551b511f618b8e9c1295b431b"}, + {file = "cbor2-5.6.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ae2b49226224e92851c333b91d83292ec62eba53a19c68a79890ce35f1230d70"}, + {file = "cbor2-5.6.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f2764804ffb6553283fc4afb10a280715905a4cea4d6dc7c90d3e89c4a93bc8d"}, + {file = "cbor2-5.6.5-cp312-cp312-win_amd64.whl", hash = "sha256:a3ac50485cf67dfaab170a3e7b527630e93cb0a6af8cdaa403054215dff93adf"}, + {file = "cbor2-5.6.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f0d0a9c5aabd48ecb17acf56004a7542a0b8d8212be52f3102b8218284bd881e"}, + {file = "cbor2-5.6.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61ceb77e6aa25c11c814d4fe8ec9e3bac0094a1f5bd8a2a8c95694596ea01e08"}, + {file = "cbor2-5.6.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97a7e409b864fecf68b2ace8978eb5df1738799a333ec3ea2b9597bfcdd6d7d2"}, + {file = "cbor2-5.6.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f6d69f38f7d788b04c09ef2b06747536624b452b3c8b371ab78ad43b0296fab"}, + {file = "cbor2-5.6.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f91e6d74fa6917df31f8757fdd0e154203b0dd0609ec53eb957016a2b474896a"}, + {file = "cbor2-5.6.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5ce13a27ef8fddf643fc17a753fe34aa72b251d03c23da6a560c005dc171085b"}, + {file = "cbor2-5.6.5-cp313-cp313-win_amd64.whl", hash = "sha256:54c72a3207bb2d4480c2c39dad12d7971ce0853a99e3f9b8d559ce6eac84f66f"}, + {file = "cbor2-5.6.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4586a4f65546243096e56a3f18f29d60752ee9204722377021b3119a03ed99ff"}, + {file = "cbor2-5.6.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d1a18b3a58dcd9b40ab55c726160d4a6b74868f2a35b71f9e726268b46dc6a2"}, + {file = "cbor2-5.6.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a83b76367d1c3e69facbcb8cdf65ed6948678e72f433137b41d27458aa2a40cb"}, + {file = "cbor2-5.6.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90bfa36944caccec963e6ab7e01e64e31cc6664535dc06e6295ee3937c999cbb"}, + {file = "cbor2-5.6.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:37096663a5a1c46a776aea44906cbe5fa3952f29f50f349179c00525d321c862"}, + {file = "cbor2-5.6.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:93676af02bd9a0b4a62c17c5b20f8e9c37b5019b1a24db70a2ee6cb770423568"}, + {file = "cbor2-5.6.5-cp38-cp38-win_amd64.whl", hash = "sha256:8f747b7a9aaa58881a0c5b4cd4a9b8fb27eca984ed261a769b61de1f6b5bd1e6"}, + {file = "cbor2-5.6.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:94885903105eec66d7efb55f4ce9884fdc5a4d51f3bd75b6fedc68c5c251511b"}, + {file = "cbor2-5.6.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fe11c2eb518c882cfbeed456e7a552e544893c17db66fe5d3230dbeaca6b615c"}, + {file = "cbor2-5.6.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66dd25dd919cddb0b36f97f9ccfa51947882f064729e65e6bef17c28535dc459"}, + {file = "cbor2-5.6.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa61a02995f3a996c03884cf1a0b5733f88cbfd7fa0e34944bf678d4227ee712"}, + {file = "cbor2-5.6.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:824f202b556fc204e2e9a67d6d6d624e150fbd791278ccfee24e68caec578afd"}, + {file = "cbor2-5.6.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7488aec919f8408f9987a3a32760bd385d8628b23a35477917aa3923ff6ad45f"}, + {file = "cbor2-5.6.5-cp39-cp39-win_amd64.whl", hash = "sha256:a34ee99e86b17444ecbe96d54d909dd1a20e2da9f814ae91b8b71cf1ee2a95e4"}, + {file = "cbor2-5.6.5-py3-none-any.whl", hash = "sha256:3038523b8fc7de312bb9cdcbbbd599987e64307c4db357cd2030c472a6c7d468"}, + {file = "cbor2-5.6.5.tar.gz", hash = "sha256:b682820677ee1dbba45f7da11898d2720f92e06be36acec290867d5ebf3d7e09"}, ] [package.extras] @@ -522,29 +529,29 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth [[package]] name = "filelock" -version = "3.16.0" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, - {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "hypothesis" -version = "6.112.1" +version = "6.118.8" description = "A library for property-based testing" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "hypothesis-6.112.1-py3-none-any.whl", hash = "sha256:93631b1498b20d2c205ed304cbd41d50e9c069d78a9c773c1324ca094c5e30ce"}, - {file = "hypothesis-6.112.1.tar.gz", hash = "sha256:b070d7a1bb9bd84706c31885c9aeddc138e2b36a9c112a91984f49501c567856"}, + {file = "hypothesis-6.118.8-py3-none-any.whl", hash = "sha256:9363bde019f9d11d584d3c96af8273ab8a6a43009a7c9b1c2fd503c646062304"}, + {file = "hypothesis-6.118.8.tar.gz", hash = "sha256:2a564996f60b8961b5112672564516a522fbd6ab4b536f8c6f3e4adb125b7dd2"}, ] [package.dependencies] @@ -553,21 +560,21 @@ exceptiongroup = {version = ">=1.0.0", markers = "python_version < \"3.11\""} sortedcontainers = ">=2.1.0,<3.0.0" [package.extras] -all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "crosshair-tool (>=0.0.70)", "django (>=3.2)", "dpcontracts (>=0.4)", "hypothesis-crosshair (>=0.0.13)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.17.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2024.1)"] +all = ["black (>=19.10b0)", "click (>=7.0)", "crosshair-tool (>=0.0.77)", "django (>=4.2)", "dpcontracts (>=0.4)", "hypothesis-crosshair (>=0.0.18)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.19.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2024.2)"] cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"] codemods = ["libcst (>=0.3.16)"] -crosshair = ["crosshair-tool (>=0.0.70)", "hypothesis-crosshair (>=0.0.13)"] +crosshair = ["crosshair-tool (>=0.0.77)", "hypothesis-crosshair (>=0.0.18)"] dateutil = ["python-dateutil (>=1.4)"] -django = ["django (>=3.2)"] +django = ["django (>=4.2)"] dpcontracts = ["dpcontracts (>=0.4)"] ghostwriter = ["black (>=19.10b0)"] lark = ["lark (>=0.10.1)"] -numpy = ["numpy (>=1.17.3)"] +numpy = ["numpy (>=1.19.3)"] pandas = ["pandas (>=1.1)"] pytest = ["pytest (>=4.6)"] pytz = ["pytz (>=2014.1)"] redis = ["redis (>=3.0.0)"] -zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2024.1)"] +zoneinfo = ["tzdata (>=2024.2)"] [[package]] name = "identify" @@ -605,29 +612,6 @@ files = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] -[[package]] -name = "importlib-metadata" -version = "8.5.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, - {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - [[package]] name = "iniconfig" version = "2.0.0" @@ -641,13 +625,13 @@ files = [ [[package]] name = "ipython" -version = "8.18.1" +version = "8.29.0" description = "IPython: Productive Interactive Computing" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" files = [ - {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, - {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, + {file = "ipython-8.29.0-py3-none-any.whl", hash = "sha256:0188a1bd83267192123ccea7f4a8ed0a78910535dbaa3f37671dca76ebd429c8"}, + {file = "ipython-8.29.0.tar.gz", hash = "sha256:40b60e15b22591450eef73e40a027cf77bd652e757523eebc5bd7c7c498290eb"}, ] [package.dependencies] @@ -656,25 +640,26 @@ decorator = "*" exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} prompt-toolkit = ">=3.0.41,<3.1.0" pygments = ">=2.4.0" stack-data = "*" -traitlets = ">=5" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} +traitlets = ">=5.13.0" +typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} [package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] kernel = ["ipykernel"] +matplotlib = ["matplotlib"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] [[package]] name = "jedi" @@ -823,38 +808,43 @@ files = [ [[package]] name = "mypy" -version = "1.11.2" +version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, - {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, - {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, - {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, - {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, - {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, - {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, - {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, - {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, - {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, - {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, - {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, - {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, - {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, - {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, - {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, - {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, - {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, - {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, - {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, - {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, - {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, - {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, - {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, - {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, - {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, - {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] @@ -864,6 +854,7 @@ typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] @@ -963,13 +954,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.8.0" +version = "4.0.1" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" files = [ - {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, - {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, + {file = "pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878"}, + {file = "pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2"}, ] [package.dependencies] @@ -995,83 +986,78 @@ wcwidth = "*" [[package]] name = "psycopg2-binary" -version = "2.9.9" +version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, + {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, ] [[package]] @@ -1112,21 +1098,21 @@ files = [ [[package]] name = "pydantic" -version = "2.9.1" +version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"}, - {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.23.3" +pydantic-core = "2.23.4" typing-extensions = [ - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, ] [package.extras] @@ -1135,100 +1121,100 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.3" +version = "2.23.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.23.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6"}, - {file = "pydantic_core-2.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba"}, - {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee"}, - {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe"}, - {file = "pydantic_core-2.23.3-cp310-none-win32.whl", hash = "sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b"}, - {file = "pydantic_core-2.23.3-cp310-none-win_amd64.whl", hash = "sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83"}, - {file = "pydantic_core-2.23.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27"}, - {file = "pydantic_core-2.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8"}, - {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48"}, - {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5"}, - {file = "pydantic_core-2.23.3-cp311-none-win32.whl", hash = "sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1"}, - {file = "pydantic_core-2.23.3-cp311-none-win_amd64.whl", hash = "sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa"}, - {file = "pydantic_core-2.23.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305"}, - {file = "pydantic_core-2.23.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c"}, - {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c"}, - {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab"}, - {file = "pydantic_core-2.23.3-cp312-none-win32.whl", hash = "sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c"}, - {file = "pydantic_core-2.23.3-cp312-none-win_amd64.whl", hash = "sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b"}, - {file = "pydantic_core-2.23.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f"}, - {file = "pydantic_core-2.23.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855"}, - {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4"}, - {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d"}, - {file = "pydantic_core-2.23.3-cp313-none-win32.whl", hash = "sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8"}, - {file = "pydantic_core-2.23.3-cp313-none-win_amd64.whl", hash = "sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1"}, - {file = "pydantic_core-2.23.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c"}, - {file = "pydantic_core-2.23.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295"}, - {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba"}, - {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e"}, - {file = "pydantic_core-2.23.3-cp38-none-win32.whl", hash = "sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710"}, - {file = "pydantic_core-2.23.3-cp38-none-win_amd64.whl", hash = "sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea"}, - {file = "pydantic_core-2.23.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8"}, - {file = "pydantic_core-2.23.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd"}, - {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835"}, - {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70"}, - {file = "pydantic_core-2.23.3-cp39-none-win32.whl", hash = "sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7"}, - {file = "pydantic_core-2.23.3-cp39-none-win_amd64.whl", hash = "sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab"}, - {file = "pydantic_core-2.23.3.tar.gz", hash = "sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] @@ -1236,13 +1222,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygithub" -version = "2.4.0" +version = "2.5.0" description = "Use the full Github API v3" optional = false python-versions = ">=3.8" files = [ - {file = "PyGithub-2.4.0-py3-none-any.whl", hash = "sha256:81935aa4bdc939fba98fee1cb47422c09157c56a27966476ff92775602b9ee24"}, - {file = "pygithub-2.4.0.tar.gz", hash = "sha256:6601e22627e87bac192f1e2e39c6e6f69a43152cfb8f307cee575879320b3051"}, + {file = "PyGithub-2.5.0-py3-none-any.whl", hash = "sha256:b0b635999a658ab8e08720bdd3318893ff20e2275f6446fcf35bf3f44f2c0fd2"}, + {file = "pygithub-2.5.0.tar.gz", hash = "sha256:e1613ac508a9be710920d26eb18b1905ebd9926aa49398e88151c1b526aad3cf"}, ] [package.dependencies] @@ -1384,10 +1370,7 @@ files = [ ] [package.dependencies] -pytest = [ - {version = ">=5.0", markers = "python_version < \"3.10\""}, - {version = ">=6.2.4", markers = "python_version >= \"3.10\""}, -] +pytest = {version = ">=6.2.4", markers = "python_version >= \"3.10\""} [[package]] name = "pytest-select" @@ -1556,58 +1539,57 @@ files = [ [[package]] name = "sphinx" -version = "7.4.7" +version = "8.1.3" description = "Python documentation generator" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" files = [ - {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, - {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, + {file = "sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2"}, + {file = "sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927"}, ] [package.dependencies] -alabaster = ">=0.7.14,<0.8.0" +alabaster = ">=0.7.14" babel = ">=2.13" colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} docutils = ">=0.20,<0.22" imagesize = ">=1.3" -importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} Jinja2 = ">=3.1" packaging = ">=23.0" Pygments = ">=2.17" requests = ">=2.30.0" snowballstemmer = ">=2.2" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" +sphinxcontrib-applehelp = ">=1.0.7" +sphinxcontrib-devhelp = ">=1.0.6" +sphinxcontrib-htmlhelp = ">=2.0.6" +sphinxcontrib-jsmath = ">=1.0.1" +sphinxcontrib-qthelp = ">=1.0.6" sphinxcontrib-serializinghtml = ">=1.1.9" tomli = {version = ">=2", markers = "python_version < \"3.11\""} [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] +lint = ["flake8 (>=6.0)", "mypy (==1.11.1)", "pyright (==1.1.384)", "pytest (>=6.0)", "ruff (==0.6.9)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-Pillow (==10.2.0.20240822)", "types-Pygments (==2.18.0.20240506)", "types-colorama (==0.4.15.20240311)", "types-defusedxml (==0.7.0.20240218)", "types-docutils (==0.21.0.20241005)", "types-requests (==2.32.0.20240914)", "types-urllib3 (==1.26.25.14)"] test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] [[package]] name = "sphinx-rtd-theme" -version = "2.0.0" +version = "3.0.1" description = "Read the Docs theme for Sphinx" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "sphinx_rtd_theme-2.0.0-py2.py3-none-any.whl", hash = "sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586"}, - {file = "sphinx_rtd_theme-2.0.0.tar.gz", hash = "sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b"}, + {file = "sphinx_rtd_theme-3.0.1-py2.py3-none-any.whl", hash = "sha256:921c0ece75e90633ee876bd7b148cfaad136b481907ad154ac3669b6fc957916"}, + {file = "sphinx_rtd_theme-3.0.1.tar.gz", hash = "sha256:a4c5745d1b06dfcb80b7704fe532eb765b44065a8fad9851e4258c8804140703"}, ] [package.dependencies] -docutils = "<0.21" -sphinx = ">=5,<8" +docutils = ">0.18,<0.22" +sphinx = ">=6,<9" sphinxcontrib-jquery = ">=4,<5" [package.extras] -dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] +dev = ["bump2version", "transifex-client", "twine", "wheel"] [[package]] name = "sphinxcontrib-applehelp" @@ -1778,24 +1760,24 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "types-pyyaml" -version = "6.0.12.20240808" +version = "6.0.12.20240917" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" files = [ - {file = "types-PyYAML-6.0.12.20240808.tar.gz", hash = "sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af"}, - {file = "types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35"}, + {file = "types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587"}, + {file = "types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570"}, ] [[package]] name = "types-requests" -version = "2.32.0.20240907" +version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.32.0.20240907.tar.gz", hash = "sha256:ff33935f061b5e81ec87997e91050f7b4af4f82027a7a7a9d9aaea04a963fdf8"}, - {file = "types_requests-2.32.0.20240907-py3-none-any.whl", hash = "sha256:1d1e79faeaf9d42def77f3c304893dea17a97cae98168ac69f3cb465516ee8da"}, + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, ] [package.dependencies] @@ -1939,26 +1921,7 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] -[[package]] -name = "zipp" -version = "3.20.1" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, - {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - [metadata] lock-version = "2.0" -python-versions = ">=3.9,<4.0" -content-hash = "3e3e2d9ee601c18527c77344a19a7087cad4d1ba625805258bd49c17ebc45f2b" +python-versions = ">=3.10,<4.0" +content-hash = "e41d79bdced69afd1301c1577571974b7fcf751e5a0771cd9cd3b8bb12520c98" diff --git a/pyproject.toml b/pyproject.toml index 3c790c841..17e06fcc0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,22 +24,22 @@ classifiers = [ packages = [{include = "cardano_node_tests"}] [tool.poetry.dependencies] -python = ">=3.9,<4.0" +python = ">=3.10,<4.0" allure-pytest = "^2.13.5" cardano-clusterlib = "^0.7.0a4" -cbor2 = "^5.6.4" -filelock = "^3.15.4" -hypothesis = "^6.108.5" -psycopg2-binary = "^2.9.9" -pydantic = "^2.8.2" -pygithub = "^2.3.0" -pytest = "^8.3.2" +cbor2 = "^5.6.5" +filelock = "^3.16.1" +hypothesis = "^6.118.8" +psycopg2-binary = "^2.9.10" +pydantic = "^2.9.2" +pygithub = "^2.5.0" +pytest = "^8.3.3" pytest-html = "^4.1.1" pytest-metadata = "^3.1.1" -pytest-order = "^1.2.1" +pytest-order = "^1.3.0" pytest-select = "^0.1.2" pytest-xdist = "^3.6.1" -PyYAML = "^6.0.1" +PyYAML = "^6.0.2" requests = "^2.32.3" pytest-subtests = "^0.13.1" @@ -47,19 +47,19 @@ pytest-subtests = "^0.13.1" optional = true [tool.poetry.group.dev.dependencies] -pre-commit = "^3.8.0" -types-PyYAML = "^6.0.12.20240724" -types-requests = "^2.32.0.20240712" -mypy = "^1.11.0" -ipython = "^8.14.0" +pre-commit = "^4.0.1" +types-PyYAML = "^6.0.12.20240917" +types-requests = "^2.32.0.20241016" +mypy = "^1.13.0" +ipython = "^8.29.0" [tool.poetry.group.docs] optional = true [tool.poetry.group.docs.dependencies] -Sphinx = "7.4.7" +Sphinx = "^8.1.3" m2r2 = "^0.3.3.post2" -sphinx-rtd-theme = "^2.0.0" +sphinx-rtd-theme = "^3.0.1" sphinxemoji = "^0.3.1" [tool.poetry.scripts] diff --git a/requirements_freeze.txt b/requirements_freeze.txt index e6aadb371..5827ae77c 100644 --- a/requirements_freeze.txt +++ b/requirements_freeze.txt @@ -1,45 +1,45 @@ # Don't edit. This file is generated by poetry_update_deps.sh -e . -allure-pytest==2.13.5 ; python_version >= "3.9" and python_version < "4.0" -allure-python-commons==2.13.5 ; python_version >= "3.9" and python_version < "4.0" -annotated-types==0.7.0 ; python_version >= "3.9" and python_version < "4.0" -attrs==24.2.0 ; python_version >= "3.9" and python_version < "4.0" -cardano-clusterlib==0.7.0a4 ; python_version >= "3.9" and python_version < "4.0" -cbor2==5.6.4 ; python_version >= "3.9" and python_version < "4.0" -certifi==2024.8.30 ; python_version >= "3.9" and python_version < "4.0" -cffi==1.17.1 ; python_version >= "3.9" and python_version < "4.0" -charset-normalizer==3.3.2 ; python_version >= "3.9" and python_version < "4.0" -colorama==0.4.6 ; python_version >= "3.9" and python_version < "4.0" and sys_platform == "win32" -cryptography==43.0.1 ; python_version >= "3.9" and python_version < "4.0" -deprecated==1.2.14 ; python_version >= "3.9" and python_version < "4.0" -exceptiongroup==1.2.2 ; python_version >= "3.9" and python_version < "3.11" -execnet==2.1.1 ; python_version >= "3.9" and python_version < "4.0" -filelock==3.16.0 ; python_version >= "3.9" and python_version < "4.0" -hypothesis==6.112.1 ; python_version >= "3.9" and python_version < "4.0" -idna==3.8 ; python_version >= "3.9" and python_version < "4.0" -iniconfig==2.0.0 ; python_version >= "3.9" and python_version < "4.0" -jinja2==3.1.4 ; python_version >= "3.9" and python_version < "4.0" -markupsafe==2.1.5 ; python_version >= "3.9" and python_version < "4.0" -packaging==24.1 ; python_version >= "3.9" and python_version < "4.0" -pluggy==1.5.0 ; python_version >= "3.9" and python_version < "4.0" -psycopg2-binary==2.9.9 ; python_version >= "3.9" and python_version < "4.0" -pycparser==2.22 ; python_version >= "3.9" and python_version < "4.0" -pydantic-core==2.23.3 ; python_version >= "3.9" and python_version < "4.0" -pydantic==2.9.1 ; python_version >= "3.9" and python_version < "4.0" -pygithub==2.4.0 ; python_version >= "3.9" and python_version < "4.0" -pyjwt[crypto]==2.9.0 ; python_version >= "3.9" and python_version < "4.0" -pynacl==1.5.0 ; python_version >= "3.9" and python_version < "4.0" -pytest-html==4.1.1 ; python_version >= "3.9" and python_version < "4.0" -pytest-metadata==3.1.1 ; python_version >= "3.9" and python_version < "4.0" -pytest-order==1.3.0 ; python_version >= "3.9" and python_version < "4.0" -pytest-select==0.1.2 ; python_version >= "3.9" and python_version < "4.0" -pytest-subtests==0.13.1 ; python_version >= "3.9" and python_version < "4.0" -pytest-xdist==3.6.1 ; python_version >= "3.9" and python_version < "4.0" -pytest==8.3.3 ; python_version >= "3.9" and python_version < "4.0" -pyyaml==6.0.2 ; python_version >= "3.9" and python_version < "4.0" -requests==2.32.3 ; python_version >= "3.9" and python_version < "4.0" -sortedcontainers==2.4.0 ; python_version >= "3.9" and python_version < "4.0" -tomli==2.0.1 ; python_version >= "3.9" and python_version < "3.11" -typing-extensions==4.12.2 ; python_version >= "3.9" and python_version < "4.0" -urllib3==2.2.3 ; python_version >= "3.9" and python_version < "4.0" -wrapt==1.16.0 ; python_version >= "3.9" and python_version < "4.0" +allure-pytest==2.13.5 ; python_version >= "3.10" and python_version < "4.0" +allure-python-commons==2.13.5 ; python_version >= "3.10" and python_version < "4.0" +annotated-types==0.7.0 ; python_version >= "3.10" and python_version < "4.0" +attrs==24.2.0 ; python_version >= "3.10" and python_version < "4.0" +cardano-clusterlib==0.7.0a4 ; python_version >= "3.10" and python_version < "4.0" +cbor2==5.6.5 ; python_version >= "3.10" and python_version < "4.0" +certifi==2024.8.30 ; python_version >= "3.10" and python_version < "4.0" +cffi==1.17.1 ; python_version >= "3.10" and python_version < "4.0" +charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" +colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and sys_platform == "win32" +cryptography==43.0.1 ; python_version >= "3.10" and python_version < "4.0" +deprecated==1.2.14 ; python_version >= "3.10" and python_version < "4.0" +exceptiongroup==1.2.2 ; python_version >= "3.10" and python_version < "3.11" +execnet==2.1.1 ; python_version >= "3.10" and python_version < "4.0" +filelock==3.16.1 ; python_version >= "3.10" and python_version < "4.0" +hypothesis==6.118.8 ; python_version >= "3.10" and python_version < "4.0" +idna==3.8 ; python_version >= "3.10" and python_version < "4.0" +iniconfig==2.0.0 ; python_version >= "3.10" and python_version < "4.0" +jinja2==3.1.4 ; python_version >= "3.10" and python_version < "4.0" +markupsafe==2.1.5 ; python_version >= "3.10" and python_version < "4.0" +packaging==24.1 ; python_version >= "3.10" and python_version < "4.0" +pluggy==1.5.0 ; python_version >= "3.10" and python_version < "4.0" +psycopg2-binary==2.9.10 ; python_version >= "3.10" and python_version < "4.0" +pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0" +pydantic-core==2.23.4 ; python_version >= "3.10" and python_version < "4.0" +pydantic==2.9.2 ; python_version >= "3.10" and python_version < "4.0" +pygithub==2.5.0 ; python_version >= "3.10" and python_version < "4.0" +pyjwt[crypto]==2.9.0 ; python_version >= "3.10" and python_version < "4.0" +pynacl==1.5.0 ; python_version >= "3.10" and python_version < "4.0" +pytest-html==4.1.1 ; python_version >= "3.10" and python_version < "4.0" +pytest-metadata==3.1.1 ; python_version >= "3.10" and python_version < "4.0" +pytest-order==1.3.0 ; python_version >= "3.10" and python_version < "4.0" +pytest-select==0.1.2 ; python_version >= "3.10" and python_version < "4.0" +pytest-subtests==0.13.1 ; python_version >= "3.10" and python_version < "4.0" +pytest-xdist==3.6.1 ; python_version >= "3.10" and python_version < "4.0" +pytest==8.3.3 ; python_version >= "3.10" and python_version < "4.0" +pyyaml==6.0.2 ; python_version >= "3.10" and python_version < "4.0" +requests==2.32.3 ; python_version >= "3.10" and python_version < "4.0" +sortedcontainers==2.4.0 ; python_version >= "3.10" and python_version < "4.0" +tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11" +typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "4.0" +urllib3==2.2.3 ; python_version >= "3.10" and python_version < "4.0" +wrapt==1.16.0 ; python_version >= "3.10" and python_version < "4.0" diff --git a/src_docs/source/conf.py b/src_docs/source/conf.py index 9dbc31cda..2795f3be2 100644 --- a/src_docs/source/conf.py +++ b/src_docs/source/conf.py @@ -78,7 +78,6 @@ html_theme_options = { "logo_only": False, - "display_version": False, "prev_next_buttons_location": "bottom", "style_external_links": False, # Toc options From 488aa0e8b94e41bd8bfa037f073ab71722884096 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 12 Nov 2024 12:46:18 +0100 Subject: [PATCH 096/168] chore(flakes): update devShells and remove legacy compat - Added a TODO comment for removing sync tests once moved to a separate repo - Removed compatibility code for cardano-node 1.34.1 --- flake.nix | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/flake.nix b/flake.nix index b7c6346a3..7441b5d6e 100644 --- a/flake.nix +++ b/flake.nix @@ -98,6 +98,7 @@ base = pkgs.mkShell { nativeBuildInputs = with pkgs; [ bash coreutils curl git gnugrep gnumake gnutar jq python3Packages.supervisor xz ]; }; + # TODO: can be removed once sync tests are fully moved to separate repo python = pkgs.mkShell { nativeBuildInputs = with pkgs; with python39Packages; [ python39Full virtualenv pip matplotlib pandas requests xmltodict psutil GitPython pymysql ]; }; @@ -114,25 +115,12 @@ ]; }); default = ( - cardano-node.devShells.${system}.devops or ( - # Compat with 1.34.1: - (import (cardano-node + "/shell.nix") { - pkgs = cardano-node.legacyPackages.${system}.extend (self: prev: { - workbench-supervisord = - { useCabalRun, profileName, haskellPackages }: - self.callPackage (cardano-node + "/nix/supervisord-cluster") - { - inherit profileName useCabalRun haskellPackages; - workbench = self.callPackage (cardano-node + "/nix/workbench") { inherit useCabalRun; }; - }; - }); - }).devops - ) + cardano-node.devShells.${system}.devops ).overrideAttrs (oldAttrs: rec { nativeBuildInputs = base.nativeBuildInputs ++ postgres.nativeBuildInputs ++ oldAttrs.nativeBuildInputs ++ [ cardano-node.packages.${system}.cardano-submit-api cardano-nodes-tests-apps - #TODO: can be removed once tests scripts do not rely on cardano-nodes-tests-apps dependencies: + # TODO: can be removed once tests scripts do not rely on cardano-nodes-tests-apps dependencies: cardano-nodes-tests-apps.dependencyEnv ]; }); From f07c03badfe034f6e5cd90b2c044b81c810a3b1d Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 12 Nov 2024 13:09:58 +0100 Subject: [PATCH 097/168] refactor: remove poetry2nix and related configurations This commit removes the poetry2nix and poetry2nix-old configurations from the flake.nix file. The related code for handling poetry2nix dependencies and environments has also been removed. The default and dev shells now use venv directly. --- flake.nix | 86 +++---------------------------------------------------- 1 file changed, 4 insertions(+), 82 deletions(-) diff --git a/flake.nix b/flake.nix index 7441b5d6e..a040d877f 100644 --- a/flake.nix +++ b/flake.nix @@ -9,19 +9,6 @@ membench.follows = "/"; }; }; - poetry2nix = { - inputs.nixpkgs.follows = "nixpkgs"; - }; - poetry2nix-old = { - # pin poetry2nix to 2023.10.05.49422, sometime after - # there is a change in the boostrap packages that expects - # wheel to take a flint-core argument, but it doesn't. It - # doesn't with the nixpkgs reference from cardano-node. - # Hence we need to make sure we pin it to an old enough - # version to work with our nixpkgs ref from cardano-node. - url = "github:nix-community/poetry2nix?ref=2023.10.05.49422"; - inputs.nixpkgs.follows = "nixpkgs"; - }; nixpkgs.follows = "cardano-node/nixpkgs"; flake-utils = { url = "github:numtide/flake-utils"; @@ -29,72 +16,14 @@ }; }; - outputs = { self, nixpkgs, flake-utils, cardano-node, poetry2nix, poetry2nix-old }: + outputs = { self, nixpkgs, flake-utils, cardano-node }: flake-utils.lib.eachDefaultSystem (system: let pkgs = nixpkgs.legacyPackages.${system}; - - # As we are using [poetry](https://python-poetry.org) as the python dependency manager for cardano-node-tests, we will be using - # [poetry2nix](https://github.com/nix-community/poetry2nix) to convert the poetry project (pyproject.toml, - # and poetry.lock) into a nix-buildable expression. This is preferable over using `pkgs.python3.withPackages` - # as it adheres to the poetry setup instead of replicating it in nix again. - p2n-for-nixpkgs = - # if we are using an old nixpkgs (<23.11) then pin poetry2nix to - # 2023.10.05.49422, sometime after there is a change in the boostrap - # packages that expects wheel to take a flit-core argument, but it - # doesn't. It doesn't with the nixpkgs reference from cardano-node. - # Hence we need to make sure we pin it to an old enough version to - # work with our nixpkgs ref from cardano-node. - - # see https://github.com/NixOS/nixpkgs/commit/3cd71e0ae67cc48f1135e55bf78cb0d67b53ff86 - # for why we do this check. - if pkgs.lib.versionAtLeast pkgs.python3Packages.wheel.version "0.41.1" - then (__trace "using NEW poetry2nix" poetry2nix) - else (__trace "using OLD poetry2nix" poetry2nix-old); - p2n = (import p2n-for-nixpkgs { inherit pkgs; }); - - # base config of poetry2nix for our local project: - p2nConfig = { - projectDir = self; - # We use sdist by default for faster build. Also avoid having to manually inject dependencies on build-tools: - preferWheels = true; - # Because we transitively depend on `py`, (through `pytest-html`), we need to drop the module from `pytest`: - overrides = p2n.overrides.withDefaults (self: super: { - # we remove py.py shim fallback in pytest, which might accidentally take precedence over actual py lib - # due to the multiple site-packages in $PYTHONPATH generated by nix: - pytest = (super.pytest.override { - # Build from source so that we can patch: - preferWheel = false; - }).overridePythonAttrs ( - old: { - postPatch = old.postPatch or "" + '' - rm src/py.py - ''; - } - ); - }); - }; - - # Packaging of [tool.poetry.scripts] as applications: - cardano-nodes-tests-apps = p2n.mkPoetryApplication p2nConfig; - - # All python dependencies of our local project: - cardano-nodes-tests-env = p2n.mkPoetryEnv (p2nConfig // { - groups = [ "dev" "docs" ]; - }); - in { - packages = { - inherit cardano-nodes-tests-apps; - default = cardano-nodes-tests-apps; - }; devShells = rec { - dev = pkgs.mkShell { - # for local python dev: - nativeBuildInputs = with pkgs; [ poetry cardano-nodes-tests-env ]; - }; base = pkgs.mkShell { nativeBuildInputs = with pkgs; [ bash coreutils curl git gnugrep gnumake gnutar jq python3Packages.supervisor xz ]; }; @@ -114,16 +43,9 @@ pkgs.python3Packages.virtualenv ]; }); - default = ( - cardano-node.devShells.${system}.devops - ).overrideAttrs (oldAttrs: rec { - nativeBuildInputs = base.nativeBuildInputs ++ postgres.nativeBuildInputs ++ oldAttrs.nativeBuildInputs ++ [ - cardano-node.packages.${system}.cardano-submit-api - cardano-nodes-tests-apps - # TODO: can be removed once tests scripts do not rely on cardano-nodes-tests-apps dependencies: - cardano-nodes-tests-apps.dependencyEnv - ]; - }); + # Use 'venv' directly as 'default' and 'dev' + default = venv; + dev = venv; }; }); From e73232644b13c3a69d5dc965cd40c5c399cfb6ae Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 12 Nov 2024 13:13:06 +0100 Subject: [PATCH 098/168] chore(nix): remove poetry2nix from flake.lock --- flake.lock | 193 +---------------------------------------------------- 1 file changed, 1 insertion(+), 192 deletions(-) diff --git a/flake.lock b/flake.lock index 83112c7b2..f166cb1d7 100644 --- a/flake.lock +++ b/flake.lock @@ -516,42 +516,6 @@ "type": "github" } }, - "flake-utils_6": { - "inputs": { - "systems": "systems_3" - }, - "locked": { - "lastModified": 1726560853, - "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, - "flake-utils_7": { - "inputs": { - "systems": "systems_5" - }, - "locked": { - "lastModified": 1689068808, - "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, "ghc-8.6.5-iohk": { "flake": false, "locked": { @@ -1070,48 +1034,6 @@ "type": "github" } }, - "nix-github-actions": { - "inputs": { - "nixpkgs": [ - "poetry2nix", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1729742964, - "narHash": "sha256-B4mzTcQ0FZHdpeWcpDYPERtyjJd/NIuaQ9+BV1h+MpA=", - "owner": "nix-community", - "repo": "nix-github-actions", - "rev": "e04df33f62cdcf93d73e9a04142464753a16db67", - "type": "github" - }, - "original": { - "owner": "nix-community", - "repo": "nix-github-actions", - "type": "github" - } - }, - "nix-github-actions_2": { - "inputs": { - "nixpkgs": [ - "poetry2nix-old", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1688870561, - "narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=", - "owner": "nix-community", - "repo": "nix-github-actions", - "rev": "165b1650b753316aa7f1787f3005a8d2da0f5301", - "type": "github" - }, - "original": { - "owner": "nix-community", - "repo": "nix-github-actions", - "type": "github" - } - }, "nix-nomad": { "inputs": { "flake-compat": "flake-compat", @@ -1539,52 +1461,6 @@ "type": "github" } }, - "poetry2nix": { - "inputs": { - "flake-utils": "flake-utils_6", - "nix-github-actions": "nix-github-actions", - "nixpkgs": [ - "nixpkgs" - ], - "systems": "systems_4", - "treefmt-nix": "treefmt-nix" - }, - "locked": { - "lastModified": 1731205797, - "narHash": "sha256-F7N1mxH1VrkVNHR3JGNMRvp9+98KYO4b832KS8Gl2xI=", - "owner": "nix-community", - "repo": "poetry2nix", - "rev": "f554d27c1544d9c56e5f1f8e2b8aff399803674e", - "type": "github" - }, - "original": { - "id": "poetry2nix", - "type": "indirect" - } - }, - "poetry2nix-old": { - "inputs": { - "flake-utils": "flake-utils_7", - "nix-github-actions": "nix-github-actions_2", - "nixpkgs": [ - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1696512612, - "narHash": "sha256-p6niqag7b4XEHvzWgG0X/xjoW/ZXbAxW8ggd8yReT3Y=", - "owner": "nix-community", - "repo": "poetry2nix", - "rev": "e23218d1599e3369dfc878757e58974017e0ecc8", - "type": "github" - }, - "original": { - "owner": "nix-community", - "ref": "2023.10.05.49422", - "repo": "poetry2nix", - "type": "github" - } - }, "root": { "inputs": { "cardano-node": "cardano-node", @@ -1592,9 +1468,7 @@ "nixpkgs": [ "cardano-node", "nixpkgs" - ], - "poetry2nix": "poetry2nix", - "poetry2nix-old": "poetry2nix-old" + ] } }, "secp256k1": { @@ -1778,71 +1652,6 @@ "type": "github" } }, - "systems_3": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - }, - "systems_4": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "id": "systems", - "type": "indirect" - } - }, - "systems_5": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - }, - "treefmt-nix": { - "inputs": { - "nixpkgs": [ - "poetry2nix", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1730120726, - "narHash": "sha256-LqHYIxMrl/1p3/kvm2ir925tZ8DkI0KA10djk8wecSk=", - "owner": "numtide", - "repo": "treefmt-nix", - "rev": "9ef337e492a5555d8e17a51c911ff1f02635be15", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "treefmt-nix", - "type": "github" - } - }, "tullia": { "inputs": { "nix-nomad": "nix-nomad", From 6395b9c871c379631f64a65e988d685e972731be Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 12 Nov 2024 13:16:07 +0100 Subject: [PATCH 099/168] chore(README): update nix develop command Removed the unnecessary .#venv argument from the nix develop command in the README.md file. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d5d1f5f3a..bc9af17fe 100644 --- a/README.md +++ b/README.md @@ -45,7 +45,7 @@ Sometimes it is useful to run individual tests and keep the local cluster runnin ```sh nix flake update --accept-flake-config --override-input cardano-node "github:IntersectMBO/cardano-node/master" # change `master` to rev you want - nix develop --accept-flake-config .#venv + nix develop --accept-flake-config ``` 1. prepare testing environment From 1f6c76186831f9b8b5f881805b8b910c3af33c6e Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 12 Nov 2024 14:57:10 +0100 Subject: [PATCH 100/168] feat: update base dev shell to use Python 3.11 Updated the base dev shell in flake.nix to use Python 3.11 instead of the default Python 3.10. This change ensures that all dependencies and tools in the base shell are compatible with Python 3.11. --- .github/workflows/repo_tests.yaml | 2 +- flake.nix | 8 +++++--- setup_dev_venv.sh | 2 +- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/workflows/repo_tests.yaml b/.github/workflows/repo_tests.yaml index 53e977a63..a5bb3e314 100644 --- a/.github/workflows/repo_tests.yaml +++ b/.github/workflows/repo_tests.yaml @@ -18,7 +18,7 @@ jobs: id: setup-python uses: actions/setup-python@v5 with: - python-version: '3.10' + python-version: '3.11' - name: Install Poetry uses: snok/install-poetry@v1 with: diff --git a/flake.nix b/flake.nix index a040d877f..aa45585bb 100644 --- a/flake.nix +++ b/flake.nix @@ -21,11 +21,13 @@ (system: let pkgs = nixpkgs.legacyPackages.${system}; + py3pkgs = pkgs.python311Packages; + py3Full = pkgs.python311Full; in { devShells = rec { base = pkgs.mkShell { - nativeBuildInputs = with pkgs; [ bash coreutils curl git gnugrep gnumake gnutar jq python3Packages.supervisor xz ]; + nativeBuildInputs = with pkgs; [ py3Full bash coreutils curl git gnugrep gnumake gnutar jq py3pkgs.supervisor xz ]; }; # TODO: can be removed once sync tests are fully moved to separate repo python = pkgs.mkShell { @@ -39,8 +41,8 @@ ).overrideAttrs (oldAttrs: rec { nativeBuildInputs = base.nativeBuildInputs ++ postgres.nativeBuildInputs ++ oldAttrs.nativeBuildInputs ++ [ cardano-node.packages.${system}.cardano-submit-api - pkgs.python3Packages.pip - pkgs.python3Packages.virtualenv + py3pkgs.pip + py3pkgs.virtualenv ]; }); # Use 'venv' directly as 'default' and 'dev' diff --git a/setup_dev_venv.sh b/setup_dev_venv.sh index d9a91a322..9c2915713 100755 --- a/setup_dev_venv.sh +++ b/setup_dev_venv.sh @@ -2,7 +2,7 @@ # # Install cardano_node_tests and its dependencies into a virtual environment. -PYTHON_VERSION="3.10" +PYTHON_VERSION="3.11" abort_install=0 From ffd380dc182ed69e7ec7f2cd246cc08d8086be9f Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 12 Nov 2024 16:38:21 +0100 Subject: [PATCH 101/168] chore: update pre-commit hooks to latest versions Updated the following pre-commit hooks to their latest versions: - pre-commit-hooks from v4.6.0 to v5.0.0 - ruff-pre-commit from v0.6.3 to v0.7.3 - markdownlint-cli from v0.41.0 to v0.42.0 This ensures we are using the latest features and fixes. --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cad4d708d..97d6cf7e4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ exclude: ^sync_tests/ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: trailing-whitespace language_version: python3 @@ -14,7 +14,7 @@ repos: - id: debug-statements language_version: python3 - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.6.3 + rev: v0.7.3 hooks: - id: ruff args: [ --fix ] @@ -24,7 +24,7 @@ repos: hooks: - id: shellcheck - repo: https://github.com/igorshubovych/markdownlint-cli - rev: v0.41.0 + rev: v0.42.0 hooks: - id: markdownlint - repo: https://github.com/rstcheck/rstcheck From 1d3104498a4c41b3cb8eaccc6f540830f8381a31 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 13 Nov 2024 11:09:54 +0100 Subject: [PATCH 102/168] feat(ci): another attempt for better output grouping GitHub Actions don't group output precisely. Another attempt to make the output look good. --- .github/regression.sh | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/.github/regression.sh b/.github/regression.sh index b288e739d..5193ae638 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -85,9 +85,7 @@ if [ -n "${BOOTSTRAP_DIR:-""}" ]; then export MAKE_TARGET="${MAKE_TARGET:-"testnets"}" fi -echo "::endgroup::" # end group for "Script setup" - -echo "::group::Dependencies setup" +echo "### Dependencies setup ###" # setup dbsync (disabled by default) case "${DBSYNC_REV:-""}" in @@ -131,9 +129,9 @@ case "${CARDANO_CLI_REV:-""}" in ;; esac -echo "::endgroup::" # end group for "Dependencies setup" +df -h . -echo "::group::Cleanup setup" +echo "### Cleanup setup ###" _cleanup() { # stop all running cluster instances @@ -177,7 +175,7 @@ _interrupted() { } trap 'set +e; _interrupted; exit 130' SIGINT -echo "::endgroup::" # end group for "Cleanup setup" +echo "::endgroup::" # end group for "Script setup" echo "::group::Nix env setup" printf "start: %(%H:%M:%S)T\n" -1 From 00931f22a333ea8315d794b0b17c7a44a708012a Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 13 Nov 2024 11:16:23 +0100 Subject: [PATCH 103/168] docs: update Python version requirement in README The README file has been updated to remove the specific Python version requirement (v3.8 or newer) for creating a virtual environment. The `setup_dev_venv.sh` makes sure correct version is used. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index bc9af17fe..943d1864a 100644 --- a/README.md +++ b/README.md @@ -135,7 +135,7 @@ Install and configure poetry, follow [Poetry documentation](https://python-poetr ### Preparing Python virtual environment -Create a Python virtual environment (requires Python v3.8 or newer) and install this package together with development requirements: +Create a Python virtual environment and install this package together with development requirements: ```sh ./setup_dev_venv.sh From 62dcc08e555f008388ec9c361417db2001ff6e14 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 13 Nov 2024 18:51:26 +0100 Subject: [PATCH 104/168] fix(tests): adjust epoch interval buffer in test_hardfork Increased the stop buffer in wait_for_epoch_interval to ensure enough time for proposal and votes submission within one epoch. --- cardano_node_tests/tests/tests_conway/test_hardfork.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cardano_node_tests/tests/tests_conway/test_hardfork.py b/cardano_node_tests/tests/tests_conway/test_hardfork.py index 9d0041433..69f7f9897 100644 --- a/cardano_node_tests/tests/tests_conway/test_hardfork.py +++ b/cardano_node_tests/tests/tests_conway/test_hardfork.py @@ -118,7 +118,7 @@ def test_hardfork( # Make sure we have enough time to submit the proposal and the votes in one epoch clusterlib_utils.wait_for_epoch_interval( - cluster_obj=cluster, start=1, stop=common.EPOCH_STOP_SEC_BUFFER - 20 + cluster_obj=cluster, start=1, stop=common.EPOCH_STOP_SEC_BUFFER - 30 ) init_epoch = cluster.g_query.get_epoch() From 333575eedae756560ba1a83ac5ac62630044ca54 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 14 Nov 2024 16:51:35 +0100 Subject: [PATCH 105/168] feat(tests): add CIP027 and failed withdrawal test case - Added CIP027 to the list of requirements in reqs_conway.py. - Updated TestConstitution to include a test case for failing to withdraw the deposit from a stake address that is not delegated to a DRep. --- cardano_node_tests/tests/reqs_conway.py | 1 + .../tests/tests_conway/test_constitution.py | 28 +++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/cardano_node_tests/tests/reqs_conway.py b/cardano_node_tests/tests/reqs_conway.py index 45dd0462a..3ba6a83e7 100644 --- a/cardano_node_tests/tests/reqs_conway.py +++ b/cardano_node_tests/tests/reqs_conway.py @@ -49,6 +49,7 @@ def __dr(id: str) -> requirements.Req: cip026_02 = __r("intCIP026-02") # Constitution disallowed cip026_03 = __r("intCIP026-03") # Withdrawal disallowed cip026_04 = __r("intCIP026-04") # DRep voting disallowed +cip027 = __r("CIP027") cip028 = __r("CIP028") cip029 = __r("CIP029") cip030en = __r("intCIP030en") # enacted diff --git a/cardano_node_tests/tests/tests_conway/test_constitution.py b/cardano_node_tests/tests/tests_conway/test_constitution.py index 2498207e3..09624f543 100644 --- a/cardano_node_tests/tests/tests_conway/test_constitution.py +++ b/cardano_node_tests/tests/tests_conway/test_constitution.py @@ -331,6 +331,7 @@ def test_change_constitution( * vote to approve the action * check that the action is ratified * try to disapprove the ratified action, this shouldn't have any effect + * try and fail to withdraw the deposit from stake address that is not delegated to a DRep * check that the action is enacted * check that it's not possible to vote on enacted action """ @@ -340,6 +341,10 @@ def test_change_constitution( governance_data = governance_w_scripts_lg temp_template = f"{common.get_test_id(cluster)}_{rand_str}" + init_return_account_balance = cluster.g_query.get_stake_addr_info( + pool_user_lg.stake.address + ).reward_account_balance + # Create an action anchor_url = f"http://www.const-action-{rand_str}.com" @@ -521,6 +526,29 @@ def _check_cli_query(): _check_cli_query() reqc.cli036.success() + # Check that deposit was returned immediately after enactment + enact_deposit_returned = cluster.g_query.get_stake_addr_info( + pool_user_lg.stake.address + ).reward_account_balance + + assert ( + enact_deposit_returned + == init_return_account_balance + cluster.conway_genesis["govActionDeposit"] + ), "Incorrect return account balance" + + reqc.cip027.start(url=helpers.get_vcs_link()) + # Try to withdraw the deposit from stake address that is not delegated to a DRep + with pytest.raises(clusterlib.CLIError) as excinfo: + clusterlib_utils.withdraw_reward_w_build( + cluster_obj=cluster, + stake_addr_record=pool_user_lg.stake, + dst_addr_record=pool_user_lg.payment, + tx_name=temp_template, + ) + err_str = str(excinfo.value) + assert "(ConwayWdrlNotDelegatedToDRep" in err_str, err_str + reqc.cip027.success() + # Try to vote on enacted action with pytest.raises(clusterlib.CLIError) as excinfo: conway_common.cast_vote( From cb74ddea47fb46f9ab903b5674a579b0ec1aed99 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 14 Nov 2024 17:19:58 +0100 Subject: [PATCH 106/168] fix(github-actions): hide special keywords in debug output Special keywords like "::endgroup::" in debug output seem to confuse GHA from time to time. Try to hide the keyword using hexadecimal representation. --- .github/node_upgrade.sh | 34 +++++++++++++++++----------------- .github/regression.sh | 24 ++++++++++++------------ 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/.github/node_upgrade.sh b/.github/node_upgrade.sh index eac04f0bd..7f467fb07 100755 --- a/.github/node_upgrade.sh +++ b/.github/node_upgrade.sh @@ -58,9 +58,9 @@ export SCHEDULING_LOG=scheduling.log export DEV_CLUSTER_RUNNING=1 CLUSTERS_COUNT=1 FORBID_RESTART=1 TEST_THREADS=10 NUM_POOLS="${NUM_POOLS:-4}" unset ENABLE_LEGACY MIXED_P2P -echo "::endgroup::" # end group for "Script setup" +echo -e "\x3A:endgroup::" # end group for "Script setup" -echo "::group::Nix env setup step1" +echo -e "\x3A:group::Nix env setup step1" printf "start: %(%H:%M:%S)T\n" -1 # shellcheck disable=SC1090,SC1091 @@ -85,13 +85,13 @@ nix flake update --accept-flake-config $NODE_OVERRIDE nix develop --accept-flake-config .#venv --command bash -c ' : > "$WORKDIR/.nix_step1" printf "finish: %(%H:%M:%S)T\n" -1 - echo "::endgroup::" # end group for "Nix env setup step1" + echo -e "\x3A:endgroup::" # end group for "Nix env setup step1" - echo "::group::Python venv setup step1" + echo -e "\x3A:group::Python venv setup step1" . .github/setup_venv.sh clean - echo "::endgroup::" # end group for "Python venv setup step1" + echo -e "\x3A:endgroup::" # end group for "Python venv setup step1" - echo "::group::-> PYTEST STEP1 <-" + echo -e "\x3A:group::-> PYTEST STEP1 <-" df -h . # prepare scripts for stating cluster instance, start cluster instance, run smoke tests ./.github/node_upgrade_pytest.sh step1 @@ -106,9 +106,9 @@ fi # retval 0 == all tests passed; 1 == some tests failed; > 1 == some runtime error and we don't want to continue [ "$retval" -le 1 ] || exit "$retval" -echo "::endgroup::" # end group for "-> PYTEST STEP1 <-" +echo -e "\x3A:endgroup::" # end group for "-> PYTEST STEP1 <-" -echo "::group::Nix env setup steps 2 & 3" +echo -e "\x3A:group::Nix env setup steps 2 & 3" printf "start: %(%H:%M:%S)T\n" -1 # update cardano-node to specified branch and/or revision, or to the latest available revision @@ -124,29 +124,29 @@ nix flake update --accept-flake-config $NODE_OVERRIDE nix develop --accept-flake-config .#venv --command bash -c ' : > "$WORKDIR/.nix_step2" printf "finish: %(%H:%M:%S)T\n" -1 - echo "::endgroup::" # end group for "Nix env setup steps 2 & 3" + echo -e "\x3A:endgroup::" # end group for "Nix env setup steps 2 & 3" - echo "::group::Python venv setup steps 2 & 3" + echo -e "\x3A:group::Python venv setup steps 2 & 3" . .github/setup_venv.sh clean - echo "::endgroup::" # end group for "Python venv setup steps 2 & 3" + echo -e "\x3A:endgroup::" # end group for "Python venv setup steps 2 & 3" - echo "::group::-> PYTEST STEP2 <-" + echo -e "\x3A:group::-> PYTEST STEP2 <-" df -h . # update cluster nodes, run smoke tests ./.github/node_upgrade_pytest.sh step2 retval="$?" # retval 0 == all tests passed; 1 == some tests failed; > 1 == some runtime error and we dont want to continue [ "$retval" -le 1 ] || exit "$retval" - echo "::endgroup::" # end group for "-> PYTEST STEP2 <-" + echo -e "\x3A:endgroup::" # end group for "-> PYTEST STEP2 <-" - echo "::group::-> PYTEST STEP3 <-" + echo -e "\x3A:group::-> PYTEST STEP3 <-" df -h . # update to Conway, run smoke tests ./.github/node_upgrade_pytest.sh step3 retval="$?" - echo "::endgroup::" # end group for "-> PYTEST STEP3 <-" + echo -e "\x3A:endgroup::" # end group for "-> PYTEST STEP3 <-" - echo "::group::Cluster teardown & artifacts" + echo -e "\x3A:group::Cluster teardown & artifacts" # teardown cluster ./.github/node_upgrade_pytest.sh finish exit $retval @@ -180,6 +180,6 @@ if [ -n "${GITHUB_ACTIONS:-""}" ]; then ls -1a fi -echo "::endgroup::" # end group for "Cluster teardown & artifacts" +echo -e "\x3A:endgroup::" # end group for "Cluster teardown & artifacts" exit "$retval" diff --git a/.github/regression.sh b/.github/regression.sh index 5193ae638..7fcd12431 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -149,8 +149,8 @@ _cleanup_testnet_on_interrupt() { _PYTEST_CURRENT="$(readlink -m "$_PYTEST_CURRENT")" export _PYTEST_CURRENT - echo "::endgroup::" # end group for the group that was interrupted - echo "::group::Testnet cleanup" + echo -e "\x3A:endgroup::" # end group for the group that was interrupted + echo -e "\x3A:group::Testnet cleanup" # shellcheck disable=SC2016 nix develop --accept-flake-config .#venv --command bash -c ' @@ -163,7 +163,7 @@ _cleanup_testnet_on_interrupt() { testnet-cleanup -a "$_PYTEST_CURRENT" ' - echo "::endgroup::" + echo -e "\x3A:endgroup::" } # cleanup on Ctrl+C @@ -175,9 +175,9 @@ _interrupted() { } trap 'set +e; _interrupted; exit 130' SIGINT -echo "::endgroup::" # end group for "Script setup" +echo -e "\x3A:endgroup::" # end group for "Script setup" -echo "::group::Nix env setup" +echo -e "\x3A:group::Nix env setup" printf "start: %(%H:%M:%S)T\n" -1 # function to update cardano-node to specified branch and/or revision, or to the latest available @@ -192,20 +192,20 @@ nix flake update --accept-flake-config $(node_override) nix develop --accept-flake-config .#venv --command bash -c ' printf "finish: %(%H:%M:%S)T\n" -1 df -h . - echo "::endgroup::" # end group for "Nix env setup" + echo -e "\x3A:endgroup::" # end group for "Nix env setup" - echo "::group::Python venv setup" + echo -e "\x3A:group::Python venv setup" . .github/setup_venv.sh clean - echo "::endgroup::" # end group for "Python venv setup" + echo -e "\x3A:endgroup::" # end group for "Python venv setup" - echo "::group::-> PYTEST RUN <-" + echo -e "\x3A:group::-> PYTEST RUN <-" export PATH="${PWD}/.bin":"$WORKDIR/cardano-cli/cardano-cli-build/bin":"$PATH" export CARDANO_NODE_SOCKET_PATH="$CARDANO_NODE_SOCKET_PATH_CI" make "${MAKE_TARGET:-"tests"}" retval="$?" - echo "::endgroup::" + echo -e "\x3A:endgroup::" - echo "::group::Collect artifacts" + echo -e "\x3A:group::Collect artifacts" ./.github/cli_coverage.sh ./.github/reqs_coverage.sh exit "$retval" @@ -252,6 +252,6 @@ if [ -n "${GITHUB_ACTIONS:-""}" ]; then ls -1a fi -echo "::endgroup::" # end group for "Collect artifacts" +echo -e "\x3A:endgroup::" # end group for "Collect artifacts" exit "$retval" From f209473a5434d9ddbfdfa57427943a42f63d3293 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 15 Nov 2024 09:05:49 +0100 Subject: [PATCH 107/168] fix(github-actions): attempt no 6 to make GHA grouping behave --- .github/node_upgrade.sh | 2 -- .github/regression.sh | 2 -- .github/workflows/regression_reusable.yaml | 1 + .github/workflows/upgrade_reusable.yaml | 1 + 4 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/node_upgrade.sh b/.github/node_upgrade.sh index 7f467fb07..fff18a8e7 100755 --- a/.github/node_upgrade.sh +++ b/.github/node_upgrade.sh @@ -180,6 +180,4 @@ if [ -n "${GITHUB_ACTIONS:-""}" ]; then ls -1a fi -echo -e "\x3A:endgroup::" # end group for "Cluster teardown & artifacts" - exit "$retval" diff --git a/.github/regression.sh b/.github/regression.sh index 7fcd12431..f3067c439 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -252,6 +252,4 @@ if [ -n "${GITHUB_ACTIONS:-""}" ]; then ls -1a fi -echo -e "\x3A:endgroup::" # end group for "Collect artifacts" - exit "$retval" diff --git a/.github/workflows/regression_reusable.yaml b/.github/workflows/regression_reusable.yaml index 98f339072..ad9e84655 100644 --- a/.github/workflows/regression_reusable.yaml +++ b/.github/workflows/regression_reusable.yaml @@ -112,6 +112,7 @@ jobs: # env echo "::group::Script setup" .github/regression.sh + echo "::endgroup::" - name: Report test results if: (success() || failure()) && inputs.testrun_name run: | diff --git a/.github/workflows/upgrade_reusable.yaml b/.github/workflows/upgrade_reusable.yaml index 6954ecded..af67f6304 100644 --- a/.github/workflows/upgrade_reusable.yaml +++ b/.github/workflows/upgrade_reusable.yaml @@ -49,6 +49,7 @@ jobs: # env echo "::group::Script setup" ./.github/node_upgrade.sh + echo "::endgroup::" - name: Upload testing artifacts on failure uses: actions/upload-artifact@v4 if: failure() From 9191f2f5dd3d44bd72053ffcd7fae3a68f0d0c32 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 18 Nov 2024 13:53:53 +0100 Subject: [PATCH 108/168] refactor: turn xtrace off when printing out grouping strings Attempt no. 7 to make GHA grouping work reliably for all workflows. Having debug output after group ends seem to confuse the parser. This commit disables xtrace at the group beginning and end. --- .github/node_upgrade.sh | 36 ++++++++++++++++++++---------------- .github/regression.sh | 28 +++++++++++++++++----------- 2 files changed, 37 insertions(+), 27 deletions(-) diff --git a/.github/node_upgrade.sh b/.github/node_upgrade.sh index fff18a8e7..9331ae4bc 100755 --- a/.github/node_upgrade.sh +++ b/.github/node_upgrade.sh @@ -58,9 +58,11 @@ export SCHEDULING_LOG=scheduling.log export DEV_CLUSTER_RUNNING=1 CLUSTERS_COUNT=1 FORBID_RESTART=1 TEST_THREADS=10 NUM_POOLS="${NUM_POOLS:-4}" unset ENABLE_LEGACY MIXED_P2P -echo -e "\x3A:endgroup::" # end group for "Script setup" +sets="$-"; set +x +echo "::endgroup::" # end group for "Script setup" +echo "::group::Nix env setup step1" +set -"$sets" -echo -e "\x3A:group::Nix env setup step1" printf "start: %(%H:%M:%S)T\n" -1 # shellcheck disable=SC1090,SC1091 @@ -85,13 +87,13 @@ nix flake update --accept-flake-config $NODE_OVERRIDE nix develop --accept-flake-config .#venv --command bash -c ' : > "$WORKDIR/.nix_step1" printf "finish: %(%H:%M:%S)T\n" -1 - echo -e "\x3A:endgroup::" # end group for "Nix env setup step1" + echo "::endgroup::" # end group for "Nix env setup step1" - echo -e "\x3A:group::Python venv setup step1" + echo "::group::Python venv setup step1" . .github/setup_venv.sh clean - echo -e "\x3A:endgroup::" # end group for "Python venv setup step1" + echo "::endgroup::" # end group for "Python venv setup step1" - echo -e "\x3A:group::-> PYTEST STEP1 <-" + echo "::group::-> PYTEST STEP1 <-" df -h . # prepare scripts for stating cluster instance, start cluster instance, run smoke tests ./.github/node_upgrade_pytest.sh step1 @@ -106,9 +108,11 @@ fi # retval 0 == all tests passed; 1 == some tests failed; > 1 == some runtime error and we don't want to continue [ "$retval" -le 1 ] || exit "$retval" -echo -e "\x3A:endgroup::" # end group for "-> PYTEST STEP1 <-" +sets="$-"; set +x +echo "::endgroup::" # end group for "-> PYTEST STEP1 <-" +echo "::group::Nix env setup steps 2 & 3" +set -"$sets" -echo -e "\x3A:group::Nix env setup steps 2 & 3" printf "start: %(%H:%M:%S)T\n" -1 # update cardano-node to specified branch and/or revision, or to the latest available revision @@ -124,29 +128,29 @@ nix flake update --accept-flake-config $NODE_OVERRIDE nix develop --accept-flake-config .#venv --command bash -c ' : > "$WORKDIR/.nix_step2" printf "finish: %(%H:%M:%S)T\n" -1 - echo -e "\x3A:endgroup::" # end group for "Nix env setup steps 2 & 3" + echo "::endgroup::" # end group for "Nix env setup steps 2 & 3" - echo -e "\x3A:group::Python venv setup steps 2 & 3" + echo "::group::Python venv setup steps 2 & 3" . .github/setup_venv.sh clean - echo -e "\x3A:endgroup::" # end group for "Python venv setup steps 2 & 3" + echo "::endgroup::" # end group for "Python venv setup steps 2 & 3" - echo -e "\x3A:group::-> PYTEST STEP2 <-" + echo "::group::-> PYTEST STEP2 <-" df -h . # update cluster nodes, run smoke tests ./.github/node_upgrade_pytest.sh step2 retval="$?" # retval 0 == all tests passed; 1 == some tests failed; > 1 == some runtime error and we dont want to continue [ "$retval" -le 1 ] || exit "$retval" - echo -e "\x3A:endgroup::" # end group for "-> PYTEST STEP2 <-" + echo "::endgroup::" # end group for "-> PYTEST STEP2 <-" - echo -e "\x3A:group::-> PYTEST STEP3 <-" + echo "::group::-> PYTEST STEP3 <-" df -h . # update to Conway, run smoke tests ./.github/node_upgrade_pytest.sh step3 retval="$?" - echo -e "\x3A:endgroup::" # end group for "-> PYTEST STEP3 <-" + echo "::endgroup::" # end group for "-> PYTEST STEP3 <-" - echo -e "\x3A:group::Cluster teardown & artifacts" + echo "::group::Cluster teardown & artifacts" # teardown cluster ./.github/node_upgrade_pytest.sh finish exit $retval diff --git a/.github/regression.sh b/.github/regression.sh index f3067c439..5df9c8cbe 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -149,8 +149,10 @@ _cleanup_testnet_on_interrupt() { _PYTEST_CURRENT="$(readlink -m "$_PYTEST_CURRENT")" export _PYTEST_CURRENT - echo -e "\x3A:endgroup::" # end group for the group that was interrupted - echo -e "\x3A:group::Testnet cleanup" + sets="$-"; set +x + echo "::endgroup::" # end group for the group that was interrupted + echo "::group::Testnet cleanup" + set -"$sets" # shellcheck disable=SC2016 nix develop --accept-flake-config .#venv --command bash -c ' @@ -163,7 +165,9 @@ _cleanup_testnet_on_interrupt() { testnet-cleanup -a "$_PYTEST_CURRENT" ' - echo -e "\x3A:endgroup::" + sets="$-"; set +x + echo "::endgroup::" + set -"$sets" } # cleanup on Ctrl+C @@ -175,9 +179,11 @@ _interrupted() { } trap 'set +e; _interrupted; exit 130' SIGINT -echo -e "\x3A:endgroup::" # end group for "Script setup" +sets="$-"; set +x +echo "::endgroup::" # end group for "Script setup" +echo "::group::Nix env setup" +set -"$sets" -echo -e "\x3A:group::Nix env setup" printf "start: %(%H:%M:%S)T\n" -1 # function to update cardano-node to specified branch and/or revision, or to the latest available @@ -192,20 +198,20 @@ nix flake update --accept-flake-config $(node_override) nix develop --accept-flake-config .#venv --command bash -c ' printf "finish: %(%H:%M:%S)T\n" -1 df -h . - echo -e "\x3A:endgroup::" # end group for "Nix env setup" + echo "::endgroup::" # end group for "Nix env setup" - echo -e "\x3A:group::Python venv setup" + echo "::group::Python venv setup" . .github/setup_venv.sh clean - echo -e "\x3A:endgroup::" # end group for "Python venv setup" + echo "::endgroup::" # end group for "Python venv setup" - echo -e "\x3A:group::-> PYTEST RUN <-" + echo "::group::-> PYTEST RUN <-" export PATH="${PWD}/.bin":"$WORKDIR/cardano-cli/cardano-cli-build/bin":"$PATH" export CARDANO_NODE_SOCKET_PATH="$CARDANO_NODE_SOCKET_PATH_CI" make "${MAKE_TARGET:-"tests"}" retval="$?" - echo -e "\x3A:endgroup::" + echo "::endgroup::" - echo -e "\x3A:group::Collect artifacts" + echo "::group::Collect artifacts" ./.github/cli_coverage.sh ./.github/reqs_coverage.sh exit "$retval" From 3a186b9ecdb4619bca9f7d181bc73ef5ff8eb2ac Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 18 Nov 2024 18:36:12 +0100 Subject: [PATCH 109/168] feat: add deposit amount to stake addr deregistration - Updated `deregister_stake_addr` to include `deposit_amt` parameter. - Modified `cleanup` function to retrieve and pass the stake deposit amount. - Adjusted transaction creation to handle deposit amount. --- cardano_node_tests/utils/testnet_cleanup.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/cardano_node_tests/utils/testnet_cleanup.py b/cardano_node_tests/utils/testnet_cleanup.py index 18cccdfd0..e725e6ffe 100644 --- a/cardano_node_tests/utils/testnet_cleanup.py +++ b/cardano_node_tests/utils/testnet_cleanup.py @@ -45,12 +45,17 @@ def withdraw_reward( def deregister_stake_addr( - cluster_obj: clusterlib.ClusterLib, pool_user: clusterlib.PoolUser, name_template: str + cluster_obj: clusterlib.ClusterLib, + pool_user: clusterlib.PoolUser, + name_template: str, + deposit_amt: int, ) -> None: """Deregister stake address.""" # files for deregistering stake address stake_addr_dereg_cert = cluster_obj.g_stake_address.gen_stake_addr_deregistration_cert( - addr_name=f"rf_{name_template}_addr0_dereg", stake_vkey_file=pool_user.stake.vkey_file + addr_name=f"rf_{name_template}_addr0_dereg", + deposit_amt=deposit_amt, + stake_vkey_file=pool_user.stake.vkey_file, ) tx_files_deregister = clusterlib.TxFiles( certificate_files=[stake_addr_dereg_cert], @@ -63,6 +68,7 @@ def deregister_stake_addr( src_address=pool_user.payment.address, tx_name=f"{name_template}_dereg_stake_addr", tx_files=tx_files_deregister, + deposit=-deposit_amt, ) @@ -166,6 +172,7 @@ def cleanup( faucet_addr_file = cluster_env.state_dir / "shelley" / "faucet.addr" faucet_payment = create_addr_record(faucet_addr_file) files_found = group_files(find_files(location)) + stake_deposit_amt = cluster_obj.g_query.get_address_deposit() def _run(files: tp.List[pl.Path]) -> None: for fpath in files: @@ -200,7 +207,10 @@ def _run(files: tp.List[pl.Path]) -> None: ) deregister_stake_addr( - cluster_obj=cluster_obj, pool_user=pool_user, name_template=f_name + cluster_obj=cluster_obj, + pool_user=pool_user, + name_template=f_name, + deposit_amt=stake_deposit_amt, ) else: try: From 140ef1c45d30f1f866c9371a12aea9c3d19a122d Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 18 Nov 2024 18:53:33 +0100 Subject: [PATCH 110/168] refactor: replace contextlib.suppress with try-except Replaced contextlib.suppress with try-except blocks to handle clusterlib.CLIError exceptions. This change improves error logging by providing specific error messages when exceptions occur. --- cardano_node_tests/utils/testnet_cleanup.py | 22 ++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/cardano_node_tests/utils/testnet_cleanup.py b/cardano_node_tests/utils/testnet_cleanup.py index e725e6ffe..14451edb9 100644 --- a/cardano_node_tests/utils/testnet_cleanup.py +++ b/cardano_node_tests/utils/testnet_cleanup.py @@ -6,7 +6,6 @@ """ import concurrent.futures -import contextlib import functools import logging import pathlib as pl @@ -34,14 +33,17 @@ def withdraw_reward( signing_key_files=[dst_addr_record.skey_file, stake_addr_record.skey_file], ) - LOGGER.info(f"Withdrawing rewards for '{stake_addr_record.address}'") - with contextlib.suppress(clusterlib.CLIError): + try: cluster_obj.g_transaction.send_tx( src_address=dst_address, tx_name=f"rf_{name_template}_reward_withdrawal", tx_files=tx_files_withdrawal, withdrawals=[clusterlib.TxOut(address=stake_addr_record.address, amount=-1)], ) + except clusterlib.CLIError: + LOGGER.error(f"Failed to withdraw rewards for '{stake_addr_record.address}'") # noqa: TRY400 + else: + LOGGER.info(f"Withdrawn rewards for '{stake_addr_record.address}'") def deregister_stake_addr( @@ -62,14 +64,17 @@ def deregister_stake_addr( signing_key_files=[pool_user.payment.skey_file, pool_user.stake.skey_file], ) - LOGGER.info(f"Deregistering stake address '{pool_user.stake.address}'") - with contextlib.suppress(clusterlib.CLIError): + try: cluster_obj.g_transaction.send_tx( src_address=pool_user.payment.address, tx_name=f"{name_template}_dereg_stake_addr", tx_files=tx_files_deregister, deposit=-deposit_amt, ) + except clusterlib.CLIError: + LOGGER.error(f"Failed to deregister stake address '{pool_user.stake.address}'") # noqa: TRY400 + else: + LOGGER.info(f"Deregistered stake address '{pool_user.stake.address}'") def return_funds_to_faucet( @@ -100,9 +105,8 @@ def return_funds_to_faucet( futxo = random.choice(faucet_utxos) txins.append(futxo) - LOGGER.info(f"Returning funds from '{src_addr.address}'") # try to return funds; don't mind if there's not enough funds for fees etc. - with contextlib.suppress(clusterlib.CLIError): + try: cluster_obj.g_transaction.send_tx( src_address=src_addr.address, tx_name=tx_name, @@ -111,6 +115,10 @@ def return_funds_to_faucet( tx_files=fund_tx_files, verify_tx=False, ) + except clusterlib.CLIError: + LOGGER.error(f"Failed to return funds from '{src_addr.address}'") # noqa: TRY400 + else: + LOGGER.info(f"Returned funds from '{src_addr.address}'") def create_addr_record(addr_file: pl.Path) -> clusterlib.AddressRecord: From d1c22a38d93c8fb40841fcb9be333dc0c6b8de00 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 18 Nov 2024 18:59:23 +0100 Subject: [PATCH 111/168] feat: update teardown & artifacts group names in GHA Updated the group names in node_upgrade.sh and regression.sh to better reflect the teardown process and artifact collection. This improves clarity in the CI logs. --- .github/node_upgrade.sh | 2 +- .github/regression.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/node_upgrade.sh b/.github/node_upgrade.sh index 9331ae4bc..b798b9e5f 100755 --- a/.github/node_upgrade.sh +++ b/.github/node_upgrade.sh @@ -150,7 +150,7 @@ nix develop --accept-flake-config .#venv --command bash -c ' retval="$?" echo "::endgroup::" # end group for "-> PYTEST STEP3 <-" - echo "::group::Cluster teardown & artifacts" + echo "::group::Teardown cluster & collect artifacts" # teardown cluster ./.github/node_upgrade_pytest.sh finish exit $retval diff --git a/.github/regression.sh b/.github/regression.sh index 5df9c8cbe..7ced6b10c 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -211,7 +211,7 @@ nix develop --accept-flake-config .#venv --command bash -c ' retval="$?" echo "::endgroup::" - echo "::group::Collect artifacts" + echo "::group::Collect artifacts & teardown cluster" ./.github/cli_coverage.sh ./.github/reqs_coverage.sh exit "$retval" From 095fcba1f1557ffb6b90f952b2fd465504b878d3 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 18 Nov 2024 19:37:50 +0100 Subject: [PATCH 112/168] feat(nix): remove dependency on devops nix shell --- flake.lock | 6 +++--- flake.nix | 14 ++++++++------ poetry.lock | 39 ++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + requirements_freeze.txt | 2 ++ 5 files changed, 52 insertions(+), 10 deletions(-) diff --git a/flake.lock b/flake.lock index f166cb1d7..edf4bbf20 100644 --- a/flake.lock +++ b/flake.lock @@ -503,11 +503,11 @@ "systems": "systems_2" }, "locked": { - "lastModified": 1726560853, - "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=", + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", "owner": "numtide", "repo": "flake-utils", - "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index aa45585bb..a794caa63 100644 --- a/flake.nix +++ b/flake.nix @@ -27,7 +27,7 @@ { devShells = rec { base = pkgs.mkShell { - nativeBuildInputs = with pkgs; [ py3Full bash coreutils curl git gnugrep gnumake gnutar jq py3pkgs.supervisor xz ]; + nativeBuildInputs = with pkgs; [ bash coreutils curl git gnugrep gnumake gnutar jq xz ]; }; # TODO: can be removed once sync tests are fully moved to separate repo python = pkgs.mkShell { @@ -36,15 +36,17 @@ postgres = pkgs.mkShell { nativeBuildInputs = with pkgs; [ glibcLocales postgresql lsof procps ]; }; - venv = ( - cardano-node.devShells.${system}.devops - ).overrideAttrs (oldAttrs: rec { - nativeBuildInputs = base.nativeBuildInputs ++ postgres.nativeBuildInputs ++ oldAttrs.nativeBuildInputs ++ [ + venv = pkgs.mkShell { + nativeBuildInputs = base.nativeBuildInputs ++ postgres.nativeBuildInputs ++ [ + cardano-node.packages.${system}.cardano-cli + cardano-node.packages.${system}.cardano-node cardano-node.packages.${system}.cardano-submit-api + cardano-node.packages.${system}.bech32 + py3Full py3pkgs.pip py3pkgs.virtualenv ]; - }); + }; # Use 'venv' directly as 'default' and 'dev' default = venv; dev = venv; diff --git a/poetry.lock b/poetry.lock index 0e76112bf..d3c00632c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1504,6 +1504,26 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "setuptools" +version = "75.5.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +files = [ + {file = "setuptools-75.5.0-py3-none-any.whl", hash = "sha256:87cb777c3b96d638ca02031192d40390e0ad97737e27b6b4fa831bea86f2f829"}, + {file = "setuptools-75.5.0.tar.gz", hash = "sha256:5c4ccb41111392671f02bb5f8436dfc5a9a7185e80500531b133f5775c4163ef"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib-metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] + [[package]] name = "six" version = "1.16.0" @@ -1732,6 +1752,23 @@ pure-eval = "*" [package.extras] tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] +[[package]] +name = "supervisor" +version = "4.2.5" +description = "A system for controlling process state under UNIX" +optional = false +python-versions = "*" +files = [ + {file = "supervisor-4.2.5-py2.py3-none-any.whl", hash = "sha256:2ecaede32fc25af814696374b79e42644ecaba5c09494c51016ffda9602d0f08"}, + {file = "supervisor-4.2.5.tar.gz", hash = "sha256:34761bae1a23c58192281a5115fb07fbf22c9b0133c08166beffc70fed3ebc12"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +testing = ["pytest", "pytest-cov"] + [[package]] name = "tomli" version = "2.0.1" @@ -1924,4 +1961,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.10,<4.0" -content-hash = "e41d79bdced69afd1301c1577571974b7fcf751e5a0771cd9cd3b8bb12520c98" +content-hash = "63884c7d85200ddc793250f699e35677e28a1d6dceb34bb712926d31a924849b" diff --git a/pyproject.toml b/pyproject.toml index 17e06fcc0..24c5507a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,7 @@ pytest-xdist = "^3.6.1" PyYAML = "^6.0.2" requests = "^2.32.3" pytest-subtests = "^0.13.1" +supervisor = "^4.2.5" [tool.poetry.group.dev] optional = true diff --git a/requirements_freeze.txt b/requirements_freeze.txt index 5827ae77c..1f3b70c46 100644 --- a/requirements_freeze.txt +++ b/requirements_freeze.txt @@ -38,7 +38,9 @@ pytest-xdist==3.6.1 ; python_version >= "3.10" and python_version < "4.0" pytest==8.3.3 ; python_version >= "3.10" and python_version < "4.0" pyyaml==6.0.2 ; python_version >= "3.10" and python_version < "4.0" requests==2.32.3 ; python_version >= "3.10" and python_version < "4.0" +setuptools==75.5.0 ; python_version >= "3.10" and python_version < "4.0" sortedcontainers==2.4.0 ; python_version >= "3.10" and python_version < "4.0" +supervisor==4.2.5 ; python_version >= "3.10" and python_version < "4.0" tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11" typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "4.0" urllib3==2.2.3 ; python_version >= "3.10" and python_version < "4.0" From b893f3af4f183dace085c34afb3595a168b0ce2d Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 19 Nov 2024 10:34:23 +0100 Subject: [PATCH 113/168] refactor: turn xtrace off for CI scripts Attempt no. 8 to make GHA grouping work reliably for all workflows. Having debug output seems to confuse the GHA groups parser, and some of the debug output leaks out of the groups. I've tried several different attempts to mitigate this behavior, unsuccessfully. In this commit, I'm turning debug output off alltogether. The script is stable, we don't have much use for the debug output anyway. --- .github/node_upgrade.sh | 6 +----- .github/regression.sh | 11 +---------- 2 files changed, 2 insertions(+), 15 deletions(-) diff --git a/.github/node_upgrade.sh b/.github/node_upgrade.sh index b798b9e5f..0e0a3d521 100755 --- a/.github/node_upgrade.sh +++ b/.github/node_upgrade.sh @@ -6,7 +6,7 @@ # BASE_REVISION - revision of cardano-node to upgrade from (alternative to BASE_TAR_URL) # UPGRADE_REVISION - revision of cardano-node to upgrade to -set -xeuo pipefail +set -euo pipefail if [[ -z "${BASE_TAR_URL:-""}" && -z "${BASE_REVISION:-""}" ]]; then echo "BASE_TAR_URL or BASE_REVISION must be set" @@ -58,10 +58,8 @@ export SCHEDULING_LOG=scheduling.log export DEV_CLUSTER_RUNNING=1 CLUSTERS_COUNT=1 FORBID_RESTART=1 TEST_THREADS=10 NUM_POOLS="${NUM_POOLS:-4}" unset ENABLE_LEGACY MIXED_P2P -sets="$-"; set +x echo "::endgroup::" # end group for "Script setup" echo "::group::Nix env setup step1" -set -"$sets" printf "start: %(%H:%M:%S)T\n" -1 @@ -108,10 +106,8 @@ fi # retval 0 == all tests passed; 1 == some tests failed; > 1 == some runtime error and we don't want to continue [ "$retval" -le 1 ] || exit "$retval" -sets="$-"; set +x echo "::endgroup::" # end group for "-> PYTEST STEP1 <-" echo "::group::Nix env setup steps 2 & 3" -set -"$sets" printf "start: %(%H:%M:%S)T\n" -1 diff --git a/.github/regression.sh b/.github/regression.sh index 7ced6b10c..751289409 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -1,7 +1,7 @@ #! /usr/bin/env -S nix develop --accept-flake-config .#base -c bash # shellcheck shell=bash disable=SC2317 -set -xeuo pipefail +set -euo pipefail nix --version df -h . @@ -149,10 +149,8 @@ _cleanup_testnet_on_interrupt() { _PYTEST_CURRENT="$(readlink -m "$_PYTEST_CURRENT")" export _PYTEST_CURRENT - sets="$-"; set +x echo "::endgroup::" # end group for the group that was interrupted echo "::group::Testnet cleanup" - set -"$sets" # shellcheck disable=SC2016 nix develop --accept-flake-config .#venv --command bash -c ' @@ -165,9 +163,7 @@ _cleanup_testnet_on_interrupt() { testnet-cleanup -a "$_PYTEST_CURRENT" ' - sets="$-"; set +x echo "::endgroup::" - set -"$sets" } # cleanup on Ctrl+C @@ -179,10 +175,8 @@ _interrupted() { } trap 'set +e; _interrupted; exit 130' SIGINT -sets="$-"; set +x echo "::endgroup::" # end group for "Script setup" echo "::group::Nix env setup" -set -"$sets" printf "start: %(%H:%M:%S)T\n" -1 @@ -230,13 +224,10 @@ mv .reports/testrun-report.* ./ # Don't stop cluster instances just yet if KEEP_CLUSTERS_RUNNING is set to 1. # After any key is pressed, resume this script and stop all running cluster instances. if [ "${KEEP_CLUSTERS_RUNNING:-""}" = 1 ]; then - sets="$-" - set +x echo echo "KEEP_CLUSTERS_RUNNING is set, leaving clusters running until any key is pressed." echo "Press any key to continue..." read -r - set -"$sets" fi _cleanup From 51dec9747cd905e79a722bde41f0789a5dac983e Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 19 Nov 2024 11:00:03 +0100 Subject: [PATCH 114/168] refactor(ci): cleanup CI scripts output Removed multiple redundant `df -h .` commands from various CI scripts. Added a check for the existence of the `stop_postgres` command before attempting to stop PostgreSQL. --- .github/grep_errors.sh | 4 ++-- .github/node_upgrade.sh | 15 +++++++++------ .github/regression.sh | 20 +++++++++++--------- .github/source_dbsync.sh | 2 ++ 4 files changed, 24 insertions(+), 17 deletions(-) diff --git a/.github/grep_errors.sh b/.github/grep_errors.sh index 9c63f55b5..50508b24b 100644 --- a/.github/grep_errors.sh +++ b/.github/grep_errors.sh @@ -4,6 +4,6 @@ ARTIFACTS_DIR="${ARTIFACTS_DIR:-".artifacts"}" ERR_LOGFILE="$PWD/errors_all.log" # shellcheck disable=SC2012 -pushd "$ARTIFACTS_DIR" || { echo "Cannot switch to $ARTIFACTS_DIR"; ls -1a "$ARTIFACTS_DIR"; exit 1; } > "$ERR_LOGFILE" +pushd "$ARTIFACTS_DIR" > /dev/null || { echo "Cannot switch to $ARTIFACTS_DIR"; ls -1a "$ARTIFACTS_DIR"; exit 1; } > "$ERR_LOGFILE" grep -r --include "*.stdout" --include "*.stderr" -Ei ":error:|failed|failure" . > "$ERR_LOGFILE" -popd || exit 1 +popd > /dev/null || exit 1 diff --git a/.github/node_upgrade.sh b/.github/node_upgrade.sh index 0e0a3d521..1aa9025ed 100755 --- a/.github/node_upgrade.sh +++ b/.github/node_upgrade.sh @@ -59,8 +59,8 @@ export DEV_CLUSTER_RUNNING=1 CLUSTERS_COUNT=1 FORBID_RESTART=1 TEST_THREADS=10 N unset ENABLE_LEGACY MIXED_P2P echo "::endgroup::" # end group for "Script setup" -echo "::group::Nix env setup step1" +echo "::group::Nix env setup step1" printf "start: %(%H:%M:%S)T\n" -1 # shellcheck disable=SC1090,SC1091 @@ -84,14 +84,15 @@ nix flake update --accept-flake-config $NODE_OVERRIDE # shellcheck disable=SC2016 nix develop --accept-flake-config .#venv --command bash -c ' : > "$WORKDIR/.nix_step1" - printf "finish: %(%H:%M:%S)T\n" -1 echo "::endgroup::" # end group for "Nix env setup step1" echo "::group::Python venv setup step1" + printf "start: %(%H:%M:%S)T\n" -1 . .github/setup_venv.sh clean echo "::endgroup::" # end group for "Python venv setup step1" echo "::group::-> PYTEST STEP1 <-" + printf "start: %(%H:%M:%S)T\n" -1 df -h . # prepare scripts for stating cluster instance, start cluster instance, run smoke tests ./.github/node_upgrade_pytest.sh step1 @@ -107,8 +108,8 @@ fi [ "$retval" -le 1 ] || exit "$retval" echo "::endgroup::" # end group for "-> PYTEST STEP1 <-" -echo "::group::Nix env setup steps 2 & 3" +echo "::group::Nix env setup steps 2 & 3" printf "start: %(%H:%M:%S)T\n" -1 # update cardano-node to specified branch and/or revision, or to the latest available revision @@ -123,14 +124,15 @@ nix flake update --accept-flake-config $NODE_OVERRIDE # shellcheck disable=SC2016 nix develop --accept-flake-config .#venv --command bash -c ' : > "$WORKDIR/.nix_step2" - printf "finish: %(%H:%M:%S)T\n" -1 echo "::endgroup::" # end group for "Nix env setup steps 2 & 3" echo "::group::Python venv setup steps 2 & 3" + printf "start: %(%H:%M:%S)T\n" -1 . .github/setup_venv.sh clean echo "::endgroup::" # end group for "Python venv setup steps 2 & 3" echo "::group::-> PYTEST STEP2 <-" + printf "start: %(%H:%M:%S)T\n" -1 df -h . # update cluster nodes, run smoke tests ./.github/node_upgrade_pytest.sh step2 @@ -140,21 +142,22 @@ nix develop --accept-flake-config .#venv --command bash -c ' echo "::endgroup::" # end group for "-> PYTEST STEP2 <-" echo "::group::-> PYTEST STEP3 <-" + printf "start: %(%H:%M:%S)T\n" -1 df -h . # update to Conway, run smoke tests ./.github/node_upgrade_pytest.sh step3 retval="$?" + df -h . echo "::endgroup::" # end group for "-> PYTEST STEP3 <-" echo "::group::Teardown cluster & collect artifacts" + printf "start: %(%H:%M:%S)T\n" -1 # teardown cluster ./.github/node_upgrade_pytest.sh finish exit $retval ' retval="$?" -df -h . - if [ ! -e "$WORKDIR/.nix_step2" ]; then echo "Nix env setup failed, exiting" exit 1 diff --git a/.github/regression.sh b/.github/regression.sh index 751289409..badf7f9b7 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -97,7 +97,6 @@ case "${DBSYNC_REV:-""}" in * ) # shellcheck disable=SC1090,SC1091 . .github/source_dbsync.sh - df -h . ;; esac @@ -129,8 +128,6 @@ case "${CARDANO_CLI_REV:-""}" in ;; esac -df -h . - echo "### Cleanup setup ###" _cleanup() { @@ -138,7 +135,9 @@ _cleanup() { stop_instances "$WORKDIR" # stop postgres if running - stop_postgres || : + if command -v stop_postgres >/dev/null 2>&1; then + stop_postgres || : + fi } _cleanup_testnet_on_interrupt() { @@ -150,7 +149,9 @@ _cleanup_testnet_on_interrupt() { export _PYTEST_CURRENT echo "::endgroup::" # end group for the group that was interrupted + echo "::group::Testnet cleanup" + printf "start: %(%H:%M:%S)T\n" -1 # shellcheck disable=SC2016 nix develop --accept-flake-config .#venv --command bash -c ' @@ -176,8 +177,8 @@ _interrupted() { trap 'set +e; _interrupted; exit 130' SIGINT echo "::endgroup::" # end group for "Script setup" -echo "::group::Nix env setup" +echo "::group::Nix env setup" printf "start: %(%H:%M:%S)T\n" -1 # function to update cardano-node to specified branch and/or revision, or to the latest available @@ -190,30 +191,31 @@ set +e nix flake update --accept-flake-config $(node_override) # shellcheck disable=SC2016 nix develop --accept-flake-config .#venv --command bash -c ' - printf "finish: %(%H:%M:%S)T\n" -1 - df -h . echo "::endgroup::" # end group for "Nix env setup" echo "::group::Python venv setup" + printf "start: %(%H:%M:%S)T\n" -1 . .github/setup_venv.sh clean echo "::endgroup::" # end group for "Python venv setup" echo "::group::-> PYTEST RUN <-" + printf "start: %(%H:%M:%S)T\n" -1 + df -h . export PATH="${PWD}/.bin":"$WORKDIR/cardano-cli/cardano-cli-build/bin":"$PATH" export CARDANO_NODE_SOCKET_PATH="$CARDANO_NODE_SOCKET_PATH_CI" make "${MAKE_TARGET:-"tests"}" retval="$?" + df -h . echo "::endgroup::" echo "::group::Collect artifacts & teardown cluster" + printf "start: %(%H:%M:%S)T\n" -1 ./.github/cli_coverage.sh ./.github/reqs_coverage.sh exit "$retval" ' retval="$?" -df -h . - # move reports to root dir mv .reports/testrun-report.* ./ diff --git a/.github/source_dbsync.sh b/.github/source_dbsync.sh index 6f2b28ba0..0018aa1ca 100644 --- a/.github/source_dbsync.sh +++ b/.github/source_dbsync.sh @@ -7,6 +7,8 @@ export TEST_THREADS CLUSTERS_COUNT pushd "$WORKDIR" || exit 1 stop_postgres() { + echo "Stopping postgres" + local psql_pid_file="$WORKDIR/postgres/postgres.pid" if [ ! -f "$psql_pid_file" ]; then return 0 From df9ccfe152b9c098c3adf03da2f5c8df86840007 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 19 Nov 2024 11:37:28 +0100 Subject: [PATCH 115/168] chore: remove missing nix inputs Removed old missing inputs from flake.nix to clean up the configuration. --- flake.nix | 5 ----- 1 file changed, 5 deletions(-) diff --git a/flake.nix b/flake.nix index a794caa63..9c84ec22b 100644 --- a/flake.nix +++ b/flake.nix @@ -4,15 +4,10 @@ inputs = { cardano-node = { url = "github:IntersectMBO/cardano-node"; - inputs = { - node-measured.follows = "cardano-node"; - membench.follows = "/"; - }; }; nixpkgs.follows = "cardano-node/nixpkgs"; flake-utils = { url = "github:numtide/flake-utils"; - inputs.nixpkgs.follows = "nixpkgs"; }; }; From 8a3d7836bbc71209dce4e47603f5bae855738220 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 19 Nov 2024 11:59:20 +0100 Subject: [PATCH 116/168] feat(ci): switch to poetry for dependency management in CI Replaced pip with poetry in setup_venv.sh and removed requirements_freeze.txt. Updated flake.nix and flake.lock to include poetry and use nixos-24.05. --- .github/setup_venv.sh | 2 +- .github/workflows/nix_smoke.yaml | 1 - flake.lock | 21 +++++++++++--- flake.nix | 4 +-- poetry_update_deps.sh | 3 -- requirements_freeze.txt | 47 -------------------------------- 6 files changed, 20 insertions(+), 58 deletions(-) delete mode 100644 requirements_freeze.txt diff --git a/.github/setup_venv.sh b/.github/setup_venv.sh index c2706a2a9..258d3f482 100644 --- a/.github/setup_venv.sh +++ b/.github/setup_venv.sh @@ -19,7 +19,7 @@ PYTHONPATH="$(echo "$VIRTUAL_ENV"/lib/python3*/site-packages):$PYTHONPATH" export PYTHONPATH if [ -z "$_REQS_INSTALLED" ]; then - pip install -r requirements_freeze.txt + poetry install -n fi unset _VENV_DIR _REQS_INSTALLED diff --git a/.github/workflows/nix_smoke.yaml b/.github/workflows/nix_smoke.yaml index 6f7c4e95e..7d2870f8e 100644 --- a/.github/workflows/nix_smoke.yaml +++ b/.github/workflows/nix_smoke.yaml @@ -5,7 +5,6 @@ on: branches: [ "master" ] paths: - 'flake.lock' - - 'requirements_freeze.txt' - '**.nix' - '.github/workflows/nix_smoke.yaml' workflow_dispatch: diff --git a/flake.lock b/flake.lock index edf4bbf20..77fd7f380 100644 --- a/flake.lock +++ b/flake.lock @@ -1366,6 +1366,22 @@ "type": "github" } }, + "nixpkgs_7": { + "locked": { + "lastModified": 1731797254, + "narHash": "sha256-df3dJApLPhd11AlueuoN0Q4fHo/hagP75LlM5K1sz9g=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "e8c38b73aeb218e27163376a2d617e61a2ad9b59", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-24.05", + "repo": "nixpkgs", + "type": "github" + } + }, "nosys": { "locked": { "lastModified": 1668010795, @@ -1465,10 +1481,7 @@ "inputs": { "cardano-node": "cardano-node", "flake-utils": "flake-utils_5", - "nixpkgs": [ - "cardano-node", - "nixpkgs" - ] + "nixpkgs": "nixpkgs_7" } }, "secp256k1": { diff --git a/flake.nix b/flake.nix index 9c84ec22b..cd6a7b8b8 100644 --- a/flake.nix +++ b/flake.nix @@ -2,10 +2,10 @@ description = "Functional tests for cardano-node"; inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.05"; cardano-node = { url = "github:IntersectMBO/cardano-node"; }; - nixpkgs.follows = "cardano-node/nixpkgs"; flake-utils = { url = "github:numtide/flake-utils"; }; @@ -37,8 +37,8 @@ cardano-node.packages.${system}.cardano-node cardano-node.packages.${system}.cardano-submit-api cardano-node.packages.${system}.bech32 + pkgs.poetry py3Full - py3pkgs.pip py3pkgs.virtualenv ]; }; diff --git a/poetry_update_deps.sh b/poetry_update_deps.sh index 94ef8fb9b..c36c5a6a6 100755 --- a/poetry_update_deps.sh +++ b/poetry_update_deps.sh @@ -21,6 +21,3 @@ if [ "$abort_install" -eq 1 ]; then fi poetry lock --no-update "$@" -echo "# Don't edit. This file is generated by poetry_update_deps.sh" > requirements_freeze.txt -echo "-e ." >> requirements_freeze.txt -poetry export --without-hashes --format=requirements.txt --no-interaction >> requirements_freeze.txt diff --git a/requirements_freeze.txt b/requirements_freeze.txt deleted file mode 100644 index 1f3b70c46..000000000 --- a/requirements_freeze.txt +++ /dev/null @@ -1,47 +0,0 @@ -# Don't edit. This file is generated by poetry_update_deps.sh --e . -allure-pytest==2.13.5 ; python_version >= "3.10" and python_version < "4.0" -allure-python-commons==2.13.5 ; python_version >= "3.10" and python_version < "4.0" -annotated-types==0.7.0 ; python_version >= "3.10" and python_version < "4.0" -attrs==24.2.0 ; python_version >= "3.10" and python_version < "4.0" -cardano-clusterlib==0.7.0a4 ; python_version >= "3.10" and python_version < "4.0" -cbor2==5.6.5 ; python_version >= "3.10" and python_version < "4.0" -certifi==2024.8.30 ; python_version >= "3.10" and python_version < "4.0" -cffi==1.17.1 ; python_version >= "3.10" and python_version < "4.0" -charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" -colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and sys_platform == "win32" -cryptography==43.0.1 ; python_version >= "3.10" and python_version < "4.0" -deprecated==1.2.14 ; python_version >= "3.10" and python_version < "4.0" -exceptiongroup==1.2.2 ; python_version >= "3.10" and python_version < "3.11" -execnet==2.1.1 ; python_version >= "3.10" and python_version < "4.0" -filelock==3.16.1 ; python_version >= "3.10" and python_version < "4.0" -hypothesis==6.118.8 ; python_version >= "3.10" and python_version < "4.0" -idna==3.8 ; python_version >= "3.10" and python_version < "4.0" -iniconfig==2.0.0 ; python_version >= "3.10" and python_version < "4.0" -jinja2==3.1.4 ; python_version >= "3.10" and python_version < "4.0" -markupsafe==2.1.5 ; python_version >= "3.10" and python_version < "4.0" -packaging==24.1 ; python_version >= "3.10" and python_version < "4.0" -pluggy==1.5.0 ; python_version >= "3.10" and python_version < "4.0" -psycopg2-binary==2.9.10 ; python_version >= "3.10" and python_version < "4.0" -pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0" -pydantic-core==2.23.4 ; python_version >= "3.10" and python_version < "4.0" -pydantic==2.9.2 ; python_version >= "3.10" and python_version < "4.0" -pygithub==2.5.0 ; python_version >= "3.10" and python_version < "4.0" -pyjwt[crypto]==2.9.0 ; python_version >= "3.10" and python_version < "4.0" -pynacl==1.5.0 ; python_version >= "3.10" and python_version < "4.0" -pytest-html==4.1.1 ; python_version >= "3.10" and python_version < "4.0" -pytest-metadata==3.1.1 ; python_version >= "3.10" and python_version < "4.0" -pytest-order==1.3.0 ; python_version >= "3.10" and python_version < "4.0" -pytest-select==0.1.2 ; python_version >= "3.10" and python_version < "4.0" -pytest-subtests==0.13.1 ; python_version >= "3.10" and python_version < "4.0" -pytest-xdist==3.6.1 ; python_version >= "3.10" and python_version < "4.0" -pytest==8.3.3 ; python_version >= "3.10" and python_version < "4.0" -pyyaml==6.0.2 ; python_version >= "3.10" and python_version < "4.0" -requests==2.32.3 ; python_version >= "3.10" and python_version < "4.0" -setuptools==75.5.0 ; python_version >= "3.10" and python_version < "4.0" -sortedcontainers==2.4.0 ; python_version >= "3.10" and python_version < "4.0" -supervisor==4.2.5 ; python_version >= "3.10" and python_version < "4.0" -tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11" -typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "4.0" -urllib3==2.2.3 ; python_version >= "3.10" and python_version < "4.0" -wrapt==1.16.0 ; python_version >= "3.10" and python_version < "4.0" From f91a0aba192ba87f083fa988ee16a2778ac77705 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 19 Nov 2024 12:46:50 +0100 Subject: [PATCH 117/168] feat: improve stop_cluster_instances.sh script - Add echo statement to indicate stopping of cluster instances - Suppress error messages from supervisord_stop script --- .github/stop_cluster_instances.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/stop_cluster_instances.sh b/.github/stop_cluster_instances.sh index 3bc47ee45..5cb1146f8 100644 --- a/.github/stop_cluster_instances.sh +++ b/.github/stop_cluster_instances.sh @@ -1,10 +1,11 @@ #!/usr/bin/env bash -# stop all running cluster instances stop_instances() { + echo "Stopping all running cluster instances" + local workdir="${1:?}" for sc in "$workdir"/state-cluster*; do [ -d "$sc" ] || continue - "$sc/supervisord_stop" || true + "$sc/supervisord_stop" 2>/dev/null || : done } From cb777c51484cbe9e2cd49d5b605f6593c90ed72f Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 19 Nov 2024 13:06:46 +0100 Subject: [PATCH 118/168] feat: refactor flake.nix to use nodePkgs variable Refactored flake.nix to introduce nodePkgs variable for cardano-node packages. This change improves readability and maintainability by replacing repeated cardano-node package references with nodePkgs. --- flake.nix | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/flake.nix b/flake.nix index cd6a7b8b8..e2d0b459b 100644 --- a/flake.nix +++ b/flake.nix @@ -16,7 +16,8 @@ (system: let pkgs = nixpkgs.legacyPackages.${system}; - py3pkgs = pkgs.python311Packages; + nodePkgs = cardano-node.packages.${system}; + py3Pkgs = pkgs.python311Packages; py3Full = pkgs.python311Full; in { @@ -33,13 +34,13 @@ }; venv = pkgs.mkShell { nativeBuildInputs = base.nativeBuildInputs ++ postgres.nativeBuildInputs ++ [ - cardano-node.packages.${system}.cardano-cli - cardano-node.packages.${system}.cardano-node - cardano-node.packages.${system}.cardano-submit-api - cardano-node.packages.${system}.bech32 + nodePkgs.cardano-cli + nodePkgs.cardano-node + nodePkgs.cardano-submit-api + nodePkgs.bech32 pkgs.poetry py3Full - py3pkgs.virtualenv + py3Pkgs.virtualenv ]; }; # Use 'venv' directly as 'default' and 'dev' From a62424adbe4bd5f08874eddebd2420046ebd0cbd Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 19 Nov 2024 14:16:51 +0100 Subject: [PATCH 119/168] chore(nix): remove python devshell from flake.nix The python devshell has been removed from flake.nix as the sync tests are being fully moved to a separate repository. This change cleans up the flake configuration by removing the unnecessary python shell. --- flake.nix | 4 ---- 1 file changed, 4 deletions(-) diff --git a/flake.nix b/flake.nix index e2d0b459b..56861589b 100644 --- a/flake.nix +++ b/flake.nix @@ -25,10 +25,6 @@ base = pkgs.mkShell { nativeBuildInputs = with pkgs; [ bash coreutils curl git gnugrep gnumake gnutar jq xz ]; }; - # TODO: can be removed once sync tests are fully moved to separate repo - python = pkgs.mkShell { - nativeBuildInputs = with pkgs; with python39Packages; [ python39Full virtualenv pip matplotlib pandas requests xmltodict psutil GitPython pymysql ]; - }; postgres = pkgs.mkShell { nativeBuildInputs = with pkgs; [ glibcLocales postgresql lsof procps ]; }; From cccefecfa5ab3a9133187f2286cf31e621bb2123 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 19 Nov 2024 14:21:40 +0100 Subject: [PATCH 120/168] chore: update flake.nix nix cache comments Updated comments in flake.nix to clarify the use of the IOG nix cache. Simplified the comment to make it more concise and clear. --- flake.nix | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/flake.nix b/flake.nix index 56861589b..774423ad9 100644 --- a/flake.nix +++ b/flake.nix @@ -47,9 +47,7 @@ # --- Flake Local Nix Configuration ---------------------------- nixConfig = { - # This sets the flake to use the IOG nix cache. - # Nix should ask for permission before using it, - # but remove it here if you do not want it to. + # Sets the flake to use the IOG nix cache. extra-substituters = [ "https://cache.iog.io" ]; extra-trusted-public-keys = [ "hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ=" ]; allow-import-from-derivation = "true"; From 6489bc8c8ddfafde6f64d798148664752c55d1f9 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 19 Nov 2024 17:22:50 +0100 Subject: [PATCH 121/168] chore: update venv directory path Updated the virtual environment directory path from .env to .venv in the setup_venv.sh script to follow standard naming conventions. --- .github/setup_venv.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/setup_venv.sh b/.github/setup_venv.sh index 258d3f482..60d96f692 100644 --- a/.github/setup_venv.sh +++ b/.github/setup_venv.sh @@ -1,6 +1,6 @@ #!/bin/bash -_VENV_DIR="${_VENV_DIR:-"$WORKDIR/.env"}" +_VENV_DIR="${_VENV_DIR:-"$WORKDIR/.venv"}" if [ "${1:-""}" = "clean" ]; then rm -rf "$_VENV_DIR" From e7bd258f101629854868191567e0ab67acafb676 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:15:27 +0000 Subject: [PATCH 122/168] chore(deps): bump dawidd6/action-send-mail from 3 to 4 Bumps [dawidd6/action-send-mail](https://github.com/dawidd6/action-send-mail) from 3 to 4. - [Release notes](https://github.com/dawidd6/action-send-mail/releases) - [Commits](https://github.com/dawidd6/action-send-mail/compare/v3...v4) --- updated-dependencies: - dependency-name: dawidd6/action-send-mail dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/regression_reusable.yaml | 2 +- .github/workflows/upgrade_reusable.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/regression_reusable.yaml b/.github/workflows/regression_reusable.yaml index ad9e84655..b8c85cc57 100644 --- a/.github/workflows/regression_reusable.yaml +++ b/.github/workflows/regression_reusable.yaml @@ -158,7 +158,7 @@ jobs: name: cli-coverage path: cli_coverage.json - name: Mail failure report - uses: dawidd6/action-send-mail@v3 + uses: dawidd6/action-send-mail@v4 if: (success() || failure()) && steps.testing-step.outcome != 'success' && env.CI_FAIL_MAILS with: server_address: smtp.gmail.com diff --git a/.github/workflows/upgrade_reusable.yaml b/.github/workflows/upgrade_reusable.yaml index af67f6304..a9742c771 100644 --- a/.github/workflows/upgrade_reusable.yaml +++ b/.github/workflows/upgrade_reusable.yaml @@ -100,7 +100,7 @@ jobs: name: cli-coverage path: cli_coverage.json - name: Mail failure report - uses: dawidd6/action-send-mail@v3 + uses: dawidd6/action-send-mail@v4 if: (success() || failure()) && steps.testing-step.outcome != 'success' && env.CI_FAIL_MAILS with: server_address: smtp.gmail.com From 9562926e33dfc0624ecb8486ec3683dd380a6957 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 20 Nov 2024 10:03:08 +0100 Subject: [PATCH 123/168] chore(ci): remove xtrace from node upgrade script Removed the 'set -x' option from the node_upgrade_pytest.sh script to disable xtrace and reduce verbosity in CI output. --- .github/node_upgrade_pytest.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/node_upgrade_pytest.sh b/.github/node_upgrade_pytest.sh index c934801d5..1a9979b62 100755 --- a/.github/node_upgrade_pytest.sh +++ b/.github/node_upgrade_pytest.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -set -xuo pipefail +set -uo pipefail retval=1 From 78b2e476beb993159ace04397c3bd95cb94985ac Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 20 Nov 2024 10:48:41 +0100 Subject: [PATCH 124/168] feat(protocol): update protocol param keys for Conway - Removed unused imports and constants related to protocol param keys - Updated `PROTOCOL_PARAM_KEYS` to include new parameters for Conway - Removed conditional logic for protocol param keys based on versions --- cardano_node_tests/tests/test_protocol.py | 50 +++++------------------ 1 file changed, 11 insertions(+), 39 deletions(-) diff --git a/cardano_node_tests/tests/test_protocol.py b/cardano_node_tests/tests/test_protocol.py index 2ad74eb5f..52ddfc0e3 100644 --- a/cardano_node_tests/tests/test_protocol.py +++ b/cardano_node_tests/tests/test_protocol.py @@ -7,13 +7,10 @@ import allure import pytest from cardano_clusterlib import clusterlib -from packaging import version from cardano_node_tests.tests import common from cardano_node_tests.tests import issues -from cardano_node_tests.utils import clusterlib_utils from cardano_node_tests.utils import helpers -from cardano_node_tests.utils.versions import VERSIONS LOGGER = logging.getLogger(__name__) @@ -32,10 +29,15 @@ PROTOCOL_PARAM_KEYS = frozenset( ( "collateralPercentage", + "committeeMaxTermLength", + "committeeMinSize", "costModels", - "decentralization", + "dRepActivity", + "dRepDeposit", + "dRepVotingThresholds", "executionUnitPrices", - "extraPraosEntropy", + "govActionDeposit", + "govActionLifetime", "maxBlockBodySize", "maxBlockExecutionUnits", "maxBlockHeaderSize", @@ -43,11 +45,12 @@ "maxTxExecutionUnits", "maxTxSize", "maxValueSize", + "minFeeRefScriptCostPerByte", "minPoolCost", - "minUTxOValue", "monetaryExpansion", "poolPledgeInfluence", "poolRetireMaxEpoch", + "poolVotingThresholds", "protocolVersion", "stakeAddressDeposit", "stakePoolDeposit", @@ -55,31 +58,10 @@ "treasuryCut", "txFeeFixed", "txFeePerByte", - "utxoCostPerWord", - ) -) -PROTOCOL_PARAM_KEYS_1_35_2 = frozenset(("utxoCostPerByte",)) - -PROTOCOL_PARAM_KEYS_CONWAY = frozenset( - ( - "govActionLifetime", - "govActionDeposit", - "committeeMaxTermLength", - "dRepDeposit", - "poolVotingThresholds", - "dRepVotingThresholds", - "committeeMinSize", - "minFeeRefScriptCostPerByte", - "dRepActivity", + "utxoCostPerByte", ) ) -PROTOCOL_PARAM_KEYS_MISSING_8_6_0 = frozenset(("utxoCostPerWord",)) - -PROTOCOL_PARAM_KEYS_MISSING_8_12_0 = frozenset( - ("minUTxOValue", "decentralization", "extraPraosEntropy") -) - @common.SKIPIF_WRONG_ERA class TestProtocol: @@ -130,18 +112,8 @@ def test_protocol_params(self, cluster: clusterlib.ClusterLib): common.get_test_id(cluster) protocol_params = cluster.g_query.get_protocol_params() + # The sets were updated for Conway, so there's nothing to add or remove at the moment. union_with: tp.FrozenSet[str] = frozenset() - if clusterlib_utils.cli_has( - "legacy governance create-update-proposal --utxo-cost-per-byte" - ): - union_with = union_with.union(PROTOCOL_PARAM_KEYS_1_35_2) - if VERSIONS.cluster_era >= VERSIONS.CONWAY: - union_with = union_with.union(PROTOCOL_PARAM_KEYS_CONWAY) - rem: tp.FrozenSet[str] = frozenset() - if clusterlib_utils.cli_has("conway"): - rem = rem.union(PROTOCOL_PARAM_KEYS_MISSING_8_6_0) - if VERSIONS.node >= version.parse("8.12.0"): - rem = rem.union(PROTOCOL_PARAM_KEYS_MISSING_8_12_0) assert set(protocol_params) == PROTOCOL_PARAM_KEYS.union(union_with).difference(rem) From f4437b405ca0eac830b0656c3ecb05a2a78ee71e Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 20 Nov 2024 10:56:51 +0100 Subject: [PATCH 125/168] refactor(tests): remove unneeded fixtures in test files Removed unneeded fixtures `skip_leadership_schedule` and `skip_hf_command` from `test_blocks.py` and `test_hardfork.py` respectively. These fixtures were checking for the availability of certain CLI commands, which are available in all recent node versions. This change simplifies the test code and removes unnecessary checks. --- cardano_node_tests/tests/test_blocks.py | 7 ------- cardano_node_tests/tests/tests_conway/test_hardfork.py | 9 --------- 2 files changed, 16 deletions(-) diff --git a/cardano_node_tests/tests/test_blocks.py b/cardano_node_tests/tests/test_blocks.py index d4938824a..f98456699 100644 --- a/cardano_node_tests/tests/test_blocks.py +++ b/cardano_node_tests/tests/test_blocks.py @@ -34,17 +34,11 @@ class TestLeadershipSchedule: """Tests for cardano-cli leadership-schedule.""" - @pytest.fixture(scope="class") - def skip_leadership_schedule(self): - if not clusterlib_utils.cli_has("query leadership-schedule"): - pytest.skip("The `cardano-cli query leadership-schedule` command is not available.") - @allure.link(helpers.get_vcs_link()) @pytest.mark.needs_dbsync @pytest.mark.parametrize("for_epoch", ("current", "next")) def test_pool_blocks( self, - skip_leadership_schedule: None, # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster_use_pool: tp.Tuple[clusterlib.ClusterLib, str], for_epoch: str, @@ -143,7 +137,6 @@ def test_pool_blocks( @allure.link(helpers.get_vcs_link()) def test_unstable_stake_distribution( self, - skip_leadership_schedule: None, # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, ): diff --git a/cardano_node_tests/tests/tests_conway/test_hardfork.py b/cardano_node_tests/tests/tests_conway/test_hardfork.py index 69f7f9897..37d6e6b98 100644 --- a/cardano_node_tests/tests/tests_conway/test_hardfork.py +++ b/cardano_node_tests/tests/tests_conway/test_hardfork.py @@ -43,19 +43,10 @@ def pool_user_lg( class TestHardfork: """Tests for hard-fork.""" - @pytest.fixture(scope="class") - def skip_hf_command(self): - if not clusterlib_utils.cli_has("conway governance action create-hardfork"): - pytest.skip( - "The `cardano-cli conway governance action create-hardfork` command " - "is not available." - ) - @allure.link(helpers.get_vcs_link()) @pytest.mark.long def test_hardfork( self, - skip_hf_command: None, # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster_lock_governance: governance_utils.GovClusterT, pool_user_lg: clusterlib.PoolUser, From 32ee6c35786c172958cf4cf998e80115fb076eff Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 20 Nov 2024 11:59:00 +0100 Subject: [PATCH 126/168] feat(logfiles): add function for checking expected messages Refactored the `expect_errors` function by extracting the log message checking logic into a separate `_check_msgs_presence_in_logs` function. Added a new `expect_messages` context manager for checking expected messages in logs. This improves code readability and reusability. --- cardano_node_tests/utils/logfiles.py | 99 ++++++++++++++++++++-------- 1 file changed, 71 insertions(+), 28 deletions(-) diff --git a/cardano_node_tests/utils/logfiles.py b/cardano_node_tests/utils/logfiles.py index 2e299e9fd..320fd5c36 100644 --- a/cardano_node_tests/utils/logfiles.py +++ b/cardano_node_tests/utils/logfiles.py @@ -270,33 +270,13 @@ def add_ignore_rule( infile.write(f"{files_glob};;{skip_after};;{regex}\n") -@contextlib.contextmanager -def expect_errors(regex_pairs: tp.List[tp.Tuple[str, str]], worker_id: str) -> tp.Iterator[None]: - """Make sure the expected errors are present in logs. - - Args: - regex_pairs: [(glob, regex)] - A list of regexes that need to be present in files - described by the glob. - worker_id: The id of the pytest-xdist worker (the `worker_id` fixture) that the test - is running on. - """ - state_dir = cluster_nodes.get_cluster_env().state_dir - - glob_list = [] - for files_glob, regex in regex_pairs: - add_ignore_rule(files_glob=files_glob, regex=regex, ignore_file_id=worker_id) - glob_list.append(files_glob) - # Resolve the globs - _expanded_paths = [list(state_dir.glob(glob_item)) for glob_item in glob_list] - # Flatten the list - expanded_paths = list(itertools.chain.from_iterable(_expanded_paths)) - # Record each end-of-file as a starting offset for searching the log file - seek_offsets = {str(p): helpers.get_eof_offset(p) for p in expanded_paths} - - timestamp = time.time() - - yield - +def _check_msgs_presence_in_logs( + regex_pairs: tp.List[tp.Tuple[str, str]], + seek_offsets: tp.Dict[str, int], + state_dir: pl.Path, + timestamp: float, +) -> None: + """Make sure the expected messages are present in logs.""" errors = [] for files_glob, regex in regex_pairs: regex_comp = re.compile(regex) @@ -307,7 +287,7 @@ def expect_errors(regex_pairs: tp.List[tp.Tuple[str, str]], worker_id: str) -> t if ROTATED_RE.match(logfile): continue - # Search for the expected error + # Search for the expected string seek = seek_offsets.get(logfile) or 0 line_found = False for logfile_rec in _get_rotated_logs( @@ -329,6 +309,69 @@ def expect_errors(regex_pairs: tp.List[tp.Tuple[str, str]], worker_id: str) -> t raise AssertionError(errors_joined) from None +@contextlib.contextmanager +def expect_errors(regex_pairs: tp.List[tp.Tuple[str, str]], worker_id: str) -> tp.Iterator[None]: + """Make sure the expected errors are present in logs. + + Context manager. + + Args: + regex_pairs: [(glob, regex)] - A list of regexes matching strings that need to be present + in files described by the glob. + worker_id: The id of the pytest-xdist worker (the `worker_id` fixture) that the test + is running on. + """ + state_dir = cluster_nodes.get_cluster_env().state_dir + + glob_list = [] + for files_glob, regex in regex_pairs: + add_ignore_rule(files_glob=files_glob, regex=regex, ignore_file_id=worker_id) + glob_list.append(files_glob) + # Resolve the globs + _expanded_paths = [list(state_dir.glob(glob_item)) for glob_item in glob_list] + # Flatten the list + expanded_paths = list(itertools.chain.from_iterable(_expanded_paths)) + # Record each end-of-file as a starting offset for searching the log file + seek_offsets = {str(p): helpers.get_eof_offset(p) for p in expanded_paths} + + timestamp = time.time() + + yield + + _check_msgs_presence_in_logs( + regex_pairs=regex_pairs, seek_offsets=seek_offsets, state_dir=state_dir, timestamp=timestamp + ) + + +@contextlib.contextmanager +def expect_messages(regex_pairs: tp.List[tp.Tuple[str, str]]) -> tp.Iterator[None]: + """Make sure the expected messages are present in logs. + + Context manager. + + Args: + regex_pairs: [(glob, regex)] - A list of regexes matching strings that need to be present + in files described by the glob. + """ + state_dir = cluster_nodes.get_cluster_env().state_dir + + glob_list = [r[0] for r in regex_pairs] + # Resolve the globs + _expanded_paths = [list(state_dir.glob(glob_item)) for glob_item in glob_list] + # Flatten the list + expanded_paths = list(itertools.chain.from_iterable(_expanded_paths)) + # Record each end-of-file as a starting offset for searching the log file + seek_offsets = {str(p): helpers.get_eof_offset(p) for p in expanded_paths} + + timestamp = time.time() + + yield + + _check_msgs_presence_in_logs( + regex_pairs=regex_pairs, seek_offsets=seek_offsets, state_dir=state_dir, timestamp=timestamp + ) + + def search_cluster_logs() -> tp.List[tp.Tuple[pl.Path, str]]: """Search cluster logs for errors.""" cluster_env = cluster_nodes.get_cluster_env() From c355ecfca29380b557a66ea6c29521e23d667eb8 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 20 Nov 2024 12:00:19 +0100 Subject: [PATCH 127/168] feat: add expected log message check in hardfork test Add expected log message check for protocol version in `TestHardfork`. This ensures that the correct protocol version is logged during the hardfork enactment process. --- cardano_node_tests/tests/tests_conway/test_hardfork.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/cardano_node_tests/tests/tests_conway/test_hardfork.py b/cardano_node_tests/tests/tests_conway/test_hardfork.py index 37d6e6b98..ee657e44c 100644 --- a/cardano_node_tests/tests/tests_conway/test_hardfork.py +++ b/cardano_node_tests/tests/tests_conway/test_hardfork.py @@ -13,6 +13,7 @@ from cardano_node_tests.utils import clusterlib_utils from cardano_node_tests.utils import governance_utils from cardano_node_tests.utils import helpers +from cardano_node_tests.utils import logfiles from cardano_node_tests.utils.versions import VERSIONS LOGGER = logging.getLogger(__name__) @@ -231,7 +232,10 @@ def test_hardfork( ), "Incorrect major version" # Check enactment - enact_epoch = cluster.wait_for_epoch(epoch_no=init_epoch + 2, padding_seconds=5) + expected_msgs = [("pool1.stdout", r"ProtVer \{pvMajor = Version 10")] + with logfiles.expect_messages(expected_msgs): + enact_epoch = cluster.wait_for_epoch(epoch_no=init_epoch + 2, padding_seconds=15) + enact_gov_state = cluster.g_conway_governance.query.gov_state() conway_common.save_gov_state( gov_state=enact_gov_state, name_template=f"{temp_template}_enact_{enact_epoch}" From e583970c179730f860ead5444303b814169e9843 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 20 Nov 2024 13:27:42 +0100 Subject: [PATCH 128/168] refactor(tests): update old pre-Conway poll tests Make the poll tests work in Conway. - Update docstrings to reflect pre-Conway governance functionality - Remove version checks for Babbage transaction era - Replace direct transaction build and submit calls with utility function - Adjust error assertions to include new possible error messages --- cardano_node_tests/tests/test_governance.py | 92 ++++++++------------- 1 file changed, 35 insertions(+), 57 deletions(-) diff --git a/cardano_node_tests/tests/test_governance.py b/cardano_node_tests/tests/test_governance.py index 2c8d53450..a53e9e6b7 100644 --- a/cardano_node_tests/tests/test_governance.py +++ b/cardano_node_tests/tests/test_governance.py @@ -1,4 +1,4 @@ -"""Tests for governance functionality. +"""Tests for old pre-Conway governance functionality. Tests for update proposals are in separate file `test_update_proposals.py`. @@ -25,24 +25,20 @@ from cardano_node_tests.utils import helpers from cardano_node_tests.utils import poll_utils from cardano_node_tests.utils import tx_view -from cardano_node_tests.utils.versions import VERSIONS LOGGER = logging.getLogger(__name__) DATA_DIR = pl.Path(__file__).parent / "data" -pytestmark = pytest.mark.skipif( - VERSIONS.transaction_era != VERSIONS.BABBAGE, - reason="legacy SPO polls work only with Babbage Tx era", -) - class TestPoll: - """Tests for SPO poll.""" + """Tests for old pre-Conway SPO poll.""" @pytest.fixture(scope="class") def governance_poll_available(self) -> None: if not clusterlib_utils.cli_has("babbage governance create-poll"): - pytest.skip("The `cardano-cli governance` poll commands are not available.") + pytest.fail( + "The `cardano-cli babbage governance` poll commands are no longer available." + ) @pytest.fixture def payment_addr( @@ -58,7 +54,7 @@ def payment_addr( cluster_obj=cluster, )[0] - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, @@ -128,30 +124,16 @@ def test_create_and_answer_poll( metadata_json_detailed_schema=True, ) - if use_build_cmd: - tx_output_poll = cluster.g_transaction.build_tx( - src_address=payment_addr.address, - tx_name=f"{temp_template}_poll", - tx_files=tx_files_poll, - witness_override=len(tx_files_poll.signing_key_files), - required_signers=required_signers_arg, - required_signer_hashes=required_signers_vkey_hash_arg, - ) - - tx_signed_poll = cluster.g_transaction.sign_tx( - tx_body_file=tx_output_poll.out_file, - signing_key_files=tx_files_poll.signing_key_files, - tx_name=f"{temp_template}_poll", - ) - cluster.g_transaction.submit_tx(tx_file=tx_signed_poll, txins=tx_output_poll.txins) - else: - tx_output_poll = cluster.g_transaction.send_tx( - src_address=payment_addr.address, - tx_name=f"{temp_template}_poll", - tx_files=tx_files_poll, - required_signers=required_signers_arg, - required_signer_hashes=required_signers_vkey_hash_arg, - ) + tx_output_poll = clusterlib_utils.build_and_submit_tx( + cluster_obj=cluster, + name_template=f"{temp_template}_poll", + src_address=payment_addr.address, + use_build_cmd=use_build_cmd, + tx_files=tx_files_poll, + required_signers=required_signers_arg, + required_signer_hashes=required_signers_vkey_hash_arg, + witness_override=len(tx_files_poll.signing_key_files), + ) expected_metadata = {"94": [[0, [poll_question]], [1, [["Yes"], ["No"]]]]} @@ -189,27 +171,15 @@ def test_create_and_answer_poll( metadata_json_detailed_schema=True, ) - if use_build_cmd: - tx_output_answer = cluster.g_transaction.build_tx( - src_address=payment_addr.address, - tx_name=f"{temp_template}_answer", - tx_files=tx_files_answer, - required_signers=[node_cold.skey_file], - witness_override=len(tx_files_answer.signing_key_files), - ) - tx_signed_answer = cluster.g_transaction.sign_tx( - tx_body_file=tx_output_answer.out_file, - signing_key_files=tx_files_answer.signing_key_files, - tx_name=f"{temp_template}_answer", - ) - cluster.g_transaction.submit_tx(tx_file=tx_signed_answer, txins=tx_output_answer.txins) - else: - tx_output_answer = cluster.g_transaction.send_tx( - src_address=payment_addr.address, - tx_name=f"{temp_template}_answer", - tx_files=tx_files_answer, - required_signers=[node_cold.skey_file], - ) + tx_output_answer = clusterlib_utils.build_and_submit_tx( + cluster_obj=cluster, + name_template=f"{temp_template}_answer", + src_address=payment_addr.address, + use_build_cmd=use_build_cmd, + tx_files=tx_files_answer, + required_signers=[node_cold.skey_file], + witness_override=len(tx_files_answer.signing_key_files), + ) out_utxos_answer = cluster.g_query.get_utxo(tx_raw_output=tx_output_answer) assert ( @@ -327,7 +297,11 @@ def test_create_invalid_answer( ) err_str = str(excinfo.value) - assert "Poll answer out of bounds" in err_str or "negative index" in err_str, err_str + assert ( + "Poll answer out of bounds" in err_str + or "negative index" in err_str + or 'unexpected "-"' in err_str + ), err_str @allure.link(helpers.get_vcs_link()) @pytest.mark.smoke @@ -353,7 +327,11 @@ def test_create_answer_negative_index( ) err_str = str(excinfo.value) - assert "Poll answer out of bounds" in err_str or "negative index" in err_str, err_str + assert ( + "Poll answer out of bounds" in err_str + or "negative index" in err_str + or 'unexpected "-"' in err_str + ), err_str if "Prelude.!!" in err_str: issues.dbsync_1363.finish_test() From b08f1693f7cc5216362c8c4c5061bd1f6019cc96 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 20 Nov 2024 13:46:23 +0100 Subject: [PATCH 129/168] feat(tests): remove testnets mark from guardrails test The @pytest.mark.testnets decorator was removed from the test_guardrails method in the TestGovernanceGuardrails class. The guardrails tests cannot currently run on testnets. We need to modify the tests to not try to update the guardrails script on testnets. --- cardano_node_tests/tests/tests_conway/test_guardrails.py | 1 - 1 file changed, 1 deletion(-) diff --git a/cardano_node_tests/tests/tests_conway/test_guardrails.py b/cardano_node_tests/tests/tests_conway/test_guardrails.py index d00917b5d..d5476af9b 100644 --- a/cardano_node_tests/tests/tests_conway/test_guardrails.py +++ b/cardano_node_tests/tests/tests_conway/test_guardrails.py @@ -1567,7 +1567,6 @@ def cost_models(cluster_with_constitution: ClusterWithConstitutionRecord): class TestGovernanceGuardrails: @allure.link(helpers.get_vcs_link()) @pytest.mark.long - @pytest.mark.testnets def test_guardrails( self, cluster_with_constitution: ClusterWithConstitutionRecord, From 4bcbd074c4f342d30f0b6f29e562991614fb9116 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 20 Nov 2024 13:49:09 +0100 Subject: [PATCH 130/168] refactor(tests): update test to run on testnets Making sure the test can run without setting up governance first. --- cardano_node_tests/tests/tests_conway/test_committee.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/cardano_node_tests/tests/tests_conway/test_committee.py b/cardano_node_tests/tests/tests_conway/test_committee.py index f292bd684..0a8a623c1 100644 --- a/cardano_node_tests/tests/tests_conway/test_committee.py +++ b/cardano_node_tests/tests/tests_conway/test_committee.py @@ -105,8 +105,8 @@ class TestCommittee: @pytest.mark.smoke def test_register_hot_key_no_cc_member( self, - cluster_use_committee: governance_utils.GovClusterT, - payment_addr_comm: clusterlib.AddressRecord, + cluster: clusterlib.ClusterLib, + pool_user: clusterlib.PoolUser, use_build_cmd: bool, submit_method: str, ): @@ -114,7 +114,6 @@ def test_register_hot_key_no_cc_member( Expect failure. """ - cluster, __ = cluster_use_committee temp_template = common.get_test_id(cluster) cc_auth_record = governance_utils.get_cc_member_auth_record( @@ -124,7 +123,7 @@ def test_register_hot_key_no_cc_member( tx_files_auth = clusterlib.TxFiles( certificate_files=[cc_auth_record.auth_cert], - signing_key_files=[payment_addr_comm.skey_file, cc_auth_record.cold_key_pair.skey_file], + signing_key_files=[pool_user.payment.skey_file, cc_auth_record.cold_key_pair.skey_file], ) # Try to submit a Hot Credential Authorization certificate without being a CC member @@ -132,7 +131,7 @@ def test_register_hot_key_no_cc_member( clusterlib_utils.build_and_submit_tx( cluster_obj=cluster, name_template=f"{temp_template}_auth", - src_address=payment_addr_comm.address, + src_address=pool_user.payment.address, submit_method=submit_method, use_build_cmd=use_build_cmd, tx_files=tx_files_auth, From 0f93c647dd7350c6b6d4ba8befca5d2b61fe7956 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 20 Nov 2024 13:55:05 +0100 Subject: [PATCH 131/168] feat(governance): add error for testnet default governance Raise a ValueError when attempting to get default governance on testnets. This ensures that the function does not proceed with unsupported operations on testnet environments. --- cardano_node_tests/utils/governance_setup.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/cardano_node_tests/utils/governance_setup.py b/cardano_node_tests/utils/governance_setup.py index 49d44ff8f..87e5fe6ac 100644 --- a/cardano_node_tests/utils/governance_setup.py +++ b/cardano_node_tests/utils/governance_setup.py @@ -250,6 +250,11 @@ def get_default_governance( cluster_manager: cluster_management.ClusterManager, cluster_obj: clusterlib.ClusterLib, ) -> governance_utils.GovernanceRecords: + """Get default governance data for CC members, DReps and SPOs.""" + if cluster_nodes.get_cluster_type().type == cluster_nodes.ClusterType.TESTNET: + err = "Default governance is not available on testnets" + raise ValueError(err) + cluster_env = cluster_nodes.get_cluster_env() gov_data_dir = cluster_env.state_dir / GOV_DATA_DIR gov_data_store = gov_data_dir / GOV_DATA_STORE From 4e5cc2c28ffd5eb2e4435fb54e765d16d5898a42 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 21 Nov 2024 11:06:57 +0100 Subject: [PATCH 132/168] feat: add defragment UTxOs script - Add `defragment_utxos.py` script to defragment address UTxOs. - Implement `defragment` function in `utils/defragment_utxos.py`. - Update `pyproject.toml` to include the new script. --- cardano_node_tests/defragment_utxos.py | 61 ++++++++++++++++++++ cardano_node_tests/utils/defragment_utxos.py | 58 +++++++++++++++++++ pyproject.toml | 1 + 3 files changed, 120 insertions(+) create mode 100755 cardano_node_tests/defragment_utxos.py create mode 100644 cardano_node_tests/utils/defragment_utxos.py diff --git a/cardano_node_tests/defragment_utxos.py b/cardano_node_tests/defragment_utxos.py new file mode 100755 index 000000000..cb6e38d92 --- /dev/null +++ b/cardano_node_tests/defragment_utxos.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 +"""Defragment address UTxOs.""" + +import argparse +import logging +import os +import pathlib as pl +import sys + +from cardano_clusterlib import clusterlib + +from cardano_node_tests.utils import defragment_utxos +from cardano_node_tests.utils import helpers + +LOGGER = logging.getLogger(__name__) + + +def get_args() -> argparse.Namespace: + """Get command line arguments.""" + parser = argparse.ArgumentParser(description=__doc__.split("\n", maxsplit=1)[0]) + parser.add_argument( + "-a", + "--address", + required=True, + help="Address", + ) + parser.add_argument( + "-s", + "--skey-file", + type=helpers.check_file_arg, + help="Path to skey file", + ) + return parser.parse_args() + + +def main() -> int: + logging.basicConfig( + format="%(name)s:%(levelname)s:%(message)s", + level=logging.INFO, + ) + args = get_args() + + socket_env = os.environ.get("CARDANO_NODE_SOCKET_PATH") + if not socket_env: + LOGGER.error("The `CARDANO_NODE_SOCKET_PATH` environment variable is not set.") + return 1 + + state_dir = pl.Path(socket_env).parent + cluster_obj = clusterlib.ClusterLib( + state_dir=state_dir, + command_era=clusterlib.CommandEras.LATEST, + ) + defragment_utxos.defragment( + cluster_obj=cluster_obj, address=args.address, skey_file=args.skey_file + ) + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/cardano_node_tests/utils/defragment_utxos.py b/cardano_node_tests/utils/defragment_utxos.py new file mode 100644 index 000000000..6971c8d79 --- /dev/null +++ b/cardano_node_tests/utils/defragment_utxos.py @@ -0,0 +1,58 @@ +"""Defragment address UTxOs.""" + +import logging +import pathlib as pl + +from cardano_clusterlib import clusterlib + +LOGGER = logging.getLogger(__name__) + + +def defragment(cluster_obj: clusterlib.ClusterLib, address: str, skey_file: pl.Path) -> None: + """Defragment address UTxOs.""" + new_blocks = 3 + + loop = 1 + utxos_len = -1 + while True: + # Select UTxOs that are not locked and that contain only Lovelace + utxos_all = cluster_obj.g_query.get_utxo(address=address) + utxos_ids_excluded = { + f"{u.utxo_hash}#{u.utxo_ix}" + for u in utxos_all + if u.coin != clusterlib.DEFAULT_COIN or u.datum_hash + } + utxos = [u for u in utxos_all if f"{u.utxo_hash}#{u.utxo_ix}" not in utxos_ids_excluded] + + prev_utxos_len, utxos_len = utxos_len, len(utxos) + if prev_utxos_len <= utxos_len and loop >= 2: + LOGGER.info("No more UTxOs to defragment.") + break + if utxos_len <= 10: + break + + batch_size = min(100, utxos_len) + for b in range(1, utxos_len + 1, batch_size): + LOGGER.info(f"Defragmenting UTxOs: Running loop {loop}, batch {b}") + batch = utxos[b : b + batch_size] + tx_name = f"defrag_loop{loop}_batch{b}" + + tx_output = cluster_obj.g_transaction.build_tx( + src_address=address, + tx_name=tx_name, + txins=batch, + change_address=address, + ) + tx_signed_file = cluster_obj.g_transaction.sign_tx( + tx_body_file=tx_output.out_file, + tx_name=tx_name, + signing_key_files=[skey_file], + ) + cluster_obj.g_transaction.submit_tx_bare(tx_file=tx_signed_file) + + loop += 1 + + LOGGER.info( + f"Defragmenting UTxOs: Waiting for {new_blocks} new blocks before starting loop {loop}" + ) + cluster_obj.wait_for_new_block(new_blocks=new_blocks) diff --git a/pyproject.toml b/pyproject.toml index 24c5507a5..131c846c1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,6 +65,7 @@ sphinxemoji = "^0.3.1" [tool.poetry.scripts] testnet-cleanup = "cardano_node_tests.testnet_cleanup:main" +defragment-utxos = "cardano_node_tests.defragment_utxos:main" prepare-cluster-scripts = "cardano_node_tests.prepare_cluster_scripts:main" split-topology = "cardano_node_tests.split_topology:main" cardano-cli-coverage = "cardano_node_tests.cardano_cli_coverage:main" From 8116fde79aadb969c7f80c1adf7de021928c2467 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 21 Nov 2024 11:07:35 +0100 Subject: [PATCH 133/168] feat: add UTxO defragmentation to testnet cleanup Add defragmentation step to cleanup function to defragment faucet address UTxOs. --- cardano_node_tests/utils/testnet_cleanup.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/cardano_node_tests/utils/testnet_cleanup.py b/cardano_node_tests/utils/testnet_cleanup.py index 14451edb9..d0e72bbe5 100644 --- a/cardano_node_tests/utils/testnet_cleanup.py +++ b/cardano_node_tests/utils/testnet_cleanup.py @@ -16,6 +16,7 @@ from cardano_clusterlib import clusterlib from cardano_node_tests.utils import cluster_nodes +from cardano_node_tests.utils import defragment_utxos LOGGER = logging.getLogger(__name__) @@ -237,3 +238,8 @@ def _run(files: tp.List[pl.Path]) -> None: with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: futures = [executor.submit(_run, f) for f in files_found] concurrent.futures.wait(futures) + + # Defragment faucet address UTxOs + defragment_utxos.defragment( + cluster_obj=cluster_obj, address=faucet_payment.address, skey_file=faucet_payment.skey_file + ) From f12b35df2a439cb2c3f9f049aca945617ef48dc3 Mon Sep 17 00:00:00 2001 From: Artur Wieczorek Date: Thu, 21 Nov 2024 12:29:15 +0100 Subject: [PATCH 134/168] Update db-sync utils for 13.6.0.1 release - Enacted gov_action_proposal are no longer marked as dropped --- .github/env_nightly_dbsync | 4 ++-- .github/env_nightly_dbsync_pv10 | 4 ++-- cardano_node_tests/utils/dbsync_utils.py | 5 ++--- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/env_nightly_dbsync b/.github/env_nightly_dbsync index d848272ec..fc19a128a 100644 --- a/.github/env_nightly_dbsync +++ b/.github/env_nightly_dbsync @@ -3,6 +3,6 @@ COMMAND_ERA=conway MARKEXPR=dbsync CLUSTERS_COUNT=4 ENABLE_LEGACY=true -DBSYNC_REV=13.5.0.0 -DBSYNC_TAR_URL=https://github.com/IntersectMBO/cardano-db-sync/releases/download/13.5.0.0/cardano-db-sync-13.5.0.0-linux.tar.gz +DBSYNC_REV=13.6.0.1 +DBSYNC_TAR_URL=https://github.com/IntersectMBO/cardano-db-sync/releases/download/13.6.0.1/cardano-db-sync-13.6.0.1-linux.tar.gz DBSYNC_SKIP_INDEXES=true diff --git a/.github/env_nightly_dbsync_pv10 b/.github/env_nightly_dbsync_pv10 index 99f382cad..a4f56d27b 100644 --- a/.github/env_nightly_dbsync_pv10 +++ b/.github/env_nightly_dbsync_pv10 @@ -3,6 +3,6 @@ COMMAND_ERA=conway PV10=true MARKEXPR=dbsync CLUSTERS_COUNT=4 -DBSYNC_REV=13.5.0.0 -DBSYNC_TAR_URL=https://github.com/IntersectMBO/cardano-db-sync/releases/download/13.5.0.0/cardano-db-sync-13.5.0.0-linux.tar.gz +DBSYNC_REV=13.6.0.1 +DBSYNC_TAR_URL=https://github.com/IntersectMBO/cardano-db-sync/releases/download/13.6.0.1/cardano-db-sync-13.6.0.1-linux.tar.gz DBSYNC_SKIP_INDEXES=true diff --git a/cardano_node_tests/utils/dbsync_utils.py b/cardano_node_tests/utils/dbsync_utils.py index baf3bf041..b34e1970b 100644 --- a/cardano_node_tests/utils/dbsync_utils.py +++ b/cardano_node_tests/utils/dbsync_utils.py @@ -1270,12 +1270,11 @@ def check_treasury_withdrawal(stake_address: str, transfer_amts: tp.List[int], t rem_amts.remove(r_amount) assert row.ratified_epoch, "Action not marked as ratified in db-sync" assert row.enacted_epoch, "Action not marked as enacted in db-sync" + assert not row.dropped_epoch, "Action marked as dropped in db-sync" + assert not row.expired_epoch, "Action marked as expired in db-sync" assert ( row.enacted_epoch == row.ratified_epoch + 1 ), "Wrong relation between enacted and ratified epochs in db-sync" - assert ( - row.enacted_epoch == row.dropped_epoch - ), "Wrong relation between enacted and dropped epochs in db-sync" def check_reward_rest(stake_address: str, transfer_amts: tp.List[int], type: str = "") -> None: From 6595239b37e803cd8871f45d971dee1201cd11e4 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 21 Nov 2024 12:44:32 +0100 Subject: [PATCH 135/168] feat: make testnet cleanup work also outside of framework Make the testnet cleanup script work also when the testnet was not started through framework testnet scripts, e.g. directly in bootstrap dir. --- cardano_node_tests/testnet_cleanup.py | 30 ++++++++++++++++++--- cardano_node_tests/utils/testnet_cleanup.py | 29 +++++++++++++++++--- 2 files changed, 52 insertions(+), 7 deletions(-) diff --git a/cardano_node_tests/testnet_cleanup.py b/cardano_node_tests/testnet_cleanup.py index 32084be18..2d5db30fb 100755 --- a/cardano_node_tests/testnet_cleanup.py +++ b/cardano_node_tests/testnet_cleanup.py @@ -9,9 +9,11 @@ import argparse import logging import os +import pathlib as pl import sys -from cardano_node_tests.utils import cluster_nodes +from cardano_clusterlib import clusterlib + from cardano_node_tests.utils import helpers from cardano_node_tests.utils import testnet_cleanup @@ -28,6 +30,19 @@ def get_args() -> argparse.Namespace: type=helpers.check_dir_arg, help="Path to a directory with testing artifacts", ) + parser.add_argument( + "-f", + "--address", + required=True, + help="Faucet address", + ) + parser.add_argument( + "-s", + "--skey-file", + required=True, + type=helpers.check_file_arg, + help="Path to faucet skey file", + ) return parser.parse_args() @@ -38,15 +53,22 @@ def main() -> int: ) args = get_args() - if not os.environ.get("CARDANO_NODE_SOCKET_PATH"): + socket_env = os.environ.get("CARDANO_NODE_SOCKET_PATH") + if not socket_env: LOGGER.error("The `CARDANO_NODE_SOCKET_PATH` environment variable is not set.") return 1 if not os.environ.get("BOOTSTRAP_DIR"): LOGGER.error("The `BOOTSTRAP_DIR` environment variable is not set.") return 1 - cluster_obj = cluster_nodes.get_cluster_type().get_cluster_obj() - testnet_cleanup.cleanup(cluster_obj=cluster_obj, location=args.artifacts_base_dir) + state_dir = pl.Path(socket_env).parent + cluster_obj = clusterlib.ClusterLib(state_dir=state_dir) + testnet_cleanup.cleanup( + cluster_obj=cluster_obj, + location=args.artifacts_base_dir, + faucet_address=args.address, + faucet_skey_file=args.skey_file, + ) return 0 diff --git a/cardano_node_tests/utils/testnet_cleanup.py b/cardano_node_tests/utils/testnet_cleanup.py index d0e72bbe5..b0c584f00 100644 --- a/cardano_node_tests/utils/testnet_cleanup.py +++ b/cardano_node_tests/utils/testnet_cleanup.py @@ -172,14 +172,37 @@ def group_files(file_paths: tp.Generator[pl.Path, None, None]) -> tp.List[tp.Lis return path_groups +def _get_faucet_payment_rec( + address: str = "", + skey_file: clusterlib.FileType = "", +) -> clusterlib.AddressRecord: + if address or skey_file: + if not (address and skey_file): + err = "Both 'address' and 'skey_file' need to be set." + raise ValueError(err) + + faucet_payment = clusterlib.AddressRecord( + address=address, + vkey_file=pl.Path("/nonexistent"), # We don't need this for faucet + skey_file=pl.Path(skey_file), + ) + else: + # Try to infer the faucet address and keys from cluster env + cluster_env = cluster_nodes.get_cluster_env() + faucet_addr_file = cluster_env.state_dir / "shelley" / "faucet.addr" + faucet_payment = create_addr_record(faucet_addr_file) + + return faucet_payment + + def cleanup( cluster_obj: clusterlib.ClusterLib, location: clusterlib.FileType, + faucet_address: str = "", + faucet_skey_file: clusterlib.FileType = "", ) -> None: """Cleanup a testnet with the help of testing artifacts.""" - cluster_env = cluster_nodes.get_cluster_env() - faucet_addr_file = cluster_env.state_dir / "shelley" / "faucet.addr" - faucet_payment = create_addr_record(faucet_addr_file) + faucet_payment = _get_faucet_payment_rec(address=faucet_address, skey_file=faucet_skey_file) files_found = group_files(find_files(location)) stake_deposit_amt = cluster_obj.g_query.get_address_deposit() From c8141f400af2d5f43952826f801cc4874c536d2f Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 21 Nov 2024 13:49:22 +0100 Subject: [PATCH 136/168] feat: make skey-file argument required The skey-file argument is now required in the defragment_utxos.py script. This change ensures that the script always receives a valid skey file path, improving its reliability and preventing potential errors due to missing arguments. --- cardano_node_tests/defragment_utxos.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cardano_node_tests/defragment_utxos.py b/cardano_node_tests/defragment_utxos.py index cb6e38d92..3969f8640 100755 --- a/cardano_node_tests/defragment_utxos.py +++ b/cardano_node_tests/defragment_utxos.py @@ -27,6 +27,7 @@ def get_args() -> argparse.Namespace: parser.add_argument( "-s", "--skey-file", + required=True, type=helpers.check_file_arg, help="Path to skey file", ) From f74c5235960842b176d97a46c107cd1ab0bb2d24 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 21 Nov 2024 13:49:58 +0100 Subject: [PATCH 137/168] refactor: init ClusterLib with default command era Simplified the initialization of the ClusterLib object by removing unnecessary command era. This ensures the default (latest) command era will be used. --- cardano_node_tests/defragment_utxos.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/cardano_node_tests/defragment_utxos.py b/cardano_node_tests/defragment_utxos.py index 3969f8640..7d1c8e703 100755 --- a/cardano_node_tests/defragment_utxos.py +++ b/cardano_node_tests/defragment_utxos.py @@ -47,10 +47,7 @@ def main() -> int: return 1 state_dir = pl.Path(socket_env).parent - cluster_obj = clusterlib.ClusterLib( - state_dir=state_dir, - command_era=clusterlib.CommandEras.LATEST, - ) + cluster_obj = clusterlib.ClusterLib(state_dir=state_dir) defragment_utxos.defragment( cluster_obj=cluster_obj, address=args.address, skey_file=args.skey_file ) From db0e4a991ecd0f8da366248bad7286b3522f17da Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 21 Nov 2024 14:13:05 +0100 Subject: [PATCH 138/168] feat: use get_drep_reg_record in test_drep.py Refactor the test_drep.py to use the get_drep_reg_record function from governance_utils. This change simplifies the code by removing manual steps for generating drep registration certificates and deposit amounts. The function handles these internally, making the test code cleaner and more maintainable. --- .../tests/tests_conway/test_drep.py | 22 +++++++------------ 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/cardano_node_tests/tests/tests_conway/test_drep.py b/cardano_node_tests/tests/tests_conway/test_drep.py index d12a782d2..10bbb9c8a 100644 --- a/cardano_node_tests/tests/tests_conway/test_drep.py +++ b/cardano_node_tests/tests/tests_conway/test_drep.py @@ -973,23 +973,17 @@ def test_drep_no_multiple_registration( drep_metadata_hash = cluster.g_conway_governance.drep.get_metadata_hash( drep_metadata_file=drep_metadata_file ) - deposit_amt = cluster.conway_genesis["dRepDeposit"] - drep_keys = cluster.g_conway_governance.drep.gen_key_pair( - key_name=temp_template, destination_dir="." - ) + reqc.cip090.start(url=helpers.get_vcs_link()) - # Obtain drep registration certificate - reg_cert = cluster.g_conway_governance.drep.gen_registration_cert( - cert_name=temp_template, - deposit_amt=deposit_amt, - drep_vkey_file=drep_keys.vkey_file, + reg_drep = governance_utils.get_drep_reg_record( + cluster_obj=cluster, + name_template=temp_template, drep_metadata_url=drep_metadata_url, drep_metadata_hash=drep_metadata_hash, - destination_dir=".", ) tx_files_reg = clusterlib.TxFiles( - certificate_files=[reg_cert], - signing_key_files=[payment_addr.skey_file, drep_keys.skey_file], + certificate_files=[reg_drep.registration_cert], + signing_key_files=[payment_addr.skey_file, reg_drep.key_pair.skey_file], ) # Submit drep registration certificate @@ -1000,7 +994,7 @@ def test_drep_no_multiple_registration( submit_method=submit_method, use_build_cmd=use_build_cmd, tx_files=tx_files_reg, - deposit=deposit_amt, + deposit=reg_drep.deposit, ) # Wait for some blocks and again submit drep registration certificate @@ -1015,7 +1009,7 @@ def test_drep_no_multiple_registration( submit_method=submit_method, use_build_cmd=use_build_cmd, tx_files=tx_files_reg, - deposit=deposit_amt, + deposit=reg_drep.deposit, ) err_msg = str(excinfo.value) From 200adecbe058558bb7c31a7eec1cacf02d2436a4 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 21 Nov 2024 17:56:53 +0100 Subject: [PATCH 139/168] feat: add DRep retirement and fund return to cleanup - Add functionality to retire DReps in testnet_cleanup.py and utils/testnet_cleanup.py. - Update return_funds_to_faucet to handle multiple source addresses. - Refactor cleanup functions to separate address and cert cleanup. --- cardano_node_tests/testnet_cleanup.py | 1 + cardano_node_tests/utils/testnet_cleanup.py | 199 +++++++++++++++----- 2 files changed, 155 insertions(+), 45 deletions(-) diff --git a/cardano_node_tests/testnet_cleanup.py b/cardano_node_tests/testnet_cleanup.py index 2d5db30fb..27e6ddee7 100755 --- a/cardano_node_tests/testnet_cleanup.py +++ b/cardano_node_tests/testnet_cleanup.py @@ -3,6 +3,7 @@ * withdraw rewards * deregister stake addresses +* retire DReps * return funds to faucet """ diff --git a/cardano_node_tests/utils/testnet_cleanup.py b/cardano_node_tests/utils/testnet_cleanup.py index b0c584f00..a281c5e1c 100644 --- a/cardano_node_tests/utils/testnet_cleanup.py +++ b/cardano_node_tests/utils/testnet_cleanup.py @@ -2,13 +2,16 @@ * withdraw rewards * deregister stake addresses +* retire DReps * return funds to faucet """ import concurrent.futures import functools +import itertools import logging import pathlib as pl +import queue import random import time import typing as tp @@ -78,19 +81,51 @@ def deregister_stake_addr( LOGGER.info(f"Deregistered stake address '{pool_user.stake.address}'") +def retire_drep( + cluster_obj: clusterlib.ClusterLib, + payment_addr: clusterlib.AddressRecord, + drep_keys: clusterlib.KeyPair, + name_template: str, + deposit_amt: int, +) -> None: + """Retire a DRep.""" + ret_cert = cluster_obj.g_conway_governance.drep.gen_retirement_cert( + cert_name=f"{name_template}_cleanup", + deposit_amt=deposit_amt, + drep_vkey_file=drep_keys.vkey_file, + ) + tx_files = clusterlib.TxFiles( + certificate_files=[ret_cert], + signing_key_files=[payment_addr.skey_file, drep_keys.skey_file], + ) + + try: + cluster_obj.g_transaction.send_tx( + src_address=payment_addr.address, + tx_name=f"{name_template}_retire_drep", + tx_files=tx_files, + deposit=-deposit_amt, + ) + except clusterlib.CLIError: + LOGGER.error(f"Failed to retire a DRep '{name_template}'") # noqa: TRY400 + else: + LOGGER.info(f"Retired a DRep '{name_template}'") + + def return_funds_to_faucet( cluster_obj: clusterlib.ClusterLib, - src_addr: clusterlib.AddressRecord, + src_addrs: tp.List[clusterlib.AddressRecord], faucet_address: str, tx_name: str, ) -> None: - """Send funds from `src_addr` to `faucet_address`.""" - tx_name = f"rf_{tx_name}_return_funds" + """Send funds from `src_addr`s to `faucet_address`.""" + tx_name = f"rf_{tx_name}" # the amount of "-1" means all available funds. fund_dst = [clusterlib.TxOut(address=faucet_address, amount=-1)] - fund_tx_files = clusterlib.TxFiles(signing_key_files=[src_addr.skey_file]) + fund_tx_files = clusterlib.TxFiles(signing_key_files=[f.skey_file for f in src_addrs]) - txins = cluster_obj.g_query.get_utxo(address=src_addr.address, coins=[clusterlib.DEFAULT_COIN]) + txins_nested = [cluster_obj.g_query.get_utxo(address=f.address) for f in src_addrs] + txins = list(itertools.chain.from_iterable(txins_nested)) utxos_balance = functools.reduce(lambda x, y: x + y.amount, txins, 0) # skip if there no (or too little) Lovelace @@ -109,7 +144,7 @@ def return_funds_to_faucet( # try to return funds; don't mind if there's not enough funds for fees etc. try: cluster_obj.g_transaction.send_tx( - src_address=src_addr.address, + src_address=src_addrs[0].address, tx_name=tx_name, txins=txins, txouts=fund_dst, @@ -117,9 +152,9 @@ def return_funds_to_faucet( verify_tx=False, ) except clusterlib.CLIError: - LOGGER.error(f"Failed to return funds from '{src_addr.address}'") # noqa: TRY400 + LOGGER.error(f"Failed to return funds from addresses for '{tx_name}'") # noqa: TRY400 else: - LOGGER.info(f"Returned funds from '{src_addr.address}'") + LOGGER.info(f"Returned funds from addresses '{tx_name}'") def create_addr_record(addr_file: pl.Path) -> clusterlib.AddressRecord: @@ -141,13 +176,17 @@ def create_addr_record(addr_file: pl.Path) -> clusterlib.AddressRecord: return addr_record -def find_files(location: clusterlib.FileType) -> tp.Generator[pl.Path, None, None]: +def find_addr_files(location: pl.Path) -> tp.Generator[pl.Path, None, None]: r"""Find all '\*.addr' files in given location and it's subdirectories.""" - location = pl.Path(location).expanduser().resolve() return location.glob("**/*.addr") -def group_files(file_paths: tp.Generator[pl.Path, None, None]) -> tp.List[tp.List[pl.Path]]: +def find_cert_files(location: pl.Path) -> tp.Generator[pl.Path, None, None]: + r"""Find all '\*_drep_reg.cert' files in given location and it's subdirectories.""" + return location.glob("**/*_drep_reg.cert") + + +def group_addr_files(file_paths: tp.Generator[pl.Path, None, None]) -> tp.List[tp.List[pl.Path]]: """Group payment address files with corresponding stake address files. These need to be processed together - funds are transferred from payment address after @@ -172,43 +211,16 @@ def group_files(file_paths: tp.Generator[pl.Path, None, None]) -> tp.List[tp.Lis return path_groups -def _get_faucet_payment_rec( - address: str = "", - skey_file: clusterlib.FileType = "", -) -> clusterlib.AddressRecord: - if address or skey_file: - if not (address and skey_file): - err = "Both 'address' and 'skey_file' need to be set." - raise ValueError(err) - - faucet_payment = clusterlib.AddressRecord( - address=address, - vkey_file=pl.Path("/nonexistent"), # We don't need this for faucet - skey_file=pl.Path(skey_file), - ) - else: - # Try to infer the faucet address and keys from cluster env - cluster_env = cluster_nodes.get_cluster_env() - faucet_addr_file = cluster_env.state_dir / "shelley" / "faucet.addr" - faucet_payment = create_addr_record(faucet_addr_file) - - return faucet_payment - - -def cleanup( - cluster_obj: clusterlib.ClusterLib, - location: clusterlib.FileType, - faucet_address: str = "", - faucet_skey_file: clusterlib.FileType = "", +def cleanup_addresses( + cluster_obj: clusterlib.ClusterLib, location: pl.Path, faucet_payment: clusterlib.AddressRecord ) -> None: - """Cleanup a testnet with the help of testing artifacts.""" - faucet_payment = _get_faucet_payment_rec(address=faucet_address, skey_file=faucet_skey_file) - files_found = group_files(find_files(location)) + """Cleanup addresses.""" + files_found = group_addr_files(find_addr_files(location)) stake_deposit_amt = cluster_obj.g_query.get_address_deposit() def _run(files: tp.List[pl.Path]) -> None: for fpath in files: - # add random sleep for < 1s to prevent + # Add random sleep for < 1s to prevent # "Network.Socket.connect: : resource exhausted" time.sleep(random.random()) @@ -252,16 +264,113 @@ def _run(files: tp.List[pl.Path]) -> None: continue return_funds_to_faucet( cluster_obj=cluster_obj, - src_addr=payment, + src_addrs=[payment], faucet_address=faucet_payment.address, tx_name=f_name, ) - # run cleanup in parallel + # Run cleanup in parallel with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: futures = [executor.submit(_run, f) for f in files_found] concurrent.futures.wait(futures) + +def cleanup_certs( + cluster_obj: clusterlib.ClusterLib, location: pl.Path, faucet_payment: clusterlib.AddressRecord +) -> None: + """Cleanup DRep certs.""" + files_found = find_cert_files(location) + drep_deposit_amt = cluster_obj.conway_genesis["dRepDeposit"] + + # Fund the addresses that will pay for fees + fund_addrs = [ + cluster_obj.g_address.gen_payment_addr_and_keys(name=f"certs_cleanup{i}") for i in range(11) + ] + fund_dst = [clusterlib.TxOut(address=f.address, amount=300_000_000) for f in fund_addrs] + fund_tx_files = clusterlib.TxFiles(signing_key_files=[faucet_payment.skey_file]) + cluster_obj.g_transaction.send_tx( + src_address=faucet_payment.address, + tx_name="fund_certs_cleanup", + txouts=fund_dst, + tx_files=fund_tx_files, + ) + + addrs_queue: queue.Queue[clusterlib.AddressRecord] = queue.Queue() + for a in fund_addrs: + addrs_queue.put(a) + + def _run(cert_file: pl.Path, addrs_queue: queue.Queue[clusterlib.AddressRecord]) -> None: + # Add random sleep for < 1s to prevent + # "Network.Socket.connect: : resource exhausted" + time.sleep(random.random()) + + fname = cert_file.name + fdir = cert_file.parent + vkey_file = fdir / cert_file.name.replace("_reg.cert", ".vkey") + skey_file = vkey_file.with_suffix(".skey") + drep_keys = clusterlib.KeyPair(vkey_file=vkey_file, skey_file=skey_file) + + addr = addrs_queue.get() + try: + retire_drep( + cluster_obj=cluster_obj, + payment_addr=addr, + drep_keys=drep_keys, + name_template=fname, + deposit_amt=drep_deposit_amt, + ) + finally: + addrs_queue.put(addr) + + # Run cleanup in parallel + with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: + futures = [executor.submit(_run, f, addrs_queue) for f in files_found] + concurrent.futures.wait(futures) + + # Return funds from the addresses that paid for fees + return_funds_to_faucet( + cluster_obj=cluster_obj, + src_addrs=fund_addrs, + faucet_address=faucet_payment.address, + tx_name="certs_cleanup_return", + ) + + +def _get_faucet_payment_rec( + address: str = "", + skey_file: clusterlib.FileType = "", +) -> clusterlib.AddressRecord: + if address or skey_file: + if not (address and skey_file): + err = "Both 'address' and 'skey_file' need to be set." + raise ValueError(err) + + faucet_payment = clusterlib.AddressRecord( + address=address, + vkey_file=pl.Path("/nonexistent"), # We don't need this for faucet + skey_file=pl.Path(skey_file), + ) + else: + # Try to infer the faucet address and keys from cluster env + cluster_env = cluster_nodes.get_cluster_env() + faucet_addr_file = cluster_env.state_dir / "shelley" / "faucet.addr" + faucet_payment = create_addr_record(faucet_addr_file) + + return faucet_payment + + +def cleanup( + cluster_obj: clusterlib.ClusterLib, + location: clusterlib.FileType, + faucet_address: str = "", + faucet_skey_file: clusterlib.FileType = "", +) -> None: + """Cleanup a testnet with the help of testing artifacts.""" + location = pl.Path(location).expanduser().resolve() + faucet_payment = _get_faucet_payment_rec(address=faucet_address, skey_file=faucet_skey_file) + cleanup_addresses(cluster_obj=cluster_obj, location=location, faucet_payment=faucet_payment) + cleanup_certs(cluster_obj=cluster_obj, location=location, faucet_payment=faucet_payment) + # Defragment faucet address UTxOs defragment_utxos.defragment( cluster_obj=cluster_obj, address=faucet_payment.address, skey_file=faucet_payment.skey_file From b4a10630b2f81a23d9d9bb2ee6eb94b9a9094682 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 21 Nov 2024 17:58:04 +0100 Subject: [PATCH 140/168] fix(testnet_cleanup): update argument requirements - Removed required constraints for address and skey-file arguments to retain backwards compatibility - Added validation to ensure both or neither are provided - Require address and skey-file arguments when BOOTSTRAP_DIR env variable is not set --- cardano_node_tests/testnet_cleanup.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/cardano_node_tests/testnet_cleanup.py b/cardano_node_tests/testnet_cleanup.py index 27e6ddee7..c543d18fb 100755 --- a/cardano_node_tests/testnet_cleanup.py +++ b/cardano_node_tests/testnet_cleanup.py @@ -34,13 +34,11 @@ def get_args() -> argparse.Namespace: parser.add_argument( "-f", "--address", - required=True, help="Faucet address", ) parser.add_argument( "-s", "--skey-file", - required=True, type=helpers.check_file_arg, help="Path to faucet skey file", ) @@ -58,8 +56,15 @@ def main() -> int: if not socket_env: LOGGER.error("The `CARDANO_NODE_SOCKET_PATH` environment variable is not set.") return 1 - if not os.environ.get("BOOTSTRAP_DIR"): - LOGGER.error("The `BOOTSTRAP_DIR` environment variable is not set.") + if bool(args.address) ^ bool(args.skey_file): + LOGGER.error( + "Both address and skey file must be provided, or neither of them should be provided." + ) + return 1 + if not (args.address or os.environ.get("BOOTSTRAP_DIR")): + LOGGER.error( + "The address must be provided, or `BOOTSTRAP_DIR` environment variable must be set." + ) return 1 state_dir = pl.Path(socket_env).parent From 51203a7063298bd638359c62380e43051b6f6294 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 21 Nov 2024 18:24:04 +0100 Subject: [PATCH 141/168] feat: add DB-Sync table names to error ignore list Added OffChainPoolFetchError and OffChainVoteFetchError to the list of ignored errors in logfiles.py. The table names are matching the searched error strings, but are not errors. --- cardano_node_tests/utils/logfiles.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/cardano_node_tests/utils/logfiles.py b/cardano_node_tests/utils/logfiles.py index 320fd5c36..0cc4bca43 100644 --- a/cardano_node_tests/utils/logfiles.py +++ b/cardano_node_tests/utils/logfiles.py @@ -41,6 +41,10 @@ "db-sync-node.*could not serialize access", # Can happen on p2p when node is shutting down "AsyncCancelled", + # DB-Sync table name + "OffChainPoolFetchError", + # DB-Sync table name + "OffChainVoteFetchError", # TODO: p2p failures on testnet "PeerStatusChangeFailure", # TODO: p2p failures on testnet - PeerMonitoringError From a69d0a29619deb8af8526eb5e7beadf820c368e7 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 22 Nov 2024 10:59:40 +0100 Subject: [PATCH 142/168] feat(ci): enhance test run logs with icons Enhanced CI output with icons for better readability. --- .github/node_upgrade.sh | 12 ++++++------ .github/regression.sh | 4 ++-- .github/workflows/regression_reusable.yaml | 14 +++++++------- .github/workflows/upgrade_reusable.yaml | 18 +++++++++--------- 4 files changed, 24 insertions(+), 24 deletions(-) diff --git a/.github/node_upgrade.sh b/.github/node_upgrade.sh index 1aa9025ed..4953a0a97 100755 --- a/.github/node_upgrade.sh +++ b/.github/node_upgrade.sh @@ -91,7 +91,7 @@ nix develop --accept-flake-config .#venv --command bash -c ' . .github/setup_venv.sh clean echo "::endgroup::" # end group for "Python venv setup step1" - echo "::group::-> PYTEST STEP1 <-" + echo "::group::🧪 Testrun Step1" printf "start: %(%H:%M:%S)T\n" -1 df -h . # prepare scripts for stating cluster instance, start cluster instance, run smoke tests @@ -107,7 +107,7 @@ fi # retval 0 == all tests passed; 1 == some tests failed; > 1 == some runtime error and we don't want to continue [ "$retval" -le 1 ] || exit "$retval" -echo "::endgroup::" # end group for "-> PYTEST STEP1 <-" +echo "::endgroup::" # end group for "Testrun Step1" echo "::group::Nix env setup steps 2 & 3" printf "start: %(%H:%M:%S)T\n" -1 @@ -131,7 +131,7 @@ nix develop --accept-flake-config .#venv --command bash -c ' . .github/setup_venv.sh clean echo "::endgroup::" # end group for "Python venv setup steps 2 & 3" - echo "::group::-> PYTEST STEP2 <-" + echo "::group::🧪 Testrun Step2" printf "start: %(%H:%M:%S)T\n" -1 df -h . # update cluster nodes, run smoke tests @@ -139,16 +139,16 @@ nix develop --accept-flake-config .#venv --command bash -c ' retval="$?" # retval 0 == all tests passed; 1 == some tests failed; > 1 == some runtime error and we dont want to continue [ "$retval" -le 1 ] || exit "$retval" - echo "::endgroup::" # end group for "-> PYTEST STEP2 <-" + echo "::endgroup::" # end group for "Testrun Step2" - echo "::group::-> PYTEST STEP3 <-" + echo "::group::🧪 Testrun Step3" printf "start: %(%H:%M:%S)T\n" -1 df -h . # update to Conway, run smoke tests ./.github/node_upgrade_pytest.sh step3 retval="$?" df -h . - echo "::endgroup::" # end group for "-> PYTEST STEP3 <-" + echo "::endgroup::" # end group for "Testrun Step3" echo "::group::Teardown cluster & collect artifacts" printf "start: %(%H:%M:%S)T\n" -1 diff --git a/.github/regression.sh b/.github/regression.sh index badf7f9b7..98a2f8333 100755 --- a/.github/regression.sh +++ b/.github/regression.sh @@ -198,7 +198,7 @@ nix develop --accept-flake-config .#venv --command bash -c ' . .github/setup_venv.sh clean echo "::endgroup::" # end group for "Python venv setup" - echo "::group::-> PYTEST RUN <-" + echo "::group::🧪 Testrun" printf "start: %(%H:%M:%S)T\n" -1 df -h . export PATH="${PWD}/.bin":"$WORKDIR/cardano-cli/cardano-cli-build/bin":"$PATH" @@ -206,7 +206,7 @@ nix develop --accept-flake-config .#venv --command bash -c ' make "${MAKE_TARGET:-"tests"}" retval="$?" df -h . - echo "::endgroup::" + echo "::endgroup::" # end group for "Testrun" echo "::group::Collect artifacts & teardown cluster" printf "start: %(%H:%M:%S)T\n" -1 diff --git a/.github/workflows/regression_reusable.yaml b/.github/workflows/regression_reusable.yaml index b8c85cc57..b22403e72 100644 --- a/.github/workflows/regression_reusable.yaml +++ b/.github/workflows/regression_reusable.yaml @@ -106,7 +106,7 @@ jobs: curl -s -u ${{ secrets.TCACHE_BASIC_AUTH }} "${{ secrets.TCACHE_URL }}/${testrun_name_strip}/pypassed" > deselected_tests.txt echo "DESELECT_FROM_FILE=deselected_tests.txt" >> $GITHUB_ENV fi - - name: Run regression tests + - name: ▶️ Run Regression Tests id: testing-step run: | # env @@ -120,13 +120,13 @@ jobs: testrun_name_strip="$(echo "${{ inputs.testrun_name }}" | sed 's/[^a-zA-Z0-9_-]//g')" curl -s -X PUT --fail-with-body -u ${{ secrets.TCACHE_BASIC_AUTH }} "${{ secrets.TCACHE_URL }}/${testrun_name_strip}/${{ github.run_number }}/import" -F "junitxml=@testrun-report.xml" fi - - name: Upload testing artifacts on failure + - name: 🠉 Upload testing artifacts on failure uses: actions/upload-artifact@v4 if: failure() with: name: testing-artifacts path: testing_artifacts.tar.xz - - name: Upload Allure results + - name: 🠉 Upload Allure results uses: actions/upload-artifact@v4 # When using `always()`, you lose ability to manually cancel the workflow. # Use `success() || failure()` instead. @@ -134,13 +134,13 @@ jobs: with: name: allure-results path: allure-results.tar.xz - - name: Upload HTML report + - name: 🠉 Upload HTML report uses: actions/upload-artifact@v4 if: success() || failure() with: name: testrun-report path: testrun-report.html - - name: Upload testrun files + - name: 🠉 Upload testrun files uses: actions/upload-artifact@v4 if: success() || failure() with: @@ -151,13 +151,13 @@ jobs: testrun-report.xml deselected_tests.txt requirements_coverage.json - - name: Upload CLI coverage + - name: 🠉 Upload CLI coverage uses: actions/upload-artifact@v4 if: success() || failure() with: name: cli-coverage path: cli_coverage.json - - name: Mail failure report + - name: ✉ Mail failure report uses: dawidd6/action-send-mail@v4 if: (success() || failure()) && steps.testing-step.outcome != 'success' && env.CI_FAIL_MAILS with: diff --git a/.github/workflows/upgrade_reusable.yaml b/.github/workflows/upgrade_reusable.yaml index a9742c771..5ed65a9f7 100644 --- a/.github/workflows/upgrade_reusable.yaml +++ b/.github/workflows/upgrade_reusable.yaml @@ -43,20 +43,20 @@ jobs: cat .github_ci_env cat .github_ci_env >> $GITHUB_ENV echo "GITHUB_TOKEN=${{ secrets.GH_TOKEN }}" >> $GITHUB_ENV - - name: Run upgrade tests + - name: ▶️ Run Upgrade Tests id: testing-step run: | # env echo "::group::Script setup" ./.github/node_upgrade.sh echo "::endgroup::" - - name: Upload testing artifacts on failure + - name: 🠉 Upload testing artifacts on failure uses: actions/upload-artifact@v4 if: failure() with: name: testing-artifacts path: testing_artifacts.tar.xz - - name: Upload Allure results for step1 + - name: 🠉 Upload Allure results for step1 uses: actions/upload-artifact@v4 # When using `always()`, you lose ability to manually cancel the workflow. # Use `success() || failure()` instead. @@ -64,19 +64,19 @@ jobs: with: name: allure-results-step1 path: allure-results-step1.tar.xz - - name: Upload Allure results for step2 + - name: 🠉 Upload Allure results for step2 uses: actions/upload-artifact@v4 if: success() || failure() with: name: allure-results-step2 path: allure-results-step2.tar.xz - - name: Upload Allure results for step3 + - name: 🠉 Upload Allure results for step3 uses: actions/upload-artifact@v4 if: success() || failure() with: name: allure-results-step3 path: allure-results-step3.tar.xz - - name: Upload HTML reports + - name: 🠉 Upload HTML reports uses: actions/upload-artifact@v4 if: success() || failure() with: @@ -85,7 +85,7 @@ jobs: testrun-report-step1.html testrun-report-step2.html testrun-report-step3.html - - name: Upload testrun files + - name: 🠉 Upload testrun files uses: actions/upload-artifact@v4 if: success() || failure() with: @@ -93,13 +93,13 @@ jobs: path: | scheduling.log.xz errors_all.log - - name: Upload CLI coverage + - name: 🠉 Upload CLI coverage uses: actions/upload-artifact@v4 if: success() || failure() with: name: cli-coverage path: cli_coverage.json - - name: Mail failure report + - name: ✉ Mail failure report uses: dawidd6/action-send-mail@v4 if: (success() || failure()) && steps.testing-step.outcome != 'success' && env.CI_FAIL_MAILS with: From 200c656a27b47bfa06378a813d4fc7617fd1b5ee Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 22 Nov 2024 13:21:10 +0100 Subject: [PATCH 143/168] chore: replace arrow symbols in workflow names MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replaced the arrow symbols (🠉) with a simpler arrow (↑) in the workflow names for better compatibility with fonts with limited set of unicode characters. --- .github/workflows/regression_reusable.yaml | 10 +++++----- .github/workflows/upgrade_reusable.yaml | 14 +++++++------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/regression_reusable.yaml b/.github/workflows/regression_reusable.yaml index b22403e72..20067826d 100644 --- a/.github/workflows/regression_reusable.yaml +++ b/.github/workflows/regression_reusable.yaml @@ -120,13 +120,13 @@ jobs: testrun_name_strip="$(echo "${{ inputs.testrun_name }}" | sed 's/[^a-zA-Z0-9_-]//g')" curl -s -X PUT --fail-with-body -u ${{ secrets.TCACHE_BASIC_AUTH }} "${{ secrets.TCACHE_URL }}/${testrun_name_strip}/${{ github.run_number }}/import" -F "junitxml=@testrun-report.xml" fi - - name: 🠉 Upload testing artifacts on failure + - name: ↑ Upload testing artifacts on failure uses: actions/upload-artifact@v4 if: failure() with: name: testing-artifacts path: testing_artifacts.tar.xz - - name: 🠉 Upload Allure results + - name: ↑ Upload Allure results uses: actions/upload-artifact@v4 # When using `always()`, you lose ability to manually cancel the workflow. # Use `success() || failure()` instead. @@ -134,13 +134,13 @@ jobs: with: name: allure-results path: allure-results.tar.xz - - name: 🠉 Upload HTML report + - name: ↑ Upload HTML report uses: actions/upload-artifact@v4 if: success() || failure() with: name: testrun-report path: testrun-report.html - - name: 🠉 Upload testrun files + - name: ↑ Upload testrun files uses: actions/upload-artifact@v4 if: success() || failure() with: @@ -151,7 +151,7 @@ jobs: testrun-report.xml deselected_tests.txt requirements_coverage.json - - name: 🠉 Upload CLI coverage + - name: ↑ Upload CLI coverage uses: actions/upload-artifact@v4 if: success() || failure() with: diff --git a/.github/workflows/upgrade_reusable.yaml b/.github/workflows/upgrade_reusable.yaml index 5ed65a9f7..641a80068 100644 --- a/.github/workflows/upgrade_reusable.yaml +++ b/.github/workflows/upgrade_reusable.yaml @@ -50,13 +50,13 @@ jobs: echo "::group::Script setup" ./.github/node_upgrade.sh echo "::endgroup::" - - name: 🠉 Upload testing artifacts on failure + - name: ↑ Upload testing artifacts on failure uses: actions/upload-artifact@v4 if: failure() with: name: testing-artifacts path: testing_artifacts.tar.xz - - name: 🠉 Upload Allure results for step1 + - name: ↑ Upload Allure results for step1 uses: actions/upload-artifact@v4 # When using `always()`, you lose ability to manually cancel the workflow. # Use `success() || failure()` instead. @@ -64,19 +64,19 @@ jobs: with: name: allure-results-step1 path: allure-results-step1.tar.xz - - name: 🠉 Upload Allure results for step2 + - name: ↑ Upload Allure results for step2 uses: actions/upload-artifact@v4 if: success() || failure() with: name: allure-results-step2 path: allure-results-step2.tar.xz - - name: 🠉 Upload Allure results for step3 + - name: ↑ Upload Allure results for step3 uses: actions/upload-artifact@v4 if: success() || failure() with: name: allure-results-step3 path: allure-results-step3.tar.xz - - name: 🠉 Upload HTML reports + - name: ↑ Upload HTML reports uses: actions/upload-artifact@v4 if: success() || failure() with: @@ -85,7 +85,7 @@ jobs: testrun-report-step1.html testrun-report-step2.html testrun-report-step3.html - - name: 🠉 Upload testrun files + - name: ↑ Upload testrun files uses: actions/upload-artifact@v4 if: success() || failure() with: @@ -93,7 +93,7 @@ jobs: path: | scheduling.log.xz errors_all.log - - name: 🠉 Upload CLI coverage + - name: ↑ Upload CLI coverage uses: actions/upload-artifact@v4 if: success() || failure() with: From 10759de44ab174b999f7a659b6f355d07ded78ef Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 22 Nov 2024 13:52:00 +0100 Subject: [PATCH 144/168] feat(ci): update artifact upload arrow MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Changed the arrow symbol in the names of upload steps in the regression and upgrade workflows from "↑" to "↟" for better clarity. --- .github/workflows/regression_reusable.yaml | 10 +++++----- .github/workflows/upgrade_reusable.yaml | 14 +++++++------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/regression_reusable.yaml b/.github/workflows/regression_reusable.yaml index 20067826d..4a31194f4 100644 --- a/.github/workflows/regression_reusable.yaml +++ b/.github/workflows/regression_reusable.yaml @@ -120,13 +120,13 @@ jobs: testrun_name_strip="$(echo "${{ inputs.testrun_name }}" | sed 's/[^a-zA-Z0-9_-]//g')" curl -s -X PUT --fail-with-body -u ${{ secrets.TCACHE_BASIC_AUTH }} "${{ secrets.TCACHE_URL }}/${testrun_name_strip}/${{ github.run_number }}/import" -F "junitxml=@testrun-report.xml" fi - - name: ↑ Upload testing artifacts on failure + - name: ↟ Upload testing artifacts on failure uses: actions/upload-artifact@v4 if: failure() with: name: testing-artifacts path: testing_artifacts.tar.xz - - name: ↑ Upload Allure results + - name: ↟ Upload Allure results uses: actions/upload-artifact@v4 # When using `always()`, you lose ability to manually cancel the workflow. # Use `success() || failure()` instead. @@ -134,13 +134,13 @@ jobs: with: name: allure-results path: allure-results.tar.xz - - name: ↑ Upload HTML report + - name: ↟ Upload HTML report uses: actions/upload-artifact@v4 if: success() || failure() with: name: testrun-report path: testrun-report.html - - name: ↑ Upload testrun files + - name: ↟ Upload testrun files uses: actions/upload-artifact@v4 if: success() || failure() with: @@ -151,7 +151,7 @@ jobs: testrun-report.xml deselected_tests.txt requirements_coverage.json - - name: ↑ Upload CLI coverage + - name: ↟ Upload CLI coverage uses: actions/upload-artifact@v4 if: success() || failure() with: diff --git a/.github/workflows/upgrade_reusable.yaml b/.github/workflows/upgrade_reusable.yaml index 641a80068..e3e60dea0 100644 --- a/.github/workflows/upgrade_reusable.yaml +++ b/.github/workflows/upgrade_reusable.yaml @@ -50,13 +50,13 @@ jobs: echo "::group::Script setup" ./.github/node_upgrade.sh echo "::endgroup::" - - name: ↑ Upload testing artifacts on failure + - name: ↟ Upload testing artifacts on failure uses: actions/upload-artifact@v4 if: failure() with: name: testing-artifacts path: testing_artifacts.tar.xz - - name: ↑ Upload Allure results for step1 + - name: ↟ Upload Allure results for step1 uses: actions/upload-artifact@v4 # When using `always()`, you lose ability to manually cancel the workflow. # Use `success() || failure()` instead. @@ -64,19 +64,19 @@ jobs: with: name: allure-results-step1 path: allure-results-step1.tar.xz - - name: ↑ Upload Allure results for step2 + - name: ↟ Upload Allure results for step2 uses: actions/upload-artifact@v4 if: success() || failure() with: name: allure-results-step2 path: allure-results-step2.tar.xz - - name: ↑ Upload Allure results for step3 + - name: ↟ Upload Allure results for step3 uses: actions/upload-artifact@v4 if: success() || failure() with: name: allure-results-step3 path: allure-results-step3.tar.xz - - name: ↑ Upload HTML reports + - name: ↟ Upload HTML reports uses: actions/upload-artifact@v4 if: success() || failure() with: @@ -85,7 +85,7 @@ jobs: testrun-report-step1.html testrun-report-step2.html testrun-report-step3.html - - name: ↑ Upload testrun files + - name: ↟ Upload testrun files uses: actions/upload-artifact@v4 if: success() || failure() with: @@ -93,7 +93,7 @@ jobs: path: | scheduling.log.xz errors_all.log - - name: ↑ Upload CLI coverage + - name: ↟ Upload CLI coverage uses: actions/upload-artifact@v4 if: success() || failure() with: From 3f9b60df02c401e96a155ea6f5102b869fc9c420 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 22 Nov 2024 18:44:35 +0100 Subject: [PATCH 145/168] feat: skip Plutus cost check on testnets Added a condition to skip the Plutus transaction cost check when running on testnets. This is because the costs are calibrated only for the local testnet environment. --- cardano_node_tests/tests/plutus_common.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/cardano_node_tests/tests/plutus_common.py b/cardano_node_tests/tests/plutus_common.py index 824abf89a..444f56e2d 100644 --- a/cardano_node_tests/tests/plutus_common.py +++ b/cardano_node_tests/tests/plutus_common.py @@ -7,6 +7,7 @@ from cardano_clusterlib import clusterlib from cardano_node_tests.tests import issues +from cardano_node_tests.utils import cluster_nodes from cardano_node_tests.utils import clusterlib_utils from cardano_node_tests.utils import dbsync_utils from cardano_node_tests.utils import helpers @@ -492,11 +493,15 @@ class ScriptCost: def check_plutus_costs( plutus_costs: tp.List[dict], expected_costs: tp.List[ExecutionCost], frac: float = 0.15 -): +) -> None: """Check plutus transaction cost. units: the time is in picoseconds and the space is in bytes. """ + if cluster_nodes.get_cluster_type().type == cluster_nodes.ClusterType.TESTNET: + # We have the costs calibrated only for local testnet + return + # sort records by total cost sorted_plutus = sorted( plutus_costs, From 1c3b269294bd50862c216ec5f662126f91e90288 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 22 Nov 2024 18:48:39 +0100 Subject: [PATCH 146/168] fix(faucet): allow None as amount parameter Updated the `fund_from_faucet` function to accept `None` as a valid value for the `amount` parameter. If `amount` is `None`, it defaults to 1,000,000,000. This change ensures better flexibility in specifying the amount to be funded from the faucet. --- cardano_node_tests/utils/faucet.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/cardano_node_tests/utils/faucet.py b/cardano_node_tests/utils/faucet.py index 550d0c7ce..4e5767022 100644 --- a/cardano_node_tests/utils/faucet.py +++ b/cardano_node_tests/utils/faucet.py @@ -18,12 +18,15 @@ def fund_from_faucet( cluster_obj: clusterlib.ClusterLib, faucet_data: tp.Optional[dict] = None, all_faucets: tp.Optional[tp.Dict[str, dict]] = None, - amount: tp.Union[int, tp.List[int]] = 1000_000_000, + amount: tp.Union[None, int, tp.List[int]] = None, tx_name: tp.Optional[str] = None, destination_dir: clusterlib.FileType = ".", force: bool = False, ) -> tp.Optional[clusterlib.TxRawOutput]: """Send `amount` from faucet addr to all `dst_addrs`.""" + if amount is None: + amount = 1000_000_000 + if not (faucet_data or all_faucets): msg = "Either `faucet_data` or `all_faucets` must be provided." raise AssertionError(msg) From fcee7f3e784acfbd2c685eacbfbd49825df131d9 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 22 Nov 2024 18:50:59 +0100 Subject: [PATCH 147/168] fix(submit_api): handle timeout and resubmit transaction - Added retry logic to handle request timeouts when posting CBOR data to the submit API. - Introduced a delay with random sleep time between retries to avoid overwhelming the server. --- cardano_node_tests/utils/submit_api.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/cardano_node_tests/utils/submit_api.py b/cardano_node_tests/utils/submit_api.py index 80b003305..794ae896e 100644 --- a/cardano_node_tests/utils/submit_api.py +++ b/cardano_node_tests/utils/submit_api.py @@ -5,7 +5,9 @@ import json import logging import pathlib as pl +import random import shutil +import time import typing as tp import requests @@ -57,7 +59,19 @@ def post_cbor(cbor_file: clusterlib.FileType, url: str) -> requests.Response: headers = {"Content-Type": "application/cbor"} with open(cbor_file, "rb") as in_fp: cbor_binary = in_fp.read() - response = requests.post(url, headers=headers, data=cbor_binary, timeout=10) + + for i in range(5): + delay = False + if i > 0: + LOGGER.warning("Resubmitting transaction to submit-api.") + try: + response = requests.post(url, headers=headers, data=cbor_binary, timeout=20) + except requests.exceptions.ReadTimeout: + delay = True + else: + break + if delay: + time.sleep(random.random()) return response From 6dc3d601aaeca2f1aa454537bf6dea2d37b3cf11 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 22 Nov 2024 18:54:24 +0100 Subject: [PATCH 148/168] fix(tests): various fixes needed for Preview - Adjusted witness_count_add in test_scripts.py for better accuracy. - Increased funding amount in test_drep.py. - Updated expected_fee_redeem in test_spend_build.py to reflect fee on Preview. --- cardano_node_tests/tests/test_scripts.py | 3 ++- cardano_node_tests/tests/tests_conway/test_drep.py | 8 +++++++- .../tests/tests_plutus_v2/test_spend_build.py | 2 +- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/cardano_node_tests/tests/test_scripts.py b/cardano_node_tests/tests/test_scripts.py index 7c08fb4fe..05f06fe7e 100644 --- a/cardano_node_tests/tests/test_scripts.py +++ b/cardano_node_tests/tests/test_scripts.py @@ -2272,6 +2272,7 @@ def test_script_reference_utxo( tx_files=tx_files, invalid_hereafter=invalid_hereafter, invalid_before=invalid_before, + witness_count_add=2, ) # Check final balances @@ -2389,7 +2390,7 @@ def test_spend_reference_script( txouts=destinations, tx_files=tx_files, witness_override=2 if address_type == "byron" else None, - witness_count_add=2 if address_type == "byron" else 0, + witness_count_add=0 if use_build_cmd else 2, ) # check that the reference UTxO was spent diff --git a/cardano_node_tests/tests/tests_conway/test_drep.py b/cardano_node_tests/tests/tests_conway/test_drep.py index 10bbb9c8a..cbe01fb53 100644 --- a/cardano_node_tests/tests/tests_conway/test_drep.py +++ b/cardano_node_tests/tests/tests_conway/test_drep.py @@ -60,6 +60,7 @@ def get_payment_addr( cluster_manager: cluster_management.ClusterManager, cluster_obj: clusterlib.ClusterLib, caching_key: str, + amount: tp.Optional[int] = None, ) -> clusterlib.AddressRecord: """Create new payment address.""" with cluster_manager.cache_fixture(key=caching_key) as fixture_cache: @@ -77,6 +78,7 @@ def get_payment_addr( addr, cluster_obj=cluster_obj, all_faucets=cluster_manager.cache.addrs_data, + amount=amount, ) return addr @@ -187,7 +189,11 @@ def payment_addr( test_id = common.get_test_id(cluster) key = helpers.get_current_line_str() return get_payment_addr( - name_template=test_id, cluster_manager=cluster_manager, cluster_obj=cluster, caching_key=key + name_template=test_id, + cluster_manager=cluster_manager, + cluster_obj=cluster, + caching_key=key, + amount=2000_000_000, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py index b3662257d..790ea851d 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py @@ -188,7 +188,7 @@ def test_txout_locking( # check expected fees if use_reference_script: expected_fee_fund = 258_913 - expected_fee_redeem = 213_889 + expected_fee_redeem = 233_889 else: expected_fee_fund = 167_965 expected_fee_redeem = 293_393 From a82e3f1c2db428e5caafd2f3a5a19b5e4f460168 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 22 Nov 2024 19:07:06 +0100 Subject: [PATCH 149/168] chore: update testnets thread count to 15 Reduced the number of test threads for the testnets target in the Makefile from 20 to 15. This change is intended to optimize the stability of the tests. --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index f579bbf49..acddad0d1 100644 --- a/Makefile +++ b/Makefile @@ -103,6 +103,6 @@ testpr: .dirs .run_tests .PHONY: testnets testnets: export CLUSTERS_COUNT=1 testnets: export FORBID_RESTART=1 -testnets: TEST_THREADS := $(or $(TEST_THREADS),20) +testnets: TEST_THREADS := $(or $(TEST_THREADS),15) testnets: MARKEXPR := $(or $(MARKEXPR),-m "testnets") testnets: .dirs .run_tests From e1e003ca01bf9c25992a14f38b1aad9cab38f359 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 25 Nov 2024 09:40:06 +0100 Subject: [PATCH 150/168] feat: increase SECP256K1 loop costs Updated the SECP256K1_ECDSA_LOOP_COST and SECP256K1_SCHNORR_LOOP_COST values in plutus_common.py to reflect the new execution costs. --- cardano_node_tests/tests/plutus_common.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cardano_node_tests/tests/plutus_common.py b/cardano_node_tests/tests/plutus_common.py index 444f56e2d..067ab5d10 100644 --- a/cardano_node_tests/tests/plutus_common.py +++ b/cardano_node_tests/tests/plutus_common.py @@ -106,10 +106,10 @@ class ExecutionCost: per_time=168_868_800, per_space=540_612, fixed_cost=43_369 ) SECP256K1_ECDSA_LOOP_COST = ExecutionCost( - per_time=397_863_996, per_space=128_584, fixed_cost=36_106 + per_time=470_000_000, per_space=128_584, fixed_cost=36_106 ) SECP256K1_SCHNORR_LOOP_COST = ExecutionCost( - per_time=430_445_916, per_space=128_584, fixed_cost=38_455 + per_time=470_000_000, per_space=128_584, fixed_cost=38_455 ) ALWAYS_FAILS_V3_COST = ExecutionCost(per_time=230_100, per_space=1_100, fixed_cost=81) From 243c4bdb84fff91dda37fe7c2e784f11bbfbc3ce Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 25 Nov 2024 13:15:54 +0100 Subject: [PATCH 151/168] feat: don't cache fixture when deposit too high - Modified get_payment_addr to use caching_key conditionally. - Adjusted payment_addr to handle different dRepDeposit values. --- .../tests/tests_conway/test_drep.py | 35 ++++++++++++++----- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/cardano_node_tests/tests/tests_conway/test_drep.py b/cardano_node_tests/tests/tests_conway/test_drep.py index cbe01fb53..efe4f14ee 100644 --- a/cardano_node_tests/tests/tests_conway/test_drep.py +++ b/cardano_node_tests/tests/tests_conway/test_drep.py @@ -36,6 +36,8 @@ LOGGER = logging.getLogger(__name__) DATA_DIR = pl.Path(__file__).parent.parent / "data" +MAINNET_DREP_DEPOSIT = 500_000_000 + pytestmark = pytest.mark.skipif( VERSIONS.transaction_era < VERSIONS.CONWAY, reason="runs only with Tx era >= Conway", @@ -59,19 +61,27 @@ def get_payment_addr( name_template: str, cluster_manager: cluster_management.ClusterManager, cluster_obj: clusterlib.ClusterLib, - caching_key: str, + caching_key: str = "", amount: tp.Optional[int] = None, ) -> clusterlib.AddressRecord: """Create new payment address.""" - with cluster_manager.cache_fixture(key=caching_key) as fixture_cache: - if fixture_cache.value: - return fixture_cache.value # type: ignore + def _create_addr() -> clusterlib.AddressRecord: addr = clusterlib_utils.create_payment_addr_records( - f"drep_addr_{name_template}", + f"{name_template}_fund_addr", cluster_obj=cluster_obj, )[0] - fixture_cache.value = addr + return addr + + if caching_key: + with cluster_manager.cache_fixture(key=caching_key) as fixture_cache: + if fixture_cache.value: + return fixture_cache.value # type: ignore + + addr = _create_addr() + fixture_cache.value = addr + else: + addr = _create_addr() # Fund source address clusterlib_utils.fund_from_faucet( @@ -186,14 +196,23 @@ def payment_addr( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, ) -> clusterlib.AddressRecord: + if cluster.conway_genesis["dRepDeposit"] < MAINNET_DREP_DEPOSIT: + amount = 1_000_000_000 + key = helpers.get_current_line_str() + else: + amount = MAINNET_DREP_DEPOSIT + 10_000_000 + # Don't cache the fixture when DRep deposit is high. We don't know on how many + # different workers the tests will run, and we might end up creating many addresses + # with lot of funds if the fixture is cached. + key = "" + test_id = common.get_test_id(cluster) - key = helpers.get_current_line_str() return get_payment_addr( name_template=test_id, cluster_manager=cluster_manager, cluster_obj=cluster, caching_key=key, - amount=2000_000_000, + amount=amount, ) From c31fb2278bd81e70cc6a4c58e4e3d6a817200db1 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 25 Nov 2024 18:23:49 +0100 Subject: [PATCH 152/168] refactor: return errors instead of raising in log check Refactored `_check_msgs_presence_in_logs` to `check_msgs_presence_in_logs` to return a list of errors instead of raising an AssertionError. Updated `expect_errors` and `expect_messages` to handle the returned errors and raise an AssertionError if any errors are found. --- cardano_node_tests/utils/logfiles.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/cardano_node_tests/utils/logfiles.py b/cardano_node_tests/utils/logfiles.py index 0cc4bca43..d74799a3c 100644 --- a/cardano_node_tests/utils/logfiles.py +++ b/cardano_node_tests/utils/logfiles.py @@ -274,12 +274,12 @@ def add_ignore_rule( infile.write(f"{files_glob};;{skip_after};;{regex}\n") -def _check_msgs_presence_in_logs( +def check_msgs_presence_in_logs( regex_pairs: tp.List[tp.Tuple[str, str]], seek_offsets: tp.Dict[str, int], state_dir: pl.Path, timestamp: float, -) -> None: +) -> tp.List[str]: """Make sure the expected messages are present in logs.""" errors = [] for files_glob, regex in regex_pairs: @@ -308,9 +308,7 @@ def _check_msgs_presence_in_logs( else: errors.append(f"No line matching `{regex}` found in '{logfile}'.") - if errors: - errors_joined = "\n".join(errors) - raise AssertionError(errors_joined) from None + return errors @contextlib.contextmanager @@ -342,9 +340,12 @@ def expect_errors(regex_pairs: tp.List[tp.Tuple[str, str]], worker_id: str) -> t yield - _check_msgs_presence_in_logs( + errors = check_msgs_presence_in_logs( regex_pairs=regex_pairs, seek_offsets=seek_offsets, state_dir=state_dir, timestamp=timestamp ) + if errors: + errors_joined = "\n".join(errors) + raise AssertionError(errors_joined) from None @contextlib.contextmanager @@ -371,9 +372,12 @@ def expect_messages(regex_pairs: tp.List[tp.Tuple[str, str]]) -> tp.Iterator[Non yield - _check_msgs_presence_in_logs( + errors = check_msgs_presence_in_logs( regex_pairs=regex_pairs, seek_offsets=seek_offsets, state_dir=state_dir, timestamp=timestamp ) + if errors: + errors_joined = "\n".join(errors) + raise AssertionError(errors_joined) from None def search_cluster_logs() -> tp.List[tp.Tuple[pl.Path, str]]: From 342371607703cd4a56eeadbdcfdcc3e23eb2a79b Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 25 Nov 2024 18:24:23 +0100 Subject: [PATCH 153/168] feat(tests): add log file checks for block minting - Updated test_pool_blocks to check for log messages indicating block minting. - Made db-sync optional for the test. --- .reponotes | 1 + cardano_node_tests/tests/test_blocks.py | 109 +++++++++++++++--------- 2 files changed, 68 insertions(+), 42 deletions(-) create mode 120000 .reponotes diff --git a/.reponotes b/.reponotes new file mode 120000 index 000000000..6fd65d3ff --- /dev/null +++ b/.reponotes @@ -0,0 +1 @@ +../reponotes/cardano-node-tests \ No newline at end of file diff --git a/cardano_node_tests/tests/test_blocks.py b/cardano_node_tests/tests/test_blocks.py index f98456699..7471d0e11 100644 --- a/cardano_node_tests/tests/test_blocks.py +++ b/cardano_node_tests/tests/test_blocks.py @@ -26,6 +26,7 @@ from cardano_node_tests.utils import configuration from cardano_node_tests.utils import dbsync_queries from cardano_node_tests.utils import helpers +from cardano_node_tests.utils import logfiles from cardano_node_tests.utils.versions import VERSIONS LOGGER = logging.getLogger(__name__) @@ -35,7 +36,7 @@ class TestLeadershipSchedule: """Tests for cardano-cli leadership-schedule.""" @allure.link(helpers.get_vcs_link()) - @pytest.mark.needs_dbsync + @pytest.mark.dbsync @pytest.mark.parametrize("for_epoch", ("current", "next")) def test_pool_blocks( self, @@ -47,9 +48,12 @@ def test_pool_blocks( * query leadership schedule for selected pool for current epoch or next epoch * wait for epoch that comes after the queried epoch - * get info about minted blocks in queried epoch for the selected pool - * compare leadership schedule with blocks that were actually minted - * compare db-sync records with ledger state dump + * check in log files that the blocks were minted in expected slots + * if db-sync is available: + + - get info about minted blocks in queried epoch for the selected pool + - compare leadership schedule with blocks that were actually minted + - compare db-sync records with ledger state dump """ # pylint: disable=unused-argument cluster, pool_name = cluster_use_pool @@ -58,12 +62,18 @@ def test_pool_blocks( pool_rec = cluster_manager.cache.addrs_data[pool_name] pool_id = cluster.g_stake_pool.get_stake_pool_id(pool_rec["cold_key_pair"].vkey_file) + state_dir = cluster_nodes.get_cluster_env().state_dir + pool_log_fname = f"{pool_name.replace('node-', '')}.stdout" + pool_log = state_dir / pool_log_fname + seek_offsets = {str(pool_log): helpers.get_eof_offset(pool_log)} + timestamp = time.time() + if for_epoch == "current": - # wait for beginning of an epoch + # Wait for beginning of an epoch queried_epoch = cluster.wait_for_new_epoch(padding_seconds=5) else: - # wait for stable stake distribution for next epoch, that is last 300 slots of - # current epoch + # Wait for stable stake distribution for next epoch, that is last 300 slots of + # current epoch. clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=-int(300 * cluster.slot_length), @@ -72,55 +82,70 @@ def test_pool_blocks( ) queried_epoch = cluster.g_query.get_epoch() + 1 - # query leadership schedule for selected pool + # Query leadership schedule for selected pool leadership_schedule = cluster.g_query.get_leadership_schedule( vrf_skey_file=pool_rec["vrf_key_pair"].skey_file, cold_vkey_file=pool_rec["cold_key_pair"].vkey_file, for_next=for_epoch != "current", ) + slots_when_scheduled = {r.slot_no for r in leadership_schedule} - # wait for epoch that comes after the queried epoch - cluster.wait_for_epoch(epoch_no=queried_epoch + 1) + expected_msgs = [ + (pool_log_fname, rf'TraceForgedBlock"\),\("slot",Number {s}') + for s in slots_when_scheduled + ] - # get info about minted blocks in queried epoch for the selected pool - minted_blocks = list( - dbsync_queries.query_blocks( - pool_id_bech32=pool_id, epoch_from=queried_epoch, epoch_to=queried_epoch - ) - ) - slots_when_minted = {r.slot_no for r in minted_blocks} + # Wait for epoch that comes after the queried epoch + cluster.wait_for_epoch(epoch_no=queried_epoch + 1) errors: tp.List[str] = [] - # compare leadership schedule with blocks that were actually minted - slots_when_scheduled = {r.slot_no for r in leadership_schedule} - - difference_scheduled = slots_when_minted.difference(slots_when_scheduled) - if difference_scheduled: - errors.append( - f"Some blocks were minted in other slots than scheduled: {difference_scheduled}" + log_msgs_errors = logfiles.check_msgs_presence_in_logs( + regex_pairs=expected_msgs, + seek_offsets=seek_offsets, + state_dir=state_dir, + timestamp=timestamp, + ) + if len(log_msgs_errors) > len(leadership_schedule) // 2: + log_msgs_errors_joined = "\n ".join(log_msgs_errors) + errors.append(f"Lot of slots missed: \n {log_msgs_errors_joined}") + + if configuration.HAS_DBSYNC: + # Get info about minted blocks in queried epoch for the selected pool + minted_blocks = list( + dbsync_queries.query_blocks( + pool_id_bech32=pool_id, epoch_from=queried_epoch, epoch_to=queried_epoch + ) ) + slots_when_minted = {r.slot_no for r in minted_blocks} - difference_minted = slots_when_scheduled.difference(slots_when_minted) - if len(difference_minted) > len(leadership_schedule) // 2: - errors.append(f"Lot of slots missed: {difference_minted}") + # Compare leadership schedule with blocks that were actually minted + difference_scheduled = slots_when_minted.difference(slots_when_scheduled) + if difference_scheduled: + errors.append( + f"Some blocks were minted in other slots than scheduled: {difference_scheduled}" + ) - # compare db-sync records with ledger state dump - ledger_state = clusterlib_utils.get_ledger_state(cluster_obj=cluster) - clusterlib_utils.save_ledger_state( - cluster_obj=cluster, - state_name=temp_template, - ledger_state=ledger_state, - ) - blocks_before: tp.Dict[str, int] = ledger_state["blocksBefore"] - pool_id_dec = helpers.decode_bech32(pool_id) - minted_blocks_ledger = blocks_before.get(pool_id_dec) or 0 - minted_blocks_db = len(slots_when_minted) - if minted_blocks_ledger != minted_blocks_db: - errors.append( - "Numbers of minted blocks reported by ledger state and db-sync don't match: " - f"{minted_blocks_ledger} vs {minted_blocks_db}" + difference_minted = slots_when_scheduled.difference(slots_when_minted) + if len(difference_minted) > len(leadership_schedule) // 2: + errors.append(f"Lot of slots missed: {difference_minted}") + + # Compare db-sync records with ledger state dump + ledger_state = clusterlib_utils.get_ledger_state(cluster_obj=cluster) + clusterlib_utils.save_ledger_state( + cluster_obj=cluster, + state_name=temp_template, + ledger_state=ledger_state, ) + blocks_before: tp.Dict[str, int] = ledger_state["blocksBefore"] + pool_id_dec = helpers.decode_bech32(pool_id) + minted_blocks_ledger = blocks_before.get(pool_id_dec) or 0 + minted_blocks_db = len(slots_when_minted) + if minted_blocks_ledger != minted_blocks_db: + errors.append( + "Numbers of minted blocks reported by ledger state and db-sync don't match: " + f"{minted_blocks_ledger} vs {minted_blocks_db}" + ) if errors: # Xfail if cardano-api GH-269 is still open From 2b61f528401a009d3f5c5ee22a1b8211cef0fd50 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 26 Nov 2024 11:37:18 +0100 Subject: [PATCH 154/168] fix(tests): add enough funds for two deposits --- cardano_node_tests/tests/tests_conway/test_drep.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/cardano_node_tests/tests/tests_conway/test_drep.py b/cardano_node_tests/tests/tests_conway/test_drep.py index efe4f14ee..7f49f7512 100644 --- a/cardano_node_tests/tests/tests_conway/test_drep.py +++ b/cardano_node_tests/tests/tests_conway/test_drep.py @@ -978,8 +978,8 @@ def test_drep_no_retirement_before_register( @pytest.mark.smoke def test_drep_no_multiple_registration( self, + cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - payment_addr: clusterlib.AddressRecord, use_build_cmd: bool, submit_method: str, ): @@ -999,6 +999,13 @@ def test_drep_no_multiple_registration( drep_metadata_file=drep_metadata_file ) + payment_addr = get_payment_addr( + name_template=temp_template, + cluster_manager=cluster_manager, + cluster_obj=cluster, + amount=cluster.conway_genesis["dRepDeposit"] * 2 + 10_000_000, + ) + reqc.cip090.start(url=helpers.get_vcs_link()) reg_drep = governance_utils.get_drep_reg_record( cluster_obj=cluster, From 35308b9384b358169e5305e1c718d1d3a82aca8c Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 26 Nov 2024 12:09:48 +0100 Subject: [PATCH 155/168] fix(tests): unify DRep docstring and capitalization - Improved readability and clarity of docstrings by rephrasing some sentences. - Updated docstrings in test_drep.py to consistently use "DRep" instead of "drep". - Ensured all references to DRep are capitalized for consistency. --- .../tests/tests_conway/test_drep.py | 104 +++++++++--------- 1 file changed, 52 insertions(+), 52 deletions(-) diff --git a/cardano_node_tests/tests/tests_conway/test_drep.py b/cardano_node_tests/tests/tests_conway/test_drep.py index 7f49f7512..8477c869d 100644 --- a/cardano_node_tests/tests/tests_conway/test_drep.py +++ b/cardano_node_tests/tests/tests_conway/test_drep.py @@ -346,11 +346,11 @@ def test_drep_id_is_blake2b_224_of_drep_vkey( self, cluster: clusterlib.ClusterLib, ): - """Test proper drep id is being generated. + """Test that proper DRep id is being generated. - * Register a drep - * Hash drep vkey using blake2b_224 - * Check drep ID generated from cli is same as blake2b_224 hash of drep vkey + * Register a DRep + * Hash DRep vkey using blake2b_224 + * Check DRep ID generated from cli is same as blake2b_224 hash of DRep vkey """ reqc.cip085.start(url=helpers.get_vcs_link()) temp_template = common.get_test_id(cluster) @@ -361,7 +361,7 @@ def test_drep_id_is_blake2b_224_of_drep_vkey( drep_metadata_hash = cluster.g_conway_governance.drep.get_metadata_hash( drep_metadata_file=drep_metadata_file ) - # Get a drep registration record + # Get a DRep registration record reg_drep = governance_utils.get_drep_reg_record( cluster_obj=cluster, name_template=temp_template, @@ -369,7 +369,7 @@ def test_drep_id_is_blake2b_224_of_drep_vkey( drep_metadata_hash=drep_metadata_hash, ) vkey_file_path = reg_drep.key_pair.vkey_file - # Get drep vkey from vkey file + # Get DRep vkey from vkey file with open(vkey_file_path) as vkey_file: vkey_file_json = json.loads(vkey_file.read()) cbor_hex = vkey_file_json["cborHex"] @@ -377,9 +377,9 @@ def test_drep_id_is_blake2b_224_of_drep_vkey( decoded_data = cbor2.loads(cbor_binary) blake2b_224 = hashlib.blake2b(digest_size=28) blake2b_224.update(decoded_data) - # Obtain blake2b_224 hash of drep vkey + # Obtain blake2b_224 hash of DRep vkey hash_digest = blake2b_224.hexdigest() - assert reg_drep.drep_id == hash_digest, "Drep ID hash is not blake2b_224." + assert reg_drep.drep_id == hash_digest, "DRep ID hash is not blake2b_224." reqc.cip085.success() @allure.link(helpers.get_vcs_link()) @@ -397,11 +397,11 @@ def test_register_and_retire_drep( ): """Test DRep registration and retirement. - * register DRep - * check that DRep was registered - * retire DRep - * check that DRep was retired - * check that deposit was returned to source address + * Register DRep + * Check that DRep was registered + * Retire DRep + * Check that DRep was retired + * Check that deposit was returned to source address """ # pylint: disable=too-many-locals temp_template = common.get_test_id(cluster) @@ -544,9 +544,9 @@ def test_register_wrong_metadata( ): """Register a DRep with wrong metadata url. - * register DRep with mismatch url metadata vs metadata file - * check that DRep was registered - * verify that dbsync is returning an error + * Register DRep with mismatch url metadata vs metadata file + * Check that DRep was registered + * Verify that dbsync is returning an error """ temp_template = common.get_test_id(cluster) @@ -650,12 +650,12 @@ def test_no_witness_register_and_retire( # noqa: C901 There was a ledger issue that allowed a DRep to be registered without needing the corresponding skey witness. - * try to register DRep without skey, expect failure - * register DRep - * check that DRep was registered - * try to retire DRep without skey, expect failure - * retire DRep - * check that DRep was retired + * Try to register DRep without skey, expect failure + * Register DRep + * Check that DRep was registered + * Try to retire DRep without skey, expect failure + * Retire DRep + * Check that DRep was retired """ temp_template = common.get_test_id(cluster) errors_final = [] @@ -780,12 +780,12 @@ def test_no_multiple_delegation( testfile_temp_dir: pl.Path, request: FixtureRequest, ): - """Test No multiple delegation to different dreps. + """Test that it is not possible to delegate to multiple DReps at the same time. - * Create 2 Dreps - * Create vote delegation certifcate to both dreps + * Create 2 DReps + * Create vote delegation certifcate to both DReps * Submit both certificates - * check that the Drep certificate placed at last of the certificates is delegated to + * Check that the DRep certificate placed at last of the certificates is delegated to """ cluster = cluster_rewards temp_template = common.get_test_id(cluster) @@ -816,7 +816,7 @@ def test_no_multiple_delegation( ) reqc.cip087.start(url=helpers.get_vcs_link()) - # Create vote delegation cert for drep 1 + # Create vote delegation cert for DRep 1 deleg_cert_1 = cluster.g_stake_address.gen_vote_delegation_cert( addr_name=f"{temp_template}_addr1", stake_vkey_file=pool_user_rewards.stake.vkey_file, @@ -825,7 +825,7 @@ def test_no_multiple_delegation( always_no_confidence=False, ) - # Create vote delegation cert for drep 2 + # Create vote delegation cert for DRep 2 deleg_cert_2 = cluster.g_stake_address.gen_vote_delegation_cert( addr_name=f"{temp_template}_addr2", stake_vkey_file=pool_user_rewards.stake.vkey_file, @@ -880,7 +880,7 @@ def test_no_delegation_without_stake_registration( custom_drep: governance_utils.DRepRegistration, drep: str, ): - """Test No voting delegation without registering stake address first. + """Test that it is not possible to delegate without registering stake address first. * Use a wallet without registered stake address * Create vote delegation certifcate using unregistered wallet stake key @@ -932,11 +932,11 @@ def test_drep_no_retirement_before_register( use_build_cmd: bool, submit_method: str, ): - """Test No Drep retirement before register. + """Test that it is not possible to retire DRep before registering it. * Create a retirement certificate without registering * Submit certificate - * check it is not possible to retire before register + * Check that it is not possible to retire before registering the DRep """ temp_template = common.get_test_id(cluster) drep_keys = cluster.g_conway_governance.drep.gen_key_pair( @@ -955,7 +955,7 @@ def test_drep_no_retirement_before_register( signing_key_files=[payment_addr.skey_file, drep_keys.skey_file], ) - # Expecting error for both cases as drep is not registered + # Expecting error for both cases as DRep is not registered with pytest.raises((clusterlib.CLIError, submit_api.SubmitApiError)) as excinfo: clusterlib_utils.build_and_submit_tx( cluster_obj=cluster, @@ -983,10 +983,10 @@ def test_drep_no_multiple_registration( use_build_cmd: bool, submit_method: str, ): - """Test Drep cannot be registered multiple time. + """Test that DRep cannot be registered multiple times. - * Generate drep keys - * Create a drep registration certificate + * Generate DRep keys + * Create a DRep registration certificate * Submit the registration certificate twice * Expect ConwayDRepAlreadyRegistered on the second time """ @@ -1018,7 +1018,7 @@ def test_drep_no_multiple_registration( signing_key_files=[payment_addr.skey_file, reg_drep.key_pair.skey_file], ) - # Submit drep registration certificate + # Submit DRep registration certificate clusterlib_utils.build_and_submit_tx( cluster_obj=cluster, name_template=f"{temp_template}_reg", @@ -1029,10 +1029,10 @@ def test_drep_no_multiple_registration( deposit=reg_drep.deposit, ) - # Wait for some blocks and again submit drep registration certificate + # Wait for some blocks and again submit DRep registration certificate cluster.wait_for_new_block(new_blocks=2) - # Expecting error as drep is already registered + # Expecting error as DRep is already registered with pytest.raises((clusterlib.CLIError, submit_api.SubmitApiError)) as excinfo: clusterlib_utils.build_and_submit_tx( cluster_obj=cluster, @@ -1073,14 +1073,14 @@ def test_dreps_delegation( ): """Test delegating to DReps. - * register stake address - * delegate stake to following DReps: + * Register stake address + * Delegate stake to following DReps: - always-abstain - always-no-confidence - custom DRep - * check that the stake address is registered + * Check that the stake address is registered """ # pylint: disable=too-many-statements,too-many-locals cluster = cluster_rewards @@ -1255,7 +1255,7 @@ def _deregister(): assert ( db_drep_distr - ), f"No Drep distribution found for Drep {drep_id_bech32} and epoch {deleg_epoch}" + ), f"No DRep distribution found for DRep {drep_id_bech32} and epoch {deleg_epoch}" assert ( db_drep_distr[0].amount >= deleg_amount ), f"Unexpected delegated amount in dbsync: {db_drep_distr[0].amount} < {deleg_amount}" @@ -1282,14 +1282,14 @@ def test_dreps_and_spo_delegation( ): """Test delegating to DRep and SPO using single certificate. - * register stake address - * delegate stake to a stake pool and to following DReps: + * Register stake address + * Delegate stake to a stake pool and to following DReps: - always-abstain - always-no-confidence - custom DRep - * check that the stake address is registered and delegated + * Check that the stake address is registered and delegated """ cluster, pool_id = cluster_and_pool_and_rewards temp_template = common.get_test_id(cluster) @@ -1434,20 +1434,20 @@ def test_change_delegation( testfile_temp_dir: pl.Path, request: FixtureRequest, ): - """Test Change delegation to different dreps. + """Test changing delegation to a different DRep. - * Create 2 Dreps - * Create vote delegation certifcate for first drep + * Create 2 DReps + * Create vote delegation certifcate for the first DRep * Submit certificate - * check that the delegation is of correct drep id + * Check that the delegation is of correct DRep id * Change delegation to drep2 and submit certificate - * Check vote delegation is updated to second drep + * Check that vote delegation is updated to second DRep """ cluster = cluster_rewards temp_template = common.get_test_id(cluster) deposit_amt = cluster.g_query.get_address_deposit() key1 = helpers.get_current_line_str() - # Get first drep + # Get first DRep drep1 = get_custom_drep( name_template=f"custom_drep_1_{temp_template}", cluster_manager=cluster_manager, @@ -1457,7 +1457,7 @@ def test_change_delegation( ) key2 = helpers.get_current_line_str() - # Get second drep + # Get second DRep drep2 = get_custom_drep( name_template=f"custom_drep_2_{temp_template}", cluster_manager=cluster_manager, From 3212e9078fe8af6504c54b38e0b2d4bffd7b7a0f Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 26 Nov 2024 16:00:33 +0100 Subject: [PATCH 156/168] fix(tests): handle DRep retirement in delegation tests - Ensure votes are correctly delegated to the second DRep after the first DRep is retired. - Check ledger issue 4772. --- cardano_node_tests/tests/issues.py | 6 +++ .../tests/tests_conway/test_drep.py | 41 +++++++++++++++++-- 2 files changed, 44 insertions(+), 3 deletions(-) diff --git a/cardano_node_tests/tests/issues.py b/cardano_node_tests/tests/issues.py index f76632ffa..19d3179c8 100644 --- a/cardano_node_tests/tests/issues.py +++ b/cardano_node_tests/tests/issues.py @@ -168,6 +168,12 @@ fixed_in="8.12.0", # Unknown yet, will be fixed/changed sometime in the future message="Inconsistent listing of DRep expiry.", ) +ledger_4772 = blockers.GH( + issue=4772, + repo="IntersectMBO/cardano-ledger", + fixed_in="10.1.3.0", # Unknown yet, will be fixed/changed sometime in the future + message="Delegation to DRep2 removed after retirement of DRep1.", +) node_3788 = blockers.GH( issue=3788, diff --git a/cardano_node_tests/tests/tests_conway/test_drep.py b/cardano_node_tests/tests/tests_conway/test_drep.py index 8477c869d..2e8d7fa4d 100644 --- a/cardano_node_tests/tests/tests_conway/test_drep.py +++ b/cardano_node_tests/tests/tests_conway/test_drep.py @@ -1446,8 +1446,9 @@ def test_change_delegation( cluster = cluster_rewards temp_template = common.get_test_id(cluster) deposit_amt = cluster.g_query.get_address_deposit() - key1 = helpers.get_current_line_str() + # Get first DRep + key1 = helpers.get_current_line_str() drep1 = get_custom_drep( name_template=f"custom_drep_1_{temp_template}", cluster_manager=cluster_manager, @@ -1456,8 +1457,8 @@ def test_change_delegation( caching_key=key1, ) - key2 = helpers.get_current_line_str() # Get second DRep + key2 = helpers.get_current_line_str() drep2 = get_custom_drep( name_template=f"custom_drep_2_{temp_template}", cluster_manager=cluster_manager, @@ -1513,8 +1514,9 @@ def _deregister(): drep_id=drep1.drep_id ), "Votes are NOT delegated to the correct DRep 1" + # Change delegation to second DRep + reqc.cip086.start(url=helpers.get_vcs_link()) - # Change delegation to drep2 deleg_cert = cluster.g_stake_address.gen_vote_delegation_cert( addr_name=f"{temp_template}_addr2", stake_vkey_file=pool_user_rewards.stake.vkey_file, @@ -1542,6 +1544,39 @@ def _deregister(): ), "Votes are NOT changed to the correct DRep 2" reqc.cip086.success() + # Retire the first DRep + + ret_cert = cluster.g_conway_governance.drep.gen_retirement_cert( + cert_name=temp_template, + deposit_amt=drep1.deposit, + drep_vkey_file=drep1.key_pair.vkey_file, + ) + + tx_files_ret = clusterlib.TxFiles( + certificate_files=[ret_cert], + signing_key_files=[payment_addr_rewards.skey_file, drep1.key_pair.skey_file], + ) + + clusterlib_utils.build_and_submit_tx( + cluster_obj=cluster, + name_template=f"{temp_template}_ret", + src_address=payment_addr_rewards.address, + tx_files=tx_files_ret, + deposit=-drep1.deposit, + ) + + ret_drep_state = cluster.g_conway_governance.query.drep_state( + drep_vkey_file=drep1.key_pair.vkey_file + ) + assert not ret_drep_state, "DRep was not retired" + + stake_addr_info = cluster.g_query.get_stake_addr_info(pool_user_rewards.stake.address) + if not stake_addr_info.vote_delegation: + issues.ledger_4772.finish_test() + assert stake_addr_info.vote_delegation == governance_utils.get_drep_cred_name( + drep_id=drep2.drep_id + ), "Votes are no longer delegated to DRep 2!" + class TestDRepActivity: """Tests for DReps activity.""" From 998e731d064660121e57d7cb70fb0dfb7f188a40 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Tue, 26 Nov 2024 16:24:03 +0100 Subject: [PATCH 157/168] fix(issues): update fixed_in version for cli issue 904 Updated the fixed_in version for the cli issue 904 in the cardano-cli repository from 9.5.0.0 to 10.1.2.1. The issue is already fixed in cardano-cli that was not released with cardano-node yet. --- cardano_node_tests/tests/issues.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cardano_node_tests/tests/issues.py b/cardano_node_tests/tests/issues.py index 19d3179c8..612be8550 100644 --- a/cardano_node_tests/tests/issues.py +++ b/cardano_node_tests/tests/issues.py @@ -83,7 +83,7 @@ cli_904 = blockers.GH( issue=904, repo="IntersectMBO/cardano-cli", - fixed_in="9.5.0.0", # Fixed in some release after 9.4.1.0 + fixed_in="10.1.2.1", # Fixed in some release after 10.1.2.0 message="Negative pparam proposal values overflow to positive.", ) cli_942 = blockers.GH( From 99e032a0dd2592211c1592321927a887ae592688 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 27 Nov 2024 16:47:02 +0100 Subject: [PATCH 158/168] feat(tests): update delegation test for DRep retirement - Updated `get_custom_drep` to `create_drep` for DRep creation - Improved assertions for vote delegation checks --- .../tests/tests_conway/test_drep.py | 33 +++++++++---------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/cardano_node_tests/tests/tests_conway/test_drep.py b/cardano_node_tests/tests/tests_conway/test_drep.py index 2e8d7fa4d..8b73a9ec8 100644 --- a/cardano_node_tests/tests/tests_conway/test_drep.py +++ b/cardano_node_tests/tests/tests_conway/test_drep.py @@ -1428,7 +1428,6 @@ def _get_drep_rec( def test_change_delegation( self, cluster_rewards: clusterlib.ClusterLib, - cluster_manager: cluster_management.ClusterManager, payment_addr_rewards: clusterlib.AddressRecord, pool_user_rewards: clusterlib.PoolUser, testfile_temp_dir: pl.Path, @@ -1442,29 +1441,25 @@ def test_change_delegation( * Check that the delegation is of correct DRep id * Change delegation to drep2 and submit certificate * Check that vote delegation is updated to second DRep + * Retire the first DRep + * Check that votes are still delegated to the second DRep """ cluster = cluster_rewards temp_template = common.get_test_id(cluster) deposit_amt = cluster.g_query.get_address_deposit() # Get first DRep - key1 = helpers.get_current_line_str() - drep1 = get_custom_drep( + drep1 = create_drep( name_template=f"custom_drep_1_{temp_template}", - cluster_manager=cluster_manager, cluster_obj=cluster, payment_addr=payment_addr_rewards, - caching_key=key1, ) # Get second DRep - key2 = helpers.get_current_line_str() - drep2 = get_custom_drep( + drep2 = create_drep( name_template=f"custom_drep_2_{temp_template}", - cluster_manager=cluster_manager, cluster_obj=cluster, payment_addr=payment_addr_rewards, - caching_key=key2, ) # Create stake address registration cert @@ -1509,8 +1504,10 @@ def _deregister(): request.addfinalizer(_deregister) - stake_addr_info = cluster.g_query.get_stake_addr_info(pool_user_rewards.stake.address) - assert stake_addr_info.vote_delegation == governance_utils.get_drep_cred_name( + stake_addr_info_deleg1 = cluster.g_query.get_stake_addr_info( + pool_user_rewards.stake.address + ) + assert stake_addr_info_deleg1.vote_delegation == governance_utils.get_drep_cred_name( drep_id=drep1.drep_id ), "Votes are NOT delegated to the correct DRep 1" @@ -1538,8 +1535,10 @@ def _deregister(): tx_files=tx_files, deposit=deposit_amt, ) - stake_addr_info = cluster.g_query.get_stake_addr_info(pool_user_rewards.stake.address) - assert stake_addr_info.vote_delegation == governance_utils.get_drep_cred_name( + stake_addr_info_deleg2 = cluster.g_query.get_stake_addr_info( + pool_user_rewards.stake.address + ) + assert stake_addr_info_deleg2.vote_delegation == governance_utils.get_drep_cred_name( drep_id=drep2.drep_id ), "Votes are NOT changed to the correct DRep 2" reqc.cip086.success() @@ -1568,12 +1567,12 @@ def _deregister(): ret_drep_state = cluster.g_conway_governance.query.drep_state( drep_vkey_file=drep1.key_pair.vkey_file ) - assert not ret_drep_state, "DRep was not retired" + assert not ret_drep_state, "DRep 1 was not retired" - stake_addr_info = cluster.g_query.get_stake_addr_info(pool_user_rewards.stake.address) - if not stake_addr_info.vote_delegation: + stake_addr_info_ret = cluster.g_query.get_stake_addr_info(pool_user_rewards.stake.address) + if not stake_addr_info_ret.vote_delegation: issues.ledger_4772.finish_test() - assert stake_addr_info.vote_delegation == governance_utils.get_drep_cred_name( + assert stake_addr_info_ret.vote_delegation == governance_utils.get_drep_cred_name( drep_id=drep2.drep_id ), "Votes are no longer delegated to DRep 2!" From 43a9cf72e5c36a814dca284c9a09e5a99ba3c380 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Wed, 27 Nov 2024 17:34:58 +0100 Subject: [PATCH 159/168] fix(tests): handle vote redelegation in PV9 - Added condition to check if the cluster is in PV9 - Ensured vote delegation is empty with PV9 due to ledger issue 4772 --- .../tests/tests_conway/test_drep.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/cardano_node_tests/tests/tests_conway/test_drep.py b/cardano_node_tests/tests/tests_conway/test_drep.py index 8b73a9ec8..f99c51545 100644 --- a/cardano_node_tests/tests/tests_conway/test_drep.py +++ b/cardano_node_tests/tests/tests_conway/test_drep.py @@ -1570,11 +1570,17 @@ def _deregister(): assert not ret_drep_state, "DRep 1 was not retired" stake_addr_info_ret = cluster.g_query.get_stake_addr_info(pool_user_rewards.stake.address) - if not stake_addr_info_ret.vote_delegation: - issues.ledger_4772.finish_test() - assert stake_addr_info_ret.vote_delegation == governance_utils.get_drep_cred_name( - drep_id=drep2.drep_id - ), "Votes are no longer delegated to DRep 2!" + + if conway_common.is_in_bootstrap(cluster): + assert ( + not stake_addr_info_ret.vote_delegation + ), "Due to ledger issue 4772, vote delegation should be empty with PV9" + else: + if not stake_addr_info_ret.vote_delegation: + issues.ledger_4772.finish_test() + assert stake_addr_info_ret.vote_delegation == governance_utils.get_drep_cred_name( + drep_id=drep2.drep_id + ), "Votes are no longer delegated to DRep 2!" class TestDRepActivity: From 6f448031450fcf7932dcbfea65bad08fce7e8d77 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 28 Nov 2024 14:01:20 +0100 Subject: [PATCH 160/168] feat(utils): add function to find messages in logs Added a new function `find_msgs_in_logs` to search for messages in log files based on a given regex pattern. This function supports seeking from a specific offset and timestamp, and can return either all matching lines or just the first match. --- cardano_node_tests/utils/logfiles.py | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/cardano_node_tests/utils/logfiles.py b/cardano_node_tests/utils/logfiles.py index d74799a3c..d4bfe101c 100644 --- a/cardano_node_tests/utils/logfiles.py +++ b/cardano_node_tests/utils/logfiles.py @@ -274,13 +274,38 @@ def add_ignore_rule( infile.write(f"{files_glob};;{skip_after};;{regex}\n") +def find_msgs_in_logs( + regex: str, + logfile: pl.Path, + seek_offset: int, + timestamp: float, + only_first: bool = False, +) -> tp.List[str]: + """Find messages in log.""" + regex_comp = re.compile(regex) + lines_found = [] + for logfile_rec in _get_rotated_logs( + logfile=pl.Path(logfile), seek=seek_offset, timestamp=timestamp + ): + with open(logfile_rec.logfile, encoding="utf-8") as infile: + infile.seek(logfile_rec.seek) + for line in infile: + if regex_comp.search(line): + lines_found.append(line) + if only_first: + break + if lines_found and only_first: + break + return lines_found + + def check_msgs_presence_in_logs( regex_pairs: tp.List[tp.Tuple[str, str]], seek_offsets: tp.Dict[str, int], state_dir: pl.Path, timestamp: float, ) -> tp.List[str]: - """Make sure the expected messages are present in logs.""" + """Check if the expected messages are present in logs.""" errors = [] for files_glob, regex in regex_pairs: regex_comp = re.compile(regex) From 6f822c542188fb0327bb978edef42e38b46a3457 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 28 Nov 2024 14:09:32 +0100 Subject: [PATCH 161/168] feat(tests): enhance block minting checks using log data Get slots in which blocks were minted from pool log file, and compare them with leadership schedule and ledger state. --- cardano_node_tests/tests/test_blocks.py | 116 ++++++++++++++++-------- 1 file changed, 76 insertions(+), 40 deletions(-) diff --git a/cardano_node_tests/tests/test_blocks.py b/cardano_node_tests/tests/test_blocks.py index 7471d0e11..5be1ec6e3 100644 --- a/cardano_node_tests/tests/test_blocks.py +++ b/cardano_node_tests/tests/test_blocks.py @@ -6,6 +6,7 @@ import logging import os import random +import re import shutil import signal import sqlite3 @@ -38,7 +39,7 @@ class TestLeadershipSchedule: @allure.link(helpers.get_vcs_link()) @pytest.mark.dbsync @pytest.mark.parametrize("for_epoch", ("current", "next")) - def test_pool_blocks( + def test_pool_blocks( # noqa: C901 self, cluster_manager: cluster_management.ClusterManager, cluster_use_pool: tp.Tuple[clusterlib.ClusterLib, str], @@ -48,12 +49,10 @@ def test_pool_blocks( * query leadership schedule for selected pool for current epoch or next epoch * wait for epoch that comes after the queried epoch - * check in log files that the blocks were minted in expected slots - * if db-sync is available: - - - get info about minted blocks in queried epoch for the selected pool - - compare leadership schedule with blocks that were actually minted - - compare db-sync records with ledger state dump + * get info about minted blocks in queried epoch for the selected pool + * compare leadership schedule with blocks that were actually minted + * compare log records with ledger state dump + * (optional) check minted blocks in db-sync """ # pylint: disable=unused-argument cluster, pool_name = cluster_use_pool @@ -62,10 +61,10 @@ def test_pool_blocks( pool_rec = cluster_manager.cache.addrs_data[pool_name] pool_id = cluster.g_stake_pool.get_stake_pool_id(pool_rec["cold_key_pair"].vkey_file) - state_dir = cluster_nodes.get_cluster_env().state_dir - pool_log_fname = f"{pool_name.replace('node-', '')}.stdout" - pool_log = state_dir / pool_log_fname - seek_offsets = {str(pool_log): helpers.get_eof_offset(pool_log)} + pool_log = ( + cluster_nodes.get_cluster_env().state_dir / f"{pool_name.replace('node-', '')}.stdout" + ) + seek_offset = helpers.get_eof_offset(pool_log) timestamp = time.time() if for_epoch == "current": @@ -90,27 +89,66 @@ def test_pool_blocks( ) slots_when_scheduled = {r.slot_no for r in leadership_schedule} - expected_msgs = [ - (pool_log_fname, rf'TraceForgedBlock"\),\("slot",Number {s}') - for s in slots_when_scheduled - ] - # Wait for epoch that comes after the queried epoch - cluster.wait_for_epoch(epoch_no=queried_epoch + 1) + cluster.wait_for_epoch(epoch_no=queried_epoch + 1, padding_seconds=10) + + # Get number of minted blocks from ledger + ledger_state = clusterlib_utils.get_ledger_state(cluster_obj=cluster) + clusterlib_utils.save_ledger_state( + cluster_obj=cluster, + state_name=temp_template, + ledger_state=ledger_state, + ) + blocks_before: tp.Dict[str, int] = ledger_state["blocksBefore"] + pool_id_dec = helpers.decode_bech32(pool_id) + minted_blocks_ledger = blocks_before.get(pool_id_dec) or 0 errors: tp.List[str] = [] - log_msgs_errors = logfiles.check_msgs_presence_in_logs( - regex_pairs=expected_msgs, - seek_offsets=seek_offsets, - state_dir=state_dir, - timestamp=timestamp, - ) - if len(log_msgs_errors) > len(leadership_schedule) // 2: - log_msgs_errors_joined = "\n ".join(log_msgs_errors) - errors.append(f"Lot of slots missed: \n {log_msgs_errors_joined}") + def _check_logs() -> None: + # Get info about minted blocks in queried epoch for the selected pool + minted_lines = logfiles.find_msgs_in_logs( + regex='"TraceForgedBlock"', + logfile=pool_log, + seek_offset=seek_offset, + timestamp=timestamp, + ) + tip = cluster.g_query.get_tip() + last_slot_queried_epoch = int(tip["slot"]) - int(tip["slotInEpoch"] - 1) + first_slot_queried_epoch = ( + last_slot_queried_epoch - int(cluster.genesis["epochLength"]) + 1 + ) + slots_pattern = re.compile(r'"slot",Number (\d+)\.0') + slots_when_minted = { + s + for m in minted_lines + if (o := slots_pattern.search(m)) is not None + and first_slot_queried_epoch <= (s := int(o.group(1))) <= last_slot_queried_epoch + } - if configuration.HAS_DBSYNC: + # Compare leadership schedule with blocks that were actually minted + difference_scheduled = slots_when_minted.difference(slots_when_scheduled) + if difference_scheduled: + errors.append( + f"Some blocks were minted in other slots than scheduled: {difference_scheduled}" + ) + + difference_minted = slots_when_scheduled.difference(slots_when_minted) + if len(difference_minted) > len(leadership_schedule) // 5: + errors.append(f"Lot of slots missed: {difference_minted}") + + # Compare log records with ledger state dump + minted_blocks_logs = len(slots_when_minted) + # Some minted block may not be adopted, and so the total number of adopted blocks + # may be lower than the number of minted blocks. + if minted_blocks_ledger > minted_blocks_logs: + errors.append( + "Number of minted blocks reported by ledger state " + "is higher than number extracted from log file: " + f"{minted_blocks_ledger} vs {minted_blocks_logs}" + ) + + def _check_dbsync() -> None: # Get info about minted blocks in queried epoch for the selected pool minted_blocks = list( dbsync_queries.query_blocks( @@ -123,30 +161,28 @@ def test_pool_blocks( difference_scheduled = slots_when_minted.difference(slots_when_scheduled) if difference_scheduled: errors.append( - f"Some blocks were minted in other slots than scheduled: {difference_scheduled}" + "DB-Sync: Some blocks were minted in other slots than scheduled: " + f"{difference_scheduled}" ) difference_minted = slots_when_scheduled.difference(slots_when_minted) - if len(difference_minted) > len(leadership_schedule) // 2: - errors.append(f"Lot of slots missed: {difference_minted}") + if len(difference_minted) > len(leadership_schedule) // 3: + errors.append(f"DB-Sync: Lot of slots missed: {difference_minted}") # Compare db-sync records with ledger state dump - ledger_state = clusterlib_utils.get_ledger_state(cluster_obj=cluster) - clusterlib_utils.save_ledger_state( - cluster_obj=cluster, - state_name=temp_template, - ledger_state=ledger_state, - ) - blocks_before: tp.Dict[str, int] = ledger_state["blocksBefore"] - pool_id_dec = helpers.decode_bech32(pool_id) - minted_blocks_ledger = blocks_before.get(pool_id_dec) or 0 minted_blocks_db = len(slots_when_minted) if minted_blocks_ledger != minted_blocks_db: errors.append( - "Numbers of minted blocks reported by ledger state and db-sync don't match: " + "DB-Sync: Numbers of minted blocks reported by ledger state " + "and db-sync don't match: " f"{minted_blocks_ledger} vs {minted_blocks_db}" ) + _check_logs() + + if configuration.HAS_DBSYNC: + _check_dbsync() + if errors: # Xfail if cardano-api GH-269 is still open if ( From 9e0ec10b0d10053e9d0655d12e70af998cb28967 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 28 Nov 2024 14:54:23 +0100 Subject: [PATCH 162/168] refactor: rename and refactor resource functions Renamed `_get_resources_from_paths` to `get_resources_from_path` and refactored the function to use a helper function `_get_res` for better readability and maintainability. --- .../cluster_management/cluster_getter.py | 4 ++-- cardano_node_tests/cluster_management/common.py | 14 ++++++++++++-- cardano_node_tests/cluster_management/manager.py | 2 +- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/cardano_node_tests/cluster_management/cluster_getter.py b/cardano_node_tests/cluster_management/cluster_getter.py index 44719ec26..d114527cd 100644 --- a/cardano_node_tests/cluster_management/cluster_getter.py +++ b/cardano_node_tests/cluster_management/cluster_getter.py @@ -440,14 +440,14 @@ def _update_marked_tests( def _resolve_resources_availability(self, cget_status: _ClusterGetStatus) -> bool: """Resolve availability of required "use" and "lock" resources.""" - resources_locked = common._get_resources_from_paths( + resources_locked = common.get_resources_from_path( paths=cget_status.instance_dir.glob(f"{common.RESOURCE_LOCKED_GLOB}_*") ) # This test wants to lock some resources, check if these are not in use res_lockable = [] if cget_status.lock_resources: - resources_used = common._get_resources_from_paths( + resources_used = common.get_resources_from_path( paths=cget_status.instance_dir.glob(f"{common.RESOURCE_IN_USE_GLOB}_*") ) unlockable_resources = {*resources_locked, *resources_used} diff --git a/cardano_node_tests/cluster_management/common.py b/cardano_node_tests/cluster_management/common.py index 977852dba..076188e3d 100644 --- a/cardano_node_tests/cluster_management/common.py +++ b/cardano_node_tests/cluster_management/common.py @@ -24,8 +24,18 @@ ADDRS_DATA_DIRNAME = "addrs_data" +RE_RESNAME = re.compile("_@@(.+)@@_") -def _get_resources_from_paths(paths: tp.Iterator[pl.Path]) -> tp.List[str]: + +def _get_res(path: pl.Path) -> str: + out = RE_RESNAME.search(str(path)) + if out is None: + err = f"Resource name not found in path: {path}" + raise ValueError(err) + return out.group(1) + + +def get_resources_from_path(paths: tp.Iterator[pl.Path]) -> tp.List[str]: """Get resources names from status files path.""" - resources = [re.search("_@@(.+)@@_", str(r)).group(1) for r in paths] # type: ignore + resources = [_get_res(p) for p in paths] return resources diff --git a/cardano_node_tests/cluster_management/manager.py b/cardano_node_tests/cluster_management/manager.py index b84048e20..c7a2615cc 100644 --- a/cardano_node_tests/cluster_management/manager.py +++ b/cardano_node_tests/cluster_management/manager.py @@ -296,7 +296,7 @@ def _get_resources_by_glob( msg = "`from_set` cannot be a string" raise AssertionError(msg) - resources_locked = set(common._get_resources_from_paths(paths=self.instance_dir.glob(glob))) + resources_locked = set(common.get_resources_from_path(paths=self.instance_dir.glob(glob))) if from_set is not None: return list(resources_locked.intersection(from_set)) From c665a6742176f6fe82b2eb63d2d5cd0052f04935 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 28 Nov 2024 15:36:03 +0100 Subject: [PATCH 163/168] fix(tests): correct stake set key names in tests Corrected the key names for stake set in test_cli.py and test_ledger_state.py to match the expected values. This fixes issues with missing keys in stake snapshots and ensures the tests run correctly. --- cardano_node_tests/tests/test_cli.py | 4 ++-- cardano_node_tests/tests/test_ledger_state.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/cardano_node_tests/tests/test_cli.py b/cardano_node_tests/tests/test_cli.py index 7467c532e..615f8fb19 100644 --- a/cardano_node_tests/tests/test_cli.py +++ b/cardano_node_tests/tests/test_cli.py @@ -1112,10 +1112,10 @@ def _dump_on_error(): elif not { "activeStakeGo", "activeStakeMark", - "activeStaketp.Set", + "activeStakeSet", "poolStakeGo", "poolStakeMark", - "poolStaketp.Set", + "poolStakeSet", }.issubset(stake_snapshot): errors.append(f"Missing some expected keys: {stake_snapshot.keys()}") diff --git a/cardano_node_tests/tests/test_ledger_state.py b/cardano_node_tests/tests/test_ledger_state.py index ab73b142c..8c3ad68c3 100644 --- a/cardano_node_tests/tests/test_ledger_state.py +++ b/cardano_node_tests/tests/test_ledger_state.py @@ -156,7 +156,7 @@ def _get_delegations(snapshot: str) -> tp.Dict[str, tp.List[str]]: pstake_go_cmd = stake_snapshot["pools"][pool_id_dec]["stakeGo"] else: pstake_mark_cmd = stake_snapshot["poolStakeMark"] - pstake_set_cmd = stake_snapshot["poolStaketp.Set"] + pstake_set_cmd = stake_snapshot["poolStakeSet"] pstake_go_cmd = stake_snapshot["poolStakeGo"] if pstake_sum_mark != pstake_mark_cmd: @@ -203,8 +203,8 @@ def _get_delegations(snapshot: str) -> tp.Dict[str, tp.List[str]]: else: if sum_mark < stake_snapshot["activeStakeMark"]: errors.append(f"active_mark: {sum_mark} < {stake_snapshot['activeStakeMark']}") - if sum_set < stake_snapshot["activeStaketp.Set"]: - errors.append(f"active_set: {sum_set} < {stake_snapshot['activeStaketp.Set']}") + if sum_set < stake_snapshot["activeStakeSet"]: + errors.append(f"active_set: {sum_set} < {stake_snapshot['activeStakeSet']}") if sum_go < stake_snapshot["activeStakeGo"]: errors.append(f"active_go: {sum_go} < {stake_snapshot['activeStakeGo']}") From a963c36a718481c43f03c836b530e8319f1487cf Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 28 Nov 2024 15:19:45 +0100 Subject: [PATCH 164/168] refactor: update types annotation to new format --- cardano_node_tests/cardano_cli_coverage.py | 6 +- .../cluster_management/cache.py | 4 +- .../cluster_management/cluster_getter.py | 8 +- .../cluster_management/common.py | 2 +- .../cluster_management/manager.py | 6 +- .../cluster_management/resources.py | 6 +- .../resources_management.py | 10 +- cardano_node_tests/tests/common.py | 14 +- cardano_node_tests/tests/conftest.py | 6 +- cardano_node_tests/tests/delegation.py | 6 +- cardano_node_tests/tests/kes.py | 14 +- cardano_node_tests/tests/plutus_common.py | 2 +- .../tests/test_addr_registration.py | 27 ++-- cardano_node_tests/tests/test_blocks.py | 21 ++- .../tests/test_chain_transactions.py | 4 +- cardano_node_tests/tests/test_cli.py | 7 +- cardano_node_tests/tests/test_dbsync.py | 4 +- cardano_node_tests/tests/test_delegation.py | 61 ++++---- .../tests/test_env_network_id.py | 6 +- cardano_node_tests/tests/test_kes.py | 8 +- cardano_node_tests/tests/test_ledger_state.py | 11 +- cardano_node_tests/tests/test_metrics.py | 2 +- cardano_node_tests/tests/test_mir_certs.py | 31 ++-- .../tests/test_native_tokens.py | 75 +++++---- cardano_node_tests/tests/test_node_upgrade.py | 7 +- .../tests/test_pool_saturation.py | 19 +-- cardano_node_tests/tests/test_pools.py | 40 ++--- cardano_node_tests/tests/test_protocol.py | 5 +- cardano_node_tests/tests/test_reconnect.py | 10 +- cardano_node_tests/tests/test_rollback.py | 8 +- cardano_node_tests/tests/test_scripts.py | 116 +++++++------- cardano_node_tests/tests/test_socket_path.py | 6 +- .../tests/test_staking_no_rewards.py | 14 +- .../tests/test_staking_rewards.py | 64 ++++---- cardano_node_tests/tests/test_tx_basic.py | 63 ++++---- cardano_node_tests/tests/test_tx_fees.py | 55 ++++--- .../tests/test_tx_many_utxos.py | 10 +- cardano_node_tests/tests/test_tx_mempool.py | 5 +- cardano_node_tests/tests/test_tx_negative.py | 94 +++++------ .../tests/test_tx_unbalanced.py | 23 ++- .../tests/tests_conway/conway_common.py | 34 ++-- .../tests/tests_conway/test_committee.py | 16 +- .../tests/tests_conway/test_constitution.py | 6 +- .../tests/tests_conway/test_drep.py | 22 +-- .../tests/tests_conway/test_guardrails.py | 30 ++-- .../tests/tests_conway/test_info.py | 3 +- .../tests/tests_conway/test_pparam_update.py | 9 +- .../tests_conway/test_treasury_withdrawals.py | 5 +- .../test_update_plutusv2_builtins.py | 5 +- .../tests/tests_plutus/mint_build.py | 3 +- .../tests/tests_plutus/mint_raw.py | 3 +- .../tests/tests_plutus/spend_build.py | 14 +- .../tests/tests_plutus/spend_raw.py | 14 +- .../tests/tests_plutus/test_delegation.py | 42 ++--- .../tests/tests_plutus/test_lobster.py | 16 +- .../tests/tests_plutus/test_mint_build.py | 37 ++--- .../tests_plutus/test_mint_negative_build.py | 15 +- .../tests_plutus/test_mint_negative_raw.py | 27 ++-- .../tests/tests_plutus/test_mint_raw.py | 21 ++- .../tests/tests_plutus/test_spend_build.py | 22 +-- .../tests_plutus/test_spend_compat_build.py | 5 +- .../tests_plutus/test_spend_compat_raw.py | 7 +- .../tests_plutus/test_spend_datum_build.py | 21 ++- .../tests_plutus/test_spend_datum_raw.py | 21 ++- .../tests_plutus/test_spend_negative_build.py | 147 ++++++------------ .../tests_plutus/test_spend_negative_raw.py | 40 +++-- .../tests/tests_plutus/test_spend_raw.py | 24 +-- .../tests/tests_plutus_v2/mint_build.py | 6 +- .../tests/tests_plutus_v2/mint_raw.py | 6 +- .../tests/tests_plutus_v2/spend_build.py | 10 +- .../tests/tests_plutus_v2/spend_raw.py | 10 +- .../tests/tests_plutus_v2/test_mint_build.py | 14 +- .../test_mint_negative_build.py | 5 +- .../tests_plutus_v2/test_mint_negative_raw.py | 7 +- .../tests/tests_plutus_v2/test_mint_raw.py | 10 +- .../test_mint_secp256k1_build.py | 8 +- .../test_mint_secp256k1_raw.py | 8 +- .../tests/tests_plutus_v2/test_spend_build.py | 7 +- .../test_spend_collateral_build.py | 10 +- .../test_spend_collateral_raw.py | 7 +- .../test_spend_compat_build.py | 8 +- .../tests_plutus_v2/test_spend_compat_raw.py | 8 +- .../tests_plutus_v2/test_spend_datum_build.py | 10 +- .../tests_plutus_v2/test_spend_datum_raw.py | 12 +- .../tests/tests_plutus_v2/test_spend_raw.py | 7 +- .../test_spend_ref_inputs_build.py | 16 +- .../test_spend_ref_inputs_raw.py | 14 +- .../test_spend_ref_scripts_build.py | 22 +-- .../test_spend_ref_scripts_raw.py | 24 +-- .../test_spend_secp256k1_build.py | 19 +-- .../test_spend_secp256k1_raw.py | 11 +- cardano_node_tests/tests/tx_common.py | 5 +- cardano_node_tests/utils/blockers.py | 2 +- cardano_node_tests/utils/cluster_nodes.py | 44 +++--- cardano_node_tests/utils/cluster_scripts.py | 12 +- cardano_node_tests/utils/clusterlib_utils.py | 48 +++--- cardano_node_tests/utils/configuration.py | 5 +- cardano_node_tests/utils/dbsync_check_tx.py | 23 ++- cardano_node_tests/utils/dbsync_conn.py | 2 +- cardano_node_tests/utils/dbsync_queries.py | 10 +- cardano_node_tests/utils/dbsync_types.py | 60 +++---- cardano_node_tests/utils/dbsync_utils.py | 32 ++-- cardano_node_tests/utils/faucet.py | 10 +- cardano_node_tests/utils/gh_issue.py | 2 +- cardano_node_tests/utils/governance_setup.py | 22 +-- cardano_node_tests/utils/governance_utils.py | 100 ++++++------ cardano_node_tests/utils/helpers.py | 12 +- cardano_node_tests/utils/logfiles.py | 34 ++-- cardano_node_tests/utils/poll_utils.py | 5 +- cardano_node_tests/utils/requirements.py | 8 +- cardano_node_tests/utils/submit_api.py | 3 +- cardano_node_tests/utils/submit_utils.py | 2 +- cardano_node_tests/utils/testnet_cleanup.py | 10 +- cardano_node_tests/utils/tx_view.py | 16 +- cardano_node_tests/utils/types.py | 7 +- cardano_node_tests/utils/versions.py | 2 +- 116 files changed, 1050 insertions(+), 1174 deletions(-) diff --git a/cardano_node_tests/cardano_cli_coverage.py b/cardano_node_tests/cardano_cli_coverage.py index 982a13ad9..13bac1b19 100755 --- a/cardano_node_tests/cardano_cli_coverage.py +++ b/cardano_node_tests/cardano_cli_coverage.py @@ -111,7 +111,7 @@ def cli(cli_args: tp.Iterable[str]) -> str: return stderr.decode() -def parse_cmd_output(output: str) -> tp.List[str]: +def parse_cmd_output(output: str) -> list[str]: """Parse `cardano-cli` command output, return sub-commands and options names.""" section_start = False cli_args = [] @@ -165,7 +165,7 @@ def get_log_coverage(log_file: pl.Path) -> dict: return coverage_dict -def get_coverage(coverage_files: tp.List[pl.Path], available_commands: dict) -> dict: +def get_coverage(coverage_files: list[pl.Path], available_commands: dict) -> dict: """Get coverage info by merging available data.""" coverage_dict = copy.deepcopy(available_commands) for in_coverage in coverage_files: @@ -186,7 +186,7 @@ def get_coverage(coverage_files: tp.List[pl.Path], available_commands: dict) -> def get_report( arg_name: str, coverage: dict, uncovered_only: bool = False -) -> tp.Tuple[dict, int, int]: +) -> tuple[dict, int, int]: """Generate coverage report.""" uncovered_db: dict = {} covered_count = 0 diff --git a/cardano_node_tests/cluster_management/cache.py b/cardano_node_tests/cluster_management/cache.py index 16b20bf78..1df62e1db 100644 --- a/cardano_node_tests/cluster_management/cache.py +++ b/cardano_node_tests/cluster_management/cache.py @@ -23,10 +23,10 @@ class CacheManager: """Set of cache management methods.""" # every pytest worker has its own cache, i.e. this cache is local to single worker - cache: tp.ClassVar[tp.Dict[int, ClusterManagerCache]] = {} + cache: tp.ClassVar[dict[int, ClusterManagerCache]] = {} @classmethod - def get_cache(cls) -> tp.Dict[int, ClusterManagerCache]: + def get_cache(cls) -> dict[int, ClusterManagerCache]: return cls.cache @classmethod diff --git a/cardano_node_tests/cluster_management/cluster_getter.py b/cardano_node_tests/cluster_management/cluster_getter.py index d114527cd..5fe872e38 100644 --- a/cardano_node_tests/cluster_management/cluster_getter.py +++ b/cardano_node_tests/cluster_management/cluster_getter.py @@ -380,8 +380,8 @@ def _on_marked_test_stop(self, instance_num: int, mark: str) -> None: f.unlink() def _get_marked_tests_status( - self, marked_tests_cache: tp.Dict[int, tp.Dict[str, int]], instance_num: int - ) -> tp.Dict[str, int]: + self, marked_tests_cache: dict[int, dict[str, int]], instance_num: int + ) -> dict[str, int]: """Return marked tests status for cluster instance.""" if instance_num not in marked_tests_cache: marked_tests_cache[instance_num] = {} @@ -390,7 +390,7 @@ def _get_marked_tests_status( def _update_marked_tests( self, - marked_tests_cache: tp.Dict[int, tp.Dict[str, int]], + marked_tests_cache: dict[int, dict[str, int]], cget_status: _ClusterGetStatus, ) -> None: """Update status about running of marked test. @@ -809,7 +809,7 @@ def get_cluster_instance( # noqa: C901 start_cmd=start_cmd, current_test=os.environ.get("PYTEST_CURRENT_TEST") or "", ) - marked_tests_cache: tp.Dict[int, tp.Dict[str, int]] = {} + marked_tests_cache: dict[int, dict[str, int]] = {} self.log(f"want to run test '{cget_status.current_test}'") diff --git a/cardano_node_tests/cluster_management/common.py b/cardano_node_tests/cluster_management/common.py index 076188e3d..469bcb926 100644 --- a/cardano_node_tests/cluster_management/common.py +++ b/cardano_node_tests/cluster_management/common.py @@ -35,7 +35,7 @@ def _get_res(path: pl.Path) -> str: return out.group(1) -def get_resources_from_path(paths: tp.Iterator[pl.Path]) -> tp.List[str]: +def get_resources_from_path(paths: tp.Iterator[pl.Path]) -> list[str]: """Get resources names from status files path.""" resources = [_get_res(p) for p in paths] return resources diff --git a/cardano_node_tests/cluster_management/manager.py b/cardano_node_tests/cluster_management/manager.py index c7a2615cc..e1dbc86ee 100644 --- a/cardano_node_tests/cluster_management/manager.py +++ b/cardano_node_tests/cluster_management/manager.py @@ -291,7 +291,7 @@ def _get_resources_by_glob( self, glob: str, from_set: tp.Optional[tp.Iterable[str]] = None, - ) -> tp.List[str]: + ) -> list[str]: if from_set is not None and isinstance(from_set, str): msg = "`from_set` cannot be a string" raise AssertionError(msg) @@ -307,7 +307,7 @@ def get_locked_resources( self, from_set: tp.Optional[tp.Iterable[str]] = None, worker_id: tp.Optional[str] = None, - ) -> tp.List[str]: + ) -> list[str]: """Get resources locked by worker. It is possible to use glob patterns for `worker_id` (e.g. `worker_id="*"`). @@ -319,7 +319,7 @@ def get_used_resources( self, from_set: tp.Optional[tp.Iterable[str]] = None, worker_id: tp.Optional[str] = None, - ) -> tp.List[str]: + ) -> list[str]: """Get resources used by worker. It is possible to use glob patterns for `worker_id` (e.g. `worker_id="*"`). diff --git a/cardano_node_tests/cluster_management/resources.py b/cardano_node_tests/cluster_management/resources.py index 58e1e51b3..0b47259ea 100644 --- a/cardano_node_tests/cluster_management/resources.py +++ b/cardano_node_tests/cluster_management/resources.py @@ -13,7 +13,7 @@ class Resources: POOL1: tp.Final[str] = "node-pool1" POOL2: tp.Final[str] = "node-pool2" POOL3: tp.Final[str] = "node-pool3" - ALL_POOLS: tp.Final[tp.Tuple[str, ...]] = tuple( + ALL_POOLS: tp.Final[tuple[str, ...]] = tuple( f"node-pool{i}" for i in range(1, configuration.NUM_POOLS + 1) ) # Reserve one pool for all tests where the pool will stop producing blocks @@ -21,7 +21,7 @@ class Resources: RESERVES: tp.Final[str] = "reserves" TREASURY: tp.Final[str] = "treasury" REWARDS: tp.Final[str] = "rewards" - POTS: tp.Final[tp.Tuple[str, ...]] = (RESERVES, TREASURY, REWARDS) + POTS: tp.Final[tuple[str, ...]] = (RESERVES, TREASURY, REWARDS) PERF: tp.Final[str] = "performance" DREPS: tp.Final[str] = "dreps" COMMITTEE: tp.Final[str] = "committee" @@ -36,6 +36,6 @@ def sanitize_res_name(s: str) -> str: return sanitized -def get_unsanitized(ls: tp.Iterable[str]) -> tp.List[str]: +def get_unsanitized(ls: tp.Iterable[str]) -> list[str]: """Return unsanitized resource names from the list.""" return [s for s in ls if s != sanitize_res_name(s)] diff --git a/cardano_node_tests/cluster_management/resources_management.py b/cardano_node_tests/cluster_management/resources_management.py index 23a3c2da0..16870edfe 100644 --- a/cardano_node_tests/cluster_management/resources_management.py +++ b/cardano_node_tests/cluster_management/resources_management.py @@ -11,7 +11,7 @@ def __init__(self, resources: tp.Iterable[str]): assert not isinstance(resources, str), "`resources` can't be single string" self.resources = resources - def filter(self, unavailable: tp.Iterable[str], **kwargs: tp.Any) -> tp.List[str]: + def filter(self, unavailable: tp.Iterable[str], **kwargs: tp.Any) -> list[str]: """Filter resources.""" raise NotImplementedError @@ -26,7 +26,7 @@ def filter( self, unavailable: tp.Iterable[str], **kwargs: tp.Any, # noqa: ARG002 - ) -> tp.List[str]: + ) -> list[str]: assert not isinstance(unavailable, str), "`unavailable` can't be single string" usable = [r for r in self.resources if r not in unavailable] @@ -36,13 +36,13 @@ def filter( return [random.choice(usable)] -ResourcesType = tp.Iterable[tp.Union[str, BaseFilter]] +ResourcesType = tp.Iterable[str | BaseFilter] def get_resources( resources: ResourcesType, unavailable: tp.Iterable[str], -) -> tp.List[str]: +) -> list[str]: """Get resources that can be used or locked.""" # The "named resources", i.e. resources specified by string, are always mandatory. # If any of these is not available, the selection cannot continue. @@ -56,7 +56,7 @@ def get_resources( # If any of the filters returns empty list, the selection cannot continue. already_unavailable = {*unavailable, *named_resources} resources_w_filter = [r for r in resources if not isinstance(r, str)] - selected_resources: tp.List[str] = [] + selected_resources: list[str] = [] for res_filter in resources_w_filter: filtered = res_filter.filter(unavailable=[*already_unavailable, *selected_resources]) if not filtered: diff --git a/cardano_node_tests/tests/common.py b/cardano_node_tests/tests/common.py index e60b1d6c7..fb130baa6 100644 --- a/cardano_node_tests/tests/common.py +++ b/cardano_node_tests/tests/common.py @@ -177,10 +177,10 @@ def get_test_id(cluster_obj: clusterlib.ClusterLib) -> str: def get_nodes_missing_utxos( cluster_obj: clusterlib.ClusterLib, - utxos: tp.List[clusterlib.UTXOData], -) -> tp.Set[str]: + utxos: list[clusterlib.UTXOData], +) -> set[str]: """Return set of nodes that don't have the given UTxOs.""" - missing_nodes: tp.Set[str] = set() + missing_nodes: set[str] = set() known_nodes = cluster_nodes.get_cluster_type().NODES # Skip the check if there is only one node @@ -208,7 +208,7 @@ def get_nodes_missing_utxos( def check_missing_utxos( cluster_obj: clusterlib.ClusterLib, - utxos: tp.List[clusterlib.UTXOData], + utxos: list[clusterlib.UTXOData], ) -> None: """Fail if any node is missing the given UTxOs.""" missing_nodes = get_nodes_missing_utxos(cluster_obj=cluster_obj, utxos=utxos) @@ -222,10 +222,10 @@ def detect_fork( cluster_manager: cluster_management.ClusterManager, cluster_obj: clusterlib.ClusterLib, temp_template: str, -) -> tp.Tuple[tp.Set[str], tp.Set[str]]: +) -> tuple[set[str], set[str]]: """Detect if one or more nodes have forked blockchain or is out of sync.""" - forked_nodes: tp.Set[str] = set() - unsynced_nodes: tp.Set[str] = set() + forked_nodes: set[str] = set() + unsynced_nodes: set[str] = set() known_nodes = cluster_nodes.get_cluster_type().NODES if len(known_nodes) <= 1: diff --git a/cardano_node_tests/tests/conftest.py b/cardano_node_tests/tests/conftest.py index c51f591c6..3b7c10e46 100644 --- a/cardano_node_tests/tests/conftest.py +++ b/cardano_node_tests/tests/conftest.py @@ -255,7 +255,7 @@ def _save_env_for_allure(pytest_config: Config) -> None: return alluredir = configuration.LAUNCH_PATH / alluredir - metadata: tp.Dict[str, tp.Any] = pytest_config.stash[metadata_key] # type: ignore + metadata: dict[str, tp.Any] = pytest_config.stash[metadata_key] # type: ignore with open(alluredir / "environment.properties", "w+", encoding="utf-8") as infile: for k, v in metadata.items(): if isinstance(v, dict): @@ -435,7 +435,7 @@ def cluster_singleton( @pytest.fixture def cluster_lock_pool( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str]: +) -> tuple[clusterlib.ClusterLib, str]: """Lock any pool and return instance of `clusterlib.ClusterLib`.""" cluster_obj = cluster_manager.get( lock_resources=[ @@ -451,7 +451,7 @@ def cluster_lock_pool( @pytest.fixture def cluster_use_pool( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str]: +) -> tuple[clusterlib.ClusterLib, str]: """Mark any pool as "in use" and return instance of `clusterlib.ClusterLib`.""" cluster_obj = cluster_manager.get( use_resources=[ diff --git a/cardano_node_tests/tests/delegation.py b/cardano_node_tests/tests/delegation.py index 15179ec56..fb250b739 100644 --- a/cardano_node_tests/tests/delegation.py +++ b/cardano_node_tests/tests/delegation.py @@ -58,7 +58,7 @@ def get_pool_id( def cluster_and_pool( cluster_manager: cluster_management.ClusterManager, use_resources: resources_management.ResourcesType = (), -) -> tp.Tuple[clusterlib.ClusterLib, str]: +) -> tuple[clusterlib.ClusterLib, str]: """Return instance of `clusterlib.ClusterLib`, and pool id to delegate to. We need to mark the pool as "in use" when requesting local cluster @@ -106,7 +106,7 @@ def cluster_and_pool( def db_check_delegation( - pool_user: tp.Union[clusterlib.PoolUser, PoolUserScript], + pool_user: clusterlib.PoolUser | PoolUserScript, db_record: tp.Optional[dbsync_types.TxRecord], deleg_epoch: int, pool_id: str, @@ -167,7 +167,7 @@ def delegate_stake_addr( ) # create stake address delegation cert - deleg_kwargs: tp.Dict[str, tp.Any] = { + deleg_kwargs: dict[str, tp.Any] = { "addr_name": f"{temp_template}_addr0", "stake_vkey_file": pool_user.stake.vkey_file, "always_abstain": True, diff --git a/cardano_node_tests/tests/kes.py b/cardano_node_tests/tests/kes.py index 5b578c2e6..697fa3d3a 100644 --- a/cardano_node_tests/tests/kes.py +++ b/cardano_node_tests/tests/kes.py @@ -25,12 +25,12 @@ class KesScenarios: def check_kes_period_info_result( # noqa: C901 cluster_obj: clusterlib.ClusterLib, - kes_output: tp.Dict[str, tp.Any], + kes_output: dict[str, tp.Any], expected_scenario: str, check_id: str, expected_start_kes: tp.Optional[int] = None, pool_num: tp.Optional[int] = None, -) -> tp.List[str]: +) -> list[str]: """Check output `kes-period-info` command. When `pool_num` is specified, prometheus metrics are checked. @@ -40,7 +40,7 @@ def check_kes_period_info_result( # noqa: C901 errors = [] # Get command metrics - command_metrics: tp.Dict[str, tp.Any] = kes_output["metrics"] or {} + command_metrics: dict[str, tp.Any] = kes_output["metrics"] or {} # Check kes metrics with values in genesis if command_metrics["qKesMaxKESEvolutions"] != cluster_obj.max_kes_evolutions: @@ -77,7 +77,7 @@ def check_kes_period_info_result( # noqa: C901 ) # Get prometheus metrics - prometheus_metrics: tp.Dict[str, tp.Any] = {} + prometheus_metrics: dict[str, tp.Any] = {} if pool_num and expected_scenario in ( KesScenarios.ALL_VALID, KesScenarios.INVALID_COUNTERS, @@ -104,7 +104,7 @@ def check_kes_period_info_result( # noqa: C901 } # Check kes metrics with expected values - expected_metrics: tp.Dict[str, tp.Any] = { + expected_metrics: dict[str, tp.Any] = { "qKesCurrentKesPeriod": cluster_obj.g_query.get_kes_period(), } if expected_start_kes is not None: @@ -189,7 +189,7 @@ def check_kes_period_info_result( # noqa: C901 return errors -def get_xfails(errors: tp.List[str]) -> tp.List[blockers.GH]: +def get_xfails(errors: list[str]) -> list[blockers.GH]: """Get xfail issues. Either all errors can Xfail, or none of them can. There can be only one outcome of a test, @@ -212,7 +212,7 @@ def get_xfails(errors: tp.List[str]) -> tp.List[blockers.GH]: return xfails -def finish_on_errors(errors: tp.List[str]) -> None: +def finish_on_errors(errors: list[str]) -> None: """Fail or Xfail the test if there are errors.""" if not errors: return diff --git a/cardano_node_tests/tests/plutus_common.py b/cardano_node_tests/tests/plutus_common.py index 067ab5d10..756672dbc 100644 --- a/cardano_node_tests/tests/plutus_common.py +++ b/cardano_node_tests/tests/plutus_common.py @@ -492,7 +492,7 @@ class ScriptCost: def check_plutus_costs( - plutus_costs: tp.List[dict], expected_costs: tp.List[ExecutionCost], frac: float = 0.15 + plutus_costs: list[dict], expected_costs: list[ExecutionCost], frac: float = 0.15 ) -> None: """Check plutus transaction cost. diff --git a/cardano_node_tests/tests/test_addr_registration.py b/cardano_node_tests/tests/test_addr_registration.py index 9a2b0ecb9..1ce8f02b1 100644 --- a/cardano_node_tests/tests/test_addr_registration.py +++ b/cardano_node_tests/tests/test_addr_registration.py @@ -1,7 +1,6 @@ """Tests for stake address registration.""" import logging -import typing as tp import allure import pytest @@ -24,7 +23,7 @@ def pool_users( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.PoolUser]: +) -> list[clusterlib.PoolUser]: """Create pool users.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -50,7 +49,7 @@ def pool_users( @pytest.fixture def pool_users_disposable( cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.PoolUser]: +) -> list[clusterlib.PoolUser]: """Create function scoped pool users.""" test_id = common.get_test_id(cluster) pool_users = clusterlib_utils.create_pool_users( @@ -72,8 +71,8 @@ class TestRegisterAddr: def test_deregister_registered( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - pool_users_disposable: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], + pool_users_disposable: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Deregister a registered stake address. @@ -205,8 +204,8 @@ def _build_dereg() -> clusterlib.TxRawOutput: def test_addr_registration_deregistration( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - pool_users_disposable: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], + pool_users_disposable: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Submit registration and deregistration certificates in single TX. @@ -299,8 +298,8 @@ def test_addr_registration_deregistration( def test_addr_registration_certificate_order( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - pool_users_disposable: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], + pool_users_disposable: list[clusterlib.PoolUser], use_build_cmd: bool, submit_method: str, ): @@ -394,7 +393,7 @@ class TestNegative: def test_registration_cert_with_wrong_key( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Try to generate stake address registration certificate using wrong stake vkey. @@ -418,8 +417,8 @@ def test_registration_cert_with_wrong_key( def test_register_addr_with_wrong_key( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - pool_users_disposable: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], + pool_users_disposable: list[clusterlib.PoolUser], ): """Try to register stake address using wrong payment skey. @@ -457,8 +456,8 @@ def test_register_addr_with_wrong_key( def test_deregister_not_registered_addr( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - pool_users_disposable: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], + pool_users_disposable: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Deregister not registered stake address.""" diff --git a/cardano_node_tests/tests/test_blocks.py b/cardano_node_tests/tests/test_blocks.py index 5be1ec6e3..fc162e1b7 100644 --- a/cardano_node_tests/tests/test_blocks.py +++ b/cardano_node_tests/tests/test_blocks.py @@ -11,7 +11,6 @@ import signal import sqlite3 import time -import typing as tp import allure import pytest @@ -42,7 +41,7 @@ class TestLeadershipSchedule: def test_pool_blocks( # noqa: C901 self, cluster_manager: cluster_management.ClusterManager, - cluster_use_pool: tp.Tuple[clusterlib.ClusterLib, str], + cluster_use_pool: tuple[clusterlib.ClusterLib, str], for_epoch: str, ): """Check that blocks were minted according to leadership schedule. @@ -99,11 +98,11 @@ def test_pool_blocks( # noqa: C901 state_name=temp_template, ledger_state=ledger_state, ) - blocks_before: tp.Dict[str, int] = ledger_state["blocksBefore"] + blocks_before: dict[str, int] = ledger_state["blocksBefore"] pool_id_dec = helpers.decode_bech32(pool_id) minted_blocks_ledger = blocks_before.get(pool_id_dec) or 0 - errors: tp.List[str] = [] + errors: list[str] = [] def _check_logs() -> None: # Get info about minted blocks in queried epoch for the selected pool @@ -245,7 +244,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -273,7 +272,7 @@ def test_block_production( # noqa: C901 self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Record number of blocks produced by each pool over multiple epochs. @@ -337,7 +336,7 @@ def _save_state(curr_epoch: int) -> None: state_name=f"{temp_template}_epoch{curr_epoch}", ledger_state=ledger_state, ) - blocks_before: tp.Dict[str, int] = ledger_state["blocksBefore"] + blocks_before: dict[str, int] = ledger_state["blocksBefore"] # save blocks data to sqlite db cur = conn.cursor() @@ -440,7 +439,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster_singleton: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" cluster = cluster_singleton @@ -463,7 +462,7 @@ def test_dynamic_block_production( self, cluster_manager: cluster_management.ClusterManager, cluster_singleton: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Check dynamic block production. @@ -491,14 +490,14 @@ def test_dynamic_block_production( ) ) - def _save_state(curr_epoch: int) -> tp.Dict[str, int]: + def _save_state(curr_epoch: int) -> dict[str, int]: ledger_state = clusterlib_utils.get_ledger_state(cluster_obj=cluster) clusterlib_utils.save_ledger_state( cluster_obj=cluster, state_name=f"{temp_template}_epoch{curr_epoch}", ledger_state=ledger_state, ) - blocks_before: tp.Dict[str, int] = ledger_state["blocksBefore"] + blocks_before: dict[str, int] = ledger_state["blocksBefore"] return blocks_before # The network needs to be at least in epoch 1 diff --git a/cardano_node_tests/tests/test_chain_transactions.py b/cardano_node_tests/tests/test_chain_transactions.py index 85a8511ac..fbe257576 100644 --- a/cardano_node_tests/tests/test_chain_transactions.py +++ b/cardano_node_tests/tests/test_chain_transactions.py @@ -51,7 +51,7 @@ def _gen_signed_tx( tx_name: str, fee: int, invalid_hereafter: tp.Optional[int] = None, -) -> tp.Tuple[clusterlib.UTXOData, clusterlib.TxRawOutput, pl.Path]: +) -> tuple[clusterlib.UTXOData, clusterlib.TxRawOutput, pl.Path]: """Generate Tx and return Tx output in a format that can be used as input for next Tx.""" send_amount = txin.amount - fee out_file = f"{tx_name}_tx.body" @@ -139,7 +139,7 @@ def test_tx_chaining( iterations = 1_000 min_utxo_value = 1_000_000 - tx_raw_outputs: tp.List[clusterlib.TxRawOutput] = [] + tx_raw_outputs: list[clusterlib.TxRawOutput] = [] submit_err = "" # It can happen that a Tx is removed from mempool without making it to the blockchain. diff --git a/cardano_node_tests/tests/test_cli.py b/cardano_node_tests/tests/test_cli.py index 615f8fb19..0247a09c7 100644 --- a/cardano_node_tests/tests/test_cli.py +++ b/cardano_node_tests/tests/test_cli.py @@ -7,7 +7,6 @@ import pathlib as pl import string import time -import typing as tp import allure import hypothesis @@ -1130,7 +1129,7 @@ def _dump_on_error(): node_4895.finish_test() @pytest.fixture - def pool_ids(self, cluster: clusterlib.ClusterLib) -> tp.List[str]: + def pool_ids(self, cluster: clusterlib.ClusterLib) -> list[str]: stake_pool_ids = cluster.g_query.get_stake_pools() if not stake_pool_ids: pytest.skip("No stake pools are available.") @@ -1191,7 +1190,7 @@ def test_stake_snapshot( @allure.link(helpers.get_vcs_link()) @pytest.mark.smoke @pytest.mark.testnets - def test_pool_params(self, cluster: clusterlib.ClusterLib, pool_ids: tp.List[str]): + def test_pool_params(self, cluster: clusterlib.ClusterLib, pool_ids: list[str]): """Test `query pool-params`.""" common.get_test_id(cluster) @@ -1245,7 +1244,7 @@ def test_tx_mempool_info( @allure.link(helpers.get_vcs_link()) @pytest.mark.smoke @pytest.mark.testnets - def test_pool_state(self, cluster: clusterlib.ClusterLib, pool_ids: tp.List[str]): + def test_pool_state(self, cluster: clusterlib.ClusterLib, pool_ids: list[str]): """Test `query pool-state`.""" common.get_test_id(cluster) diff --git a/cardano_node_tests/tests/test_dbsync.py b/cardano_node_tests/tests/test_dbsync.py index 206f5d38e..83a5e8792 100644 --- a/cardano_node_tests/tests/test_dbsync.py +++ b/cardano_node_tests/tests/test_dbsync.py @@ -30,7 +30,7 @@ class TestDBSync: """General db-sync tests.""" - DBSYNC_TABLES: tp.Final[tp.Set[str]] = { + DBSYNC_TABLES: tp.Final[set[str]] = { "ada_pots", "block", "collateral_tx_in", @@ -134,7 +134,7 @@ def test_blocks(self, cluster: clusterlib.ClusterLib): # noqa: C901 rec = None prev_rec = None - errors: tp.List[str] = [] + errors: list[str] = [] for rec in dbsync_queries.query_blocks(epoch_from=epoch_from): if not prev_rec: prev_rec = rec diff --git a/cardano_node_tests/tests/test_delegation.py b/cardano_node_tests/tests/test_delegation.py index 3a6343c98..e984289b3 100644 --- a/cardano_node_tests/tests/test_delegation.py +++ b/cardano_node_tests/tests/test_delegation.py @@ -1,7 +1,6 @@ """Tests for stake address delegation.""" import logging -import typing as tp import allure import pytest @@ -23,14 +22,14 @@ @pytest.fixture def cluster_and_pool( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str]: +) -> tuple[clusterlib.ClusterLib, str]: return delegation.cluster_and_pool(cluster_manager=cluster_manager) @pytest.fixture def cluster_and_pool_and_rewards( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str]: +) -> tuple[clusterlib.ClusterLib, str]: return delegation.cluster_and_pool( cluster_manager=cluster_manager, use_resources=[cluster_management.Resources.REWARDS] ) @@ -39,7 +38,7 @@ def cluster_and_pool_and_rewards( @pytest.fixture def cluster_and_two_pools( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str, str]: +) -> tuple[clusterlib.ClusterLib, str, str]: """Return instance of `clusterlib.ClusterLib` and two pools.""" cluster_obj = cluster_manager.get( use_resources=[ @@ -64,7 +63,7 @@ def cluster_and_two_pools( def pool_users( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.PoolUser]: +) -> list[clusterlib.PoolUser]: """Create pool users.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -90,7 +89,7 @@ def pool_users( @pytest.fixture def pool_users_disposable( cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.PoolUser]: +) -> list[clusterlib.PoolUser]: """Create function scoped pool users.""" test_id = common.get_test_id(cluster) pool_users = clusterlib_utils.create_pool_users( @@ -104,8 +103,8 @@ def pool_users_disposable( @pytest.fixture def pool_users_cluster_and_pool( cluster_manager: cluster_management.ClusterManager, - cluster_and_pool: tp.Tuple[clusterlib.ClusterLib, str], -) -> tp.List[clusterlib.PoolUser]: + cluster_and_pool: tuple[clusterlib.ClusterLib, str], +) -> list[clusterlib.PoolUser]: """Create pool users using `cluster_and_pool` fixture. .. warning:: @@ -136,8 +135,8 @@ def pool_users_cluster_and_pool( @pytest.fixture def pool_users_disposable_cluster_and_pool( - cluster_and_pool: tp.Tuple[clusterlib.ClusterLib, str], -) -> tp.List[clusterlib.PoolUser]: + cluster_and_pool: tuple[clusterlib.ClusterLib, str], +) -> list[clusterlib.PoolUser]: """Create function scoped pool users using `cluster_and_pool` fixture.""" cluster, *__ = cluster_and_pool test_id = common.get_test_id(cluster) @@ -160,7 +159,7 @@ class TestDelegateAddr: def test_delegate_using_pool_id( self, cluster_manager: cluster_management.ClusterManager, - cluster_and_pool: tp.Tuple[clusterlib.ClusterLib, str], + cluster_and_pool: tuple[clusterlib.ClusterLib, str], use_build_cmd: bool, ): """Submit registration certificate and delegate to pool using pool id. @@ -203,7 +202,7 @@ def test_delegate_using_pool_id( def test_delegate_using_vkey( self, cluster_manager: cluster_management.ClusterManager, - cluster_use_pool: tp.Tuple[clusterlib.ClusterLib, str], + cluster_use_pool: tuple[clusterlib.ClusterLib, str], use_build_cmd: bool, ): """Submit registration certificate and delegate to pool using cold vkey. @@ -246,7 +245,7 @@ def test_delegate_using_vkey( def test_multi_delegation( self, cluster_manager: cluster_management.ClusterManager, - cluster_and_two_pools: tp.Tuple[clusterlib.ClusterLib, str, str], + cluster_and_two_pools: tuple[clusterlib.ClusterLib, str, str], ): """Delegate multiple stake addresses that share the same payment keys to multiple pools. @@ -401,7 +400,7 @@ def _get_pool_id(idx: int) -> str: def test_deregister_delegated( self, cluster_manager: cluster_management.ClusterManager, - cluster_and_pool_and_rewards: tp.Tuple[clusterlib.ClusterLib, str], + cluster_and_pool_and_rewards: tuple[clusterlib.ClusterLib, str], ): """Deregister a delegated stake address. @@ -567,7 +566,7 @@ def test_deregister_delegated( def test_undelegate( self, cluster_manager: cluster_management.ClusterManager, - cluster_and_pool_and_rewards: tp.Tuple[clusterlib.ClusterLib, str], + cluster_and_pool_and_rewards: tuple[clusterlib.ClusterLib, str], ): """Undelegate stake address. @@ -709,9 +708,9 @@ def test_undelegate( @pytest.mark.testnets def test_addr_delegation_deregistration( self, - cluster_and_pool: tp.Tuple[clusterlib.ClusterLib, str], - pool_users_cluster_and_pool: tp.List[clusterlib.PoolUser], - pool_users_disposable_cluster_and_pool: tp.List[clusterlib.PoolUser], + cluster_and_pool: tuple[clusterlib.ClusterLib, str], + pool_users_cluster_and_pool: list[clusterlib.PoolUser], + pool_users_disposable_cluster_and_pool: list[clusterlib.PoolUser], stake_cert: str, use_build_cmd: bool, ): @@ -864,8 +863,8 @@ class TestNegative: @pytest.mark.testnets def test_delegation_cert_with_wrong_key( self, - cluster_and_pool: tp.Tuple[clusterlib.ClusterLib, str], - pool_users_cluster_and_pool: tp.List[clusterlib.PoolUser], + cluster_and_pool: tuple[clusterlib.ClusterLib, str], + pool_users_cluster_and_pool: list[clusterlib.PoolUser], ): """Try to generate stake address delegation certificate using wrong stake vkey. @@ -893,9 +892,9 @@ def test_delegation_cert_with_wrong_key( @pytest.mark.testnets def test_delegate_addr_with_wrong_key( self, - cluster_and_pool: tp.Tuple[clusterlib.ClusterLib, str], - pool_users_cluster_and_pool: tp.List[clusterlib.PoolUser], - pool_users_disposable_cluster_and_pool: tp.List[clusterlib.PoolUser], + cluster_and_pool: tuple[clusterlib.ClusterLib, str], + pool_users_cluster_and_pool: list[clusterlib.PoolUser], + pool_users_disposable_cluster_and_pool: list[clusterlib.PoolUser], ): """Try to delegate stake address using wrong payment skey. @@ -957,9 +956,9 @@ def test_delegate_addr_with_wrong_key( @pytest.mark.testnets def test_delegate_unknown_addr( self, - cluster_and_pool: tp.Tuple[clusterlib.ClusterLib, str], - pool_users_cluster_and_pool: tp.List[clusterlib.PoolUser], - pool_users_disposable_cluster_and_pool: tp.List[clusterlib.PoolUser], + cluster_and_pool: tuple[clusterlib.ClusterLib, str], + pool_users_cluster_and_pool: list[clusterlib.PoolUser], + pool_users_disposable_cluster_and_pool: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Try to delegate unknown stake address. @@ -1018,9 +1017,9 @@ def test_delegate_unknown_addr( @pytest.mark.testnets def test_delegate_deregistered_addr( self, - cluster_and_pool: tp.Tuple[clusterlib.ClusterLib, str], - pool_users_cluster_and_pool: tp.List[clusterlib.PoolUser], - pool_users_disposable_cluster_and_pool: tp.List[clusterlib.PoolUser], + cluster_and_pool: tuple[clusterlib.ClusterLib, str], + pool_users_cluster_and_pool: list[clusterlib.PoolUser], + pool_users_disposable_cluster_and_pool: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Try to delegate deregistered stake address. @@ -1125,8 +1124,8 @@ def test_delegate_deregistered_addr( def test_delegatee_not_registered( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - pool_users_disposable: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], + pool_users_disposable: list[clusterlib.PoolUser], ): """Try to delegate stake address to unregistered pool. diff --git a/cardano_node_tests/tests/test_env_network_id.py b/cardano_node_tests/tests/test_env_network_id.py index 82486a4b6..5aa185880 100644 --- a/cardano_node_tests/tests/test_env_network_id.py +++ b/cardano_node_tests/tests/test_env_network_id.py @@ -99,7 +99,7 @@ def payment_addrs( set_network_id_env: None, # noqa: ARG001 cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" # pylint: disable=unused-argument with cluster_manager.cache_fixture() as fixture_cache: @@ -208,7 +208,7 @@ def test_build_transfer_funds( skip_on_no_env: None, # noqa: ARG002 set_network_id_env: None, # noqa: ARG002 cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Send funds to payment address. @@ -427,7 +427,7 @@ def test_neg_build_transfer_funds( skip_on_no_env: None, # noqa: ARG002 set_network_id_env: None, # noqa: ARG002 cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], env_scenario: str, arg_scenario: str, ): diff --git a/cardano_node_tests/tests/test_kes.py b/cardano_node_tests/tests/test_kes.py index 67cf1041b..a83e8127d 100644 --- a/cardano_node_tests/tests/test_kes.py +++ b/cardano_node_tests/tests/test_kes.py @@ -99,7 +99,7 @@ def _check_block_production( temp_template: str, pool_id_dec: str, in_epoch: int, -) -> tp.Tuple[int, bool]: +) -> tuple[int, bool]: cluster_obj.wait_for_epoch(epoch_no=in_epoch) clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster_obj, @@ -196,7 +196,7 @@ def _save_all_period_info(temp_template: str) -> None: ] ) - def _refresh_opcerts() -> tp.Dict[str, int]: + def _refresh_opcerts() -> dict[str, int]: """Refresh opcert on pools that are not supposed to expire.""" refreshed_nodes_kes_period = {} @@ -218,8 +218,8 @@ def _refresh_opcerts() -> tp.Dict[str, int]: return refreshed_nodes_kes_period def _check_kes_period_info( - refreshed_nodes_kes_period: tp.Dict[str, int], - ) -> tp.List[str]: + refreshed_nodes_kes_period: dict[str, int], + ) -> list[str]: errors = [] # Check kes-period-info with an operational certificate with KES expired kes_info_expired = cluster.g_query.get_kes_period_info( diff --git a/cardano_node_tests/tests/test_ledger_state.py b/cardano_node_tests/tests/test_ledger_state.py index 8c3ad68c3..e4dfef822 100644 --- a/cardano_node_tests/tests/test_ledger_state.py +++ b/cardano_node_tests/tests/test_ledger_state.py @@ -3,7 +3,6 @@ import functools import itertools import logging -import typing as tp import allure import pytest @@ -65,13 +64,13 @@ def test_stake_snapshot(self, cluster: clusterlib.ClusterLib): # noqa: C901 ) es_snapshot: dict = ledger_state["stateBefore"]["esSnapshots"] - def _get_hashes(snapshot: str) -> tp.Dict[str, int]: + def _get_hashes(snapshot: str) -> dict[str, int]: hashes: dict = clusterlib_utils.get_snapshot_rec( ledger_snapshot=es_snapshot[snapshot]["stake"] ) return hashes - def _get_delegations(snapshot: str) -> tp.Dict[str, tp.List[str]]: + def _get_delegations(snapshot: str) -> dict[str, list[str]]: delegations: dict = clusterlib_utils.get_snapshot_delegations( ledger_snapshot=es_snapshot[snapshot]["delegations"] ) @@ -125,9 +124,9 @@ def _get_delegations(snapshot: str) -> tp.Dict[str, tp.List[str]]: ) sum_mark = sum_set = sum_go = 0 - seen_hashes_mark: tp.Set[str] = set() - seen_hashes_set: tp.Set[str] = set() - seen_hashes_go: tp.Set[str] = set() + seen_hashes_mark: set[str] = set() + seen_hashes_set: set[str] = set() + seen_hashes_go: set[str] = set() delegation_pool_ids = {*delegations_mark, *delegations_set, *delegations_go} for pool_id_dec in delegation_pool_ids: pool_id = helpers.encode_bech32(prefix="pool", data=pool_id_dec) diff --git a/cardano_node_tests/tests/test_metrics.py b/cardano_node_tests/tests/test_metrics.py index fa9cfb5ce..681a3f2e8 100644 --- a/cardano_node_tests/tests/test_metrics.py +++ b/cardano_node_tests/tests/test_metrics.py @@ -44,7 +44,7 @@ def get_ekg_metrics(port: int) -> requests.Response: class TestPrometheus: """Prometheus metrics tests.""" - EXPECTED_METRICS: tp.Final[tp.Set[str]] = { + EXPECTED_METRICS: tp.Final[set[str]] = { "cardano_node_metrics_Forge_adopted_int", "cardano_node_metrics_Forge_forge_about_to_lead_int", "cardano_node_metrics_Forge_forged_int", diff --git a/cardano_node_tests/tests/test_mir_certs.py b/cardano_node_tests/tests/test_mir_certs.py index a9af7636c..5fc36cb2e 100644 --- a/cardano_node_tests/tests/test_mir_certs.py +++ b/cardano_node_tests/tests/test_mir_certs.py @@ -2,7 +2,6 @@ import logging import time -import typing as tp import allure import pytest @@ -29,9 +28,7 @@ TREASURY = "treasury" -def _wait_for_ada_pots( - epoch_from: int, expected_len: int = 2 -) -> tp.List[dbsync_queries.ADAPotsDBRow]: +def _wait_for_ada_pots(epoch_from: int, expected_len: int = 2) -> list[dbsync_queries.ADAPotsDBRow]: pots_records = [] for r in range(4): if r > 0: @@ -63,7 +60,7 @@ def cluster_pots( def pool_users( cluster_manager: cluster_management.ClusterManager, cluster_pots: clusterlib.ClusterLib, -) -> tp.List[clusterlib.PoolUser]: +) -> list[clusterlib.PoolUser]: """Create pool user.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -90,8 +87,8 @@ def pool_users( def registered_users( cluster_manager: cluster_management.ClusterManager, cluster_pots: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], -) -> tp.List[clusterlib.PoolUser]: + pool_users: list[clusterlib.PoolUser], +) -> list[clusterlib.PoolUser]: """Register pool user's stake address.""" registered = pool_users[1:3] @@ -131,7 +128,7 @@ def test_transfer_to_treasury( skip_on_hf_shortcut: None, # pylint: disable=unused-argument # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster_pots: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Send funds from the reserves pot to the treasury pot. @@ -206,7 +203,7 @@ def test_build_transfer_to_treasury( skip_on_hf_shortcut: None, # pylint: disable=unused-argument # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster_pots: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Send funds from the reserves pot to the treasury pot. @@ -286,7 +283,7 @@ def test_transfer_to_reserves( skip_on_hf_shortcut: None, # pylint: disable=unused-argument # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster_pots: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Send funds from the treasury pot to the reserves pot. @@ -361,7 +358,7 @@ def test_build_transfer_to_reserves( skip_on_hf_shortcut: None, # pylint: disable=unused-argument # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster_pots: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Send funds from the treasury pot to the reserves pot. @@ -442,7 +439,7 @@ def test_pay_stake_addr_from( skip_on_hf_shortcut: None, # pylint: disable=unused-argument # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster_pots: clusterlib.ClusterLib, - registered_users: tp.List[clusterlib.PoolUser], + registered_users: list[clusterlib.PoolUser], fund_src: str, ): """Send funds from the reserves or treasury pot to stake address. @@ -533,7 +530,7 @@ def test_build_pay_stake_addr_from( skip_on_hf_shortcut: None, # pylint: disable=unused-argument # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster_pots: clusterlib.ClusterLib, - registered_users: tp.List[clusterlib.PoolUser], + registered_users: list[clusterlib.PoolUser], fund_src: str, ): """Send funds from the reserves or treasury pot to stake address. @@ -630,7 +627,7 @@ def test_pay_stake_addr_from_both( skip_on_hf_shortcut: None, # pylint: disable=unused-argument # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster_pots: clusterlib.ClusterLib, - registered_users: tp.List[clusterlib.PoolUser], + registered_users: list[clusterlib.PoolUser], ): """Send funds from the reserves and treasury pots to stake address. @@ -763,7 +760,7 @@ def test_pay_multi_stake_addrs( skip_on_hf_shortcut: None, # pylint: disable=unused-argument # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster_pots: clusterlib.ClusterLib, - registered_users: tp.List[clusterlib.PoolUser], + registered_users: list[clusterlib.PoolUser], ): """Send funds from the reserves and treasury pots to multiple stake addresses in single TX. @@ -908,7 +905,7 @@ def test_pay_unregistered_stake_addr_from( # noqa: C901 skip_on_hf_shortcut: None, # pylint: disable=unused-argument # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster_pots: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], fund_src: str, addr_history: str, ): @@ -1039,7 +1036,7 @@ class TestNegativeMIRCerts: def test_exceed_pay_stake_addr_from( self, cluster_pots: clusterlib.ClusterLib, - registered_users: tp.List[clusterlib.PoolUser], + registered_users: list[clusterlib.PoolUser], fund_src: str, ): """Try to send more funds than available from the reserves or treasury pot to stake address. diff --git a/cardano_node_tests/tests/test_native_tokens.py b/cardano_node_tests/tests/test_native_tokens.py index b499e2173..493baa494 100644 --- a/cardano_node_tests/tests/test_native_tokens.py +++ b/cardano_node_tests/tests/test_native_tokens.py @@ -13,7 +13,6 @@ import pathlib as pl import re import time -import typing as tp import allure import hypothesis @@ -78,7 +77,7 @@ def issuers_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new issuers addresses.""" temp_template = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -100,8 +99,8 @@ def issuers_addrs( @pytest.fixture def simple_script_policyid( cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], -) -> tp.Tuple[pl.Path, str]: + issuers_addrs: list[clusterlib.AddressRecord], +) -> tuple[pl.Path, str]: """Return script and its PolicyId.""" temp_template = common.get_test_id(cluster) issuer_addr = issuers_addrs[1] @@ -121,8 +120,8 @@ def simple_script_policyid( @pytest.fixture def multisig_script_policyid( cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], -) -> tp.Tuple[pl.Path, str]: + issuers_addrs: list[clusterlib.AddressRecord], +) -> tuple[pl.Path, str]: """Return multisig script and it's PolicyId.""" temp_template = common.get_test_id(cluster) payment_vkey_files = [p.vkey_file for p in issuers_addrs] @@ -152,7 +151,7 @@ class TestMinting: def test_minting_and_burning_witnesses( self, cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], + issuers_addrs: list[clusterlib.AddressRecord], aname_type: str, use_build_cmd: bool, submit_method: str, @@ -258,7 +257,7 @@ def test_minting_and_burning_witnesses( def test_minting_and_burning_sign( self, cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], + issuers_addrs: list[clusterlib.AddressRecord], aname_type: str, use_build_cmd: bool, submit_method: str, @@ -352,7 +351,7 @@ def test_minting_and_burning_sign( def test_minting_multiple_scripts( self, cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], + issuers_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -472,7 +471,7 @@ def test_minting_multiple_scripts( def test_minting_burning_diff_tokens_single_tx( self, cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], + issuers_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -586,7 +585,7 @@ def test_minting_burning_diff_tokens_single_tx( def test_minting_burning_same_token_single_tx( self, cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], + issuers_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -710,9 +709,9 @@ def test_bundle_minting_and_burning_witnesses( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, worker_id: str, - issuers_addrs: tp.List[clusterlib.AddressRecord], - multisig_script_policyid: tp.Tuple[pl.Path, str], - tokens_db: tp.Tuple[int, int, int], + issuers_addrs: list[clusterlib.AddressRecord], + multisig_script_policyid: tuple[pl.Path, str], + tokens_db: tuple[int, int, int], use_build_cmd: bool, submit_method: str, ): @@ -860,9 +859,9 @@ def test_bundle_minting_and_burning_sign( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, worker_id: str, - issuers_addrs: tp.List[clusterlib.AddressRecord], - simple_script_policyid: tp.Tuple[pl.Path, str], - tokens_db: tp.Tuple[int, int, int], + issuers_addrs: list[clusterlib.AddressRecord], + simple_script_policyid: tuple[pl.Path, str], + tokens_db: tuple[int, int, int], use_build_cmd: bool, submit_method: str, ): @@ -1004,7 +1003,7 @@ def _mint_tokens() -> clusterlib.TxRawOutput: def test_minting_and_partial_burning( self, cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], + issuers_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -1103,7 +1102,7 @@ def test_minting_and_partial_burning( def test_minting_unicode_asset_name( self, cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], + issuers_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -1194,7 +1193,7 @@ class TestPolicies: def test_valid_policy_after( self, cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], + issuers_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, ): """Test minting and burning of tokens after a given slot, check fees in Lovelace.""" @@ -1286,7 +1285,7 @@ def test_valid_policy_after( def test_valid_policy_before( self, cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], + issuers_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, ): """Test minting and burning of tokens before a given slot, check fees in Lovelace.""" @@ -1376,7 +1375,7 @@ def test_valid_policy_before( @pytest.mark.smoke @pytest.mark.testnets def test_policy_before_past( - self, cluster: clusterlib.ClusterLib, issuers_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, issuers_addrs: list[clusterlib.AddressRecord] ): """Test that it's NOT possible to mint tokens when the "before" slot is in the past.""" temp_template = common.get_test_id(cluster) @@ -1447,7 +1446,7 @@ def test_policy_before_past( @pytest.mark.smoke @pytest.mark.testnets def test_policy_before_future( - self, cluster: clusterlib.ClusterLib, issuers_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, issuers_addrs: list[clusterlib.AddressRecord] ): """Test that it's NOT possible to mint tokens when the policy is not met. @@ -1510,7 +1509,7 @@ def test_policy_before_future( @pytest.mark.smoke @pytest.mark.testnets def test_policy_after_future( - self, cluster: clusterlib.ClusterLib, issuers_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, issuers_addrs: list[clusterlib.AddressRecord] ): """Test that it's NOT possible to mint tokens when the policy is not met. @@ -1584,7 +1583,7 @@ def test_policy_after_future( @pytest.mark.smoke @pytest.mark.testnets def test_policy_after_past( - self, cluster: clusterlib.ClusterLib, issuers_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, issuers_addrs: list[clusterlib.AddressRecord] ): """Test that it's NOT possible to mint tokens when the policy is not met. @@ -1655,7 +1654,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -1684,7 +1683,7 @@ def new_token( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ) -> clusterlib_utils.TokenRecord: with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -1717,7 +1716,7 @@ def new_token( def test_transfer_tokens( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], new_token: clusterlib_utils.TokenRecord, amount: int, use_build_cmd: bool, @@ -1850,7 +1849,7 @@ def test_transfer_tokens( def test_transfer_multiple_tokens( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], new_token: clusterlib_utils.TokenRecord, use_build_cmd: bool, ): @@ -2033,7 +2032,7 @@ def test_transfer_multiple_tokens( def test_transfer_no_ada( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], new_token: clusterlib_utils.TokenRecord, use_build_cmd: bool, ): @@ -2088,7 +2087,7 @@ def test_transfer_no_ada( def test_transfer_invalid_token_amount( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], new_token: clusterlib_utils.TokenRecord, use_build_cmd: bool, token_amount: int, @@ -2162,7 +2161,7 @@ class TestNegative: def _mint_tx( self, cluster_obj: clusterlib.ClusterLib, - new_tokens: tp.List[clusterlib_utils.TokenRecord], + new_tokens: list[clusterlib_utils.TokenRecord], temp_template: str, ) -> pl.Path: """Return signed TX for minting new token. Sign using skeys.""" @@ -2226,8 +2225,8 @@ def _mint_tx( def test_long_name( self, cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], - simple_script_policyid: tp.Tuple[pl.Path, str], + issuers_addrs: list[clusterlib.AddressRecord], + simple_script_policyid: tuple[pl.Path, str], asset_name: str, ): """Try to create token with asset name that is longer than allowed. @@ -2273,8 +2272,8 @@ def test_long_name( def test_minting_amount_above_the_allowed( self, cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], - simple_script_policyid: tp.Tuple[pl.Path, str], + issuers_addrs: list[clusterlib.AddressRecord], + simple_script_policyid: tuple[pl.Path, str], token_amount: int, ): """Test minting a token amount above the maximum allowed.""" @@ -2316,7 +2315,7 @@ class TestCLITxOutSyntax: @pytest.mark.testnets @pytest.mark.dbsync def test_multiasset_txouts_syntax( - self, cluster: clusterlib.ClusterLib, issuers_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, issuers_addrs: list[clusterlib.AddressRecord] ): """Test syntax for specifying multi-asset values and txouts via CLI. @@ -2467,7 +2466,7 @@ class TestReferenceUTxO: def test_script_reference_utxo( self, cluster: clusterlib.ClusterLib, - issuers_addrs: tp.List[clusterlib.AddressRecord], + issuers_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, script_version: str, ): diff --git a/cardano_node_tests/tests/test_node_upgrade.py b/cardano_node_tests/tests/test_node_upgrade.py index fa8ff13fa..3b120a9e6 100644 --- a/cardano_node_tests/tests/test_node_upgrade.py +++ b/cardano_node_tests/tests/test_node_upgrade.py @@ -5,7 +5,6 @@ import os import pathlib as pl import shutil -import typing as tp import allure import pytest @@ -59,7 +58,7 @@ def payment_addr_locked( def payment_addrs_disposable( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new disposable payment addresses.""" temp_template = common.get_test_id(cluster) @@ -152,7 +151,7 @@ def test_update_cost_models( def _propose_pparams_update( name_template: str, - proposals: tp.List[clusterlib_utils.UpdateProposal], + proposals: list[clusterlib_utils.UpdateProposal], ) -> conway_common.PParamPropRec: anchor_url = f"http://www.pparam-action-{clusterlib.get_rand_str(4)}.com" anchor_data_hash = cluster.g_conway_governance.get_anchor_data_hash(text=anchor_url) @@ -364,7 +363,7 @@ def test_prepare_tx( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - payment_addrs_disposable: tp.List[clusterlib.AddressRecord], + payment_addrs_disposable: list[clusterlib.AddressRecord], use_build_cmd: bool, for_step: int, file_type: str, diff --git a/cardano_node_tests/tests/test_pool_saturation.py b/cardano_node_tests/tests/test_pool_saturation.py index 367fd6f66..97e00f6e7 100644 --- a/cardano_node_tests/tests/test_pool_saturation.py +++ b/cardano_node_tests/tests/test_pool_saturation.py @@ -3,7 +3,6 @@ import dataclasses import logging import pickle -import typing as tp import allure import pytest @@ -35,10 +34,10 @@ class PoolRecord: id_dec: str reward_addr: clusterlib.PoolUser delegation_out: delegation.DelegationOut - user_rewards: tp.List[RewardRecord] - owner_rewards: tp.List[RewardRecord] - blocks_minted: tp.Dict[int, int] - saturation_amounts: tp.Dict[int, int] + user_rewards: list[RewardRecord] + owner_rewards: list[RewardRecord] + blocks_minted: dict[int, int] + saturation_amounts: dict[int, int] @pytest.fixture @@ -74,11 +73,9 @@ def _get_saturation_threshold( return saturation_threshold -def _get_reward_per_block( - pool_record: PoolRecord, owner_rewards: bool = False -) -> tp.Dict[int, float]: +def _get_reward_per_block(pool_record: PoolRecord, owner_rewards: bool = False) -> dict[int, float]: """For each epoch calculate reward per block per staked Lovelace.""" - results: tp.Dict[int, float] = {} + results: dict[int, float] = {} rew_db = pool_record.user_rewards if owner_rewards: @@ -131,7 +128,7 @@ def _withdraw_rewards( return tx_raw_withdrawal_output -def _check_pool_records(pool_records: tp.Dict[int, PoolRecord]) -> None: +def _check_pool_records(pool_records: dict[int, PoolRecord]) -> None: """Check that pool records has expected values.""" pool1_user_rewards_per_block = _get_reward_per_block(pool_records[1]) pool2_user_rewards_per_block = _get_reward_per_block(pool_records[2]) @@ -244,7 +241,7 @@ def test_oversaturated( # noqa: C901 initial_balance = 1_000_000_000 faucet_rec = cluster_manager.cache.addrs_data["faucet"] - pool_records: tp.Dict[int, PoolRecord] = {} + pool_records: dict[int, PoolRecord] = {} def _save_pool_records() -> None: """Save debugging data in case of test failure.""" diff --git a/cardano_node_tests/tests/test_pools.py b/cardano_node_tests/tests/test_pools.py index 8e5c98b28..9fd021659 100644 --- a/cardano_node_tests/tests/test_pools.py +++ b/cardano_node_tests/tests/test_pools.py @@ -90,7 +90,7 @@ def _check_pool( def _check_staking( - pool_owners: tp.List[clusterlib.PoolUser], + pool_owners: list[clusterlib.PoolUser], cluster_obj: clusterlib.ClusterLib, stake_pool_id: str, ): @@ -122,14 +122,14 @@ def _check_staking( def _register_stake_pool_w_build( cluster_obj: clusterlib.ClusterLib, pool_data: clusterlib.PoolData, - pool_owners: tp.List[clusterlib.PoolUser], + pool_owners: list[clusterlib.PoolUser], vrf_vkey_file: clusterlib.FileType, cold_key_pair: clusterlib.ColdKeyPair, tx_name: str, reward_account_vkey_file: tp.Optional[clusterlib.FileType] = None, deposit: tp.Optional[int] = None, destination_dir: clusterlib.FileType = ".", -) -> tp.Tuple[pl.Path, clusterlib.TxRawOutput]: +) -> tuple[pl.Path, clusterlib.TxRawOutput]: """Register a stake pool using a `transaction build` command. Args: @@ -199,7 +199,7 @@ def _register_stake_pool_w_build( def _create_stake_pool_w_build( cluster_obj: clusterlib.ClusterLib, pool_data: clusterlib.PoolData, - pool_owners: tp.List[clusterlib.PoolUser], + pool_owners: list[clusterlib.PoolUser], tx_name: str, destination_dir: clusterlib.FileType = ".", ) -> clusterlib.PoolCreationOutput: @@ -265,13 +265,13 @@ def _create_stake_pool_w_build( def _deregister_stake_pool_w_build( cluster_obj: clusterlib.ClusterLib, - pool_owners: tp.List[clusterlib.PoolUser], + pool_owners: list[clusterlib.PoolUser], cold_key_pair: clusterlib.ColdKeyPair, epoch: int, pool_name: str, tx_name: str, destination_dir: clusterlib.FileType = ".", -) -> tp.Tuple[pl.Path, clusterlib.TxRawOutput]: +) -> tuple[pl.Path, clusterlib.TxRawOutput]: """Deregister a stake pool. Args: @@ -333,7 +333,7 @@ def _create_register_pool( cluster_obj: clusterlib.ClusterLib, temp_template: str, temp_dir: pl.Path, - pool_owners: tp.List[clusterlib.PoolUser], + pool_owners: list[clusterlib.PoolUser], pool_data: clusterlib.PoolData, request: tp.Optional[FixtureRequest] = None, use_build_cmd: bool = False, @@ -398,7 +398,7 @@ def _deregister(): def _create_register_pool_delegate_stake_tx( cluster_obj: clusterlib.ClusterLib, - pool_owners: tp.List[clusterlib.PoolUser], + pool_owners: list[clusterlib.PoolUser], temp_template: str, temp_dir: pl.Path, pool_data: clusterlib.PoolData, @@ -528,7 +528,7 @@ def _deregister(): def _create_register_pool_tx_delegate_stake_tx( cluster_obj: clusterlib.ClusterLib, - pool_owners: tp.List[clusterlib.PoolUser], + pool_owners: list[clusterlib.PoolUser], temp_template: str, temp_dir: pl.Path, pool_data: clusterlib.PoolData, @@ -1825,7 +1825,7 @@ def pool_owners_pbt( def test_stake_pool_low_cost( self, cluster_mincost: clusterlib.ClusterLib, - pool_owners_pbt: tp.List[clusterlib.PoolUser], + pool_owners_pbt: list[clusterlib.PoolUser], pool_cost: int, ): """Try to create and register a stake pool with pool cost lower than *minPoolCost*. @@ -1923,7 +1923,7 @@ def pool_users( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.PoolUser]: + ) -> list[clusterlib.PoolUser]: """Create pool users.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -1960,7 +1960,7 @@ def pool_data(self) -> clusterlib.PoolData: def gen_pool_registration_cert_data( self, cluster: clusterlib.ClusterLib, - ) -> tp.Tuple[str, str, clusterlib.KeyPair, clusterlib.ColdKeyPair]: + ) -> tuple[str, str, clusterlib.KeyPair, clusterlib.ColdKeyPair]: rand_str = clusterlib.get_rand_str(3) pool_name = f"pool_{rand_str}" @@ -1988,7 +1988,7 @@ def gen_pool_registration_cert_data( def test_pool_registration_cert_wrong_vrf( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], pool_data: clusterlib.PoolData, ): """Try to generate pool registration certificate using wrong VRF key. @@ -2015,7 +2015,7 @@ def test_pool_registration_cert_wrong_vrf( def test_pool_registration_cert_wrong_cold( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], pool_data: clusterlib.PoolData, ): """Try to generate pool registration certificate using wrong Cold vkey. @@ -2042,7 +2042,7 @@ def test_pool_registration_cert_wrong_cold( def test_pool_registration_cert_wrong_stake( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], pool_data: clusterlib.PoolData, ): """Try to generate pool registration certificate using wrong stake vkey. @@ -2069,7 +2069,7 @@ def test_pool_registration_cert_wrong_stake( def test_pool_registration_missing_cold_skey( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], pool_data: clusterlib.PoolData, ): """Try to register pool using transaction with missing Cold skey. @@ -2110,7 +2110,7 @@ def test_pool_registration_missing_cold_skey( def test_pool_registration_missing_payment_skey( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], pool_data: clusterlib.PoolData, ): """Try to register pool using transaction with missing payment skey. @@ -2150,7 +2150,7 @@ def test_pool_registration_missing_payment_skey( def test_pool_deregistration_not_registered( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], pool_data: clusterlib.PoolData, use_build_cmd: bool, ): @@ -2443,8 +2443,8 @@ def test_stake_pool_metadata_long_homepage( def test_stake_pool_long_metadata_url( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - gen_pool_registration_cert_data: tp.Tuple[ + pool_users: list[clusterlib.PoolUser], + gen_pool_registration_cert_data: tuple[ str, str, clusterlib.KeyPair, clusterlib.ColdKeyPair ], metadata_url: str, diff --git a/cardano_node_tests/tests/test_protocol.py b/cardano_node_tests/tests/test_protocol.py index 52ddfc0e3..a6819393e 100644 --- a/cardano_node_tests/tests/test_protocol.py +++ b/cardano_node_tests/tests/test_protocol.py @@ -2,7 +2,6 @@ import json import logging -import typing as tp import allure import pytest @@ -113,7 +112,7 @@ def test_protocol_params(self, cluster: clusterlib.ClusterLib): protocol_params = cluster.g_query.get_protocol_params() # The sets were updated for Conway, so there's nothing to add or remove at the moment. - union_with: tp.FrozenSet[str] = frozenset() - rem: tp.FrozenSet[str] = frozenset() + union_with: frozenset[str] = frozenset() + rem: frozenset[str] = frozenset() assert set(protocol_params) == PROTOCOL_PARAM_KEYS.union(union_with).difference(rem) diff --git a/cardano_node_tests/tests/test_reconnect.py b/cardano_node_tests/tests/test_reconnect.py index 25593695f..a08281bfc 100644 --- a/cardano_node_tests/tests/test_reconnect.py +++ b/cardano_node_tests/tests/test_reconnect.py @@ -41,7 +41,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster_singleton: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" cluster = cluster_singleton num_addrs = 2 @@ -73,7 +73,7 @@ def node_query_utxo( node: str, address: str = "", tx_raw_output: tp.Optional[clusterlib.TxRawOutput] = None, - ) -> tp.List[clusterlib.UTXOData]: + ) -> list[clusterlib.UTXOData]: """Query UTxO on given node.""" orig_socket = os.environ.get("CARDANO_NODE_SOCKET_PATH") assert orig_socket @@ -90,7 +90,7 @@ def node_get_tip( self, cluster_obj: clusterlib.ClusterLib, node: str, - ) -> tp.Dict[str, tp.Any]: + ) -> dict[str, tp.Any]: """Query UTxO on given node.""" orig_socket = os.environ.get("CARDANO_NODE_SOCKET_PATH") assert orig_socket @@ -160,7 +160,7 @@ def test_reconnect( self, cluster_manager: cluster_management.ClusterManager, cluster_singleton: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test that node reconnects after it was stopped. @@ -177,7 +177,7 @@ def test_reconnect( node1 = "pool1" node2 = "pool2" - def _assert(tx_outputs: tp.List[clusterlib.TxRawOutput]) -> None: + def _assert(tx_outputs: list[clusterlib.TxRawOutput]) -> None: tx1_node2 = self.node_query_utxo( cluster_obj=cluster, node=node2, tx_raw_output=tx_outputs[-2] ) diff --git a/cardano_node_tests/tests/test_rollback.py b/cardano_node_tests/tests/test_rollback.py index cca06a6ad..407be2565 100644 --- a/cardano_node_tests/tests/test_rollback.py +++ b/cardano_node_tests/tests/test_rollback.py @@ -50,7 +50,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster_singleton: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" cluster = cluster_singleton num_addrs = 4 if ROLLBACK_PAUSE else 3 @@ -138,7 +138,7 @@ def node_query_utxo( node: str, address: str = "", tx_raw_output: tp.Optional[clusterlib.TxRawOutput] = None, - ) -> tp.List[clusterlib.UTXOData]: + ) -> list[clusterlib.UTXOData]: """Query UTxO on given node.""" orig_socket = os.environ.get("CARDANO_NODE_SOCKET_PATH") assert orig_socket @@ -210,7 +210,7 @@ def test_consensus_reached( self, cluster_manager: cluster_management.ClusterManager, cluster_singleton: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], backup_topology: pl.Path, split_topology_dir: pl.Path, ): @@ -379,7 +379,7 @@ def test_permanent_fork( self, cluster_manager: cluster_management.ClusterManager, cluster_singleton: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], backup_topology: pl.Path, split_topology_dir: pl.Path, ): diff --git a/cardano_node_tests/tests/test_scripts.py b/cardano_node_tests/tests/test_scripts.py index 05f06fe7e..242857c09 100644 --- a/cardano_node_tests/tests/test_scripts.py +++ b/cardano_node_tests/tests/test_scripts.py @@ -47,7 +47,7 @@ def multisig_tx( src_address: str, dst_address: str, amount: int, - payment_skey_files: tp.List[pl.Path], + payment_skey_files: list[pl.Path], multisig_script: tp.Optional[pl.Path] = None, invalid_hereafter: tp.Optional[int] = None, invalid_before: tp.Optional[int] = None, @@ -143,7 +143,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -169,7 +169,7 @@ def payment_addrs( @pytest.mark.smoke @pytest.mark.testnets def test_script_addr_length( - self, cluster: clusterlib.ClusterLib, payment_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, payment_addrs: list[clusterlib.AddressRecord] ): """Check that script address length is the same as length of other addresses. @@ -203,7 +203,7 @@ def test_script_addr_length( def test_multisig_all( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -262,7 +262,7 @@ def test_multisig_all( def test_multisig_any( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -358,7 +358,7 @@ def test_multisig_any( def test_multisig_atleast( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -429,7 +429,7 @@ def test_multisig_atleast( def test_normal_tx_to_script_addr( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -489,7 +489,7 @@ def test_normal_tx_to_script_addr( def test_normal_tx_from_script_addr( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -566,7 +566,7 @@ def test_normal_tx_from_script_addr( @pytest.mark.testnets @pytest.mark.dbsync def test_multisig_empty_all( - self, cluster: clusterlib.ClusterLib, payment_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, payment_addrs: list[clusterlib.AddressRecord] ): """Send funds from script address using the *all* script with zero skeys.""" temp_template = common.get_test_id(cluster) @@ -620,7 +620,7 @@ def test_multisig_empty_all( @pytest.mark.testnets @pytest.mark.dbsync def test_multisig_no_required_atleast( - self, cluster: clusterlib.ClusterLib, payment_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, payment_addrs: list[clusterlib.AddressRecord] ): """Send funds from script address using the *atLeast* script with no required witnesses.""" temp_template = common.get_test_id(cluster) @@ -686,7 +686,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -715,7 +715,7 @@ def payment_addrs( @pytest.mark.testnets @pytest.mark.dbsync def test_multisig_all_missing_skey( - self, cluster: clusterlib.ClusterLib, payment_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, payment_addrs: list[clusterlib.AddressRecord] ): """Try to send funds from script address using the *all* script, omit one skey. @@ -769,7 +769,7 @@ def test_multisig_all_missing_skey( @pytest.mark.testnets @pytest.mark.dbsync def test_multisig_any_unlisted_skey( - self, cluster: clusterlib.ClusterLib, payment_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, payment_addrs: list[clusterlib.AddressRecord] ): """Try to send funds from script address using the *any* script with unlisted skey. @@ -823,7 +823,7 @@ def test_multisig_any_unlisted_skey( @pytest.mark.testnets @pytest.mark.dbsync def test_multisig_atleast_low_num_of_skeys( - self, cluster: clusterlib.ClusterLib, payment_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, payment_addrs: list[clusterlib.AddressRecord] ): """Try to send funds from script address using the *atLeast* script. @@ -889,11 +889,11 @@ def _fund_script_time_locking( self, cluster_obj: clusterlib.ClusterLib, temp_template: str, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], slot: int, slot_type_arg: str, use_build_cmd: bool, - ) -> tp.Tuple[pl.Path, str, clusterlib.TxRawOutput]: + ) -> tuple[pl.Path, str, clusterlib.TxRawOutput]: """Create and fund script address.""" payment_vkey_files = [p.vkey_file for p in payment_addrs] payment_skey_files = [p.skey_file for p in payment_addrs] @@ -930,7 +930,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -958,9 +958,9 @@ def payment_addrs( def fund_script_before_slot_in_past( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], request: SubRequest, - ) -> tp.Tuple[pl.Path, str, clusterlib.TxRawOutput, int]: + ) -> tuple[pl.Path, str, clusterlib.TxRawOutput, int]: """Create and fund script address with "before" slot in the past.""" temp_template = common.get_test_id(cluster) use_build_cmd = request.param @@ -983,9 +983,9 @@ def fund_script_before_slot_in_past( def fund_script_before_slot_in_future( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], request: SubRequest, - ) -> tp.Tuple[pl.Path, str, clusterlib.TxRawOutput, int]: + ) -> tuple[pl.Path, str, clusterlib.TxRawOutput, int]: """Create and fund script address with "before" slot in the future.""" temp_template = common.get_test_id(cluster) use_build_cmd = request.param @@ -1008,9 +1008,9 @@ def fund_script_before_slot_in_future( def fund_script_after_slot_in_future( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], request: SubRequest, - ) -> tp.Tuple[pl.Path, str, clusterlib.TxRawOutput, int]: + ) -> tuple[pl.Path, str, clusterlib.TxRawOutput, int]: """Create and fund script address with "after" slot in the future.""" temp_template = common.get_test_id(cluster) use_build_cmd = request.param @@ -1033,9 +1033,9 @@ def fund_script_after_slot_in_future( def fund_script_after_slot_in_past( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], request: SubRequest, - ) -> tp.Tuple[pl.Path, str, clusterlib.TxRawOutput, int]: + ) -> tuple[pl.Path, str, clusterlib.TxRawOutput, int]: """Create and fund script address with "after" slot in the past.""" temp_template = common.get_test_id(cluster) use_build_cmd = request.param @@ -1065,7 +1065,7 @@ def fund_script_after_slot_in_past( def test_script_after( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, use_tx_validity: bool, ): @@ -1138,7 +1138,7 @@ def test_script_after( def test_script_before( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, use_tx_validity: bool, ): @@ -1206,7 +1206,7 @@ def test_script_before( def test_tx_missing_validity( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, slot_type: str, ): @@ -1275,7 +1275,7 @@ def test_tx_missing_validity( def test_tx_negative_validity( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, ): """Check that it is NOT possible to spend from script address when validity is negative.""" @@ -1357,8 +1357,8 @@ def test_tx_negative_validity( def test_before_past( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_before_slot_in_past: tp.Tuple[pl.Path, str, clusterlib.TxRawOutput, int], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_before_slot_in_past: tuple[pl.Path, str, clusterlib.TxRawOutput, int], data: st.DataObject, request: FixtureRequest, ): @@ -1426,8 +1426,8 @@ def test_before_past( def test_before_future( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_before_slot_in_future: tp.Tuple[pl.Path, str, clusterlib.TxRawOutput, int], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_before_slot_in_future: tuple[pl.Path, str, clusterlib.TxRawOutput, int], slot_no: int, request: FixtureRequest, ): @@ -1476,8 +1476,8 @@ def test_before_future( def test_after_future( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_after_slot_in_future: tp.Tuple[pl.Path, str, clusterlib.TxRawOutput, int], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_after_slot_in_future: tuple[pl.Path, str, clusterlib.TxRawOutput, int], data: st.DataObject, request: FixtureRequest, ): @@ -1545,8 +1545,8 @@ def test_after_future( def test_after_past( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_after_slot_in_past: tp.Tuple[pl.Path, str, clusterlib.TxRawOutput, int], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_after_slot_in_past: tuple[pl.Path, str, clusterlib.TxRawOutput, int], data: st.DataObject, request: FixtureRequest, ): @@ -1596,7 +1596,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -1629,7 +1629,7 @@ def payment_addrs( def test_tx_script_metadata_json( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -1686,7 +1686,7 @@ def test_tx_script_metadata_json( def test_tx_script_metadata_cbor( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -1744,7 +1744,7 @@ def test_tx_script_metadata_cbor( def test_tx_script_no_metadata( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -1791,7 +1791,7 @@ def test_tx_script_no_metadata( def test_tx_script_invalid( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, ): """Build transaction with invalid auxiliary script. @@ -1830,7 +1830,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -1868,7 +1868,7 @@ def payment_addrs( def test_incremental_signing( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, tx_is: str, @@ -2030,7 +2030,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -2064,7 +2064,7 @@ def payment_addrs( def test_script_utxo_datum( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, script_version: str, @@ -2142,7 +2142,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -2174,7 +2174,7 @@ def payment_addrs( def test_script_reference_utxo( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, script_version: str, @@ -2319,7 +2319,7 @@ def test_script_reference_utxo( def test_spend_reference_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, script_version: str, @@ -2414,7 +2414,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -2449,7 +2449,7 @@ def payment_addrs( def test_nested_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], type_top: str, type_nested: str, use_build_cmd: bool, @@ -2553,7 +2553,7 @@ def test_nested_script( def test_nested_optional_all( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -2638,7 +2638,7 @@ def test_nested_optional_all( def test_invalid( # noqa: C901 self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], scenario: str, use_build_cmd: bool, submit_method: str, @@ -2660,8 +2660,8 @@ def test_invalid( # noqa: C901 invalid_hereafter = last_slot_no + 1_000 # `dst_addr1.skey_file` is needed and missing payment_skey_files = [dst_addr2.skey_file] - script_top: tp.List[dict] = [] - script_nested: tp.List[dict] = [{"type": "after", "slot": invalid_before}] + script_top: list[dict] = [] + script_nested: list[dict] = [{"type": "after", "slot": invalid_before}] expected_err = "ScriptWitnessNotValidatingUTXOW" elif scenario == "all2": type_top = "all" @@ -2844,7 +2844,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -2876,7 +2876,7 @@ def payment_addrs( def test_script_v2( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], submit_method: str, ): """Check that it is not possible to use 'SimpleScriptV2' in Shelley-era Tx.""" @@ -2941,7 +2941,7 @@ def test_script_v2( def test_auxiliary_scripts( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], submit_method: str, ): """Check that it is not possible to use auxiliary script in Shelley-era Tx.""" diff --git a/cardano_node_tests/tests/test_socket_path.py b/cardano_node_tests/tests/test_socket_path.py index 3d5e0a8c5..4adcb60ee 100644 --- a/cardano_node_tests/tests/test_socket_path.py +++ b/cardano_node_tests/tests/test_socket_path.py @@ -91,7 +91,7 @@ def payment_addrs( set_socket_path: None, # noqa: ARG001 cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" # pylint: disable=unused-argument with cluster_manager.cache_fixture() as fixture_cache: @@ -220,7 +220,7 @@ def test_build_transfer_funds( self, set_socket_path: None, # noqa: ARG002 cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], env_scenario: str, ): """Send funds to payment address. @@ -422,7 +422,7 @@ def test_neg_build_transfer_funds( self, set_socket_path: None, # noqa: ARG002 cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], env_scenario: str, socket_scenario: str, ): diff --git a/cardano_node_tests/tests/test_staking_no_rewards.py b/cardano_node_tests/tests/test_staking_no_rewards.py index b2af51389..8ad62db7f 100644 --- a/cardano_node_tests/tests/test_staking_no_rewards.py +++ b/cardano_node_tests/tests/test_staking_no_rewards.py @@ -23,7 +23,7 @@ @pytest.fixture def cluster_lock_pool_use_rewards( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str]: +) -> tuple[clusterlib.ClusterLib, str]: """Lock any pool, use pots, and return instance of `clusterlib.ClusterLib`.""" cluster_obj = cluster_manager.get( lock_resources=[ @@ -44,7 +44,7 @@ class TestNoRewards: def test_no_reward_unmet_pledge1( self, cluster_manager: cluster_management.ClusterManager, - cluster_lock_pool: tp.Tuple[clusterlib.ClusterLib, str], + cluster_lock_pool: tuple[clusterlib.ClusterLib, str], ): """Check that the stake pool is not receiving rewards when pledge is not met. @@ -209,7 +209,7 @@ def test_no_reward_unmet_pledge1( def test_no_reward_unmet_pledge2( self, cluster_manager: cluster_management.ClusterManager, - cluster_lock_pool: tp.Tuple[clusterlib.ClusterLib, str], + cluster_lock_pool: tuple[clusterlib.ClusterLib, str], ): """Check that the stake pool is not receiving rewards when pledge is not met. @@ -372,7 +372,7 @@ def test_no_reward_unmet_pledge2( def test_no_reward_deregistered_stake_addr( self, cluster_manager: cluster_management.ClusterManager, - cluster_lock_pool_use_rewards: tp.Tuple[clusterlib.ClusterLib, str], + cluster_lock_pool_use_rewards: tuple[clusterlib.ClusterLib, str], ): """Check that the pool is not receiving rewards when owner's stake address is deregistered. @@ -569,7 +569,7 @@ def test_no_reward_deregistered_stake_addr( def test_no_reward_deregistered_reward_addr( self, cluster_manager: cluster_management.ClusterManager, - cluster_lock_pool_use_rewards: tp.Tuple[clusterlib.ClusterLib, str], + cluster_lock_pool_use_rewards: tuple[clusterlib.ClusterLib, str], ): """Check that the reward address is not receiving rewards when deregistered. @@ -756,7 +756,7 @@ def test_no_reward_deregistered_reward_addr( def test_deregister_reward_addr_retire_pool( self, cluster_manager: cluster_management.ClusterManager, - cluster_lock_pool_use_rewards: tp.Tuple[clusterlib.ClusterLib, str], + cluster_lock_pool_use_rewards: tuple[clusterlib.ClusterLib, str], ): """Test deregistering reward address and retiring stake pool. @@ -779,7 +779,7 @@ def test_deregister_reward_addr_retire_pool( cluster, pool_name = cluster_lock_pool_use_rewards pool_num = int(pool_name.replace("node-pool", "")) - kes_period_info_errors_list: tp.List[str] = [] + kes_period_info_errors_list: list[str] = [] pool_rec = cluster_manager.cache.addrs_data[pool_name] pool_reward = clusterlib.PoolUser(payment=pool_rec["payment"], stake=pool_rec["reward"]) diff --git a/cardano_node_tests/tests/test_staking_rewards.py b/cardano_node_tests/tests/test_staking_rewards.py index a8f73c033..f65ee3cfc 100644 --- a/cardano_node_tests/tests/test_staking_rewards.py +++ b/cardano_node_tests/tests/test_staking_rewards.py @@ -32,21 +32,21 @@ class RewardRecord: reward_total: int reward_per_epoch: int member_pool_id: str = "" - leader_pool_ids: tp.Union[tp.List[str], tuple] = () + leader_pool_ids: list[str] | tuple = () stake_total: int = 0 @pytest.fixture def cluster_and_pool( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str]: +) -> tuple[clusterlib.ClusterLib, str]: return delegation.cluster_and_pool(cluster_manager=cluster_manager) @pytest.fixture def cluster_use_pool_and_rewards( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str]: +) -> tuple[clusterlib.ClusterLib, str]: """Mark any pool and all pots as "in use" and return instance of `clusterlib.ClusterLib`.""" cluster_obj = cluster_manager.get( use_resources=[ @@ -63,7 +63,7 @@ def cluster_use_pool_and_rewards( @pytest.fixture def cluster_use_two_pools_and_rewards( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str, str]: +) -> tuple[clusterlib.ClusterLib, str, str]: cluster_obj = cluster_manager.get( use_resources=[ resources_management.OneOf(resources=cluster_management.Resources.ALL_POOLS), @@ -78,7 +78,7 @@ def cluster_use_two_pools_and_rewards( @pytest.fixture def cluster_lock_two_pools( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str, str]: +) -> tuple[clusterlib.ClusterLib, str, str]: cluster_obj = cluster_manager.get( lock_resources=[ resources_management.OneOf(resources=cluster_management.Resources.ALL_POOLS), @@ -94,7 +94,7 @@ def cluster_lock_two_pools( @pytest.fixture def cluster_lock_pool_and_pots( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str]: +) -> tuple[clusterlib.ClusterLib, str]: cluster_obj = cluster_manager.get( lock_resources=[ *cluster_management.Resources.POTS, @@ -107,10 +107,8 @@ def cluster_lock_pool_and_pots( return cluster_obj, pool_name -def _add_spendable( - rewards: tp.List[dbsync_types.RewardEpochRecord], max_epoch: int -) -> tp.Dict[int, int]: - recs: tp.Dict[int, int] = {} +def _add_spendable(rewards: list[dbsync_types.RewardEpochRecord], max_epoch: int) -> dict[int, int]: + recs: dict[int, int] = {} for r in rewards: epoch = r.spendable_epoch if max_epoch and epoch > max_epoch: @@ -125,7 +123,7 @@ def _add_spendable( def _check_member_pool_ids( - rewards_by_idx: tp.Dict[int, RewardRecord], reward_db_record: dbsync_types.RewardRecord + rewards_by_idx: dict[int, RewardRecord], reward_db_record: dbsync_types.RewardRecord ) -> None: """Check that in each epoch member rewards were received from the expected pool.""" epoch_to = rewards_by_idx[max(rewards_by_idx)].epoch_no @@ -164,7 +162,7 @@ def _check_member_pool_ids( def _check_leader_pool_ids( - rewards_by_idx: tp.Dict[int, RewardRecord], reward_db_record: dbsync_types.RewardRecord + rewards_by_idx: dict[int, RewardRecord], reward_db_record: dbsync_types.RewardRecord ) -> None: """Check that in each epoch leader rewards were received from the expected pool.""" epoch_to = rewards_by_idx[max(rewards_by_idx)].epoch_no @@ -208,7 +206,7 @@ def _check_leader_pool_ids( def _dbsync_check_rewards( stake_address: str, - rewards: tp.List[RewardRecord], + rewards: list[RewardRecord], ) -> dbsync_types.RewardRecord: """Check rewards in db-sync.""" epoch_from = rewards[1].epoch_no @@ -234,7 +232,7 @@ def _dbsync_check_rewards( return reward_db_record -def _get_rew_amount_for_cred_hash(key_hash: str, rec: tp.Dict[str, tp.List[dict]]) -> int: +def _get_rew_amount_for_cred_hash(key_hash: str, rec: dict[str, list[dict]]) -> int: """Get reward amount for credential hash in ledger state snapshot record.""" r = rec.get(key_hash) or [] rew_amount = 0 @@ -243,7 +241,7 @@ def _get_rew_amount_for_cred_hash(key_hash: str, rec: tp.Dict[str, tp.List[dict] return rew_amount -def _get_rew_type_for_cred_hash(key_hash: str, rec: tp.Dict[str, tp.List[dict]]) -> tp.List[str]: +def _get_rew_type_for_cred_hash(key_hash: str, rec: dict[str, list[dict]]) -> list[str]: """Get reward types for credential hash in ledger state snapshot record.""" r = rec.get(key_hash) or [] rew_types = [] @@ -266,7 +264,7 @@ class TestRewards: def test_reward_simple( self, cluster_manager: cluster_management.ClusterManager, - cluster_and_pool: tp.Tuple[clusterlib.ClusterLib, str], + cluster_and_pool: tuple[clusterlib.ClusterLib, str], ): """Check that the stake address and pool owner are receiving rewards. @@ -320,7 +318,7 @@ def test_reward_simple( def test_reward_amount( # noqa: C901 self, cluster_manager: cluster_management.ClusterManager, - cluster_use_pool_and_rewards: tp.Tuple[clusterlib.ClusterLib, str], + cluster_use_pool_and_rewards: tuple[clusterlib.ClusterLib, str], ): """Check that the stake address and pool owner are receiving rewards. @@ -393,7 +391,7 @@ def test_reward_amount( # noqa: C901 pool_id=pool_id, ) - native_tokens: tp.List[clusterlib_utils.TokenRecord] = [] + native_tokens: list[clusterlib_utils.TokenRecord] = [] if VERSIONS.transaction_era >= VERSIONS.MARY: # create native tokens UTxOs for pool user native_tokens = clusterlib_utils.new_tokens( @@ -629,7 +627,7 @@ def _check_ledger_state( def test_reward_addr_delegation( # noqa: C901 self, cluster_manager: cluster_management.ClusterManager, - cluster_lock_pool_and_pots: tp.Tuple[clusterlib.ClusterLib, str], + cluster_lock_pool_and_pots: tuple[clusterlib.ClusterLib, str], ): """Check that the rewards address can be delegated and receive rewards. @@ -691,7 +689,7 @@ def test_reward_addr_delegation( # noqa: C901 init_epoch = cluster.g_query.get_epoch() # rewards each epoch - reward_records: tp.List[RewardRecord] = [] + reward_records: list[RewardRecord] = [] # ledger state db rs_records: dict = {init_epoch: None} @@ -706,7 +704,7 @@ def _check_ledger_state( ledger_state=ledger_state, ) es_snapshot: dict = ledger_state["stateBefore"]["esSnapshots"] - rs_record: tp.Dict[str, tp.Any] = clusterlib_utils.get_snapshot_rec( + rs_record: dict[str, tp.Any] = clusterlib_utils.get_snapshot_rec( ledger_snapshot=ledger_state["possibleRewardUpdate"]["rs"] ) rs_records[this_epoch] = rs_record @@ -990,7 +988,7 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: # in db-sync check that there were rewards of multiple different types # ("leader", "member", "treasury", "reserves") - reward_types: tp.Dict[int, tp.List[str]] = {} + reward_types: dict[int, list[str]] = {} for rec in reward_db_record.rewards: stored_types = reward_types.get(rec.earned_epoch) if stored_types is None: @@ -1027,7 +1025,7 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: def test_decreasing_reward_transferred_funds( self, cluster_manager: cluster_management.ClusterManager, - cluster_use_pool_and_rewards: tp.Tuple[clusterlib.ClusterLib, str], + cluster_use_pool_and_rewards: tuple[clusterlib.ClusterLib, str], ): """Check that rewards are gradually decreasing when funds are being transferred. @@ -1140,7 +1138,7 @@ def _withdraw(): def test_2_pools_same_reward_addr( # noqa: C901 self, cluster_manager: cluster_management.ClusterManager, - cluster_lock_two_pools: tp.Tuple[clusterlib.ClusterLib, str, str], + cluster_lock_two_pools: tuple[clusterlib.ClusterLib, str, str], ): """Check that one reward address used for two pools receives rewards for both of them. @@ -1245,8 +1243,8 @@ def test_2_pools_same_reward_addr( # noqa: C901 this_epoch = init_epoch # rewards each epoch - rewards_ledger_pool1: tp.List[RewardRecord] = [] - rewards_ledger_pool2: tp.List[RewardRecord] = [] + rewards_ledger_pool1: list[RewardRecord] = [] + rewards_ledger_pool2: list[RewardRecord] = [] # check rewards for ep in range(6): @@ -1356,7 +1354,7 @@ def test_2_pools_same_reward_addr( # noqa: C901 # in db-sync check that pool1 reward address is used as reward address for pool1, and # in the expected epochs also for pool2 - reward_types_pool1: tp.Dict[int, tp.List[str]] = {} + reward_types_pool1: dict[int, list[str]] = {} for rec in rewards_db_pool1.rewards: stored_types = reward_types_pool1.get(rec.earned_epoch) if stored_types is None: @@ -1372,7 +1370,7 @@ def test_2_pools_same_reward_addr( # noqa: C901 # in db-sync check that pool2 reward address is NOT used for receiving rewards anymore # in the expected epochs - reward_types_pool2: tp.Dict[int, tp.List[str]] = {} + reward_types_pool2: dict[int, list[str]] = {} for rec in rewards_db_pool2.rewards: stored_types = reward_types_pool2.get(rec.earned_epoch) if stored_types is None: @@ -1393,7 +1391,7 @@ def test_2_pools_same_reward_addr( # noqa: C901 def test_redelegation( # noqa: C901 self, cluster_manager: cluster_management.ClusterManager, - cluster_use_two_pools_and_rewards: tp.Tuple[clusterlib.ClusterLib, str, str], + cluster_use_two_pools_and_rewards: tuple[clusterlib.ClusterLib, str, str], ): """Check rewards received by stake address over multiple epochs. @@ -1716,8 +1714,8 @@ class TestNegativeWithdrawal: def pool_users( self, cluster_manager: cluster_management.ClusterManager, - cluster_use_pool: tp.Tuple[clusterlib.ClusterLib, str], - ) -> tp.Tuple[clusterlib.PoolUser, clusterlib.PoolUser]: + cluster_use_pool: tuple[clusterlib.ClusterLib, str], + ) -> tuple[clusterlib.PoolUser, clusterlib.PoolUser]: cluster, pool_name = cluster_use_pool pool_rec = cluster_manager.cache.addrs_data[pool_name] @@ -1740,8 +1738,8 @@ def pool_users( @common.hypothesis_settings(max_examples=300) def test_withdrawal_wrong_amount( self, - cluster_use_pool: tp.Tuple[clusterlib.ClusterLib, str], - pool_users: tp.Tuple[clusterlib.PoolUser, clusterlib.PoolUser], + cluster_use_pool: tuple[clusterlib.ClusterLib, str], + pool_users: tuple[clusterlib.PoolUser, clusterlib.PoolUser], amount: int, ): """Test that it is not possible to withdraw other amount than the total reward amount. diff --git a/cardano_node_tests/tests/test_tx_basic.py b/cardano_node_tests/tests/test_tx_basic.py index e7357206f..b418ac713 100644 --- a/cardano_node_tests/tests/test_tx_basic.py +++ b/cardano_node_tests/tests/test_tx_basic.py @@ -4,7 +4,6 @@ import itertools import logging import re -import typing as tp import allure import pytest @@ -36,7 +35,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create 2 new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -62,7 +61,7 @@ def byron_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create 2 new Byron payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -90,7 +89,7 @@ def payment_addrs_disposable( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create 2 new payment addresses.""" temp_template = common.get_test_id(cluster) @@ -113,7 +112,7 @@ def payment_addrs_no_change( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create 2 new payment addresses for `test_build_no_change`.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -166,8 +165,8 @@ def cluster_default_tx_era( def test_transfer_funds( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - byron_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], + byron_addrs: list[clusterlib.AddressRecord], src_addr_type: str, dst_addr_type: str, amount: int, @@ -227,8 +226,8 @@ def test_transfer_funds( def test_byron_fee_too_small( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - byron_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], + byron_addrs: list[clusterlib.AddressRecord], submit_method: str, ): """Test cardano-node issue #4752. @@ -278,7 +277,7 @@ def test_byron_fee_too_small( def test_build_no_change( self, cluster: clusterlib.ClusterLib, - payment_addrs_no_change: tp.List[clusterlib.AddressRecord], + payment_addrs_no_change: list[clusterlib.AddressRecord], submit_method: str, ): """Send funds to payment address and balance the outputs so that there is no change. @@ -364,7 +363,7 @@ def test_build_no_change( def test_transfer_all_funds( self, cluster: clusterlib.ClusterLib, - payment_addrs_disposable: tp.List[clusterlib.AddressRecord], + payment_addrs_disposable: list[clusterlib.AddressRecord], submit_method: str, ): """Send ALL funds from one payment address to another. @@ -435,7 +434,7 @@ def test_transfer_all_funds( def test_funds_to_valid_address( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -496,7 +495,7 @@ def test_funds_to_valid_address( def test_get_txid( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -552,7 +551,7 @@ def test_get_txid( def test_extra_signing_keys( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], submit_method: str, ): """Send a transaction with extra signing key. @@ -617,7 +616,7 @@ def test_extra_signing_keys( def test_duplicate_signing_keys( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], submit_method: str, ): """Send a transaction with duplicate signing key. @@ -682,7 +681,7 @@ def test_duplicate_signing_keys( def test_sign_wrong_file( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], file_type: str, submit_method: str, ): @@ -802,7 +801,7 @@ def test_no_txout( def test_missing_tx_out( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Build a transaction with a missing `--tx-out` parameter.""" temp_template = common.get_test_id(cluster) @@ -843,7 +842,7 @@ def test_missing_tx_out( def test_missing_ttl( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], submit_method: str, ): """Submit a transaction with a missing `--ttl` (`--invalid-hereafter`) parameter.""" @@ -905,7 +904,7 @@ def test_missing_ttl( def test_multiple_same_txins( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], submit_method: str, ): """Try to build a transaction with multiple identical txins.""" @@ -969,7 +968,7 @@ def test_multiple_same_txins( def test_build_multiple_same_txins( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Build a transaction with multiple identical txins. @@ -1018,7 +1017,7 @@ def test_build_multiple_same_txins( def test_utxo_with_datum_hash( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], submit_method: str, ): """Create a UTxO with datum hash in a regular address and spend it. @@ -1086,7 +1085,7 @@ def test_utxo_with_datum_hash( def test_far_future_ttl( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], submit_method: str, ): """Send a transaction with ttl far in the future.""" @@ -1147,7 +1146,7 @@ def test_default_tx_era( self, cluster: clusterlib.ClusterLib, cluster_default_tx_era: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, submit_method: str, ): @@ -1188,7 +1187,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create 201 new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -1216,7 +1215,7 @@ def payment_addrs( def _from_to_transactions( self, cluster_obj: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], tx_name: str, from_num: int, to_num: int, @@ -1306,7 +1305,7 @@ def _from_to_transactions( def test_10_transactions( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], submit_method: str, ): """Send 10 transactions to payment address. @@ -1362,7 +1361,7 @@ def test_10_transactions( def test_transaction_to_10_addrs_from_1_addr( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], amount: int, use_build_cmd: bool, submit_method: str, @@ -1394,7 +1393,7 @@ def test_transaction_to_10_addrs_from_1_addr( def test_transaction_to_1_addr_from_10_addrs( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], amount: int, use_build_cmd: bool, submit_method: str, @@ -1426,7 +1425,7 @@ def test_transaction_to_1_addr_from_10_addrs( def test_transaction_to_10_addrs_from_10_addrs( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], amount: int, use_build_cmd: bool, submit_method: str, @@ -1458,7 +1457,7 @@ def test_transaction_to_10_addrs_from_10_addrs( def test_transaction_to_100_addrs_from_50_addrs( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], amount: int, use_build_cmd: bool, submit_method: str, @@ -1489,7 +1488,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -1523,7 +1522,7 @@ def payment_addrs( def test_incremental_signing( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_build_cmd: bool, tx_is: str, submit_method: str, diff --git a/cardano_node_tests/tests/test_tx_fees.py b/cardano_node_tests/tests/test_tx_fees.py index ec2053e9e..813c186f1 100644 --- a/cardano_node_tests/tests/test_tx_fees.py +++ b/cardano_node_tests/tests/test_tx_fees.py @@ -2,7 +2,6 @@ import itertools import logging -import typing as tp import allure import hypothesis @@ -46,7 +45,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create 2 new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -76,7 +75,7 @@ def payment_addrs( def test_negative_fee( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], fee: int, ): """Try to send a transaction with negative fee (property-based test). @@ -108,7 +107,7 @@ def test_negative_fee( def test_smaller_fee( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], fee_change: float, ): """Try to send a transaction with smaller-than-expected fee. @@ -152,7 +151,7 @@ def test_smaller_fee( def test_expected_or_higher_fee( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], fee_add: int, ): """Send a transaction with fee that is same or higher than expected.""" @@ -202,7 +201,7 @@ def pool_users( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.PoolUser]: + ) -> list[clusterlib.PoolUser]: """Create pool users.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -227,10 +226,10 @@ def pool_users( def _create_pool_certificates( self, cluster_obj: clusterlib.ClusterLib, - pool_owners: tp.List[clusterlib.PoolUser], + pool_owners: list[clusterlib.PoolUser], temp_template: str, pool_data: clusterlib.PoolData, - ) -> tp.Tuple[str, clusterlib.TxFiles]: + ) -> tuple[str, clusterlib.TxFiles]: """Create certificates for registering a stake pool, delegating stake address.""" # create node VRF key pair node_vrf = cluster_obj.g_node.gen_vrf_key_pair(node_name=pool_data.pool_name) @@ -287,10 +286,10 @@ def _from_to_transactions( self, cluster_obj: clusterlib.ClusterLib, tx_name: str, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], from_num: int, to_num: int, - amount_expected: tp.Tuple[int, int], + amount_expected: tuple[int, int], ): """Check fees for 1 tx from `from_num` payment addresses to `to_num` payment addresses.""" amount, expected_fee = amount_expected @@ -324,8 +323,8 @@ def _from_to_transactions( def test_pool_registration_fees( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - addr_fee: tp.Tuple[int, int], + pool_users: list[clusterlib.PoolUser], + addr_fee: tuple[int, int], ): """Test pool registration fees.""" no_of_addr, expected_fee = addr_fee @@ -377,8 +376,8 @@ def test_pool_registration_fees( def test_pool_deregistration_fees( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - addr_fee: tp.Tuple[int, int], + pool_users: list[clusterlib.PoolUser], + addr_fee: tuple[int, int], ): """Test pool deregistration fees.""" no_of_addr, expected_fee = addr_fee @@ -442,8 +441,8 @@ def test_pool_deregistration_fees( def test_addr_registration_fees( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - addr_fee: tp.Tuple[int, int], + pool_users: list[clusterlib.PoolUser], + addr_fee: tuple[int, int], ): """Test stake address registration fees.""" no_of_addr, expected_fee = addr_fee @@ -483,8 +482,8 @@ def test_addr_registration_fees( def test_addr_deregistration_fees( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - addr_fee: tp.Tuple[int, int], + pool_users: list[clusterlib.PoolUser], + addr_fee: tuple[int, int], ): """Test stake address deregistration fees.""" no_of_addr, expected_fee = addr_fee @@ -526,8 +525,8 @@ def test_addr_deregistration_fees( def test_transaction_to_1_addr_from_1_addr_fees( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - amount_expected: tp.Tuple[int, int], + pool_users: list[clusterlib.PoolUser], + amount_expected: tuple[int, int], ): """Test fees for 1 tx from 1 payment address to 1 payment address.""" temp_template = common.get_test_id(cluster) @@ -547,8 +546,8 @@ def test_transaction_to_1_addr_from_1_addr_fees( def test_transaction_to_10_addrs_from_1_addr_fees( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - amount_expected: tp.Tuple[int, int], + pool_users: list[clusterlib.PoolUser], + amount_expected: tuple[int, int], ): """Test fees for 1 tx from 1 payment address to 10 payment addresses.""" temp_template = common.get_test_id(cluster) @@ -568,8 +567,8 @@ def test_transaction_to_10_addrs_from_1_addr_fees( def test_transaction_to_1_addr_from_10_addrs_fees( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - amount_expected: tp.Tuple[int, int], + pool_users: list[clusterlib.PoolUser], + amount_expected: tuple[int, int], ): """Test fees for 1 tx from 10 payment addresses to 1 payment address.""" temp_template = common.get_test_id(cluster) @@ -589,8 +588,8 @@ def test_transaction_to_1_addr_from_10_addrs_fees( def test_transaction_to_10_addrs_from_10_addrs_fees( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - amount_expected: tp.Tuple[int, int], + pool_users: list[clusterlib.PoolUser], + amount_expected: tuple[int, int], ): """Test fees for 1 tx from 10 payment addresses to 10 payment addresses.""" temp_template = common.get_test_id(cluster) @@ -613,8 +612,8 @@ def test_transaction_to_10_addrs_from_10_addrs_fees( def test_transaction_to_100_addrs_from_100_addrs_fees( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], - amount_expected: tp.Tuple[int, int], + pool_users: list[clusterlib.PoolUser], + amount_expected: tuple[int, int], ): """Test fees for 1 tx from 100 payment addresses to 100 payment addresses.""" temp_template = common.get_test_id(cluster) diff --git a/cardano_node_tests/tests/test_tx_many_utxos.py b/cardano_node_tests/tests/test_tx_many_utxos.py index 166bfab3c..5ed4ecbd5 100644 --- a/cardano_node_tests/tests/test_tx_many_utxos.py +++ b/cardano_node_tests/tests/test_tx_many_utxos.py @@ -41,7 +41,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" addrs = clusterlib_utils.create_payment_addr_records( *[f"tiny_tx_addr_ci{cluster_manager.cluster_instance_num}_{i}" for i in range(3)], @@ -62,7 +62,7 @@ def _from_to_transactions( self, cluster_obj: clusterlib.ClusterLib, payment_addr: clusterlib.AddressRecord, - out_addrs: tp.List[clusterlib.AddressRecord], + out_addrs: list[clusterlib.AddressRecord], tx_name: str, amount: int, ): @@ -87,8 +87,8 @@ def _from_to_transactions( def many_utxos( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - ) -> tp.Tuple[clusterlib.AddressRecord, clusterlib.AddressRecord]: + payment_addrs: list[clusterlib.AddressRecord], + ) -> tuple[clusterlib.AddressRecord, clusterlib.AddressRecord]: """Generate many UTxOs (100000+) with 1-2 ADA.""" temp_template = common.get_test_id(cluster) @@ -158,7 +158,7 @@ def many_utxos( def test_mini_transactions( self, cluster: clusterlib.ClusterLib, - many_utxos: tp.Tuple[clusterlib.AddressRecord, clusterlib.AddressRecord], + many_utxos: tuple[clusterlib.AddressRecord, clusterlib.AddressRecord], subtests: pytest_subtests.SubTests, ): """Test transaction with many UTxOs (300+) with small amounts of ADA (1-10). diff --git a/cardano_node_tests/tests/test_tx_mempool.py b/cardano_node_tests/tests/test_tx_mempool.py index 5b7d170fd..aacc6560f 100644 --- a/cardano_node_tests/tests/test_tx_mempool.py +++ b/cardano_node_tests/tests/test_tx_mempool.py @@ -1,7 +1,6 @@ """Tests for transactions in mempool.""" import logging -import typing as tp import allure import pytest @@ -23,7 +22,7 @@ def payment_addrs_locked( self, cluster_manager: cluster_management.ClusterManager, cluster_singleton: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create 2 new payment addresses for 'test_query_mempool_txin'.""" temp_template = common.get_test_id(cluster_singleton) @@ -46,7 +45,7 @@ def payment_addrs_locked( def test_query_mempool_txin( self, cluster_singleton: clusterlib.ClusterLib, - payment_addrs_locked: tp.List[clusterlib.AddressRecord], + payment_addrs_locked: list[clusterlib.AddressRecord], ): """Test that is possible to query txin of a transaction that is still in mempool. diff --git a/cardano_node_tests/tests/test_tx_negative.py b/cardano_node_tests/tests/test_tx_negative.py index 4003762cc..27f7aa418 100644 --- a/cardano_node_tests/tests/test_tx_negative.py +++ b/cardano_node_tests/tests/test_tx_negative.py @@ -72,7 +72,7 @@ def pool_users( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.PoolUser]: + ) -> list[clusterlib.PoolUser]: """Create pool users.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -97,7 +97,7 @@ def pool_users( def _send_funds_to_invalid_address( self, cluster_obj: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, temp_template: str, use_build_cmd=False, @@ -131,7 +131,7 @@ def _send_funds_to_invalid_address( def _send_funds_from_invalid_address( self, cluster_obj: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, temp_template: str, use_build_cmd=False, @@ -163,7 +163,7 @@ def _send_funds_from_invalid_address( def _send_funds_invalid_change_address( self, cluster_obj: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, temp_template: str, ): @@ -186,7 +186,7 @@ def _send_funds_invalid_change_address( def _send_funds_with_invalid_utxo( self, cluster_obj: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], utxo: clusterlib.UTXOData, temp_template: str, use_build_cmd=False, @@ -219,12 +219,12 @@ def _send_funds_with_invalid_utxo( def _submit_wrong_validity( self, cluster_obj: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], temp_template: str, invalid_before: tp.Optional[int] = None, invalid_hereafter: tp.Optional[int] = None, use_build_cmd=False, - ) -> tp.Tuple[tp.Optional[int], str, tp.Optional[clusterlib.TxRawOutput]]: + ) -> tuple[tp.Optional[int], str, tp.Optional[clusterlib.TxRawOutput]]: """Try to build and submit a transaction with wrong validity interval.""" src_address = pool_users[0].payment.address dst_address = pool_users[1].payment.address @@ -288,7 +288,7 @@ def _submit_wrong_validity( def _get_validity_range( self, cluster_obj: clusterlib.ClusterLib, tx_body_file: pl.Path - ) -> tp.Tuple[tp.Optional[int], tp.Optional[int]]: + ) -> tuple[tp.Optional[int], tp.Optional[int]]: """Get validity range from a transaction body.""" tx_loaded = tx_view.load_tx_view(cluster_obj=cluster_obj, tx_body_file=tx_body_file) @@ -308,7 +308,7 @@ def _get_validity_range( def test_past_ttl( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Try to send a transaction with ttl in the past. @@ -335,7 +335,7 @@ def test_past_ttl( def test_before_negative_overflow( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Try to send a transaction with negative `invalid_before` and check for int overflow. @@ -387,7 +387,7 @@ def test_before_negative_overflow( def test_before_positive_overflow( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Try to send a transaction with `invalid_before` > `MAX_UINT64`. @@ -440,7 +440,7 @@ def test_before_positive_overflow( def test_before_too_high( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Try to send a transaction with `invalid_before` > `MAX_INT64`. @@ -478,7 +478,7 @@ def test_before_too_high( def test_pbt_before_negative_overflow( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], before_value: int, use_build_cmd: bool, ): @@ -525,7 +525,7 @@ def test_pbt_before_negative_overflow( def test_pbt_before_positive_overflow( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], before_value: int, use_build_cmd: bool, ): @@ -573,7 +573,7 @@ def test_pbt_before_positive_overflow( def test_pbt_before_too_high( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], before_value: int, use_build_cmd: bool, ): @@ -600,7 +600,7 @@ def test_pbt_before_too_high( def test_duplicated_tx( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Try to send an identical transaction twice. @@ -658,7 +658,7 @@ def test_duplicated_tx( def test_wrong_network_magic( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Try to submit a TX with wrong network magic. @@ -728,7 +728,7 @@ def test_wrong_network_magic( def test_wrong_signing_key( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Try to send a transaction signed with wrong signing key. @@ -757,7 +757,7 @@ def test_wrong_tx_era( self, cluster: clusterlib.ClusterLib, cluster_wrong_tx_era: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Try to send a transaction using TX era > network (cluster) era. @@ -789,7 +789,7 @@ def test_wrong_tx_era( def test_send_funds_to_reward_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Try to send funds from payment address to stake address. @@ -814,7 +814,7 @@ def test_send_funds_to_reward_address( def test_send_funds_to_utxo_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Try to send funds from payment address to UTxO address. @@ -841,7 +841,7 @@ def test_send_funds_to_utxo_address( def test_send_funds_to_invalid_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds from payment address to non-existent address (property-based test). @@ -864,7 +864,7 @@ def test_send_funds_to_invalid_address( def test_build_send_funds_to_invalid_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds from payment address to non-existent address (property-based test). @@ -892,7 +892,7 @@ def test_build_send_funds_to_invalid_address( def test_send_funds_to_invalid_length_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds from payment address to address with invalid length. @@ -915,7 +915,7 @@ def test_send_funds_to_invalid_length_address( def test_build_send_funds_to_invalid_length_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds from payment address to address with invalid length. @@ -945,7 +945,7 @@ def test_build_send_funds_to_invalid_length_address( def test_send_funds_to_invalid_chars_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds from payment address to address with invalid characters. @@ -970,7 +970,7 @@ def test_send_funds_to_invalid_chars_address( def test_build_send_funds_to_invalid_chars_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds from payment address to address with invalid characters. @@ -998,7 +998,7 @@ def test_build_send_funds_to_invalid_chars_address( def test_send_funds_from_invalid_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds from invalid address (property-based test). @@ -1021,7 +1021,7 @@ def test_send_funds_from_invalid_address( def test_build_send_funds_from_invalid_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds from non-existent address (property-based test). @@ -1049,7 +1049,7 @@ def test_build_send_funds_from_invalid_address( def test_send_funds_from_invalid_length_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds from address with invalid length (property-based test). @@ -1072,7 +1072,7 @@ def test_send_funds_from_invalid_length_address( def test_build_send_funds_from_invalid_length_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds from address with invalid length (property-based test). @@ -1102,7 +1102,7 @@ def test_build_send_funds_from_invalid_length_address( def test_send_funds_from_invalid_chars_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds from address with invalid characters (property-based test). @@ -1127,7 +1127,7 @@ def test_send_funds_from_invalid_chars_address( def test_build_send_funds_from_invalid_chars_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds from address with invalid characters (property-based test). @@ -1156,7 +1156,7 @@ def test_build_send_funds_from_invalid_chars_address( def test_build_send_funds_invalid_change_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds using invalid change address (property-based test). @@ -1183,7 +1183,7 @@ def test_build_send_funds_invalid_change_address( def test_build_send_funds_invalid_chars_change_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds using change address with invalid characters (property-based test). @@ -1208,7 +1208,7 @@ def test_build_send_funds_invalid_chars_change_address( def test_build_send_funds_invalid_length_change_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], addr: str, ): """Try to send funds using change address with invalid length (property-based test). @@ -1231,7 +1231,7 @@ def test_build_send_funds_invalid_length_change_address( def test_nonexistent_utxo_ix( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Try to use nonexistent UTxO TxIx as an input. @@ -1265,7 +1265,7 @@ def test_nonexistent_utxo_ix( def test_nonexistent_utxo_hash( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], use_build_cmd: bool, ): """Try to use nonexistent UTxO hash as an input. @@ -1301,7 +1301,7 @@ def test_nonexistent_utxo_hash( def test_invalid_length_utxo_hash( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], utxo_hash: str, ): """Try to use invalid UTxO hash as an input (property-based test). @@ -1330,7 +1330,7 @@ def test_invalid_length_utxo_hash( def test_build_invalid_length_utxo_hash( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], utxo_hash: str, ): """Try to use invalid UTxO hash as an input (property-based test). @@ -1362,7 +1362,7 @@ def test_build_invalid_length_utxo_hash( def test_missing_fee( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Try to build a transaction with a missing `--fee` parameter. @@ -1421,7 +1421,7 @@ def test_missing_fee( def test_missing_ttl( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Try to build a Shelley era TX with a missing `--ttl` (`--invalid-hereafter`) parameter. @@ -1468,7 +1468,7 @@ def test_missing_ttl( def test_missing_tx_in( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Try to build a transaction with a missing `--tx-in` parameter. @@ -1512,7 +1512,7 @@ def test_missing_tx_in( def test_lower_bound_not_supported( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Try to build a Shelley era TX with an `--invalid-before` argument. @@ -1550,7 +1550,7 @@ def test_lower_bound_not_supported( def test_build_missing_tx_in( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Try to build a transaction with a missing `--tx-in` parameter. @@ -1594,7 +1594,7 @@ def test_build_missing_tx_in( def test_build_missing_change_address( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Try to build a transaction with a missing `--change-address` parameter. @@ -1637,7 +1637,7 @@ def test_build_missing_change_address( def test_build_multiple_change_addresses( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Try to build a transaction with multiple `--change-address` parameters. diff --git a/cardano_node_tests/tests/test_tx_unbalanced.py b/cardano_node_tests/tests/test_tx_unbalanced.py index 21a5cea7c..51658b2f2 100644 --- a/cardano_node_tests/tests/test_tx_unbalanced.py +++ b/cardano_node_tests/tests/test_tx_unbalanced.py @@ -1,7 +1,6 @@ """Tests for unbalanced transactions.""" import logging -import typing as tp import allure import hypothesis @@ -29,7 +28,7 @@ def _build_transfer_amount_bellow_minimum( self, cluster: clusterlib.ClusterLib, temp_template: str, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], pbt_highest_utxo: clusterlib.UTXOData, amount: int, ): @@ -69,7 +68,7 @@ def payment_addrs( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create 2 new payment addresses.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -95,7 +94,7 @@ def payment_addrs( def pbt_highest_utxo( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ) -> clusterlib.UTXOData: """Get UTxO with highest amount of Lovelace. @@ -109,7 +108,7 @@ def pbt_highest_utxo( def test_negative_change( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Try to build a transaction with a negative change. @@ -173,7 +172,7 @@ def test_negative_change( def test_build_transfer_unavailable_funds( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], pbt_highest_utxo: clusterlib.UTXOData, transfer_add: int, ): @@ -218,7 +217,7 @@ def test_build_transfer_unavailable_funds( def test_wrong_balance( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], pbt_highest_utxo: clusterlib.UTXOData, change_amount: int, ): @@ -279,7 +278,7 @@ def test_wrong_balance( def test_out_of_bounds_amount( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], pbt_highest_utxo: clusterlib.UTXOData, change_amount: int, ): @@ -324,7 +323,7 @@ def test_out_of_bounds_amount( def test_build_transfer_amount_bellow_minimum( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], pbt_highest_utxo: clusterlib.UTXOData, amount: int, ): @@ -355,7 +354,7 @@ def test_build_transfer_amount_bellow_minimum( def test_build_transfer_negative_amount( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], pbt_highest_utxo: clusterlib.UTXOData, amount: int, ): @@ -388,7 +387,7 @@ def test_build_transfer_negative_amount( def test_transfer_amount_bellow_minimum( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], pbt_highest_utxo: clusterlib.UTXOData, amount: int, ): @@ -451,7 +450,7 @@ def test_transfer_amount_bellow_minimum( def test_transfer_negative_amount( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], pbt_highest_utxo: clusterlib.UTXOData, amount: int, ): diff --git a/cardano_node_tests/tests/tests_conway/conway_common.py b/cardano_node_tests/tests/tests_conway/conway_common.py index 38c585bc8..b58e80554 100644 --- a/cardano_node_tests/tests/tests_conway/conway_common.py +++ b/cardano_node_tests/tests/tests_conway/conway_common.py @@ -18,11 +18,11 @@ @dataclasses.dataclass(frozen=True) class PParamPropRec: - proposals: tp.List[clusterlib_utils.UpdateProposal] + proposals: list[clusterlib_utils.UpdateProposal] action_txid: str action_ix: int - proposal_names: tp.Set[str] - future_pparams: tp.Dict[str, tp.Any] + proposal_names: set[str] + future_pparams: dict[str, tp.Any] def is_in_bootstrap( @@ -33,7 +33,7 @@ def is_in_bootstrap( return bool(pv == 9) -def get_committee_val(data: tp.Dict[str, tp.Any]) -> tp.Dict[str, tp.Any]: +def get_committee_val(data: dict[str, tp.Any]) -> dict[str, tp.Any]: """Get the committee value from the data. The key can be either correctly "committee", or with typo "commitee". @@ -42,7 +42,7 @@ def get_committee_val(data: tp.Dict[str, tp.Any]) -> tp.Dict[str, tp.Any]: return data.get("committee") or data.get("commitee") or {} -def possible_rem_issue(gov_state: tp.Dict[str, tp.Any], epoch: int) -> bool: +def possible_rem_issue(gov_state: dict[str, tp.Any], epoch: int) -> bool: """Check if the unexpected removed action situation can be result of known ledger issue. When the issue manifests, only single expired action gets removed and all other expired or @@ -50,10 +50,8 @@ def possible_rem_issue(gov_state: tp.Dict[str, tp.Any], epoch: int) -> bool: See https://github.com/IntersectMBO/cardano-ledger/issues/3979 """ - removed_actions: tp.List[tp.Dict[str, tp.Any]] = gov_state["nextRatifyState"][ - "expiredGovActions" - ] - proposals: tp.List[tp.Dict[str, tp.Any]] = gov_state["proposals"] + removed_actions: list[dict[str, tp.Any]] = gov_state["nextRatifyState"]["expiredGovActions"] + proposals: list[dict[str, tp.Any]] = gov_state["proposals"] if len(removed_actions) != 1 or len(proposals) == 1: return False @@ -91,13 +89,13 @@ def get_no_abstain_vote(idx: int) -> clusterlib.Votes: return clusterlib.Votes.ABSTAIN -def save_gov_state(gov_state: tp.Dict[str, tp.Any], name_template: str) -> None: +def save_gov_state(gov_state: dict[str, tp.Any], name_template: str) -> None: """Save governance state to a file.""" with open(f"{name_template}_gov_state.json", "w", encoding="utf-8") as out_fp: json.dump(gov_state, out_fp, indent=2) -def save_committee_state(committee_state: tp.Dict[str, tp.Any], name_template: str) -> None: +def save_committee_state(committee_state: dict[str, tp.Any], name_template: str) -> None: """Save CC state to a file.""" with open(f"{name_template}_committee_state.json", "w", encoding="utf-8") as out_fp: json.dump(committee_state, out_fp, indent=2) @@ -176,8 +174,8 @@ def submit_vote( cluster_obj: clusterlib.ClusterLib, name_template: str, payment_addr: clusterlib.AddressRecord, - votes: tp.List[governance_utils.VotesAllT], - keys: tp.List[clusterlib.FileType], + votes: list[governance_utils.VotesAllT], + keys: list[clusterlib.FileType], script_votes: clusterlib.OptionalScriptVotes = (), submit_method: str = "", use_build_cmd: bool = True, @@ -311,10 +309,10 @@ def cast_vote( drep_script_witnesses = [r.skey_file for r in drep_script_key_pairs] spo_keys = [r.skey_file for r in governance_data.pools_cold] if votes_spo else [] - votes_simple: tp.List[governance_utils.VotesAllT] = [*votes_cc, *votes_drep_keys, *votes_spo] + votes_simple: list[governance_utils.VotesAllT] = [*votes_cc, *votes_drep_keys, *votes_spo] keys_all = [*cc_keys, *drep_keys, *drep_script_witnesses, *spo_keys] - script_votes: tp.List[clusterlib.ScriptVote] = [] + script_votes: list[clusterlib.ScriptVote] = [] if votes_drep_scripts: drep_script_reg_certs = [r.registration_cert for r in governance_data.drep_scripts_reg] @@ -366,7 +364,7 @@ def cast_vote( def resign_ccs( cluster_obj: clusterlib.ClusterLib, name_template: str, - ccs_to_resign: tp.List[clusterlib.CCMember], + ccs_to_resign: list[clusterlib.CCMember], payment_addr: clusterlib.AddressRecord, ) -> clusterlib.TxRawOutput: """Resign multiple CC Members.""" @@ -416,7 +414,7 @@ def propose_change_constitution( constitution_hash: str, pool_user: clusterlib.PoolUser, constitution_script_hash: str = "", -) -> tp.Tuple[clusterlib.ActionConstitution, str, int]: +) -> tuple[clusterlib.ActionConstitution, str, int]: """Propose a constitution change.""" deposit_amt = cluster_obj.conway_genesis["govActionDeposit"] @@ -490,7 +488,7 @@ def propose_pparams_update( anchor_url: str, anchor_data_hash: str, pool_user: clusterlib.PoolUser, - proposals: tp.List[clusterlib_utils.UpdateProposal], + proposals: list[clusterlib_utils.UpdateProposal], prev_action_rec: tp.Optional[governance_utils.PrevActionRec] = None, ) -> PParamPropRec: """Propose a pparams update.""" diff --git a/cardano_node_tests/tests/tests_conway/test_committee.py b/cardano_node_tests/tests/tests_conway/test_committee.py index 0a8a623c1..076a71f46 100644 --- a/cardano_node_tests/tests/tests_conway/test_committee.py +++ b/cardano_node_tests/tests/tests_conway/test_committee.py @@ -456,7 +456,7 @@ def _auth_hot_keys() -> None: ), "CC Member should not be recognized" [r.success() for r in (reqc.cli032, reqc.cip002, reqc.cip004)] - def _add_members() -> tp.Tuple[clusterlib.ActionUpdateCommittee, str, int]: + def _add_members() -> tuple[clusterlib.ActionUpdateCommittee, str, int]: """Add new CC members.""" anchor_url_add = "http://www.cc-add.com" anchor_data_hash_add = ( @@ -572,7 +572,7 @@ def _resign_member(res_member: clusterlib.CCMember) -> None: def _remove_member( rem_member: clusterlib.CCMember, prev_action_txid: str, prev_action_ix: int - ) -> tp.Tuple[clusterlib.ActionUpdateCommittee, str, int]: + ) -> tuple[clusterlib.ActionUpdateCommittee, str, int]: """Remove a CC member.""" anchor_url_rem = "http://www.cc-rem.com" anchor_data_hash_rem = ( @@ -677,9 +677,7 @@ def _resign_active(): tx_files=tx_files_res, ) - def _check_cc_member1_expired( - committee_state: tp.Dict[str, tp.Any], curr_epoch: int - ) -> None: + def _check_cc_member1_expired(committee_state: dict[str, tp.Any], curr_epoch: int) -> None: member_rec = committee_state["committee"][cc_member1_key] if curr_epoch <= cc_member1_expire: assert member_rec["status"] != "Expired", "CC Member is already expired" @@ -690,13 +688,13 @@ def _check_cc_member1_expired( elif curr_epoch > cc_member1_expire: assert member_rec["status"] == "Expired", "CC Member should be expired" - def _check_cc_member2_removed(gov_state: tp.Dict[str, tp.Any]): + def _check_cc_member2_removed(gov_state: dict[str, tp.Any]): cc_member_val = conway_common.get_committee_val(data=gov_state)["members"].get( cc_member2_key ) assert not cc_member_val, "Removed committee member still present" - def _check_add_state(gov_state: tp.Dict[str, tp.Any]): + def _check_add_state(gov_state: dict[str, tp.Any]): for i, _cc_member_key in enumerate((cc_member1_key, cc_member2_key, cc_member3_key)): cc_member_val = conway_common.get_committee_val(data=gov_state)["members"].get( _cc_member_key @@ -1094,7 +1092,7 @@ def _set_zero_committee_pparam() -> conway_common.PParamPropRec: proposals=update_proposals, ) - def _rem_committee() -> tp.Tuple[clusterlib.ActionUpdateCommittee, str, int]: + def _rem_committee() -> tuple[clusterlib.ActionUpdateCommittee, str, int]: """Remove all CC members.""" anchor_url_rem = "http://www.cc-rem-all.com" anchor_data_hash_rem = ( @@ -1169,7 +1167,7 @@ def _rem_committee() -> tp.Tuple[clusterlib.ActionUpdateCommittee, str, int]: def _check_rat_gov_state( name_template: str, action_txid: str, action_ix: int, epoch_no: int - ) -> tp.Dict[str, tp.Any]: + ) -> dict[str, tp.Any]: gov_state = cluster.g_conway_governance.query.gov_state() conway_common.save_gov_state( gov_state=gov_state, name_template=f"{name_template}_{epoch_no}" diff --git a/cardano_node_tests/tests/tests_conway/test_constitution.py b/cardano_node_tests/tests/tests_conway/test_constitution.py index 09624f543..0286225d0 100644 --- a/cardano_node_tests/tests/tests_conway/test_constitution.py +++ b/cardano_node_tests/tests/tests_conway/test_constitution.py @@ -81,7 +81,7 @@ def script_dreps_lg( cluster_lock_gov_script: governance_utils.GovClusterT, testfile_temp_dir: pl.Path, ) -> tp.Generator[ - tp.Tuple[tp.List[governance_utils.DRepScriptRegistration], tp.List[clusterlib.PoolUser]], + tuple[list[governance_utils.DRepScriptRegistration], list[clusterlib.PoolUser]], None, None, ]: @@ -299,8 +299,8 @@ def _retire_dreps() -> None: @pytest.fixture def governance_w_scripts_lg( cluster_lock_gov_script: governance_utils.GovClusterT, - script_dreps_lg: tp.Tuple[ - tp.List[governance_utils.DRepScriptRegistration], tp.List[clusterlib.PoolUser] + script_dreps_lg: tuple[ + list[governance_utils.DRepScriptRegistration], list[clusterlib.PoolUser] ], ) -> governance_utils.GovernanceRecords: """Create a governance records with script DReps.""" diff --git a/cardano_node_tests/tests/tests_conway/test_drep.py b/cardano_node_tests/tests/tests_conway/test_drep.py index f99c51545..5f66e7a7a 100644 --- a/cardano_node_tests/tests/tests_conway/test_drep.py +++ b/cardano_node_tests/tests/tests_conway/test_drep.py @@ -178,7 +178,7 @@ def get_custom_drep( @pytest.fixture def cluster_and_pool_and_rewards( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str]: +) -> tuple[clusterlib.ClusterLib, str]: return delegation.cluster_and_pool( cluster_manager=cluster_manager, use_resources=[cluster_management.Resources.REWARDS] ) @@ -248,7 +248,7 @@ def custom_drep( @pytest.fixture def payment_addr_wpr( cluster_manager: cluster_management.ClusterManager, - cluster_and_pool_and_rewards: tp.Tuple[clusterlib.ClusterLib, str], + cluster_and_pool_and_rewards: tuple[clusterlib.ClusterLib, str], ) -> clusterlib.AddressRecord: cluster, __ = cluster_and_pool_and_rewards test_id = common.get_test_id(cluster) @@ -261,7 +261,7 @@ def payment_addr_wpr( @pytest.fixture def pool_user_wpr( cluster_manager: cluster_management.ClusterManager, - cluster_and_pool_and_rewards: tp.Tuple[clusterlib.ClusterLib, str], + cluster_and_pool_and_rewards: tuple[clusterlib.ClusterLib, str], ) -> clusterlib.PoolUser: cluster, __ = cluster_and_pool_and_rewards test_id = common.get_test_id(cluster) @@ -274,7 +274,7 @@ def pool_user_wpr( @pytest.fixture def custom_drep_wpr( cluster_manager: cluster_management.ClusterManager, - cluster_and_pool_and_rewards: tp.Tuple[clusterlib.ClusterLib, str], + cluster_and_pool_and_rewards: tuple[clusterlib.ClusterLib, str], payment_addr_wpr: clusterlib.AddressRecord, ) -> governance_utils.DRepRegistration: cluster, __ = cluster_and_pool_and_rewards @@ -1270,7 +1270,7 @@ def _deregister(): @pytest.mark.smoke def test_dreps_and_spo_delegation( self, - cluster_and_pool_and_rewards: tp.Tuple[clusterlib.ClusterLib, str], + cluster_and_pool_and_rewards: tuple[clusterlib.ClusterLib, str], payment_addr_wpr: clusterlib.AddressRecord, pool_user_wpr: clusterlib.PoolUser, custom_drep_wpr: governance_utils.DRepRegistration, @@ -1403,7 +1403,7 @@ def test_cli_drep_status_consistency( def _get_drep_rec( drep_state: governance_utils.DRepStateT, - ) -> tp.Dict[str, tp.Dict[str, tp.Any]]: + ) -> dict[str, dict[str, tp.Any]]: return {drep[0]["keyHash"]: drep[1] for drep in drep_state} drep_states_all = _get_drep_rec(drep_state=cluster.g_conway_governance.query.drep_state()) @@ -1643,9 +1643,9 @@ def test_drep_inactivity( # noqa: C901 deposit_amt = cluster.g_query.get_address_deposit() # Saved DRep records - drep1_state: tp.Dict[str, DRepStateRecord] = {} - drep2_state: tp.Dict[str, DRepStateRecord] = {} - rat_records: tp.Dict[str, DRepRatRecord] = {} + drep1_state: dict[str, DRepStateRecord] = {} + drep2_state: dict[str, DRepStateRecord] = {} + rat_records: dict[str, DRepRatRecord] = {} # Register and delegate stake address def _delegate_addr( @@ -1769,7 +1769,7 @@ def _update_drep_activity( for i, d in enumerate(governance_data.dreps_reg, start=1) ] - votes: tp.List[governance_utils.VotesAllT] = [*votes_cc, *votes_drep] + votes: list[governance_utils.VotesAllT] = [*votes_cc, *votes_drep] vote_keys = [ *[r.hot_keys.hot_skey_file for r in governance_data.cc_key_members], *[r.key_pair.skey_file for r in governance_data.dreps_reg], @@ -1861,7 +1861,7 @@ def _dump_records() -> None: _state = {"drep1": drep1_state, "drep2": drep2_state, "rat_records": rat_records} pickle.dump(_state, out_data) - def _check_records() -> tp.List[blockers.GH]: + def _check_records() -> list[blockers.GH]: found_issues = [] assert drep1_state, "No DRep1 states" diff --git a/cardano_node_tests/tests/tests_conway/test_guardrails.py b/cardano_node_tests/tests/tests_conway/test_guardrails.py index d5476af9b..95c5957a7 100644 --- a/cardano_node_tests/tests/tests_conway/test_guardrails.py +++ b/cardano_node_tests/tests/tests_conway/test_guardrails.py @@ -104,10 +104,10 @@ class ClusterWithConstitutionRecord: cluster: clusterlib.ClusterLib constitution_script_file: pl.Path constitution_script_hash: str - default_constitution: tp.Dict[str, tp.Any] + default_constitution: dict[str, tp.Any] pool_user: clusterlib.PoolUser payment_addr: clusterlib.AddressRecord - collaterals: tp.List[clusterlib.UTXOData] + collaterals: list[clusterlib.UTXOData] @pytest.fixture @@ -217,7 +217,7 @@ def _enact_script_constitution(): def propose_param_changes( cluster_with_constitution: ClusterWithConstitutionRecord, - proposals: tp.List[clusterlib_utils.UpdateProposal], + proposals: list[clusterlib_utils.UpdateProposal], ) -> str: """Build and submit update pparams action with specified proposals.""" cluster = cluster_with_constitution.cluster @@ -289,7 +289,7 @@ def propose_param_changes( def check_invalid_proposals( # noqa: C901 cluster_with_constitution: ClusterWithConstitutionRecord, - proposals: tp.List[clusterlib_utils.UpdateProposal], + proposals: list[clusterlib_utils.UpdateProposal], ): """Check that the guardrails are enforced.""" action_txid = "" @@ -340,7 +340,7 @@ def check_invalid_proposals( # noqa: C901 def check_valid_proposals( cluster_with_constitution: ClusterWithConstitutionRecord, - proposals: tp.List[clusterlib_utils.UpdateProposal], + proposals: list[clusterlib_utils.UpdateProposal], ): action_txid = propose_param_changes( cluster_with_constitution=cluster_with_constitution, proposals=proposals @@ -358,7 +358,7 @@ def _get_rational_str(value: float) -> str: def _get_param_min_value( cluster_with_constitution: ClusterWithConstitutionRecord, key: str -) -> tp.Union[float, int]: +) -> float | int: """Get the min value from the default constitution for a param.""" param_predicates = cluster_with_constitution.default_constitution[key] min_val_dicts = list(filter(lambda x: "minValue" in x, param_predicates["predicates"])) @@ -383,7 +383,7 @@ def _get_param_min_value( def _get_param_max_value( cluster_with_constitution: ClusterWithConstitutionRecord, key: str -) -> tp.Union[float, int]: +) -> float | int: """Get the max value from the default constitution for a param.""" param_predicates = cluster_with_constitution.default_constitution[key] max_value_dicts = list(filter(lambda x: "maxValue" in x, param_predicates["predicates"])) @@ -413,15 +413,15 @@ class GuardrailTestParam: param_key: str # key in the default constitution json file param_cli_arg: str # CLI argument for the parameter param_name: str # name of the protocol parameter - param_lower_limit: tp.Union[int, None] = None # optional lower limit of the parameter - param_upper_limit: tp.Union[int, None] = None # optional upper limit of the parameter + param_lower_limit: int | None = None # optional lower limit of the parameter + param_upper_limit: int | None = None # optional upper limit of the parameter def check_min_value_proposals( cluster_with_constitution: ClusterWithConstitutionRecord, param: GuardrailTestParam, - min_value: tp.Union[int, float], - dependent_proposals: tp.Union[tp.List[clusterlib_utils.UpdateProposal], tp.Tuple], + min_value: int | float, + dependent_proposals: list[clusterlib_utils.UpdateProposal] | tuple, ): """Check invalid proposals for min value predicate (must not be lower than).""" if min_value == 0: @@ -465,8 +465,8 @@ def check_min_value_proposals( def check_max_value_proposals( cluster_with_constitution: ClusterWithConstitutionRecord, param: GuardrailTestParam, - max_value: tp.Union[int, float], - dependent_proposals: tp.Union[tp.List[clusterlib_utils.UpdateProposal], tp.Tuple], + max_value: int | float, + dependent_proposals: list[clusterlib_utils.UpdateProposal] | tuple, type_upper_limit: int, ): """Check invalid proposals for max value predicate (must not exceed).""" @@ -492,7 +492,7 @@ def check_max_value_proposals( def perform_predicates_check_with_dependent_params( cluster_with_constitution: ClusterWithConstitutionRecord, param: GuardrailTestParam, - dependent_params: tp.List[GuardrailTestParam], + dependent_params: list[GuardrailTestParam], ): """ Check for predicates defined in the constitution with dependent parameters. @@ -555,7 +555,7 @@ def get_upper_limit_according_to_type(type: str) -> int: def perform_predicates_check( cluster_with_constitution: ClusterWithConstitutionRecord, param: GuardrailTestParam, - dependent_proposals: tp.Union[tp.List[clusterlib_utils.UpdateProposal], tp.Tuple] = (), + dependent_proposals: list[clusterlib_utils.UpdateProposal] | tuple = (), ): """Check for predicates defined in the constitution. diff --git a/cardano_node_tests/tests/tests_conway/test_info.py b/cardano_node_tests/tests/tests_conway/test_info.py index 3839f7ef7..e250ecc04 100644 --- a/cardano_node_tests/tests/tests_conway/test_info.py +++ b/cardano_node_tests/tests/tests_conway/test_info.py @@ -4,7 +4,6 @@ import json import logging import pathlib as pl -import typing as tp import allure import pytest @@ -186,7 +185,7 @@ def test_info( ] [r.success() for r in (reqc.cli021, reqc.cip059)] - votes: tp.List[governance_utils.VotesAllT] = [*votes_cc, *votes_drep, *votes_spo] + votes: list[governance_utils.VotesAllT] = [*votes_cc, *votes_drep, *votes_spo] vote_keys = [ *[r.hot_keys.hot_skey_file for r in governance_data.cc_key_members], *[r.key_pair.skey_file for r in governance_data.dreps_reg], diff --git a/cardano_node_tests/tests/tests_conway/test_pparam_update.py b/cardano_node_tests/tests/tests_conway/test_pparam_update.py index 763256fcd..4c8a648db 100644 --- a/cardano_node_tests/tests/tests_conway/test_pparam_update.py +++ b/cardano_node_tests/tests/tests_conway/test_pparam_update.py @@ -5,7 +5,6 @@ import logging import pathlib as pl import random -import typing as tp import allure import pytest @@ -114,9 +113,9 @@ def _get_rational_str(value: float) -> str: def _check_w_denominator( - update_proposal: clusterlib_utils.UpdateProposal, pparam: tp.Union[float, dict] + update_proposal: clusterlib_utils.UpdateProposal, pparam: float | dict ) -> bool: - exp_val: tp.Union[float, dict, str] = pparam + exp_val: float | dict | str = pparam if isinstance(pparam, dict): exp_val = f"{pparam['numerator']}/{pparam['denominator']}" return bool(update_proposal.value == exp_val) @@ -649,7 +648,7 @@ def test_pparam_update( # noqa: C901 def _propose_pparams_update( name_template: str, - proposals: tp.List[clusterlib_utils.UpdateProposal], + proposals: list[clusterlib_utils.UpdateProposal], ) -> conway_common.PParamPropRec: anchor_url = f"http://www.pparam-action-{clusterlib.get_rand_str(4)}.com" anchor_data_hash = cluster.g_conway_governance.get_anchor_data_hash(text=anchor_url) @@ -669,7 +668,7 @@ def _propose_pparams_update( proposed_pparams_errors = [] def _check_proposed_pparams( - update_proposals: tp.List[clusterlib_utils.UpdateProposal], protocol_params: dict + update_proposals: list[clusterlib_utils.UpdateProposal], protocol_params: dict ) -> None: try: clusterlib_utils.check_updated_params( diff --git a/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py b/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py index a756efe66..ceacb150c 100644 --- a/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py +++ b/cardano_node_tests/tests/tests_conway/test_treasury_withdrawals.py @@ -2,7 +2,6 @@ # pylint: disable=expression-not-assigned import logging -import typing as tp import allure import pytest @@ -272,7 +271,7 @@ def _cast_vote( ] ) - votes: tp.List[governance_utils.VotesAllT] = [*votes_cc, *votes_drep, *votes_spo] + votes: list[governance_utils.VotesAllT] = [*votes_cc, *votes_drep, *votes_spo] spo_keys = [r.skey_file for r in governance_data.pools_cold] if votes_spo else [] vote_keys = [ *[r.hot_keys.hot_skey_file for r in governance_data.cc_key_members], @@ -525,7 +524,7 @@ def test_expire_treasury_withdrawals( gov_state=action_gov_state, name_template=f"{temp_template}_action_{action_epoch}" ) - votes: tp.List[governance_utils.VotesAllT] = [] + votes: list[governance_utils.VotesAllT] = [] for action_ix in range(actions_num): prop_action = governance_utils.lookup_proposal( gov_state=action_gov_state, action_txid=action_txid, action_ix=action_ix diff --git a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py index 5def7002a..600d08858 100644 --- a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py +++ b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py @@ -2,7 +2,6 @@ import logging import pathlib as pl -import typing as tp import allure import pytest @@ -49,7 +48,7 @@ def pool_user_lg( def payment_addrs_lg( cluster_manager: cluster_management.ClusterManager, cluster_lock_governance: governance_utils.GovClusterT, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address.""" cluster, __ = cluster_lock_governance test_id = common.get_test_id(cluster) @@ -79,7 +78,7 @@ class TestUpdateBuiltIns: def test_update_in_pv9( self, cluster_lock_governance: governance_utils.GovClusterT, - payment_addrs_lg: tp.List[clusterlib.AddressRecord], + payment_addrs_lg: list[clusterlib.AddressRecord], pool_user_lg: clusterlib.PoolUser, ): """Test updating PlutusV2 cost model in PV9. diff --git a/cardano_node_tests/tests/tests_plutus/mint_build.py b/cardano_node_tests/tests/tests_plutus/mint_build.py index b6a9c840b..54ba6d284 100644 --- a/cardano_node_tests/tests/tests_plutus/mint_build.py +++ b/cardano_node_tests/tests/tests_plutus/mint_build.py @@ -1,5 +1,4 @@ import logging -import typing as tp from cardano_clusterlib import clusterlib @@ -19,7 +18,7 @@ def _fund_issuer( amount: int, collateral_utxo_num: int = 1, submit_method: str = submit_utils.SubmitMethods.CLI, -) -> tp.Tuple[tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], clusterlib.TxRawOutput]: +) -> tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData], clusterlib.TxRawOutput]: """Fund the token issuer.""" single_collateral_amount = minting_cost.collateral // collateral_utxo_num collateral_amounts = [single_collateral_amount for __ in range(collateral_utxo_num - 1)] diff --git a/cardano_node_tests/tests/tests_plutus/mint_raw.py b/cardano_node_tests/tests/tests_plutus/mint_raw.py index c4f976585..21d299e9b 100644 --- a/cardano_node_tests/tests/tests_plutus/mint_raw.py +++ b/cardano_node_tests/tests/tests_plutus/mint_raw.py @@ -1,5 +1,4 @@ import logging -import typing as tp from cardano_clusterlib import clusterlib @@ -20,7 +19,7 @@ def _fund_issuer( amount: int, fee_txsize: int = FEE_MINT_TXSIZE, collateral_utxo_num: int = 1, -) -> tp.Tuple[tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], clusterlib.TxRawOutput]: +) -> tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData], clusterlib.TxRawOutput]: """Fund the token issuer.""" single_collateral_amount = minting_cost.collateral // collateral_utxo_num collateral_amounts = [single_collateral_amount for __ in range(collateral_utxo_num - 1)] diff --git a/cardano_node_tests/tests/tests_plutus/spend_build.py b/cardano_node_tests/tests/tests_plutus/spend_build.py index 2b82085d1..2151f830e 100644 --- a/cardano_node_tests/tests/tests_plutus/spend_build.py +++ b/cardano_node_tests/tests/tests_plutus/spend_build.py @@ -22,13 +22,13 @@ def _build_fund_script( dst_addr: clusterlib.AddressRecord, plutus_op: plutus_common.PlutusOp, tokens: tp.Optional[ - tp.List[plutus_common.Token] + list[plutus_common.Token] ] = None, # tokens must already be in `payment_addr` tokens_collateral: tp.Optional[ - tp.List[plutus_common.Token] + list[plutus_common.Token] ] = None, # tokens must already be in `payment_addr` embed_datum: bool = False, -) -> tp.Tuple[tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], clusterlib.TxRawOutput]: +) -> tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData], clusterlib.TxRawOutput]: """Fund a Plutus script and create the locked UTxO and collateral UTxO. Uses `cardano-cli transaction build` command for building the transactions. @@ -133,8 +133,8 @@ def _build_spend_locked_txin( # noqa: C901 cluster_obj: clusterlib.ClusterLib, payment_addr: clusterlib.AddressRecord, dst_addr: clusterlib.AddressRecord, - script_utxos: tp.List[clusterlib.UTXOData], - collateral_utxos: tp.List[clusterlib.UTXOData], + script_utxos: list[clusterlib.UTXOData], + collateral_utxos: list[clusterlib.UTXOData], plutus_op: plutus_common.PlutusOp, amount: int, deposit_amount: int = 0, @@ -142,11 +142,11 @@ def _build_spend_locked_txin( # noqa: C901 tx_files: tp.Optional[clusterlib.TxFiles] = None, invalid_hereafter: tp.Optional[int] = None, invalid_before: tp.Optional[int] = None, - tokens: tp.Optional[tp.List[plutus_common.Token]] = None, + tokens: tp.Optional[list[plutus_common.Token]] = None, expect_failure: bool = False, script_valid: bool = True, submit_tx: bool = True, -) -> tp.Tuple[str, tp.Optional[clusterlib.TxRawOutput], list]: +) -> tuple[str, tp.Optional[clusterlib.TxRawOutput], list]: """Spend the locked UTxO. Uses `cardano-cli transaction build` command for building the transactions. diff --git a/cardano_node_tests/tests/tests_plutus/spend_raw.py b/cardano_node_tests/tests/tests_plutus/spend_raw.py index 94f4ec571..4d33f305c 100644 --- a/cardano_node_tests/tests/tests_plutus/spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus/spend_raw.py @@ -28,14 +28,14 @@ def _fund_script( fee_txsize: int = FEE_REDEEM_TXSIZE, deposit_amount: int = 0, tokens: tp.Optional[ - tp.List[plutus_common.Token] + list[plutus_common.Token] ] = None, # tokens must already be in `payment_addr` tokens_collateral: tp.Optional[ - tp.List[plutus_common.Token] + list[plutus_common.Token] ] = None, # tokens must already be in `payment_addr` collateral_fraction_offset: float = 1.0, embed_datum: bool = False, -) -> tp.Tuple[tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], clusterlib.TxRawOutput]: +) -> tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData], clusterlib.TxRawOutput]: """Fund a Plutus script and create the locked UTxO and collateral UTxO.""" # pylint: disable=too-many-locals,too-many-arguments assert plutus_op.execution_cost # for mypy @@ -130,8 +130,8 @@ def _spend_locked_txin( # noqa: C901 temp_template: str, cluster_obj: clusterlib.ClusterLib, dst_addr: clusterlib.AddressRecord, - script_utxos: tp.List[clusterlib.UTXOData], - collateral_utxos: tp.List[clusterlib.UTXOData], + script_utxos: list[clusterlib.UTXOData], + collateral_utxos: list[clusterlib.UTXOData], plutus_op: plutus_common.PlutusOp, amount: int, fee_txsize: int = FEE_REDEEM_TXSIZE, @@ -139,11 +139,11 @@ def _spend_locked_txin( # noqa: C901 tx_files: tp.Optional[clusterlib.TxFiles] = None, invalid_hereafter: tp.Optional[int] = None, invalid_before: tp.Optional[int] = None, - tokens: tp.Optional[tp.List[plutus_common.Token]] = None, + tokens: tp.Optional[list[plutus_common.Token]] = None, expect_failure: bool = False, script_valid: bool = True, submit_tx: bool = True, -) -> tp.Tuple[str, clusterlib.TxRawOutput]: +) -> tuple[str, clusterlib.TxRawOutput]: """Spend the locked UTxO.""" # pylint: disable=too-many-arguments,too-many-locals,too-many-branches assert plutus_op.execution_cost diff --git a/cardano_node_tests/tests/tests_plutus/test_delegation.py b/cardano_node_tests/tests/tests_plutus/test_delegation.py index c8d34be8c..9faf40ee4 100644 --- a/cardano_node_tests/tests/tests_plutus/test_delegation.py +++ b/cardano_node_tests/tests/tests_plutus/test_delegation.py @@ -41,7 +41,7 @@ @pytest.fixture def cluster_lock_42stake( cluster_manager: cluster_management.ClusterManager, -) -> tp.Tuple[clusterlib.ClusterLib, str]: +) -> tuple[clusterlib.ClusterLib, str]: """Make sure just one staking Plutus test run at a time. Plutus script always has the same address. When one script is used in multiple @@ -74,7 +74,7 @@ def cluster_lock_42stake( @pytest.fixture def pool_user( cluster_manager: cluster_management.ClusterManager, - cluster_lock_42stake: tp.Tuple[clusterlib.ClusterLib, str], + cluster_lock_42stake: tuple[clusterlib.ClusterLib, str], ) -> delegation.PoolUserScript: """Create pool user.""" cluster, *__ = cluster_lock_42stake @@ -116,14 +116,14 @@ def pool_user( def register_delegate_stake_addr( cluster_obj: clusterlib.ClusterLib, temp_template: str, - txins: tp.List[clusterlib.UTXOData], - collaterals: tp.List[clusterlib.UTXOData], + txins: list[clusterlib.UTXOData], + collaterals: list[clusterlib.UTXOData], pool_user: delegation.PoolUserScript, pool_id: str, redeemer_file: pl.Path, - reference_script_utxos: tp.Optional[tp.List[clusterlib.UTXOData]], + reference_script_utxos: tp.Optional[list[clusterlib.UTXOData]], use_build_cmd: bool, -) -> tp.Tuple[clusterlib.TxRawOutput, tp.List[dict]]: +) -> tuple[clusterlib.TxRawOutput, list[dict]]: """Submit registration certificate and delegate to pool.""" # Create stake address registration cert stake_addr_reg_cert_file = cluster_obj.g_stake_address.gen_stake_addr_registration_cert( @@ -216,13 +216,13 @@ def register_delegate_stake_addr( def register_stake_addr( cluster_obj: clusterlib.ClusterLib, temp_template: str, - txins: tp.List[clusterlib.UTXOData], - collaterals: tp.List[clusterlib.UTXOData], + txins: list[clusterlib.UTXOData], + collaterals: list[clusterlib.UTXOData], pool_user: delegation.PoolUserScript, redeemer_file: pl.Path, - reference_script_utxos: tp.Optional[tp.List[clusterlib.UTXOData]], + reference_script_utxos: tp.Optional[list[clusterlib.UTXOData]], use_build_cmd: bool, -) -> tp.Tuple[clusterlib.TxRawOutput, tp.List[dict]]: +) -> tuple[clusterlib.TxRawOutput, list[dict]]: """Register a stake address.""" # Create stake address registration cert stake_addr_reg_cert_file = cluster_obj.g_stake_address.gen_stake_addr_registration_cert( @@ -298,14 +298,14 @@ def register_stake_addr( def delegate_stake_addr( cluster_obj: clusterlib.ClusterLib, temp_template: str, - txins: tp.List[clusterlib.UTXOData], - collaterals: tp.List[clusterlib.UTXOData], + txins: list[clusterlib.UTXOData], + collaterals: list[clusterlib.UTXOData], pool_user: delegation.PoolUserScript, pool_id: str, redeemer_file: pl.Path, - reference_script_utxos: tp.Optional[tp.List[clusterlib.UTXOData]], + reference_script_utxos: tp.Optional[list[clusterlib.UTXOData]], use_build_cmd: bool, -) -> tp.Tuple[clusterlib.TxRawOutput, tp.List[dict]]: +) -> tuple[clusterlib.TxRawOutput, list[dict]]: """Delegate a stake address to a pool.""" # Create stake address delegation cert stake_addr_deleg_cert_file = cluster_obj.g_stake_address.gen_stake_addr_delegation_cert( @@ -383,13 +383,13 @@ def delegate_stake_addr( def deregister_stake_addr( cluster_obj: clusterlib.ClusterLib, temp_template: str, - txins: tp.List[clusterlib.UTXOData], - collaterals: tp.List[clusterlib.UTXOData], + txins: list[clusterlib.UTXOData], + collaterals: list[clusterlib.UTXOData], pool_user: delegation.PoolUserScript, redeemer_file: pl.Path, - reference_script_utxos: tp.Optional[tp.List[clusterlib.UTXOData]], + reference_script_utxos: tp.Optional[list[clusterlib.UTXOData]], use_build_cmd: bool, -) -> tp.Tuple[clusterlib.TxRawOutput, tp.List[dict]]: +) -> tuple[clusterlib.TxRawOutput, list[dict]]: """Deregister stake address.""" src_payment_balance = cluster_obj.g_query.get_address_balance(pool_user.payment.address) reward_balance = cluster_obj.g_query.get_stake_addr_info( @@ -503,7 +503,7 @@ class TestRegisterAddr: @pytest.mark.dbsync def test_register_deregister( self, - cluster_lock_42stake: tp.Tuple[clusterlib.ClusterLib, str], + cluster_lock_42stake: tuple[clusterlib.ClusterLib, str], pool_user: delegation.PoolUserScript, plutus_version: str, use_build_cmd: bool, @@ -667,7 +667,7 @@ class TestDelegateAddr: @pytest.mark.dbsync def test_delegate_deregister( # noqa: C901 self, - cluster_lock_42stake: tp.Tuple[clusterlib.ClusterLib, str], + cluster_lock_42stake: tuple[clusterlib.ClusterLib, str], pool_user: delegation.PoolUserScript, plutus_version: str, use_build_cmd: bool, @@ -878,7 +878,7 @@ def test_delegate_deregister( # noqa: C901 @pytest.mark.dbsync def test_register_delegate_deregister( self, - cluster_lock_42stake: tp.Tuple[clusterlib.ClusterLib, str], + cluster_lock_42stake: tuple[clusterlib.ClusterLib, str], pool_user: delegation.PoolUserScript, plutus_version: str, use_build_cmd: bool, diff --git a/cardano_node_tests/tests/tests_plutus/test_lobster.py b/cardano_node_tests/tests/tests_plutus/test_lobster.py index c906b5e1e..5edcaf499 100644 --- a/cardano_node_tests/tests/tests_plutus/test_lobster.py +++ b/cardano_node_tests/tests/tests_plutus/test_lobster.py @@ -39,7 +39,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -65,7 +65,7 @@ def _fund_issuer( issuer_addr: clusterlib.AddressRecord, amount: int, collateral_amount: int, -) -> tp.Tuple[tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], clusterlib.TxRawOutput]: +) -> tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData], clusterlib.TxRawOutput]: """Fund the token issuer.""" tx_files = clusterlib.TxFiles( signing_key_files=[payment_addr.skey_file], @@ -111,11 +111,11 @@ def _mint_lobster_nft( cluster_obj: clusterlib.ClusterLib, temp_template: str, issuer_addr: clusterlib.AddressRecord, - mint_utxos: tp.List[clusterlib.UTXOData], - collateral_utxos: tp.List[clusterlib.UTXOData], + mint_utxos: list[clusterlib.UTXOData], + collateral_utxos: list[clusterlib.UTXOData], nft_amount: int, lovelace_amount: int, -) -> tp.Tuple[str, tp.List[clusterlib.UTXOData], clusterlib.TxRawOutput]: +) -> tuple[str, list[clusterlib.UTXOData], clusterlib.TxRawOutput]: """Mint the LobsterNFT token.""" lobster_policyid = cluster_obj.g_transaction.get_policyid(NFT_MINT_PLUTUS) asset_name = b"LobsterNFT".hex() @@ -190,11 +190,11 @@ def _deploy_lobster_nft( temp_template: str, payment_addr: clusterlib.AddressRecord, issuer_addr: clusterlib.AddressRecord, - token_utxos: tp.List[clusterlib.UTXOData], + token_utxos: list[clusterlib.UTXOData], lobster_nft_token: str, nft_amount: int, lovelace_amount: int, -) -> tp.Tuple[str, tp.List[clusterlib.UTXOData], clusterlib.TxRawOutput]: +) -> tuple[str, list[clusterlib.UTXOData], clusterlib.TxRawOutput]: """Deploy the LobsterNFT token to script address.""" script_address = cluster_obj.g_address.gen_payment_addr( addr_name=f"{temp_template}_deploy_nft", payment_script_file=LOBSTER_PLUTUS @@ -255,7 +255,7 @@ class TestLobsterChallenge: @pytest.mark.dbsync @pytest.mark.testnets def test_lobster_name( - self, cluster: clusterlib.ClusterLib, payment_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, payment_addrs: list[clusterlib.AddressRecord] ): """Test the Lobster Challenge. diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_build.py b/cardano_node_tests/tests/tests_plutus/test_mint_build.py index 28cbb7fdc..8c4e6b87c 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_build.py @@ -5,7 +5,6 @@ import logging import pathlib as pl import shutil -import typing as tp import allure import pytest @@ -38,7 +37,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -65,10 +64,8 @@ def past_horizon_funds( self, cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - ) -> tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], clusterlib.TxRawOutput - ]: + payment_addrs: list[clusterlib.AddressRecord], + ) -> tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData], clusterlib.TxRawOutput]: """Create UTxOs for `test_ttl_horizon`.""" with cluster_manager.cache_fixture() as fixture_cache: if fixture_cache.value: @@ -107,7 +104,7 @@ def past_horizon_funds( def test_minting_one_token( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, submit_method: str, ): @@ -248,7 +245,7 @@ def test_minting_one_token( def test_minting_missing_txout( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, submit_method: str, ): @@ -366,7 +363,7 @@ def test_minting_missing_txout( def test_time_range_minting( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, submit_method: str, ): @@ -527,7 +524,7 @@ def test_time_range_minting( def test_two_scripts_minting( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, submit_method: str, ): @@ -793,7 +790,7 @@ def test_two_scripts_minting( def test_minting_context_equivalence( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], submit_method: str, ): """Test context equivalence while minting a token. @@ -994,7 +991,7 @@ def test_minting_context_equivalence( def test_witness_redeemer( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], key: str, plutus_version: str, submit_method: str, @@ -1153,9 +1150,9 @@ def test_witness_redeemer( def test_ttl_horizon( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - past_horizon_funds: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], clusterlib.TxRawOutput + payment_addrs: list[clusterlib.AddressRecord], + past_horizon_funds: tuple[ + list[clusterlib.UTXOData], list[clusterlib.UTXOData], clusterlib.TxRawOutput ], plutus_version: str, ttl_offset: int, @@ -1273,7 +1270,7 @@ class TestCollateralOutput: def test_duplicated_collateral( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, submit_method: str, ): @@ -1431,7 +1428,7 @@ def payment_addrs( skip_bootstrap: None, # noqa: ARG002 cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, - ) -> tp.List[clusterlib.AddressRecord]: + ) -> list[clusterlib.AddressRecord]: """Create new payment address.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -1452,7 +1449,7 @@ def payment_addrs( def run_scenario( self, cluster_obj: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_v_record: plutus_common.PlutusScriptData, success_expected: bool, ): @@ -1544,7 +1541,7 @@ def test_plutus_success( self, skip_bootstrap: None, # noqa: ARG002 cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], script: plutus_common.PlutusScriptData, ): """Test scenarios that are supposed to succeed.""" @@ -1567,7 +1564,7 @@ def test_plutus_fail( self, skip_bootstrap: None, # noqa: ARG002 cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], script: plutus_common.PlutusScriptData, ): """Test scenarios that are supposed to fail.""" diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_negative_build.py b/cardano_node_tests/tests/tests_plutus/test_mint_negative_build.py index e8b235726..02308cd2d 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_negative_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_negative_build.py @@ -4,7 +4,6 @@ import json import logging import pathlib as pl -import typing as tp import allure import hypothesis @@ -30,8 +29,8 @@ pytest.mark.plutus, ] -FundTupleT = tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], tp.List[clusterlib.AddressRecord] +FundTupleT = tuple[ + list[clusterlib.UTXOData], list[clusterlib.UTXOData], list[clusterlib.AddressRecord] ] @@ -39,7 +38,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -65,7 +64,7 @@ class TestBuildMintingNegative: def fund_issuer_long_asset_name( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ) -> FundTupleT: """Fund the token issuer and create the collateral UTxO.""" temp_template = common.get_test_id(cluster) @@ -101,7 +100,7 @@ def fund_issuer_long_asset_name( def test_witness_redeemer_missing_signer( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, submit_method: str, ): @@ -200,7 +199,7 @@ def test_witness_redeemer_missing_signer( def test_redeemer_with_simple_minting_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test minting a token passing a redeemer for a simple minting script. @@ -380,7 +379,7 @@ def test_asset_name_too_long( def test_time_range_missing_tx_validity( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting a token with a time constraints Plutus script and no TX validity. diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_negative_raw.py b/cardano_node_tests/tests/tests_plutus/test_mint_negative_raw.py index 2471f9f85..657079510 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_negative_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_negative_raw.py @@ -3,7 +3,6 @@ import dataclasses import datetime import logging -import typing as tp import allure import hypothesis @@ -31,7 +30,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -61,12 +60,10 @@ def pparams(self, cluster: clusterlib.ClusterLib) -> dict: def fund_execution_units_above_limit( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], pparams: dict, request: SubRequest, - ) -> tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], plutus_common.PlutusOp - ]: + ) -> tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData], plutus_common.PlutusOp]: plutus_version = request.param temp_template = common.get_test_id(cluster) @@ -109,7 +106,7 @@ def fund_execution_units_above_limit( def test_witness_redeemer_missing_signer( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting a token with a Plutus script with invalid signers. @@ -206,7 +203,7 @@ def test_witness_redeemer_missing_signer( def test_low_budget( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting a token when budget is too low. @@ -304,7 +301,7 @@ def test_low_budget( def test_low_fee( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting a token when fee is set too low. @@ -406,9 +403,9 @@ def test_low_fee( def test_execution_units_above_limit( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_execution_units_above_limit: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], plutus_common.PlutusOp + payment_addrs: list[clusterlib.AddressRecord], + fund_execution_units_above_limit: tuple[ + list[clusterlib.UTXOData], list[clusterlib.UTXOData], plutus_common.PlutusOp ], pparams: dict, data: st.DataObject, @@ -524,7 +521,7 @@ def test_execution_units_above_limit( def test_time_range_missing_tx_validity( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting a token with a time constraints Plutus script and no TX validity. @@ -630,7 +627,7 @@ class TestNegativeCollateral: def test_minting_with_invalid_collaterals( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting a token with a Plutus script with invalid collaterals. @@ -731,7 +728,7 @@ def test_minting_with_invalid_collaterals( def test_minting_with_insufficient_collateral( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting a token with a Plutus script with insufficient collateral. diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_raw.py b/cardano_node_tests/tests/tests_plutus/test_mint_raw.py index 1090315cc..d9c6a4a95 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_raw.py @@ -5,7 +5,6 @@ import logging import pathlib as pl import shutil -import typing as tp import allure import pytest @@ -34,7 +33,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -110,7 +109,7 @@ class TestMinting: def test_minting_two_tokens( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting two tokens with a single Plutus script. @@ -257,7 +256,7 @@ def test_minting_two_tokens( def test_witness_redeemer( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], key: str, plutus_version: str, ): @@ -390,7 +389,7 @@ def test_witness_redeemer( def test_time_range_minting( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting a token with a time constraints Plutus script. @@ -518,7 +517,7 @@ def test_time_range_minting( def test_two_scripts_minting( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting two tokens with two different Plutus scripts. @@ -741,7 +740,7 @@ def test_two_scripts_minting( def test_minting_policy_executed_once1( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test that minting policy is executed only once even when the same policy is used twice. @@ -899,7 +898,7 @@ def test_minting_policy_executed_once1( def test_minting_policy_executed_once2( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test that minting policy is executed only once even when the same policy is used twice. @@ -1058,7 +1057,7 @@ def test_minting_policy_executed_once2( @pytest.mark.testnets @pytest.mark.dbsync def test_minting_context_equivalence( - self, cluster: clusterlib.ClusterLib, payment_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, payment_addrs: list[clusterlib.AddressRecord] ): """Test context equivalence while minting a token. @@ -1214,7 +1213,7 @@ def test_minting_context_equivalence( def test_ttl_horizon( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ttl_offset: int, plutus_version: str, ): @@ -1359,7 +1358,7 @@ class TestCollateralOutput: def test_duplicated_collateral( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting a token with a Plutus script while using the same collateral input twice. diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_build.py index 274894ddd..8901501f3 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_build.py @@ -33,7 +33,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -56,7 +56,7 @@ def payment_addrs( def pool_users( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.PoolUser]: +) -> list[clusterlib.PoolUser]: """Create new pool users.""" test_id = common.get_test_id(cluster) created_users = clusterlib_utils.create_pool_users( @@ -87,7 +87,7 @@ class TestBuildLocking: def test_txout_locking( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test locking a Tx output with a Plutus script and spending the locked UTxO. @@ -156,7 +156,7 @@ def test_txout_locking( def test_context_equivalence( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Test context equivalence while spending a locked UTxO. @@ -272,7 +272,7 @@ def test_context_equivalence( def test_guessing_game( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], variant: str, plutus_version: str, embed_datum: bool, @@ -385,7 +385,7 @@ def test_guessing_game( def test_two_scripts_spending( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test locking two Tx outputs with two different Plutus scripts in single Tx. @@ -646,7 +646,7 @@ def test_two_scripts_spending( def test_always_fails( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test locking a Tx output with a Plutus script and spending the locked UTxO. @@ -704,7 +704,7 @@ def test_always_fails( def test_script_invalid( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test failing script together with the `--script-invalid` argument - collateral is taken. @@ -783,7 +783,7 @@ def test_script_invalid( def test_txout_token_locking( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test locking a Tx output with a Plutus script and spending the locked UTxO. @@ -862,7 +862,7 @@ def test_txout_token_locking( def test_partial_spending( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spending part of funds (Lovelace and native tokens) on a locked UTxO. @@ -987,7 +987,7 @@ def test_partial_spending( def test_collateral_is_txin( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spending the locked UTxO while using single UTxO for both collateral and Tx input. diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_compat_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_compat_build.py index c19bec9ed..b45ae84cd 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_compat_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_compat_build.py @@ -1,7 +1,6 @@ """Compatibility tests for spending with Plutus using `transaction build`.""" import logging -import typing as tp import allure import pytest @@ -27,7 +26,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -60,7 +59,7 @@ class TestCompatibility: def test_plutusv2_old_tx_era( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test spending a UTxO locked with PlutusV2 script using old Tx era. diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_compat_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_compat_raw.py index 8b72a49bf..ef6b0e2b7 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_compat_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_compat_raw.py @@ -1,7 +1,6 @@ """Compatibility tests for spending with Plutus using `transaction build-raw`.""" import logging -import typing as tp import allure import pytest @@ -26,7 +25,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -59,7 +58,7 @@ class TestCompatibility: def test_plutusv2_old_tx_era( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test spending a UTxO locked with PlutusV2 script using old Tx era. @@ -113,7 +112,7 @@ def test_plutusv2_old_tx_era( def test_plutusv1_old_tx_era( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test spending a UTxO locked with PlutusV1 script using old Tx era. diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_datum_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_datum_build.py index 1d742cd06..f7ad255c4 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_datum_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_datum_build.py @@ -3,7 +3,6 @@ import json import logging import pathlib as pl -import typing as tp import allure import hypothesis @@ -34,7 +33,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -63,7 +62,7 @@ class TestDatum: def test_datum_on_key_credential_address( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test creating UTxO with datum on address with key credentials (non-script address). @@ -112,7 +111,7 @@ def test_datum_on_key_credential_address( def test_embed_datum_without_pparams( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test 'build --tx-out-datum-embed' without providing protocol params file.""" @@ -174,7 +173,7 @@ class TestNegativeDatum: def pbt_script_addresses( self, cluster: clusterlib.ClusterLib, - ) -> tp.Dict[str, str]: + ) -> dict[str, str]: """Get Plutus script addresses. Meant for property-based tests, so this expensive operation gets executed only once. @@ -199,7 +198,7 @@ def pbt_script_addresses( def test_no_datum_txout( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], address_type: str, plutus_version: str, ): @@ -300,7 +299,7 @@ def test_no_datum_txout( def test_lock_tx_invalid_datum( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], datum_value: str, plutus_version: str, ): @@ -340,7 +339,7 @@ def test_lock_tx_invalid_datum( def test_unlock_tx_wrong_datum( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test locking a Tx output and try to spend it with a wrong datum. @@ -401,7 +400,7 @@ def test_unlock_tx_wrong_datum( def test_unlock_non_script_utxo( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Try to spend a non-script UTxO with datum as if it was script locked UTxO. @@ -494,8 +493,8 @@ def test_unlock_non_script_utxo( def test_too_big( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - pbt_script_addresses: tp.Dict[str, str], + payment_addrs: list[clusterlib.AddressRecord], + pbt_script_addresses: dict[str, str], datum_value: bytes, plutus_version: str, ): diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_datum_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_datum_raw.py index 71fa300a6..4d6dd0b37 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_datum_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_datum_raw.py @@ -3,7 +3,6 @@ import json import logging import pathlib as pl -import typing as tp import allure import hypothesis @@ -31,7 +30,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -60,7 +59,7 @@ class TestDatum: def test_datum_on_key_credential_address( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test creating UTxO with datum on address with key credentials (non-script address).""" temp_template = common.get_test_id(cluster) @@ -101,7 +100,7 @@ class TestNegativeDatum: def pbt_highest_utxo( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ) -> clusterlib.UTXOData: """Get UTxO with highest amount of Lovelace. @@ -113,7 +112,7 @@ def pbt_highest_utxo( def pbt_script_addresses( self, cluster: clusterlib.ClusterLib, - ) -> tp.Dict[str, str]: + ) -> dict[str, str]: """Get Plutus script addresses. Meant for property-based tests, so this expensive operation gets executed only once. @@ -138,7 +137,7 @@ def pbt_script_addresses( def test_no_datum_txout( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], address_type: str, plutus_version: str, ): @@ -222,7 +221,7 @@ def test_no_datum_txout( def test_lock_tx_invalid_datum( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], datum_value: str, plutus_version: str, ): @@ -266,7 +265,7 @@ def test_lock_tx_invalid_datum( def test_unlock_tx_wrong_datum( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test locking a Tx output and try to spend it with a wrong datum. @@ -327,7 +326,7 @@ def test_unlock_tx_wrong_datum( def test_unlock_non_script_utxo( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Try to spend a non-script UTxO with datum as if it was script locked UTxO. @@ -419,9 +418,9 @@ def test_unlock_non_script_utxo( def test_too_big( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], pbt_highest_utxo: clusterlib.UTXOData, - pbt_script_addresses: tp.Dict[str, str], + pbt_script_addresses: dict[str, str], datum_value: bytes, plutus_version: str, ): diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py index bbd81db5a..a7985401f 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py @@ -4,7 +4,6 @@ import json import logging import pathlib as pl -import typing as tp import allure import hypothesis @@ -34,7 +33,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -64,7 +63,7 @@ class TestNegative: def test_wrong_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spending the locked UTxO while using wrong Plutus script. @@ -120,7 +119,7 @@ def test_wrong_script( def test_no_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spending the locked UTxO while passing no Plutus script. @@ -174,7 +173,7 @@ def test_no_script( def test_collateral_w_tokens( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spending the locked UTxO while collateral contains native tokens. @@ -250,7 +249,7 @@ def test_collateral_w_tokens( def test_same_collateral_txin( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spending the locked UTxO while using the same UTxO as collateral. @@ -324,7 +323,7 @@ def test_same_collateral_txin( def test_invalid_guessing_game( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], variant: str, plutus_version: str, ): @@ -394,7 +393,7 @@ def test_invalid_guessing_game( def test_two_scripts_spending_one_fail( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test locking two Tx outputs with two different Plutus scripts in single Tx, one fails. @@ -552,8 +551,8 @@ class TestNegativeRedeemer: def fund_script_guessing_game_v1( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - ) -> tp.Tuple[tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData]]: + payment_addrs: list[clusterlib.AddressRecord], + ) -> tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]]: """Fund a PlutusV1 script and create the locked UTxO and collateral UTxO. Uses `cardano-cli transaction build` command for building the transactions. @@ -580,8 +579,8 @@ def fund_script_guessing_game_v1( def fund_script_guessing_game_v2( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - ) -> tp.Tuple[tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData]]: + payment_addrs: list[clusterlib.AddressRecord], + ) -> tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]]: """Fund a PlutusV2 script and create the locked UTxO and collateral UTxO. Uses `cardano-cli transaction build` command for building the transactions. @@ -608,8 +607,8 @@ def _int_out_of_range( self, cluster: clusterlib.ClusterLib, temp_template: str, - script_utxos: tp.List[clusterlib.UTXOData], - collateral_utxos: tp.List[clusterlib.UTXOData], + script_utxos: list[clusterlib.UTXOData], + collateral_utxos: list[clusterlib.UTXOData], payment_addr: clusterlib.AddressRecord, dst_addr: clusterlib.AddressRecord, redeemer_value: int, @@ -666,13 +665,9 @@ def _int_out_of_range( def test_wrong_value_inside_range( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_guessing_game_v1: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], - fund_script_guessing_game_v2: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_guessing_game_v1: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], + fund_script_guessing_game_v2: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], plutus_version: str, redeemer_value: int, ): @@ -733,13 +728,9 @@ def test_wrong_value_inside_range( def test_wrong_value_above_range( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_guessing_game_v1: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], - fund_script_guessing_game_v2: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_guessing_game_v1: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], + fund_script_guessing_game_v2: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], plutus_version: str, redeemer_value: int, ): @@ -775,13 +766,9 @@ def test_wrong_value_above_range( def test_wrong_value_bellow_range( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_guessing_game_v1: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], - fund_script_guessing_game_v2: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_guessing_game_v1: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], + fund_script_guessing_game_v2: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], plutus_version: str, redeemer_value: int, ): @@ -816,13 +803,9 @@ def test_wrong_value_bellow_range( def test_wrong_type( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_guessing_game_v1: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], - fund_script_guessing_game_v2: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_guessing_game_v1: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], + fund_script_guessing_game_v2: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], plutus_version: str, redeemer_value: bytes, ): @@ -874,13 +857,9 @@ def test_wrong_type( def test_too_big( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_guessing_game_v1: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], - fund_script_guessing_game_v2: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_guessing_game_v1: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], + fund_script_guessing_game_v2: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], plutus_version: str, redeemer_value: bytes, ): @@ -935,13 +914,9 @@ def test_too_big( def test_json_schema_typed_int_bytes_declared( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_guessing_game_v1: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], - fund_script_guessing_game_v2: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_guessing_game_v1: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], + fund_script_guessing_game_v2: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], plutus_version: str, redeemer_value: bytes, ): @@ -996,13 +971,9 @@ def test_json_schema_typed_int_bytes_declared( def test_json_schema_untyped_int_bytes_declared( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_guessing_game_v1: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], - fund_script_guessing_game_v2: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_guessing_game_v1: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], + fund_script_guessing_game_v2: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], plutus_version: str, redeemer_value: bytes, ): @@ -1057,13 +1028,9 @@ def test_json_schema_untyped_int_bytes_declared( def test_json_schema_typed_bytes_int_declared( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_guessing_game_v1: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], - fund_script_guessing_game_v2: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_guessing_game_v1: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], + fund_script_guessing_game_v2: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], plutus_version: str, redeemer_value: int, ): @@ -1119,13 +1086,9 @@ def test_json_schema_typed_bytes_int_declared( def test_json_schema_untyped_bytes_int_declared( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_guessing_game_v1: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], - fund_script_guessing_game_v2: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_guessing_game_v1: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], + fund_script_guessing_game_v2: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], plutus_version: str, redeemer_value: int, ): @@ -1181,13 +1144,9 @@ def test_json_schema_untyped_bytes_int_declared( def test_invalid_json( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_guessing_game_v1: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], - fund_script_guessing_game_v2: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_guessing_game_v1: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], + fund_script_guessing_game_v2: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], plutus_version: str, redeemer_value: str, ): @@ -1239,13 +1198,9 @@ def test_invalid_json( def test_json_schema_typed_invalid_type( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_guessing_game_v1: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], - fund_script_guessing_game_v2: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_guessing_game_v1: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], + fund_script_guessing_game_v2: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], plutus_version: str, redeemer_type: str, ): @@ -1302,13 +1257,9 @@ def test_json_schema_typed_invalid_type( def test_json_schema_untyped_invalid_type( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_guessing_game_v1: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], - fund_script_guessing_game_v2: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_guessing_game_v1: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], + fund_script_guessing_game_v2: tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData]], plutus_version: str, redeemer_type: str, ): diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_negative_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_negative_raw.py index c4d2cf309..036e4aacb 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_negative_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_negative_raw.py @@ -29,8 +29,8 @@ ] -FundTupleT = tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], tp.List[clusterlib.AddressRecord] +FundTupleT = tuple[ + list[clusterlib.UTXOData], list[clusterlib.UTXOData], list[clusterlib.AddressRecord] ] @@ -38,7 +38,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -68,11 +68,9 @@ def pparams(self, cluster: clusterlib.ClusterLib) -> dict: def fund_execution_units_above_limit( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], request: SubRequest, - ) -> tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], plutus_common.PlutusOp - ]: + ) -> tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData], plutus_common.PlutusOp]: plutus_version = request.param temp_template = common.get_test_id(cluster) @@ -110,7 +108,7 @@ def fund_execution_units_above_limit( def test_invalid_guessing_game( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], variant: str, plutus_version: str, ): @@ -178,7 +176,7 @@ def test_invalid_guessing_game( def test_wrong_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spending the locked UTxO while using wrong Plutus script. @@ -236,7 +234,7 @@ def test_wrong_script( def test_no_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spending the locked UTxO while passing no Plutus script. @@ -289,7 +287,7 @@ def test_no_script( def test_collateral_w_tokens( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spending the locked UTxO while collateral contains native tokens. @@ -356,7 +354,7 @@ def test_collateral_w_tokens( def test_same_collateral_txin( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spending the locked UTxO while using the same UTxO as collateral. @@ -410,7 +408,7 @@ def test_same_collateral_txin( def test_collateral_percent( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Try to spend locked UTxO while collateral is less than required. @@ -468,7 +466,7 @@ def test_collateral_percent( def test_two_scripts_spending_one_fail( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test locking two Tx outputs with two different Plutus scripts in single Tx, one fails. @@ -640,9 +638,9 @@ def test_two_scripts_spending_one_fail( def test_execution_units_above_limit( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_execution_units_above_limit: tp.Tuple[ - tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData], plutus_common.PlutusOp + payment_addrs: list[clusterlib.AddressRecord], + fund_execution_units_above_limit: tuple[ + list[clusterlib.UTXOData], list[clusterlib.UTXOData], plutus_common.PlutusOp ], pparams: dict, data: st.DataObject, @@ -798,8 +796,8 @@ def _failed_tx_build( self, cluster_obj: clusterlib.ClusterLib, temp_template: str, - script_utxos: tp.List[clusterlib.UTXOData], - collateral_utxos: tp.List[clusterlib.UTXOData], + script_utxos: list[clusterlib.UTXOData], + collateral_utxos: list[clusterlib.UTXOData], redeemer_content: str, dst_addr: clusterlib.AddressRecord, cost_per_unit: plutus_common.ExecutionCost, @@ -849,8 +847,8 @@ def _int_out_of_range( self, cluster_obj: clusterlib.ClusterLib, temp_template: str, - script_utxos: tp.List[clusterlib.UTXOData], - collateral_utxos: tp.List[clusterlib.UTXOData], + script_utxos: list[clusterlib.UTXOData], + collateral_utxos: list[clusterlib.UTXOData], redeemer_value: int, dst_addr: clusterlib.AddressRecord, cost_per_unit: plutus_common.ExecutionCost, diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_raw.py index 2cb665647..afe8e85fc 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_raw.py @@ -37,7 +37,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -60,7 +60,7 @@ def payment_addrs( def pool_users( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.PoolUser]: +) -> list[clusterlib.PoolUser]: """Create new pool users.""" test_id = common.get_test_id(cluster) created_users = clusterlib_utils.create_pool_users( @@ -139,7 +139,7 @@ class TestLocking: def test_txout_locking( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test locking a Tx output with a Plutus script and spending the locked UTxO. @@ -198,7 +198,7 @@ def test_txout_locking( def test_context_equivalence( self, cluster: clusterlib.ClusterLib, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], ): """Test context equivalence while spending a locked UTxO. @@ -311,7 +311,7 @@ def test_context_equivalence( def test_guessing_game( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], embed_datum: bool, variant: str, plutus_version: str, @@ -412,7 +412,7 @@ def test_guessing_game( def test_two_scripts_spending( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test locking two Tx outputs with two different Plutus scripts in single Tx. @@ -641,7 +641,7 @@ def test_two_scripts_spending( def test_always_fails( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test locking a Tx output with a Plutus script and spending the locked UTxO. @@ -701,7 +701,7 @@ def test_always_fails( def test_script_invalid( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test failing script together with the `--script-invalid` argument - collateral is taken. @@ -768,7 +768,7 @@ def test_script_invalid( def test_txout_token_locking( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test locking a Tx output with native tokens and spending the locked UTxO. @@ -831,7 +831,7 @@ def test_txout_token_locking( def test_partial_spending( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spending part of funds (Lovelace and native tokens) on a locked UTxO. @@ -923,7 +923,7 @@ def test_partial_spending( def test_collaterals( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], scenario: str, plutus_version: str, ): @@ -951,7 +951,7 @@ def test_collaterals( if scenario == "max": collateral_num = max_collateral_ins - exp_errors: tp.Tuple[str, ...] = () + exp_errors: tuple[str, ...] = () collateral_fraction_offset = 250_000.0 elif scenario == "max+1": collateral_num = max_collateral_ins + 1 diff --git a/cardano_node_tests/tests/tests_plutus_v2/mint_build.py b/cardano_node_tests/tests/tests_plutus_v2/mint_build.py index 438aa153c..528012b5d 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/mint_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/mint_build.py @@ -19,9 +19,9 @@ def _fund_issuer( amount: int, reference_script: tp.Optional[pl.Path] = None, inline_datum: tp.Optional[pl.Path] = None, -) -> tp.Tuple[ - tp.List[clusterlib.UTXOData], - tp.List[clusterlib.UTXOData], +) -> tuple[ + list[clusterlib.UTXOData], + list[clusterlib.UTXOData], tp.Optional[clusterlib.UTXOData], clusterlib.TxRawOutput, ]: diff --git a/cardano_node_tests/tests/tests_plutus_v2/mint_raw.py b/cardano_node_tests/tests/tests_plutus_v2/mint_raw.py index 3e709314b..e21e138f7 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/mint_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/mint_raw.py @@ -24,9 +24,9 @@ def _fund_issuer( collateral_utxo_num: int = 1, reference_script: tp.Optional[pl.Path] = None, datum_file: tp.Optional[pl.Path] = None, -) -> tp.Tuple[ - tp.List[clusterlib.UTXOData], - tp.List[clusterlib.UTXOData], +) -> tuple[ + list[clusterlib.UTXOData], + list[clusterlib.UTXOData], tp.Optional[clusterlib.UTXOData], clusterlib.TxRawOutput, ]: diff --git a/cardano_node_tests/tests/tests_plutus_v2/spend_build.py b/cardano_node_tests/tests/tests_plutus_v2/spend_build.py index 14cd57238..3e21423a8 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/spend_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/spend_build.py @@ -52,11 +52,11 @@ def _build_fund_script( use_inline_datum: bool = True, collateral_amount: tp.Optional[int] = None, tokens_collateral: tp.Optional[ - tp.List[plutus_common.Token] + list[plutus_common.Token] ] = None, # tokens must already be in `payment_addr` -) -> tp.Tuple[ - tp.List[clusterlib.UTXOData], - tp.List[clusterlib.UTXOData], +) -> tuple[ + list[clusterlib.UTXOData], + list[clusterlib.UTXOData], tp.Optional[clusterlib.UTXOData], clusterlib.TxRawOutput, ]: @@ -184,7 +184,7 @@ def _build_reference_txin( amount: int, payment_addr: clusterlib.AddressRecord, dst_addr: tp.Optional[clusterlib.AddressRecord] = None, -) -> tp.List[clusterlib.UTXOData]: +) -> list[clusterlib.UTXOData]: """Create a basic txin to use as readonly reference input. Uses `cardano-cli transaction build` command for building the transaction. diff --git a/cardano_node_tests/tests/tests_plutus_v2/spend_raw.py b/cardano_node_tests/tests/tests_plutus_v2/spend_raw.py index 7de85df3a..e129b0ea1 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/spend_raw.py @@ -48,11 +48,11 @@ def _fund_script( use_inline_datum: bool = False, collateral_amount: tp.Optional[int] = None, tokens_collateral: tp.Optional[ - tp.List[plutus_common.Token] + list[plutus_common.Token] ] = None, # tokens must already be in `payment_addr` -) -> tp.Tuple[ - tp.List[clusterlib.UTXOData], - tp.List[clusterlib.UTXOData], +) -> tuple[ + list[clusterlib.UTXOData], + list[clusterlib.UTXOData], tp.Optional[clusterlib.UTXOData], clusterlib.TxRawOutput, ]: @@ -170,7 +170,7 @@ def _build_reference_txin( amount: int, payment_addr: clusterlib.AddressRecord, dst_addr: tp.Optional[clusterlib.AddressRecord] = None, -) -> tp.List[clusterlib.UTXOData]: +) -> list[clusterlib.UTXOData]: """Create a basic txin to use as readonly reference input. Uses `cardano-cli transaction build-raw` command for building the transaction. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py index 4ea8035a5..4be1a63d4 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py @@ -30,7 +30,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -56,7 +56,7 @@ def _build_reference_txin( payment_addr: clusterlib.AddressRecord, dst_addr: tp.Optional[clusterlib.AddressRecord] = None, inline_datum: tp.Optional[pl.Path] = None, -) -> tp.List[clusterlib.UTXOData]: +) -> list[clusterlib.UTXOData]: """Create a basic txin to use as readonly reference input. Uses `cardano-cli transaction build` command for building the transaction. @@ -109,7 +109,7 @@ class TestBuildMinting: def test_minting_ref_one_token( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting a token with reference Plutus script. @@ -254,7 +254,7 @@ def test_minting_ref_one_token( def test_minting_ref_missing_txout( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting a token with reference Plutus script without providing TxOut for the token. @@ -366,7 +366,7 @@ def test_minting_ref_missing_txout( def test_reference_inputs_visibility( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], valid_redeemer: bool, ): """ @@ -513,7 +513,7 @@ def test_reference_inputs_visibility( def test_reference_scripts_visibility( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], valid_redeemer: bool, ): """Test visibility of reference inputs by a plutus script. @@ -644,7 +644,7 @@ def test_reference_scripts_visibility( def test_inline_datum_visibility( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], scenario: str, ): """ diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_build.py index 9c6fc2a16..780fbc052 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_build.py @@ -1,7 +1,6 @@ """Negative tests for minting with Plutus V2 using `transaction build`.""" import logging -import typing as tp import allure import pytest @@ -27,7 +26,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -61,7 +60,7 @@ class TestNegativeCollateralOutput: def test_minting_with_unbalanced_total_collateral( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], with_return_collateral: bool, plutus_version: str, ): diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_raw.py index db090d7ad..da8e43dea 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_raw.py @@ -1,7 +1,6 @@ """Negative tests for minting with Plutus V2 using `transaction build-raw`.""" import logging -import typing as tp import allure import pytest @@ -26,7 +25,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -55,7 +54,7 @@ class TestNegativeCollateralOutput: def test_minting_with_limited_collateral( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test minting a token with a Plutus script with limited collateral amount. @@ -173,7 +172,7 @@ def test_minting_with_limited_collateral( def test_minting_with_unbalanced_total_collateral( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], with_return_collateral: bool, plutus_version: str, ): diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py index 4a277844a..222fe999b 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py @@ -30,7 +30,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -56,7 +56,7 @@ def _build_reference_txin( payment_addr: clusterlib.AddressRecord, dst_addr: tp.Optional[clusterlib.AddressRecord] = None, datum_file: tp.Optional[pl.Path] = None, -) -> tp.List[clusterlib.UTXOData]: +) -> list[clusterlib.UTXOData]: """Create a basic txin to use as readonly reference input. Uses `cardano-cli transaction build-raw` command for building the transaction. @@ -102,7 +102,7 @@ class TestMinting: def test_minting_two_tokens( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_reference_script: bool, plutus_version: str, ): @@ -239,7 +239,7 @@ def test_minting_two_tokens( def test_datum_hash_visibility( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], scenario: str, ): """Test visibility of datum hash on reference inputs by the plutus script. @@ -392,7 +392,7 @@ def test_datum_hash_visibility( def test_missing_builtin( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test builtins added to PlutusV2 from PlutusV3. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_build.py index bd0c290cb..6f05faf83 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_build.py @@ -28,7 +28,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -52,7 +52,7 @@ def _fund_issuer_mint_token( self, cluster_obj: clusterlib.ClusterLib, temp_template: str, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], script_file: pl.Path, redeemer_file: pl.Path, ): @@ -139,7 +139,7 @@ def _fund_issuer_mint_token( def test_use_secp_builtin_functions( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], algorithm: str, plutus_version: str, ): @@ -216,7 +216,7 @@ def test_use_secp_builtin_functions( def test_negative_secp_builtin_functions( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], test_vector: str, algorithm: str, plutus_version: str, diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_raw.py index 580ffd52f..56e943085 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_raw.py @@ -28,7 +28,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -52,7 +52,7 @@ def _fund_issuer_mint_token( self, cluster_obj: clusterlib.ClusterLib, temp_template: str, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], script_file: pl.Path, redeemer_file: pl.Path, ): @@ -142,7 +142,7 @@ def _fund_issuer_mint_token( def test_use_secp_builtin_functions( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], algorithm: str, plutus_version: str, ): @@ -194,7 +194,7 @@ def test_use_secp_builtin_functions( def test_negative_secp_builtin_functions( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], test_vector: str, algorithm: str, plutus_version: str, diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py index 790ea851d..1f06700e8 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py @@ -1,7 +1,6 @@ """Tests for spending with Plutus V2 using `transaction build`.""" import logging -import typing as tp import allure import pytest @@ -30,7 +29,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -63,7 +62,7 @@ class TestBuildLocking: def test_txout_locking( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_inline_datum: bool, use_reference_script: bool, ): @@ -218,7 +217,7 @@ def test_txout_locking( def test_min_required_utxo( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_inline_datum: bool, use_token: bool, use_reference_script: bool, diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py index dcaffce2d..8c6931a57 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py @@ -31,7 +31,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -59,8 +59,8 @@ def _build_spend_locked_txin( cluster: clusterlib.ClusterLib, payment_addr: clusterlib.AddressRecord, dst_addr: clusterlib.AddressRecord, - script_utxos: tp.List[clusterlib.UTXOData], - collateral_utxos: tp.List[clusterlib.UTXOData], + script_utxos: list[clusterlib.UTXOData], + collateral_utxos: list[clusterlib.UTXOData], plutus_op: plutus_common.PlutusOp, total_collateral_amount: tp.Optional[int] = None, return_collateral_txouts: clusterlib.OptionalTxOuts = (), @@ -136,7 +136,7 @@ def _build_spend_locked_txin( def test_with_total_return_collateral( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_return_collateral: bool, use_total_collateral: bool, ): @@ -253,7 +253,7 @@ def test_with_total_return_collateral( def test_collateral_with_tokens( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_return_collateral: bool, ): """Test failing script using collaterals with tokens. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_raw.py index b9befa582..e639aefe0 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_raw.py @@ -1,7 +1,6 @@ """Tests for collateral while spending with Plutus V2 using `transaction build-raw`.""" import logging -import typing as tp import allure import pytest @@ -29,7 +28,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -68,7 +67,7 @@ class TestCollateralOutput: def test_with_total_return_collateral( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_return_collateral: bool, use_total_collateral: bool, ): @@ -214,7 +213,7 @@ def test_with_total_return_collateral( def test_collateral_with_tokens( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test failing script using collaterals with tokens. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_build.py index 7d4b84d9f..c9b90b5b6 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_build.py @@ -28,7 +28,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -60,7 +60,7 @@ class TestCompatibility: def test_inline_datum_old_tx_era( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with an inline datum using old Tx era. @@ -107,7 +107,7 @@ def test_inline_datum_old_tx_era( def test_reference_script_old_tx_era( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with a reference script using old Tx era.""" __: tp.Any # mypy workaround @@ -155,7 +155,7 @@ def test_reference_script_old_tx_era( def test_ro_reference_old_tx_era( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test building Tx with read-only reference input using old Tx era. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_raw.py index a8fa52bf3..232009a13 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_raw.py @@ -27,7 +27,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -59,7 +59,7 @@ class TestCompatibility: def test_inline_datum_old_tx_era( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with an inline datum using old Tx era. @@ -118,7 +118,7 @@ def test_inline_datum_old_tx_era( def test_reference_script_old_tx_era( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with a reference script using old Tx era.""" __: tp.Any # mypy workaround @@ -176,7 +176,7 @@ def test_reference_script_old_tx_era( def test_ro_reference_old_tx_era( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test building Tx with read-only reference input using old Tx era. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_build.py index a90548726..1a482358c 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_build.py @@ -32,7 +32,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -190,7 +190,7 @@ def pbt_script_address( def test_lock_tx_invalid_datum( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], datum_value: str, ): """Test locking a Tx output with an invalid datum. @@ -230,7 +230,7 @@ def test_lock_tx_invalid_datum( def test_lock_tx_v1_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with an inline datum and a v1 script. @@ -301,7 +301,7 @@ def test_lock_tx_v1_script( def test_lock_tx_big_datum( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], pbt_script_address: str, datum_content: str, ): @@ -353,7 +353,7 @@ def test_lock_tx_big_datum( @pytest.mark.testnets @pytest.mark.dbsync def test_lock_tx_datum_as_witness( - self, cluster: clusterlib.ClusterLib, payment_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, payment_addrs: list[clusterlib.AddressRecord] ): """Test unlock a Tx output with a datum as witness. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_raw.py index 3199ce5cb..49b9a40cb 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_raw.py @@ -31,7 +31,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -57,7 +57,7 @@ class TestNegativeInlineDatum: def pbt_highest_utxo( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ) -> clusterlib.UTXOData: """Get UTxO with highest amount of Lovelace. @@ -90,7 +90,7 @@ def pbt_script_address( def test_lock_tx_invalid_datum( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], datum_value: str, ): """Test locking a Tx output with an invalid datum. @@ -142,7 +142,7 @@ def test_lock_tx_invalid_datum( def test_lock_tx_v1_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with an inline datum and a v1 script. @@ -231,7 +231,7 @@ def test_lock_tx_v1_script( def test_lock_tx_big_datum( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], pbt_highest_utxo: clusterlib.UTXOData, pbt_script_address: str, datum_content: str, @@ -286,7 +286,7 @@ def test_lock_tx_big_datum( @pytest.mark.testnets @pytest.mark.dbsync def test_lock_tx_datum_as_witness( - self, cluster: clusterlib.ClusterLib, payment_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, payment_addrs: list[clusterlib.AddressRecord] ): """Test unlock a Tx output with a datum as witness. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_raw.py index 0a2afdc2e..add13ae37 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_raw.py @@ -2,7 +2,6 @@ import binascii import logging -import typing as tp import allure import pytest @@ -29,7 +28,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -62,7 +61,7 @@ class TestLockingV2: def test_txout_locking( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_inline_datum: bool, use_reference_script: bool, ): @@ -187,7 +186,7 @@ def test_txout_locking( def test_datum_bytes_in_dbsync( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test that datum bytes in db-sync corresponds to original datum. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_build.py index b4938a2fe..9bc925fb2 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_build.py @@ -31,7 +31,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -61,7 +61,7 @@ class TestReadonlyReferenceInputs: def test_use_reference_input( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], reference_input_scenario: str, ): """Test use a reference input when unlock some funds. @@ -169,7 +169,7 @@ def test_use_reference_input( def test_same_input_as_reference_input( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test use a reference input that is also a regular input of the same transaction. @@ -274,7 +274,7 @@ def test_use_same_reference_input_multiple_times( self, cluster: clusterlib.ClusterLib, cluster_manager: cluster_management.ClusterManager, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test 2 transactions using the same reference input in the same block. @@ -351,7 +351,7 @@ def test_use_same_reference_input_multiple_times( def test_reference_input_non_plutus( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test using a read-only reference input in non-Plutus transaction. @@ -422,7 +422,7 @@ class TestNegativeReadonlyReferenceInputs: def test_reference_spent_output( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test use a reference input that was already spent. @@ -534,7 +534,7 @@ def test_reference_spent_output( def test_v1_script_with_reference_input( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test use a reference input with a v1 Plutus script. @@ -619,7 +619,7 @@ def test_v1_script_with_reference_input( def test_reference_input_without_spend_anything( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test using a read-only reference input without spending any UTxO. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_raw.py index 323ef8650..cc80c8d7c 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_raw.py @@ -30,7 +30,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -60,7 +60,7 @@ class TestReadonlyReferenceInputs: def test_use_reference_input( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], reference_input_scenario: str, ): """Test use a reference input when unlock some funds. @@ -159,7 +159,7 @@ def test_use_reference_input( def test_same_input_as_reference_input( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test use a reference input that is also a regular input of the same transaction. @@ -262,7 +262,7 @@ def test_same_input_as_reference_input( def test_reference_input_non_plutus( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test using a read-only reference input in non-Plutus transaction. @@ -327,7 +327,7 @@ class TestNegativeReadonlyReferenceInputs: def test_reference_spent_output( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test use a reference input that was already spent. @@ -432,7 +432,7 @@ def test_reference_spent_output( @pytest.mark.testnets @pytest.mark.dbsync def test_v1_script_with_reference_input( - self, cluster: clusterlib.ClusterLib, payment_addrs: tp.List[clusterlib.AddressRecord] + self, cluster: clusterlib.ClusterLib, payment_addrs: list[clusterlib.AddressRecord] ): """Test use a reference input with a v1 Plutus script. @@ -526,7 +526,7 @@ def test_v1_script_with_reference_input( def test_reference_input_without_spend_anything( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test using a read-only reference input without spending any UTxO. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_build.py index f2f373380..7ba0b9ce1 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_build.py @@ -28,7 +28,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -59,7 +59,7 @@ class TestReferenceScripts: def test_reference_multiple_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_same_script: bool, ): """Test locking two Tx output with a V2 reference script and spending it. @@ -236,7 +236,7 @@ def test_reference_multiple_script( def test_reference_same_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking two Tx output with the same V2 reference script and spending it. @@ -383,7 +383,7 @@ def test_reference_same_script( def test_mix_reference_attached_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with an attached V2 script and one using reference V2 script. @@ -551,7 +551,7 @@ def test_mix_reference_attached_script( def test_spend_reference_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, address_type: str, ): @@ -620,7 +620,7 @@ def test_spend_reference_script( def test_spend_regular_utxo_and_reference_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spend an UTxO and use a reference a script on the same transaction. @@ -716,7 +716,7 @@ class TestNegativeReferenceScripts: def test_not_a_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with an invalid reference script. @@ -751,7 +751,7 @@ def test_not_a_script( def test_two_scripts_one_fail( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking two Tx with different Plutus reference scripts in single Tx, one fails. @@ -900,7 +900,7 @@ def test_two_scripts_one_fail( def test_lock_tx_v1_reference_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with a Plutus V1 reference script. @@ -976,7 +976,7 @@ def test_lock_tx_v1_reference_script( def test_v1_attached_v2_reference( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with an attached V1 script and one using reference V2 script. @@ -1124,7 +1124,7 @@ def test_v1_attached_v2_reference( def test_lock_byron_reference_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with a Plutus V2 reference script on Byron address. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_raw.py index b81d8910d..0c8d51398 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_raw.py @@ -28,7 +28,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -59,7 +59,7 @@ class TestReferenceScripts: def test_reference_multiple_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], use_same_script: bool, ): """Test locking two Tx output with a V2 reference script and spending it. @@ -220,7 +220,7 @@ def test_reference_multiple_script( def test_reference_same_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking two Tx output with the same V2 reference script and spending it. @@ -363,7 +363,7 @@ def test_reference_same_script( def test_mix_reference_attached_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with an attached V2 script and one using reference V2 script. @@ -513,7 +513,7 @@ def test_mix_reference_attached_script( def test_spend_reference_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, address_type: str, ): @@ -573,7 +573,7 @@ def test_spend_reference_script( def test_spend_regular_utxo_and_reference_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], plutus_version: str, ): """Test spend an UTxO and use a reference a script on the same transaction. @@ -649,7 +649,7 @@ def test_spend_regular_utxo_and_reference_script( def test_reference_script_byron_address( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test creating reference script UTxO on Byron address. @@ -687,7 +687,7 @@ class TestNegativeReferenceScripts: def test_not_a_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with an invalid reference script. @@ -731,7 +731,7 @@ def test_not_a_script( def test_two_scripts_one_fail( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking two Tx with different Plutus reference scripts in single Tx, one fails. @@ -890,7 +890,7 @@ def test_two_scripts_one_fail( def test_lock_tx_v1_reference_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with a Plutus V1 reference script. @@ -984,7 +984,7 @@ def test_lock_tx_v1_reference_script( def test_v1_attached_v2_reference( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with an attached V1 script and one using reference V2 script. @@ -1136,7 +1136,7 @@ def test_v1_attached_v2_reference( def test_lock_byron_reference_script( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], ): """Test locking a Tx output with a Plutus V2 reference script on Byron address. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_build.py index 7c8f93fd0..92008dddc 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_build.py @@ -3,7 +3,6 @@ import json import logging import pathlib as pl -import typing as tp import allure import hypothesis @@ -31,7 +30,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -55,9 +54,9 @@ class TestSECP256k1: def build_fund_script_secp( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], request: SubRequest, - ) -> tp.Tuple[str, tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData]]: + ) -> tuple[str, list[clusterlib.UTXOData], list[clusterlib.UTXOData]]: """Fund a Plutus script and create the necessary Tx outputs.""" algorithm = request.param temp_template = common.get_test_id(cluster) @@ -138,10 +137,8 @@ def build_fund_script_secp( def test_use_secp_builtin_functions( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - build_fund_script_secp: tp.Tuple[ - str, tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + payment_addrs: list[clusterlib.AddressRecord], + build_fund_script_secp: tuple[str, list[clusterlib.UTXOData], list[clusterlib.UTXOData]], ): """Test that it is possible to spend a locked UTxO by a script that uses a SECP function. @@ -236,11 +233,9 @@ def test_use_secp_builtin_functions( def test_overspending_execution_budget( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], number_of_iterations: int, - build_fund_script_secp: tp.Tuple[ - str, tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData] - ], + build_fund_script_secp: tuple[str, list[clusterlib.UTXOData], list[clusterlib.UTXOData]], ): """Try to build a transaction with a plutus script that overspend the execution budget. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_raw.py index 663317295..8204cf6a0 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_raw.py @@ -1,7 +1,6 @@ """SECP256k1 tests for spending with Plutus V2 using `transaction build-raw`.""" import logging -import typing as tp import allure import pytest @@ -27,7 +26,7 @@ def payment_addrs( cluster_manager: cluster_management.ClusterManager, cluster: clusterlib.ClusterLib, -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment addresses.""" test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( @@ -51,9 +50,9 @@ class TestSECP256k1: def fund_script_secp( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], + payment_addrs: list[clusterlib.AddressRecord], request: SubRequest, - ) -> tp.Tuple[str, tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData]]: + ) -> tuple[str, list[clusterlib.UTXOData], list[clusterlib.UTXOData]]: """Fund a Plutus script and create the necessary Tx outputs.""" algorithm = request.param temp_template = common.get_test_id(cluster) @@ -122,8 +121,8 @@ def fund_script_secp( def test_use_secp_builtin_functions( self, cluster: clusterlib.ClusterLib, - payment_addrs: tp.List[clusterlib.AddressRecord], - fund_script_secp: tp.Tuple[str, tp.List[clusterlib.UTXOData], tp.List[clusterlib.UTXOData]], + payment_addrs: list[clusterlib.AddressRecord], + fund_script_secp: tuple[str, list[clusterlib.UTXOData], list[clusterlib.UTXOData]], ): """Test that it is possible to spend a locked UTxO by a script that uses a SECP function. diff --git a/cardano_node_tests/tests/tx_common.py b/cardano_node_tests/tests/tx_common.py index abe7b8cb7..4fba6881e 100644 --- a/cardano_node_tests/tests/tx_common.py +++ b/cardano_node_tests/tests/tx_common.py @@ -1,6 +1,5 @@ import logging import pathlib as pl -import typing as tp from cardano_clusterlib import clusterlib @@ -64,8 +63,8 @@ def get_raw_tx_values( def get_txins_txouts( - txins: tp.List[clusterlib.UTXOData], txouts: tp.List[clusterlib.TxOut] -) -> tp.Tuple[tp.List[str], tp.List[str]]: + txins: list[clusterlib.UTXOData], txouts: list[clusterlib.TxOut] +) -> tuple[list[str], list[str]]: txins_combined = [f"{x.utxo_hash}#{x.utxo_ix}" for x in txins] txouts_combined = [f"{x.address}+{x.amount}" for x in txouts] return txins_combined, txouts_combined diff --git a/cardano_node_tests/utils/blockers.py b/cardano_node_tests/utils/blockers.py index fc90d00d7..cb1f0b485 100644 --- a/cardano_node_tests/utils/blockers.py +++ b/cardano_node_tests/utils/blockers.py @@ -166,7 +166,7 @@ def __repr__(self) -> str: def finish_test(issues: tp.Iterable[GH]) -> None: """Fail or Xfail test with references to multiple GitHub issues.""" - def _get_outcome(issue: GH) -> tp.Tuple[bool, str, str]: + def _get_outcome(issue: GH) -> tuple[bool, str, str]: blocked = issue.is_blocked() py_outcome = "XFAIL" if blocked else "FAIL" reason = f"{py_outcome}: {issue.gh_issue}: {issue.message}" diff --git a/cardano_node_tests/utils/cluster_nodes.py b/cardano_node_tests/utils/cluster_nodes.py index 0a56cbf05..6456cb836 100644 --- a/cardano_node_tests/utils/cluster_nodes.py +++ b/cardano_node_tests/utils/cluster_nodes.py @@ -54,7 +54,7 @@ class ClusterType: LOCAL: tp.Final[str] = "local" TESTNET: tp.Final[str] = "testnet" - test_addr_records: tp.ClassVar[tp.Tuple[str, ...]] = ( + test_addr_records: tp.ClassVar[tuple[str, ...]] = ( "user1", "user2", "user3", @@ -62,7 +62,7 @@ class ClusterType: "user5", ) - NODES: tp.ClassVar[tp.Set[str]] = set() + NODES: tp.ClassVar[set[str]] = set() def __init__(self) -> None: self.type = "unknown" @@ -85,7 +85,7 @@ def get_cluster_obj(self, command_era: str = "") -> clusterlib.ClusterLib: def create_addrs_data( self, cluster_obj: clusterlib.ClusterLib, destination_dir: clusterlib.FileType = "." - ) -> tp.Dict[str, tp.Dict[str, tp.Any]]: + ) -> dict[str, dict[str, tp.Any]]: """Create addresses and their keys for usage in tests.""" msg = f"Not implemented for cluster type '{self.type}'." raise NotImplementedError(msg) @@ -94,7 +94,7 @@ def create_addrs_data( class LocalCluster(ClusterType): """Local cluster type (full cardano mode).""" - NODES: tp.ClassVar[tp.Set[str]] = { + NODES: tp.ClassVar[set[str]] = { "bft1", *(f"pool{i}" for i in range(1, configuration.NUM_POOLS + 1)), } @@ -128,7 +128,7 @@ def get_cluster_obj(self, command_era: str = "") -> clusterlib.ClusterLib: def create_addrs_data( self, cluster_obj: clusterlib.ClusterLib, destination_dir: clusterlib.FileType = "." - ) -> tp.Dict[str, tp.Dict[str, tp.Any]]: + ) -> dict[str, dict[str, tp.Any]]: """Create addresses and their keys for usage in tests.""" destination_dir = pl.Path(destination_dir).expanduser() destination_dir.mkdir(parents=True, exist_ok=True) @@ -136,7 +136,7 @@ def create_addrs_data( instance_num = cluster_env.instance_num # Create new addresses - new_addrs_data: tp.Dict[str, tp.Dict[str, tp.Any]] = {} + new_addrs_data: dict[str, dict[str, tp.Any]] = {} for addr_name in self.test_addr_records: addr_name_instance = f"{addr_name}_ci{instance_num}" payment = cluster_obj.g_address.gen_payment_addr_and_keys( @@ -148,7 +148,7 @@ def create_addrs_data( } # Create records for existing addresses - faucet_addrs_data: tp.Dict[str, tp.Dict[str, tp.Any]] = {"faucet": {"payment": None}} + faucet_addrs_data: dict[str, dict[str, tp.Any]] = {"faucet": {"payment": None}} byron_dir = cluster_env.state_dir / "byron" shelley_dir = cluster_env.state_dir / "shelley" @@ -188,20 +188,20 @@ def create_addrs_data( class TestnetCluster(ClusterType): """Testnet cluster type (full cardano mode).""" - TESTNETS: tp.ClassVar[tp.Dict[int, dict]] = { + TESTNETS: tp.ClassVar[dict[int, dict]] = { 1506203091: {"type": Testnets.mainnet, "shelley_start": "2020-07-29T21:44:51Z"}, 1654041600: {"type": Testnets.preprod, "byron_epochs": 4}, 1666656000: {"type": Testnets.preview, "byron_epochs": 0}, } - NODES: tp.ClassVar[tp.Set[str]] = {"relay1"} + NODES: tp.ClassVar[set[str]] = {"relay1"} def __init__(self) -> None: super().__init__() self.type = ClusterType.TESTNET - self.cluster_scripts: tp.Union[ - cluster_scripts.ScriptsTypes, cluster_scripts.TestnetScripts - ] = cluster_scripts.TestnetScripts() + self.cluster_scripts: cluster_scripts.ScriptsTypes | cluster_scripts.TestnetScripts = ( + cluster_scripts.TestnetScripts() + ) # cached values self._testnet_type = "" @@ -243,7 +243,7 @@ def create_addrs_data( self, cluster_obj: clusterlib.ClusterLib, destination_dir: clusterlib.FileType = ".", - ) -> tp.Dict[str, tp.Dict[str, tp.Any]]: + ) -> dict[str, dict[str, tp.Any]]: """Create addresses and their keys for usage in tests.""" # Store record of the original faucet address shelley_dir = get_cluster_env().state_dir / "shelley" @@ -252,12 +252,12 @@ def create_addrs_data( vkey_file=shelley_dir / "faucet.vkey", skey_file=shelley_dir / "faucet.skey", ) - faucet_addrs_data: tp.Dict[str, tp.Dict[str, tp.Any]] = { + faucet_addrs_data: dict[str, dict[str, tp.Any]] = { self.test_addr_records[1]: {"payment": faucet_rec} } # Create new addresses - new_addrs_data: tp.Dict[str, tp.Dict[str, tp.Any]] = {} + new_addrs_data: dict[str, dict[str, tp.Any]] = {} for addr_name in self.test_addr_records[1:]: payment = cluster_obj.g_address.gen_payment_addr_and_keys( name=addr_name, @@ -369,7 +369,7 @@ def reload_supervisor_config( time.sleep(delay) -def start_cluster(cmd: str, args: tp.List[str]) -> clusterlib.ClusterLib: +def start_cluster(cmd: str, args: list[str]) -> clusterlib.ClusterLib: """Start cluster.""" args_str = " ".join(args) args_str = f" {args_str}" if args_str else "" @@ -403,7 +403,7 @@ def restart_all_nodes( def services_action( - service_names: tp.List[str], action: str, instance_num: tp.Optional[int] = None + service_names: list[str], action: str, instance_num: tp.Optional[int] = None ) -> None: """Perform action on services on the running cluster.""" LOGGER.info(f"Performing '{action}' action on services {service_names}.") @@ -424,20 +424,20 @@ def services_action( ) from exc -def start_nodes(node_names: tp.List[str], instance_num: tp.Optional[int] = None) -> None: +def start_nodes(node_names: list[str], instance_num: tp.Optional[int] = None) -> None: """Start list of Cardano nodes of the running cluster.""" service_names = [f"nodes:{n}" for n in node_names] services_action(service_names=service_names, action="start", instance_num=instance_num) -def stop_nodes(node_names: tp.List[str], instance_num: tp.Optional[int] = None) -> None: +def stop_nodes(node_names: list[str], instance_num: tp.Optional[int] = None) -> None: """Stop list of Cardano nodes of the running cluster.""" service_names = [f"nodes:{n}" for n in node_names] services_action(service_names=service_names, action="stop", instance_num=instance_num) def restart_nodes( - node_names: tp.List[str], + node_names: list[str], instance_num: tp.Optional[int] = None, delay: int = configuration.TX_SUBMISSION_DELAY, ) -> None: @@ -451,8 +451,8 @@ def restart_nodes( def services_status( - service_names: tp.Optional[tp.List[str]] = None, instance_num: tp.Optional[int] = None -) -> tp.List[ServiceStatus]: + service_names: tp.Optional[list[str]] = None, instance_num: tp.Optional[int] = None +) -> list[ServiceStatus]: """Return status info for list of services running on the running cluster (all by default).""" if instance_num is None: instance_num = get_cluster_env().instance_num diff --git a/cardano_node_tests/utils/cluster_scripts.py b/cardano_node_tests/utils/cluster_scripts.py index 57ac0ee3b..e2dcc47a3 100644 --- a/cardano_node_tests/utils/cluster_scripts.py +++ b/cardano_node_tests/utils/cluster_scripts.py @@ -27,7 +27,7 @@ class InstanceFiles: start_script: pl.Path stop_script: pl.Path - start_script_args: tp.List[str] + start_script_args: list[str] dir: pl.Path @@ -68,7 +68,7 @@ class InstancePorts: pool3: int ekg_pool3: int prometheus_pool3: int - node_ports: tp.Tuple[NodePorts, ...] + node_ports: tuple[NodePorts, ...] class ScriptsTypes: @@ -283,7 +283,7 @@ def _gen_legacy_topology(self, addr: str, ports: tp.Iterable[int]) -> dict: topology = {"Producers": producers} return topology - def _gen_p2p_topology(self, addr: str, ports: tp.List[int], fixed_ports: tp.List[int]) -> dict: + def _gen_p2p_topology(self, addr: str, ports: list[int], fixed_ports: list[int]) -> dict: """Generate p2p topology for given ports.""" # Select fixed ports and several randomly selected ports sample_ports = random.sample(ports, 3) if len(ports) > 3 else ports @@ -510,7 +510,7 @@ def gen_split_topology_files( class TestnetScripts(ScriptsTypes): """Scripts for starting a node on testnet.""" - TESTNET_GLOBS: tp.ClassVar[tp.Tuple[str, ...]] = ( + TESTNET_GLOBS: tp.ClassVar[tuple[str, ...]] = ( "config*.json", "genesis-*.json", "topology-*.json", @@ -589,7 +589,7 @@ def copy_scripts_files(self, destdir: ttypes.FileType) -> StartupFiles: ) def _reconfigure_testnet( - self, indir: pl.Path, destdir: pl.Path, instance_num: int, globs: tp.List[str] + self, indir: pl.Path, destdir: pl.Path, instance_num: int, globs: list[str] ) -> None: """Reconfigure cluster scripts and config files.""" instance_ports = self.get_instance_ports(instance_num=instance_num) @@ -639,7 +639,7 @@ def _reconfigure_submit_api_config(self, infile: pl.Path, outfile: pl.Path) -> N with open(outfile, "w", encoding="utf-8") as out_fp: out_fp.write("".join(new_content)) - def _reconfigure_bootstrap(self, indir: pl.Path, destdir: pl.Path, globs: tp.List[str]) -> None: + def _reconfigure_bootstrap(self, indir: pl.Path, destdir: pl.Path, globs: list[str]) -> None: """Copy and reconfigure config files from bootstrap dir.""" _infiles = [list(indir.glob(g)) for g in globs] infiles = list(itertools.chain.from_iterable(_infiles)) diff --git a/cardano_node_tests/utils/clusterlib_utils.py b/cardano_node_tests/utils/clusterlib_utils.py index efe3204ea..c51400cb4 100644 --- a/cardano_node_tests/utils/clusterlib_utils.py +++ b/cardano_node_tests/utils/clusterlib_utils.py @@ -34,7 +34,7 @@ class UpdateProposal: class TokenRecord: token: str amount: int - issuers_addrs: tp.List[clusterlib.AddressRecord] + issuers_addrs: list[clusterlib.AddressRecord] token_mint_addr: clusterlib.AddressRecord script: pl.Path @@ -65,7 +65,7 @@ def build_and_submit_tx( fee_buffer: tp.Optional[int] = None, raw_fee: tp.Optional[int] = None, required_signers: cl_types.OptionalFiles = (), - required_signer_hashes: tp.Optional[tp.List[str]] = None, + required_signer_hashes: tp.Optional[list[str]] = None, withdrawals: clusterlib.OptionalTxOuts = (), script_withdrawals: clusterlib.OptionalScriptWithdrawals = (), script_votes: clusterlib.OptionalScriptVotes = (), @@ -291,7 +291,7 @@ def create_payment_addr_records( cluster_obj: clusterlib.ClusterLib, stake_vkey_file: tp.Optional[cl_types.FileType] = None, destination_dir: cl_types.FileType = ".", -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new payment address(es).""" addrs = [ cluster_obj.g_address.gen_payment_addr_and_keys( @@ -310,7 +310,7 @@ def create_stake_addr_records( *names: str, cluster_obj: clusterlib.ClusterLib, destination_dir: cl_types.FileType = ".", -) -> tp.List[clusterlib.AddressRecord]: +) -> list[clusterlib.AddressRecord]: """Create new stake address(es).""" addrs = [ cluster_obj.g_stake_address.gen_stake_addr_and_keys( @@ -328,7 +328,7 @@ def create_pool_users( name_template: str, no_of_addr: int = 1, destination_dir: cl_types.FileType = ".", -) -> tp.List[clusterlib.PoolUser]: +) -> list[clusterlib.PoolUser]: """Create PoolUsers.""" pool_users = [] for i in range(no_of_addr): @@ -452,7 +452,7 @@ def check_pool_data( # noqa: C901 return "\n\n".join(errors_list) -def check_updated_params(update_proposals: tp.List[UpdateProposal], protocol_params: dict) -> None: +def check_updated_params(update_proposals: list[UpdateProposal], protocol_params: dict) -> None: """Compare update proposals with actual protocol parameters.""" failures = [] for u in update_proposals: @@ -477,8 +477,8 @@ def check_updated_params(update_proposals: tp.List[UpdateProposal], protocol_par def get_pparams_update_args( - update_proposals: tp.List[UpdateProposal], -) -> tp.List[str]: + update_proposals: list[UpdateProposal], +) -> list[str]: """Get cli arguments for pparams update action.""" if not update_proposals: return [] @@ -491,7 +491,7 @@ def get_pparams_update_args( def update_params( cluster_obj: clusterlib.ClusterLib, src_addr_record: clusterlib.AddressRecord, - update_proposals: tp.List[UpdateProposal], + update_proposals: list[UpdateProposal], ) -> None: """Update params using update proposal.""" if not update_proposals: @@ -512,7 +512,7 @@ def update_params( def update_params_build( cluster_obj: clusterlib.ClusterLib, src_addr_record: clusterlib.AddressRecord, - update_proposals: tp.List[UpdateProposal], + update_proposals: list[UpdateProposal], ) -> None: """Update params using update proposal. @@ -558,7 +558,7 @@ def update_params_build( def mint_or_burn_witness( cluster_obj: clusterlib.ClusterLib, - new_tokens: tp.List[TokenRecord], + new_tokens: list[TokenRecord], temp_template: str, invalid_hereafter: tp.Optional[int] = None, invalid_before: tp.Optional[int] = None, @@ -683,7 +683,7 @@ def mint_or_burn_witness( def mint_or_burn_sign( cluster_obj: clusterlib.ClusterLib, - new_tokens: tp.List[TokenRecord], + new_tokens: list[TokenRecord], temp_template: str, submit_method: str = submit_utils.SubmitMethods.CLI, use_build_cmd: bool = False, @@ -860,7 +860,7 @@ def new_tokens( token_mint_addr: clusterlib.AddressRecord, issuer_addr: clusterlib.AddressRecord, amount: int, -) -> tp.List[TokenRecord]: +) -> list[TokenRecord]: """Mint new token, sign using skeys.""" # create simple script keyhash = cluster_obj.g_address.get_payment_vkey_hash(payment_vkey_file=issuer_addr.vkey_file) @@ -949,7 +949,7 @@ def get_delegation_state( def get_blocks_before( cluster_obj: clusterlib.ClusterLib, -) -> tp.Dict[str, int]: +) -> dict[str, int]: """Get `blocksBefore` section of ledger state with bech32 encoded pool ids.""" ledger_state_cmd = _get_ledger_state_cmd(cluster_obj=cluster_obj) @@ -1201,7 +1201,7 @@ def cli_has(command: str) -> bool: def check_txins_spent( - cluster_obj: clusterlib.ClusterLib, txins: tp.List[clusterlib.UTXOData], wait_blocks: int = 2 + cluster_obj: clusterlib.ClusterLib, txins: list[clusterlib.UTXOData], wait_blocks: int = 2 ) -> None: """Check that txins were spent.""" if wait_blocks > 0: @@ -1221,7 +1221,7 @@ def create_reference_utxo( dst_addr: clusterlib.AddressRecord, script_file: pl.Path, amount: int, -) -> tp.Tuple[clusterlib.UTXOData, clusterlib.TxRawOutput]: +) -> tuple[clusterlib.UTXOData, clusterlib.TxRawOutput]: """Create a reference script UTxO.""" # pylint: disable=too-many-arguments tx_files = clusterlib.TxFiles( @@ -1255,9 +1255,7 @@ def create_reference_utxo( return reference_utxo, tx_raw_output -def get_utxo_ix_offset( - utxos: tp.List[clusterlib.UTXOData], txouts: tp.List[clusterlib.TxOut] -) -> int: +def get_utxo_ix_offset(utxos: list[clusterlib.UTXOData], txouts: list[clusterlib.TxOut]) -> int: """Get offset of index of the first user-defined txout. Change txout created by `transaction build` used to be UTxO with index 0, now it is the last @@ -1336,9 +1334,9 @@ def get_plutus_b64(script_file: cl_types.FileType) -> str: return script_base64 -def get_snapshot_rec(ledger_snapshot: dict) -> tp.Dict[str, tp.Union[int, list]]: +def get_snapshot_rec(ledger_snapshot: dict) -> dict[str, int | list]: """Get uniform record for ledger state snapshot.""" - hashes: tp.Dict[str, tp.Union[int, list]] = {} + hashes: dict[str, int | list] = {} for r in ledger_snapshot: r_hash_rec = r[0] @@ -1360,9 +1358,9 @@ def get_snapshot_rec(ledger_snapshot: dict) -> tp.Dict[str, tp.Union[int, list]] return hashes -def get_snapshot_delegations(ledger_snapshot: dict) -> tp.Dict[str, tp.List[str]]: +def get_snapshot_delegations(ledger_snapshot: dict) -> dict[str, list[str]]: """Get delegations data from ledger state snapshot.""" - delegations: tp.Dict[str, tp.List[str]] = {} + delegations: dict[str, list[str]] = {} for r in ledger_snapshot: r_hash_rec = r[0] @@ -1388,8 +1386,8 @@ def create_collaterals( cluster: clusterlib.ClusterLib, payment_addr: clusterlib.AddressRecord, temp_template: str, - tx_outs: tp.List[clusterlib.TxOut], -) -> tp.List[clusterlib.UTXOData]: + tx_outs: list[clusterlib.TxOut], +) -> list[clusterlib.UTXOData]: """Create collateral UTxOs as required.""" tx_files = clusterlib.TxFiles( signing_key_files=[payment_addr.skey_file], diff --git a/cardano_node_tests/utils/configuration.py b/cardano_node_tests/utils/configuration.py index 3d453d2ad..eab6dca1f 100644 --- a/cardano_node_tests/utils/configuration.py +++ b/cardano_node_tests/utils/configuration.py @@ -2,7 +2,6 @@ import os import pathlib as pl -import typing as tp def _check_cardano_node_socket_path() -> None: @@ -56,12 +55,12 @@ def _check_cardano_node_socket_path() -> None: os.environ["CARDANO_NODE_SOCKET_PATH"] = str(STARTUP_CARDANO_NODE_SOCKET_PATH) # resolve SCHEDULING_LOG -SCHEDULING_LOG: tp.Union[str, pl.Path] = os.environ.get("SCHEDULING_LOG") or "" +SCHEDULING_LOG: str | pl.Path = os.environ.get("SCHEDULING_LOG") or "" if SCHEDULING_LOG: SCHEDULING_LOG = pl.Path(SCHEDULING_LOG).expanduser().resolve() # resolve BLOCK_PRODUCTION_DB -BLOCK_PRODUCTION_DB: tp.Union[str, pl.Path] = os.environ.get("BLOCK_PRODUCTION_DB") or "" +BLOCK_PRODUCTION_DB: str | pl.Path = os.environ.get("BLOCK_PRODUCTION_DB") or "" if BLOCK_PRODUCTION_DB: BLOCK_PRODUCTION_DB = pl.Path(BLOCK_PRODUCTION_DB).expanduser().resolve() diff --git a/cardano_node_tests/utils/dbsync_check_tx.py b/cardano_node_tests/utils/dbsync_check_tx.py index c2e7c2db6..33254e830 100644 --- a/cardano_node_tests/utils/dbsync_check_tx.py +++ b/cardano_node_tests/utils/dbsync_check_tx.py @@ -6,7 +6,6 @@ import json import logging import pathlib as pl -import typing as tp from cardano_clusterlib import clusterlib @@ -17,7 +16,7 @@ LOGGER = logging.getLogger(__name__) -def _sum_mint_txouts(txouts: clusterlib.OptionalTxOuts) -> tp.List[clusterlib.TxOut]: +def _sum_mint_txouts(txouts: clusterlib.OptionalTxOuts) -> list[clusterlib.TxOut]: """Calculate minting amount sum for records with the same token. Remove address information - minting tokens doesn't include address, only amount and asset ID, @@ -25,7 +24,7 @@ def _sum_mint_txouts(txouts: clusterlib.OptionalTxOuts) -> tp.List[clusterlib.Tx Remove also datum hash, which is not available as well. MA output is handled in Tx output checks. """ - mint_txouts: tp.Dict[str, clusterlib.TxOut] = {} + mint_txouts: dict[str, clusterlib.TxOut] = {} for mt in txouts: if mt.coin in mint_txouts: @@ -41,8 +40,8 @@ def _sum_mint_txouts(txouts: clusterlib.OptionalTxOuts) -> tp.List[clusterlib.Tx def _get_scripts_hashes( cluster_obj: clusterlib.ClusterLib, - records: tp.Union[clusterlib.OptionalScriptTxIn, clusterlib.OptionalMint], -) -> tp.Dict[str, tp.Union[clusterlib.OptionalScriptTxIn, clusterlib.OptionalMint]]: + records: clusterlib.OptionalScriptTxIn | clusterlib.OptionalMint, +) -> dict[str, clusterlib.OptionalScriptTxIn | clusterlib.OptionalMint]: """Create a hash table of Tx Plutus data indexed by script hash.""" hashes_db: dict = {} @@ -68,8 +67,8 @@ def _get_script_data_hash(cluster_obj: clusterlib.ClusterLib, script_data: dict) def _db_redeemer_hashes( - records: tp.List[dbsync_types.RedeemerRecord], -) -> tp.Dict[str, tp.List[dbsync_types.RedeemerRecord]]: + records: list[dbsync_types.RedeemerRecord], +) -> dict[str, list[dbsync_types.RedeemerRecord]]: """Create a hash table of redeemers indexed by script hash.""" hashes_db: dict = {} @@ -85,7 +84,7 @@ def _db_redeemer_hashes( def _compare_redeemer_value( - tx_rec: tp.Union[clusterlib.ScriptTxIn, clusterlib.Mint], db_redeemer: dict + tx_rec: clusterlib.ScriptTxIn | clusterlib.Mint, db_redeemer: dict ) -> bool: """Compare the value of the tx redeemer with the value stored on dbsync.""" if not (tx_rec.redeemer_file or tx_rec.redeemer_value): @@ -108,8 +107,8 @@ def _compare_redeemer_value( def _compare_redeemers( - tx_data: tp.Dict[str, tp.Union[clusterlib.OptionalScriptTxIn, clusterlib.OptionalMint]], - db_data: tp.Dict[str, tp.List[dbsync_types.RedeemerRecord]], + tx_data: dict[str, clusterlib.OptionalScriptTxIn | clusterlib.OptionalMint], + db_data: dict[str, list[dbsync_types.RedeemerRecord]], purpose: str, ) -> None: """Compare redeemers data available in Tx data with data in db-sync.""" @@ -186,7 +185,7 @@ def _txout_has_inline_datum(txout: clusterlib.TxOut) -> bool: def utxodata2txout( - utxodata: tp.Union[dbsync_types.UTxORecord, clusterlib.UTXOData], + utxodata: dbsync_types.UTxORecord | clusterlib.UTXOData, ) -> clusterlib.TxOut: """Convert `UTxORecord` or `UTxOData` to `clusterlib.TxOut`.""" return clusterlib.TxOut( @@ -249,7 +248,7 @@ def check_tx_ins( response: dbsync_types.TxRecord, ) -> None: """Check that the Tx inputs match the data from db-sync.""" - combined_txins: tp.List[clusterlib.UTXOData] = [ + combined_txins: list[clusterlib.UTXOData] = [ *tx_raw_output.txins, *[p.txins[0] for p in tx_raw_output.script_txins if p.txins], ] diff --git a/cardano_node_tests/utils/dbsync_conn.py b/cardano_node_tests/utils/dbsync_conn.py index d73addc11..6e4c40364 100644 --- a/cardano_node_tests/utils/dbsync_conn.py +++ b/cardano_node_tests/utils/dbsync_conn.py @@ -15,7 +15,7 @@ class DBSyncCache: """Cache connection to db-sync database for each cluster instance.""" - conns: tp.ClassVar[tp.Dict[int, tp.Optional[psycopg2.extensions.connection]]] = {0: None} + conns: tp.ClassVar[dict[int, tp.Optional[psycopg2.extensions.connection]]] = {0: None} def _conn(instance_num: int) -> psycopg2.extensions.connection: diff --git a/cardano_node_tests/utils/dbsync_queries.py b/cardano_node_tests/utils/dbsync_queries.py index 264db5da1..4794d6f71 100644 --- a/cardano_node_tests/utils/dbsync_queries.py +++ b/cardano_node_tests/utils/dbsync_queries.py @@ -1144,7 +1144,7 @@ def query_blocks( yield BlockDBRow(*result) -def query_table_names() -> tp.List[str]: +def query_table_names() -> list[str]: """Query table names in db-sync.""" query = ( "SELECT tablename " @@ -1154,7 +1154,7 @@ def query_table_names() -> tp.List[str]: ) with execute(query=query) as cur: - results: tp.List[tp.Tuple[str]] = cur.fetchall() + results: list[tuple[str]] = cur.fetchall() table_names = [r[0] for r in results] return table_names @@ -1168,14 +1168,14 @@ def query_datum(datum_hash: str) -> tp.Generator[DatumDBRow, None, None]: yield DatumDBRow(*result) -def query_cost_model(model_id: int = -1, epoch_no: int = -1) -> tp.Dict[str, tp.Dict[str, tp.Any]]: +def query_cost_model(model_id: int = -1, epoch_no: int = -1) -> dict[str, dict[str, tp.Any]]: """Query cost model record in db-sync. If `model_id` is specified, query the cost model that corresponds to the given id. If `epoch_no` is specified, query the cost model used in the given epoch. Otherwise query the latest cost model. """ - query_var: tp.Union[int, str] + query_var: int | str if model_id != -1: subquery = "WHERE cm.id = %s " @@ -1194,7 +1194,7 @@ def query_cost_model(model_id: int = -1, epoch_no: int = -1) -> tp.Dict[str, tp. with execute(query=query, vars=(query_var,)) as cur: results = cur.fetchone() - cost_model: tp.Dict[str, tp.Dict[str, tp.Any]] = results[1] if results else {} + cost_model: dict[str, dict[str, tp.Any]] = results[1] if results else {} return cost_model diff --git a/cardano_node_tests/utils/dbsync_types.py b/cardano_node_tests/utils/dbsync_types.py index ab0e57549..64a0de73f 100644 --- a/cardano_node_tests/utils/dbsync_types.py +++ b/cardano_node_tests/utils/dbsync_types.py @@ -47,7 +47,7 @@ class RewardEpochRecord: @dataclasses.dataclass(frozen=True, order=True) class RewardRecord: address: str - rewards: tp.List[RewardEpochRecord] + rewards: list[RewardEpochRecord] reward_sum: int def __bool__(self) -> bool: @@ -64,7 +64,7 @@ class UTxORecord: decoded_coin: str = "" datum_hash: str = "" inline_datum_hash: str = "" - inline_datum: tp.Optional[tp.Union[str, dict]] = None + inline_datum: str | dict | None = None reference_script: tp.Optional[dict] = None reference_script_hash: str = "" @@ -83,7 +83,7 @@ class PaymentAddrRecord: payment_address: str stake_address: tp.Optional[str] amount_sum: int - utxos: tp.List[GetUTxORecord] + utxos: list[GetUTxORecord] def __bool__(self) -> bool: return self.amount_sum > 0 @@ -106,8 +106,8 @@ class PoolDataRecord: registered_tx_id: int metadata_url: str metadata_hash: str - owners: tp.List[str] - relays: tp.List[tp.Dict[str, tp.Dict[str, tp.Any]]] + owners: list[str] + relays: list[dict[str, dict[str, tp.Any]]] retire_cert_index: tp.Optional[int] retire_announced_tx_id: tp.Optional[int] retiring_epoch: tp.Optional[int] @@ -144,23 +144,23 @@ class TxRecord: invalid_before: tp.Optional[int] invalid_hereafter: tp.Optional[int] treasury_donation: int - txins: tp.List[UTxORecord] - txouts: tp.List[UTxORecord] - mint: tp.List[UTxORecord] - collaterals: tp.List[UTxORecord] - collateral_outputs: tp.List[clusterlib.UTXOData] - reference_inputs: tp.List[UTxORecord] - scripts: tp.List[ScriptRecord] - redeemers: tp.List[RedeemerRecord] - metadata: tp.List[MetadataRecord] - reserve: tp.List[ADAStashRecord] - treasury: tp.List[ADAStashRecord] - pot_transfers: tp.List[PotTransferRecord] - stake_registration: tp.List[str] - stake_deregistration: tp.List[str] - stake_delegation: tp.List[DelegationRecord] - withdrawals: tp.List[clusterlib.TxOut] - extra_key_witness: tp.List[str] + txins: list[UTxORecord] + txouts: list[UTxORecord] + mint: list[UTxORecord] + collaterals: list[UTxORecord] + collateral_outputs: list[clusterlib.UTXOData] + reference_inputs: list[UTxORecord] + scripts: list[ScriptRecord] + redeemers: list[RedeemerRecord] + metadata: list[MetadataRecord] + reserve: list[ADAStashRecord] + treasury: list[ADAStashRecord] + pot_transfers: list[PotTransferRecord] + stake_registration: list[str] + stake_deregistration: list[str] + stake_delegation: list[DelegationRecord] + withdrawals: list[clusterlib.TxOut] + extra_key_witness: list[str] def _convert_metadata(self) -> dict: """Convert list of `MetadataRecord`s to metadata dictionary.""" @@ -170,9 +170,9 @@ def _convert_metadata(self) -> dict: @dataclasses.dataclass(frozen=True, order=True) class TxPrelimRecord: - utxo_out: tp.List[UTxORecord] - ma_utxo_out: tp.List[UTxORecord] - mint_utxo_out: tp.List[UTxORecord] + utxo_out: list[UTxORecord] + ma_utxo_out: list[UTxORecord] + mint_utxo_out: list[UTxORecord] last_row: dbsync_queries.TxDBRow @@ -221,8 +221,8 @@ class OffChainVoteDataRecord: language: str comment: tp.Optional[str] is_valid: tp.Optional[bool] - authors: tp.List[tp.Dict[str, tp.Any]] - references: tp.List[tp.Dict[str, tp.Any]] - gov_action_data: tp.Dict[str, tp.Any] - external_updates: tp.List[tp.Dict[str, tp.Any]] - voting_anchor: tp.Dict[str, tp.Any] + authors: list[dict[str, tp.Any]] + references: list[dict[str, tp.Any]] + gov_action_data: dict[str, tp.Any] + external_updates: list[dict[str, tp.Any]] + voting_anchor: dict[str, tp.Any] diff --git a/cardano_node_tests/utils/dbsync_utils.py b/cardano_node_tests/utils/dbsync_utils.py index b34e1970b..7d9d8e377 100644 --- a/cardano_node_tests/utils/dbsync_utils.py +++ b/cardano_node_tests/utils/dbsync_utils.py @@ -175,11 +175,11 @@ def get_pool_data(pool_id_bech32: str) -> tp.Optional[dbsync_types.PoolDataRecor def get_prelim_tx_record(txhash: str) -> dbsync_types.TxPrelimRecord: """Get first batch of transaction data from db-sync.""" - utxo_out: tp.List[dbsync_types.UTxORecord] = [] + utxo_out: list[dbsync_types.UTxORecord] = [] seen_tx_out_ids = set() - ma_utxo_out: tp.List[dbsync_types.UTxORecord] = [] + ma_utxo_out: list[dbsync_types.UTxORecord] = [] seen_ma_tx_out_ids = set() - mint_utxo_out: tp.List[dbsync_types.UTxORecord] = [] + mint_utxo_out: list[dbsync_types.UTxORecord] = [] seen_ma_tx_mint_ids = set() tx_id = -1 @@ -258,9 +258,9 @@ def get_prelim_tx_record(txhash: str) -> dbsync_types.TxPrelimRecord: return txdata -def get_txins(txhash: str) -> tp.List[dbsync_types.UTxORecord]: +def get_txins(txhash: str) -> list[dbsync_types.UTxORecord]: """Get txins of a transaction from db-sync.""" - txins: tp.List[dbsync_types.UTxORecord] = [] + txins: list[dbsync_types.UTxORecord] = [] seen_txins_out_ids = set() seen_txins_ma_ids = set() @@ -754,7 +754,7 @@ def check_pool_off_chain_fetch_error( def check_plutus_cost( - redeemer_record: dbsync_types.RedeemerRecord, cost_record: tp.Dict[str, tp.Any] + redeemer_record: dbsync_types.RedeemerRecord, cost_record: dict[str, tp.Any] ) -> None: """Compare cost of Plutus script with data from db-sync.""" errors = [] @@ -776,8 +776,8 @@ def check_plutus_cost( def check_plutus_costs( - redeemer_records: tp.List[dbsync_types.RedeemerRecord], - cost_records: tp.List[tp.Dict[str, tp.Any]], + redeemer_records: list[dbsync_types.RedeemerRecord], + cost_records: list[dict[str, tp.Any]], ) -> None: """Compare cost of multiple Plutus scripts with data from db-sync.""" # Sort records first by total cost, second by hash @@ -864,7 +864,7 @@ def _get_float_pparam(pparam: tp.Any) -> tp.Optional[float]: return float(pparam) -def map_params_to_db_convention(pparams: dict) -> tp.Dict[str, tp.Any]: +def map_params_to_db_convention(pparams: dict) -> dict[str, tp.Any]: # Get the prices of memory and steps prices = pparams.get("executionUnitPrices", {}) price_mem = _get_float_pparam(prices.get("priceMemory")) @@ -932,7 +932,7 @@ def map_params_to_db_convention(pparams: dict) -> tp.Dict[str, tp.Any]: def _check_param_proposal( - param_proposal_db: tp.Union[dbsync_queries.ParamProposalDBRow, dbsync_queries.EpochParamDBRow], + param_proposal_db: dbsync_queries.ParamProposalDBRow | dbsync_queries.EpochParamDBRow, params_map: dict, ) -> list: """Check parameter proposal against db-sync.""" @@ -1035,7 +1035,7 @@ def check_conway_gov_action_proposal_description( def get_gov_action_proposals( txhash: str = "", type: str = "" -) -> tp.List[dbsync_queries.GovActionProposalDBRow]: +) -> list[dbsync_queries.GovActionProposalDBRow]: """Get government action proposal from db-sync.""" gov_action_proposals = list(dbsync_queries.query_gov_action_proposal(txhash=txhash, type=type)) return gov_action_proposals @@ -1139,7 +1139,7 @@ def get_drep(drep_hash: str, drep_deposit: int) -> tp.Optional[dbsync_types.Drep def check_drep_registration( - drep: governance_utils.DRepRegistration, drep_state: tp.List[tp.List[tp.Dict[str, tp.Any]]] + drep: governance_utils.DRepRegistration, drep_state: list[list[dict[str, tp.Any]]] ) -> tp.Optional[dbsync_types.DrepRegistrationRecord]: """Check drep registration in db-sync.""" if not configuration.HAS_DBSYNC: @@ -1246,7 +1246,7 @@ def check_committee_info(gov_state: dict, txid: str, action_ix: int = 0) -> None ) -def check_treasury_withdrawal(stake_address: str, transfer_amts: tp.List[int], txhash: str) -> None: +def check_treasury_withdrawal(stake_address: str, transfer_amts: list[int], txhash: str) -> None: """Check treasury_withdrawal in db-sync.""" if not configuration.HAS_DBSYNC: return @@ -1277,7 +1277,7 @@ def check_treasury_withdrawal(stake_address: str, transfer_amts: tp.List[int], t ), "Wrong relation between enacted and ratified epochs in db-sync" -def check_reward_rest(stake_address: str, transfer_amts: tp.List[int], type: str = "") -> None: +def check_reward_rest(stake_address: str, transfer_amts: list[int], type: str = "") -> None: """Check reward_rest in db-sync.""" if not configuration.HAS_DBSYNC: return @@ -1388,7 +1388,7 @@ def get_action_data(data_hash: str) -> tp.Optional[dbsync_types.OffChainVoteData "rationale": vote.gov_act_rationale, } if vote.ref_id: - reference: tp.Dict[str, tp.Union[str, tp.Optional[tp.Dict[str, str]]]] + reference: dict[str, str | tp.Optional[dict[str, str]]] reference = {"label": vote.ref_label, "uri": vote.ref_uri} if vote.ref_hash_digest and vote.ref_hash_alg: reference["referenceHash"] = { @@ -1430,7 +1430,7 @@ def get_action_data(data_hash: str) -> tp.Optional[dbsync_types.OffChainVoteData def check_action_data( # noqa: C901 - json_anchor_file: tp.Dict[str, tp.Any], + json_anchor_file: dict[str, tp.Any], anchor_data_hash: str, ) -> None: """Compare anchor json file with off chain action's data from db-sync.""" diff --git a/cardano_node_tests/utils/faucet.py b/cardano_node_tests/utils/faucet.py index 4e5767022..8fecb470d 100644 --- a/cardano_node_tests/utils/faucet.py +++ b/cardano_node_tests/utils/faucet.py @@ -14,11 +14,11 @@ def fund_from_faucet( - *dst_addrs: tp.Union[clusterlib.AddressRecord, clusterlib.PoolUser], + *dst_addrs: clusterlib.AddressRecord | clusterlib.PoolUser, cluster_obj: clusterlib.ClusterLib, faucet_data: tp.Optional[dict] = None, - all_faucets: tp.Optional[tp.Dict[str, dict]] = None, - amount: tp.Union[None, int, tp.List[int]] = None, + all_faucets: tp.Optional[dict[str, dict]] = None, + amount: None | int | list[int] = None, tx_name: tp.Optional[str] = None, destination_dir: clusterlib.FileType = ".", force: bool = False, @@ -32,7 +32,7 @@ def fund_from_faucet( raise AssertionError(msg) # Get payment AddressRecord out of PoolUser - dst_addr_records: tp.List[clusterlib.AddressRecord] = [ + dst_addr_records: list[clusterlib.AddressRecord] = [ (r.payment if hasattr(r, "payment") else r) for r in dst_addrs # type: ignore ] @@ -75,7 +75,7 @@ def return_funds_to_faucet( *src_addrs: clusterlib.AddressRecord, cluster_obj: clusterlib.ClusterLib, faucet_addr: str, - amount: tp.Union[int, tp.List[int]] = -1, + amount: int | list[int] = -1, tx_name: tp.Optional[str] = None, destination_dir: cl_types.FileType = ".", ) -> None: diff --git a/cardano_node_tests/utils/gh_issue.py b/cardano_node_tests/utils/gh_issue.py index 5427a6309..f68e2447e 100644 --- a/cardano_node_tests/utils/gh_issue.py +++ b/cardano_node_tests/utils/gh_issue.py @@ -13,7 +13,7 @@ class GHIssue: TOKEN: tp.ClassVar[tp.Optional[str]] = None - issue_cache: tp.ClassVar[tp.Dict[str, str]] = {} + issue_cache: tp.ClassVar[dict[str, str]] = {} _github_instance: tp.ClassVar[tp.Optional[github.Github]] = None _github_instance_error: tp.ClassVar[bool] = False diff --git a/cardano_node_tests/utils/governance_setup.py b/cardano_node_tests/utils/governance_setup.py index 87e5fe6ac..f20693f04 100644 --- a/cardano_node_tests/utils/governance_setup.py +++ b/cardano_node_tests/utils/governance_setup.py @@ -18,7 +18,7 @@ GOV_DATA_STORE = "governance_data.pickle" -def _get_committee_val(data: tp.Dict[str, tp.Any]) -> tp.Dict[str, tp.Any]: +def _get_committee_val(data: dict[str, tp.Any]) -> dict[str, tp.Any]: return data.get("committee") or data.get("commitee") or {} @@ -99,7 +99,7 @@ def create_vote_stake( cluster_obj: clusterlib.ClusterLib, no_of_addr: int, destination_dir: clusterlib.FileType = ".", -) -> tp.List[clusterlib.PoolUser]: +) -> list[clusterlib.PoolUser]: pool_users = clusterlib_utils.create_pool_users( cluster_obj=cluster_obj, name_template=name_template, @@ -119,7 +119,7 @@ def create_vote_stake( return pool_users -def load_committee(cluster_obj: clusterlib.ClusterLib) -> tp.List[governance_utils.CCKeyMember]: +def load_committee(cluster_obj: clusterlib.ClusterLib) -> list[governance_utils.CCKeyMember]: genesis_cc_members = cluster_obj.conway_genesis.get("committee", {}).get("members") or {} if not genesis_cc_members: return [] @@ -154,7 +154,7 @@ def load_committee(cluster_obj: clusterlib.ClusterLib) -> tp.List[governance_uti return cc_members -def load_dreps(cluster_obj: clusterlib.ClusterLib) -> tp.List[governance_utils.DRepRegistration]: +def load_dreps(cluster_obj: clusterlib.ClusterLib) -> list[governance_utils.DRepRegistration]: """Load DReps from the state directory.""" data_dir = cluster_obj.state_dir / GOV_DATA_DIR deposit_amt = cluster_obj.conway_genesis["dRepDeposit"] @@ -180,7 +180,7 @@ def load_dreps(cluster_obj: clusterlib.ClusterLib) -> tp.List[governance_utils.D return dreps -def load_drep_users(cluster_obj: clusterlib.ClusterLib) -> tp.List[clusterlib.PoolUser]: +def load_drep_users(cluster_obj: clusterlib.ClusterLib) -> list[clusterlib.PoolUser]: """Load DReps users from the state directory.""" data_dir = cluster_obj.state_dir / GOV_DATA_DIR @@ -293,10 +293,10 @@ def _setup_gov() -> tp.Optional[governance_utils.GovernanceRecords]: def save_default_governance( - dreps_reg: tp.List[governance_utils.DRepRegistration], - drep_delegators: tp.List[clusterlib.PoolUser], - cc_members: tp.List[governance_utils.CCKeyMember], - pools_cold: tp.List[clusterlib.ColdKeyPair], + dreps_reg: list[governance_utils.DRepRegistration], + drep_delegators: list[clusterlib.PoolUser], + cc_members: list[governance_utils.CCKeyMember], + pools_cold: list[clusterlib.ColdKeyPair], ) -> governance_utils.GovernanceRecords: """Save governance data to a pickle, so it can be reused. @@ -323,7 +323,7 @@ def save_default_governance( def refresh_cc_keys( cluster_obj: clusterlib.ClusterLib, - cc_members: tp.List[governance_utils.CCKeyMember], + cc_members: list[governance_utils.CCKeyMember], governance_data: governance_utils.GovernanceRecords, ) -> governance_utils.GovernanceRecords: """Refresh hot certs for original CC members.""" @@ -382,7 +382,7 @@ def refresh_cc_keys( def auth_cc_members( cluster_obj: clusterlib.ClusterLib, - cc_members: tp.List[governance_utils.CCKeyMember], + cc_members: list[governance_utils.CCKeyMember], name_template: str, payment_addr: clusterlib.AddressRecord, ) -> None: diff --git a/cardano_node_tests/utils/governance_utils.py b/cardano_node_tests/utils/governance_utils.py index ffad02da7..f64ea4925 100644 --- a/cardano_node_tests/utils/governance_utils.py +++ b/cardano_node_tests/utils/governance_utils.py @@ -16,23 +16,19 @@ LOGGER = logging.getLogger(__name__) -ActionsAllT = tp.Union[ # pylint: disable=invalid-name - clusterlib.ActionConstitution, - clusterlib.ActionHardfork, - clusterlib.ActionInfo, - clusterlib.ActionNoConfidence, - clusterlib.ActionPParamsUpdate, - clusterlib.ActionTreasuryWithdrawal, - clusterlib.ActionUpdateCommittee, -] +ActionsAllT = ( + clusterlib.ActionConstitution + | clusterlib.ActionHardfork + | clusterlib.ActionInfo + | clusterlib.ActionNoConfidence + | clusterlib.ActionPParamsUpdate + | clusterlib.ActionTreasuryWithdrawal + | clusterlib.ActionUpdateCommittee +) -VotesAllT = tp.Union[ # pylint: disable=invalid-name - clusterlib.VoteCC, - clusterlib.VoteDrep, - clusterlib.VoteSPO, -] +VotesAllT = clusterlib.VoteCC | clusterlib.VoteDrep | clusterlib.VoteSPO -DRepStateT = tp.List[tp.List[tp.Dict[str, tp.Any]]] +DRepStateT = list[list[dict[str, tp.Any]]] class ScriptTypes(enum.Enum): @@ -58,14 +54,14 @@ class DRepScriptRegRecord: @dataclasses.dataclass(frozen=True, order=True) class DRepScriptRegInputs: registration_cert: clusterlib.ComplexCert - key_pairs: tp.List[clusterlib.KeyPair] + key_pairs: list[clusterlib.KeyPair] script_type: ScriptTypes @dataclasses.dataclass(frozen=True, order=True) class DRepScriptRegistration: registration_cert: clusterlib.ComplexCert - key_pairs: tp.List[clusterlib.KeyPair] + key_pairs: list[clusterlib.KeyPair] script_hash: str script_type: ScriptTypes deposit: int @@ -89,15 +85,15 @@ class CCKeyMember: @dataclasses.dataclass(frozen=True, order=True) class GovernanceRecords: - dreps_reg: tp.List[DRepRegistration] - drep_delegators: tp.List[clusterlib.PoolUser] - cc_key_members: tp.List[CCKeyMember] - pools_cold: tp.List[clusterlib.ColdKeyPair] - drep_scripts_reg: tp.List[DRepScriptRegistration] = dataclasses.field(default_factory=list) - drep_scripts_delegators: tp.List[clusterlib.PoolUser] = dataclasses.field(default_factory=list) + dreps_reg: list[DRepRegistration] + drep_delegators: list[clusterlib.PoolUser] + cc_key_members: list[CCKeyMember] + pools_cold: list[clusterlib.ColdKeyPair] + drep_scripts_reg: list[DRepScriptRegistration] = dataclasses.field(default_factory=list) + drep_scripts_delegators: list[clusterlib.PoolUser] = dataclasses.field(default_factory=list) -GovClusterT = tp.Tuple[clusterlib.ClusterLib, GovernanceRecords] +GovClusterT = tuple[clusterlib.ClusterLib, GovernanceRecords] @dataclasses.dataclass(frozen=True, order=True) @@ -127,9 +123,9 @@ class StakeDelegation: @dataclasses.dataclass(frozen=True, order=True) class VotedVotes: - cc: tp.List[clusterlib.VoteCC] # pylint: disable=invalid-name - drep: tp.List[clusterlib.VoteDrep] - spo: tp.List[clusterlib.VoteSPO] + cc: list[clusterlib.VoteCC] # pylint: disable=invalid-name + drep: list[clusterlib.VoteDrep] + spo: list[clusterlib.VoteSPO] class PrevGovActionIds(enum.Enum): @@ -182,9 +178,7 @@ def check_drep_delegation(deleg_state: dict, drep_id: str, stake_addr_hash: str) assert stake_addr_val.get("drep") == expected_drep -def check_drep_stake_distribution( - distrib_state: tp.List[list], drep_id: str, min_amount: int -) -> None: +def check_drep_stake_distribution(distrib_state: list[list], drep_id: str, min_amount: int) -> None: cred_name = get_drep_cred_name(drep_id=drep_id) expected_drep = f"drep-{cred_name}" @@ -202,7 +196,7 @@ def check_drep_stake_distribution( def get_prev_action( action_type: PrevGovActionIds, - gov_state: tp.Dict[str, tp.Any], + gov_state: dict[str, tp.Any], ) -> PrevActionRec: prev_action_rec = ( gov_state["nextRatifyState"]["nextEnactState"]["prevGovActionIds"][action_type.value] or {} @@ -214,9 +208,9 @@ def get_prev_action( def _lookup_action( - actions: tp.List[tp.Dict[str, tp.Any]], action_txid: str, action_ix: int = 0 -) -> tp.Dict[str, tp.Any]: - prop: tp.Dict[str, tp.Any] = {} + actions: list[dict[str, tp.Any]], action_txid: str, action_ix: int = 0 +) -> dict[str, tp.Any]: + prop: dict[str, tp.Any] = {} for _a in actions: _p_action_id = _a["actionId"] if _p_action_id["txId"] == action_txid and _p_action_id["govActionIx"] == action_ix: @@ -226,30 +220,26 @@ def _lookup_action( def lookup_proposal( - gov_state: tp.Dict[str, tp.Any], action_txid: str, action_ix: int = 0 -) -> tp.Dict[str, tp.Any]: - proposals: tp.List[tp.Dict[str, tp.Any]] = gov_state["proposals"] + gov_state: dict[str, tp.Any], action_txid: str, action_ix: int = 0 +) -> dict[str, tp.Any]: + proposals: list[dict[str, tp.Any]] = gov_state["proposals"] return _lookup_action(actions=proposals, action_txid=action_txid, action_ix=action_ix) def lookup_ratified_actions( - gov_state: tp.Dict[str, tp.Any], action_txid: str, action_ix: int = 0 -) -> tp.Dict[str, tp.Any]: - ratified_actions: tp.List[tp.Dict[str, tp.Any]] = gov_state["nextRatifyState"][ - "enactedGovActions" - ] + gov_state: dict[str, tp.Any], action_txid: str, action_ix: int = 0 +) -> dict[str, tp.Any]: + ratified_actions: list[dict[str, tp.Any]] = gov_state["nextRatifyState"]["enactedGovActions"] return _lookup_action(actions=ratified_actions, action_txid=action_txid, action_ix=action_ix) def lookup_expired_actions( - gov_state: tp.Dict[str, tp.Any], + gov_state: dict[str, tp.Any], action_txid: str, action_ix: int = 0, -) -> tp.Dict[str, tp.Any]: - removed_actions: tp.List[tp.Dict[str, tp.Any]] = gov_state["nextRatifyState"][ - "expiredGovActions" - ] - raction: tp.Dict[str, tp.Any] = {} +) -> dict[str, tp.Any]: + removed_actions: list[dict[str, tp.Any]] = gov_state["nextRatifyState"]["expiredGovActions"] + raction: dict[str, tp.Any] = {} for _r in removed_actions: if _r["txId"] == action_txid and _r["govActionIx"] == action_ix: raction = _r @@ -376,7 +366,7 @@ def check_action_view( # noqa: C901 prev_action_txid = getattr(action_data, "prev_action_txid", None) prev_action_ix = getattr(action_data, "prev_action_ix", None) - gov_action: tp.Dict[str, tp.Any] + gov_action: dict[str, tp.Any] if isinstance(action_data, clusterlib.ActionTreasuryWithdrawal): if not recv_addr_vkey_hash: @@ -637,7 +627,7 @@ def is_drep_active( return bool(drep_state[0][1].get("expiry", 0) > epoch) -def is_cc_active(cc_member_state: tp.Dict[str, tp.Any]) -> bool: +def is_cc_active(cc_member_state: dict[str, tp.Any]) -> bool: """Check if CC member is active.""" if not cc_member_state: return False @@ -654,9 +644,9 @@ def create_dreps( num: int, cluster_obj: clusterlib.ClusterLib, payment_addr: clusterlib.AddressRecord, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], destination_dir: clusterlib.FileType = ".", -) -> tp.Tuple[tp.List[DRepRegistration], tp.List[clusterlib.PoolUser]]: +) -> tuple[list[DRepRegistration], list[clusterlib.PoolUser]]: """Create DReps with keys.""" no_of_addrs = len(pool_users) @@ -732,12 +722,12 @@ def create_dreps( def create_script_dreps( name_template: str, - script_inputs: tp.List[DRepScriptRegInputs], + script_inputs: list[DRepScriptRegInputs], cluster_obj: clusterlib.ClusterLib, payment_addr: clusterlib.AddressRecord, - pool_users: tp.List[clusterlib.PoolUser], + pool_users: list[clusterlib.PoolUser], destination_dir: clusterlib.FileType = ".", -) -> tp.Tuple[tp.List[DRepScriptRegistration], tp.List[clusterlib.PoolUser]]: +) -> tuple[list[DRepScriptRegistration], list[clusterlib.PoolUser]]: """Create DReps with scripts.""" no_of_addrs = len(pool_users) no_of_scripts = len(script_inputs) diff --git a/cardano_node_tests/utils/helpers.py b/cardano_node_tests/utils/helpers.py index 03f75401d..11b00f36a 100644 --- a/cardano_node_tests/utils/helpers.py +++ b/cardano_node_tests/utils/helpers.py @@ -98,13 +98,13 @@ def environ(env: dict) -> tp.Iterator[None]: def run_command( - command: tp.Union[str, list], + command: str | list, workdir: ttypes.FileType = "", ignore_fail: bool = False, shell: bool = False, ) -> bytes: """Run command.""" - cmd: tp.Union[str, list] + cmd: str | list if isinstance(command, str): cmd = command if shell else command.split() cmd_str = command @@ -152,7 +152,7 @@ def get_rand_str(length: int = 8) -> str: # TODO: unify with the implementation in clusterlib -def prepend_flag(flag: str, contents: tp.Iterable) -> tp.List[str]: +def prepend_flag(flag: str, contents: tp.Iterable) -> list[str]: """Prepend flag to every item of the sequence. Args: @@ -160,7 +160,7 @@ def prepend_flag(flag: str, contents: tp.Iterable) -> tp.List[str]: contents: A list (iterable) of content to be prepended. Returns: - List[str]: A list of flag followed by content, see below. + list[str]: A list of flag followed by content, see below. >>> prepend_flag("--foo", [1, 2, 3]) ['--foo', '1', '--foo', '2', '--foo', '3'] @@ -306,8 +306,8 @@ def flatten( def validate_dict_values( - dict1: tp.Dict[str, tp.Any], dict2: tp.Dict[str, tp.Any], keys: tp.Iterable[str] -) -> tp.List[str]: + dict1: dict[str, tp.Any], dict2: dict[str, tp.Any], keys: tp.Iterable[str] +) -> list[str]: """Compare values for specified keys between two dictionaries and return discrepancies. Args: diff --git a/cardano_node_tests/utils/logfiles.py b/cardano_node_tests/utils/logfiles.py index d4bfe101c..2359f3b99 100644 --- a/cardano_node_tests/utils/logfiles.py +++ b/cardano_node_tests/utils/logfiles.py @@ -94,7 +94,7 @@ def get_framework_log_path() -> pl.Path: return temptools.get_pytest_worker_tmp() / "framework.log" -def _look_back_found(buffer: tp.List[str]) -> bool: +def _look_back_found(buffer: list[str]) -> bool: """Look back to the buffer to see if there is an expected message. If the expected message is found, the error can be ignored. @@ -114,9 +114,7 @@ def _look_back_found(buffer: tp.List[str]) -> bool: return any(re.search(look_back_re, line) for line in buffer[:-1]) -def _get_rotated_logs( - logfile: pl.Path, seek: int = 0, timestamp: float = 0.0 -) -> tp.List[RotableLog]: +def _get_rotated_logs(logfile: pl.Path, seek: int = 0, timestamp: float = 0.0) -> list[RotableLog]: """Return list of versions of the log file (list of `RotableLog`). When the seek offset was recorded for a log file and the log file was rotated, @@ -149,9 +147,9 @@ def _get_ignore_rules_lock_file(instance_num: int) -> pl.Path: def _get_ignore_rules( cluster_env: cluster_nodes.ClusterEnv, timestamp: float -) -> tp.List[tp.Tuple[str, str]]: +) -> list[tuple[str, str]]: """Get rules (file glob and regex) for ignored errors.""" - rules: tp.List[tp.Tuple[str, str]] = [] + rules: list[tuple[str, str]] = [] lock_file = _get_ignore_rules_lock_file(instance_num=cluster_env.instance_num) with locking.FileLockIfXdist(lock_file): @@ -183,7 +181,7 @@ def _read_seek(offset_file: pl.Path) -> int: def _get_ignore_regex( - ignore_rules: tp.List[tp.Tuple[str, str]], regexes: tp.List[str], logfile: pl.Path + ignore_rules: list[tuple[str, str]], regexes: list[str], logfile: pl.Path ) -> str: """Combine together regex for the given log file using file specific and global ignore rules.""" regex_set = set(regexes) @@ -196,11 +194,11 @@ def _get_ignore_regex( def _search_log_lines( logfile: pl.Path, - rotated_logs: tp.List[RotableLog], + rotated_logs: list[RotableLog], errors_re: re.Pattern, errors_ignored_re: tp.Optional[re.Pattern] = None, errors_look_back_re: tp.Optional[re.Pattern] = None, -) -> tp.List[tp.Tuple[pl.Path, str]]: +) -> list[tuple[pl.Path, str]]: """Search for errors in the log file and, if needed, in the corresponding rotated logs.""" errors = [] last_line_pos = -1 @@ -280,7 +278,7 @@ def find_msgs_in_logs( seek_offset: int, timestamp: float, only_first: bool = False, -) -> tp.List[str]: +) -> list[str]: """Find messages in log.""" regex_comp = re.compile(regex) lines_found = [] @@ -300,11 +298,11 @@ def find_msgs_in_logs( def check_msgs_presence_in_logs( - regex_pairs: tp.List[tp.Tuple[str, str]], - seek_offsets: tp.Dict[str, int], + regex_pairs: list[tuple[str, str]], + seek_offsets: dict[str, int], state_dir: pl.Path, timestamp: float, -) -> tp.List[str]: +) -> list[str]: """Check if the expected messages are present in logs.""" errors = [] for files_glob, regex in regex_pairs: @@ -337,7 +335,7 @@ def check_msgs_presence_in_logs( @contextlib.contextmanager -def expect_errors(regex_pairs: tp.List[tp.Tuple[str, str]], worker_id: str) -> tp.Iterator[None]: +def expect_errors(regex_pairs: list[tuple[str, str]], worker_id: str) -> tp.Iterator[None]: """Make sure the expected errors are present in logs. Context manager. @@ -374,7 +372,7 @@ def expect_errors(regex_pairs: tp.List[tp.Tuple[str, str]], worker_id: str) -> t @contextlib.contextmanager -def expect_messages(regex_pairs: tp.List[tp.Tuple[str, str]]) -> tp.Iterator[None]: +def expect_messages(regex_pairs: list[tuple[str, str]]) -> tp.Iterator[None]: """Make sure the expected messages are present in logs. Context manager. @@ -405,7 +403,7 @@ def expect_messages(regex_pairs: tp.List[tp.Tuple[str, str]]) -> tp.Iterator[Non raise AssertionError(errors_joined) from None -def search_cluster_logs() -> tp.List[tp.Tuple[pl.Path, str]]: +def search_cluster_logs() -> list[tuple[pl.Path, str]]: """Search cluster logs for errors.""" cluster_env = cluster_nodes.get_cluster_env() lock_file = temptools.get_basetemp() / f"search_cluster_{cluster_env.instance_num}.lock" @@ -448,7 +446,7 @@ def search_cluster_logs() -> tp.List[tp.Tuple[pl.Path, str]]: return errors -def search_framework_log() -> tp.List[tp.Tuple[pl.Path, str]]: +def search_framework_log() -> list[tuple[pl.Path, str]]: """Search framework log for errors.""" # It is not necessary to lock the `framework.log` file because there is one log file per worker. # Each worker is checking only its own log file. @@ -476,7 +474,7 @@ def search_framework_log() -> tp.List[tp.Tuple[pl.Path, str]]: return errors -def search_supervisord_logs() -> tp.List[tp.Tuple[pl.Path, str]]: +def search_supervisord_logs() -> list[tuple[pl.Path, str]]: """Search cluster logs for errors.""" cluster_env = cluster_nodes.get_cluster_env() lock_file = temptools.get_basetemp() / f"search_supervisord_{cluster_env.instance_num}.lock" diff --git a/cardano_node_tests/utils/poll_utils.py b/cardano_node_tests/utils/poll_utils.py index 8a8b862eb..b1e3a87bd 100644 --- a/cardano_node_tests/utils/poll_utils.py +++ b/cardano_node_tests/utils/poll_utils.py @@ -4,7 +4,6 @@ import json import logging import pathlib as pl -import typing as tp from cardano_clusterlib import clusterlib @@ -20,7 +19,7 @@ class PollFiles: def create_poll( - cluster_obj: clusterlib.ClusterLib, question: str, answers: tp.List[str], name_template: str + cluster_obj: clusterlib.ClusterLib, question: str, answers: list[str], name_template: str ) -> PollFiles: """Create a poll and return the poll and metadata files.""" poll_file = f"{name_template}_poll.json" @@ -85,7 +84,7 @@ def answer_poll( def verify_poll( cluster_obj: clusterlib.ClusterLib, poll_file: pl.Path, tx_signed: pl.Path -) -> tp.Tuple[str, ...]: +) -> tuple[str, ...]: """Verify an answer to the poll.""" cli_out = cluster_obj.cli( [ diff --git a/cardano_node_tests/utils/requirements.py b/cardano_node_tests/utils/requirements.py index 852f17d00..dfe0c93d0 100644 --- a/cardano_node_tests/utils/requirements.py +++ b/cardano_node_tests/utils/requirements.py @@ -119,9 +119,9 @@ def collect_executed_req(base_dir: pl.Path) -> dict: return collected -def merge_reqs(*reqs: tp.Dict[str, dict]) -> dict: +def merge_reqs(*reqs: dict[str, dict]) -> dict: """Merge requirements.""" - merged: tp.Dict[str, dict] = {} + merged: dict[str, dict] = {} for report in reqs: for gname, greqs in report.items(): merged_group = merged.get(gname) or {} @@ -140,14 +140,14 @@ def get_mapped_req(mapping: pl.Path, executed_req: dict) -> dict: # noqa: C901 with open(mapping, encoding="utf-8") as in_fp: requirements_mapping = json.load(in_fp) - errors: tp.Dict[str, tp.Set[str]] = {} + errors: dict[str, set[str]] = {} for group, reqs in requirements_mapping.items(): reqs_set = set(reqs.keys()) executed_group = executed_req.get(group) or {} if not executed_group: executed_req[group] = executed_group - group_errors: tp.Set[str] = set() + group_errors: set[str] = set() for req_id, dependencies in reqs.items(): deps_in_reqs = reqs_set.intersection(dependencies) if deps_in_reqs: diff --git a/cardano_node_tests/utils/submit_api.py b/cardano_node_tests/utils/submit_api.py index 794ae896e..5cb61a890 100644 --- a/cardano_node_tests/utils/submit_api.py +++ b/cardano_node_tests/utils/submit_api.py @@ -8,7 +8,6 @@ import random import shutil import time -import typing as tp import requests from cardano_clusterlib import clusterlib @@ -105,7 +104,7 @@ def submit_tx_bare(tx_file: clusterlib.FileType) -> SubmitApiOut: def submit_tx( cluster_obj: clusterlib.ClusterLib, tx_file: clusterlib.FileType, - txins: tp.List[clusterlib.UTXOData], + txins: list[clusterlib.UTXOData], wait_blocks: int = 2, ) -> None: """Submit a transaction, resubmit if the transaction didn't make it to the chain. diff --git a/cardano_node_tests/utils/submit_utils.py b/cardano_node_tests/utils/submit_utils.py index c96b9a19f..4ee7301df 100644 --- a/cardano_node_tests/utils/submit_utils.py +++ b/cardano_node_tests/utils/submit_utils.py @@ -39,7 +39,7 @@ def submit_tx( submit_method: str, cluster_obj: clusterlib.ClusterLib, tx_file: clusterlib.FileType, - txins: tp.List[clusterlib.UTXOData], + txins: list[clusterlib.UTXOData], wait_blocks: int = 2, ) -> None: """Submit a transaction using the selected method. diff --git a/cardano_node_tests/utils/testnet_cleanup.py b/cardano_node_tests/utils/testnet_cleanup.py index a281c5e1c..d6e2a5773 100644 --- a/cardano_node_tests/utils/testnet_cleanup.py +++ b/cardano_node_tests/utils/testnet_cleanup.py @@ -114,7 +114,7 @@ def retire_drep( def return_funds_to_faucet( cluster_obj: clusterlib.ClusterLib, - src_addrs: tp.List[clusterlib.AddressRecord], + src_addrs: list[clusterlib.AddressRecord], faucet_address: str, tx_name: str, ) -> None: @@ -186,14 +186,14 @@ def find_cert_files(location: pl.Path) -> tp.Generator[pl.Path, None, None]: return location.glob("**/*_drep_reg.cert") -def group_addr_files(file_paths: tp.Generator[pl.Path, None, None]) -> tp.List[tp.List[pl.Path]]: +def group_addr_files(file_paths: tp.Generator[pl.Path, None, None]) -> list[list[pl.Path]]: """Group payment address files with corresponding stake address files. These need to be processed together - funds are transferred from payment address after the stake address was deregistered. """ - curr_group: tp.List[pl.Path] = [] - path_groups: tp.List[tp.List[pl.Path]] = [curr_group] + curr_group: list[pl.Path] = [] + path_groups: list[list[pl.Path]] = [curr_group] prev_basename = "" # reverse-sort the list so stake address files are processes before payment address files @@ -218,7 +218,7 @@ def cleanup_addresses( files_found = group_addr_files(find_addr_files(location)) stake_deposit_amt = cluster_obj.g_query.get_address_deposit() - def _run(files: tp.List[pl.Path]) -> None: + def _run(files: list[pl.Path]) -> None: for fpath in files: # Add random sleep for < 1s to prevent # "Network.Socket.connect: : resource exhausted" diff --git a/cardano_node_tests/utils/tx_view.py b/cardano_node_tests/utils/tx_view.py index 58fc874f0..c4191772d 100644 --- a/cardano_node_tests/utils/tx_view.py +++ b/cardano_node_tests/utils/tx_view.py @@ -71,7 +71,7 @@ def load_raw(tx_view: str) -> dict: return tx_loaded -def _load_assets(assets: tp.Dict[str, tp.Dict[str, int]]) -> tp.List[tp.Tuple[int, str]]: +def _load_assets(assets: dict[str, dict[str, int]]) -> list[tuple[int, str]]: loaded_data = [] for policy_key_rec, policy_rec in assets.items(): @@ -92,7 +92,7 @@ def _load_assets(assets: tp.Dict[str, tp.Dict[str, int]]) -> tp.List[tp.Tuple[in return loaded_data -def _load_coins_data(coins_data: tp.Union[dict, str]) -> tp.List[tp.Tuple[int, str]]: +def _load_coins_data(coins_data: dict | str) -> list[tuple[int, str]]: # `coins_data` for Mary+ Tx era has Lovelace amount and policies info, # for older Tx eras it's just Lovelace amount try: @@ -116,7 +116,7 @@ def _check_collateral_inputs(tx_raw_output: clusterlib.TxRawOutput, tx_loaded: d """Check collateral inputs of tx_view.""" view_collateral = set(tx_loaded.get("collateral inputs") or []) - all_collateral_locations: tp.List[tp.Any] = [ + all_collateral_locations: list[tp.Any] = [ *(tx_raw_output.script_txins or ()), *(tx_raw_output.script_withdrawals or ()), *(tx_raw_output.complex_certs or ()), @@ -150,7 +150,7 @@ def _check_reference_inputs(tx_raw_output: clusterlib.TxRawOutput, tx_loaded: di s.reference_txin for s in reference_txin_locations if getattr(s, "reference_txin", None) ] - reference_txins_combined: tp.List[tp.Any] = [ + reference_txins_combined: list[tp.Any] = [ *(tx_raw_output.readonly_reference_txins or []), *reference_txins, ] @@ -223,15 +223,15 @@ def _check_return_collateral(tx_raw_output: clusterlib.TxRawOutput, tx_loaded: d ), "Return collateral address mismatch" -def load_tx_view(cluster_obj: clusterlib.ClusterLib, tx_body_file: pl.Path) -> tp.Dict[str, tp.Any]: +def load_tx_view(cluster_obj: clusterlib.ClusterLib, tx_body_file: pl.Path) -> dict[str, tp.Any]: tx_view_raw = cluster_obj.g_transaction.view_tx(tx_body_file=tx_body_file) - tx_loaded: tp.Dict[str, tp.Any] = load_raw(tx_view=tx_view_raw) + tx_loaded: dict[str, tp.Any] = load_raw(tx_view=tx_view_raw) return tx_loaded def check_tx_view( # noqa: C901 cluster_obj: clusterlib.ClusterLib, tx_raw_output: clusterlib.TxRawOutput -) -> tp.Dict[str, tp.Any]: +) -> dict[str, tp.Any]: """Check output of the `transaction view` command.""" # pylint: disable=too-many-branches,too-many-locals,too-many-statements @@ -252,7 +252,7 @@ def check_tx_view( # noqa: C901 # check outputs tx_loaded_outputs = tx_loaded.get("outputs") or [] - loaded_txouts: tp.Set[tp.Tuple[str, int, str]] = set() + loaded_txouts: set[tuple[str, int, str]] = set() for txout in tx_loaded_outputs: address = txout["address"] for amount in _load_coins_data(txout["amount"]): diff --git a/cardano_node_tests/utils/types.py b/cardano_node_tests/utils/types.py index aa02f669b..156423c3c 100644 --- a/cardano_node_tests/utils/types.py +++ b/cardano_node_tests/utils/types.py @@ -1,7 +1,6 @@ import pathlib as pl -import typing as tp -FileType = tp.Union[str, pl.Path] -FileTypeList = tp.Union[tp.List[FileType], tp.List[str], tp.List[pl.Path]] +FileType = str | pl.Path +FileTypeList = list[FileType] | list[str] | list[pl.Path] # list of `FileType`s, empty list, or empty tuple -OptionalFiles = tp.Union[FileTypeList, tp.Tuple[()]] +OptionalFiles = FileTypeList | tuple[()] diff --git a/cardano_node_tests/utils/versions.py b/cardano_node_tests/utils/versions.py index 4c0025e23..b3c5c9341 100644 --- a/cardano_node_tests/utils/versions.py +++ b/cardano_node_tests/utils/versions.py @@ -23,7 +23,7 @@ class Versions: DEFAULT_TX_ERA: tp.Final[int] = 9 LAST_KNOWN_ERA: tp.Final[int] = 9 - MAP: tp.ClassVar[tp.Dict[int, str]] = { + MAP: tp.ClassVar[dict[int, str]] = { 1: "byron", 2: "shelley", 3: "allegra", From 355737b42478b247b79cdb4cbb7b9bac8a430ba2 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Thu, 28 Nov 2024 17:02:21 +0100 Subject: [PATCH 165/168] refactor: replace `Optional[type]` with `type | None` --- .../cluster_management/cache.py | 2 +- .../cluster_management/cluster_getter.py | 2 +- .../cluster_management/manager.py | 10 +- cardano_node_tests/tests/delegation.py | 6 +- cardano_node_tests/tests/kes.py | 4 +- cardano_node_tests/tests/plutus_common.py | 17 +- .../tests/test_chain_transactions.py | 3 +- cardano_node_tests/tests/test_pools.py | 11 +- cardano_node_tests/tests/test_reconnect.py | 2 +- cardano_node_tests/tests/test_rollback.py | 3 +- cardano_node_tests/tests/test_scripts.py | 7 +- .../tests/test_staking_rewards.py | 4 +- .../tests/test_tx_many_utxos.py | 3 +- cardano_node_tests/tests/test_tx_negative.py | 8 +- .../tests/tests_conway/conway_common.py | 8 +- .../tests/tests_conway/test_drep.py | 6 +- .../tests/tests_plutus/spend_build.py | 20 +- .../tests/tests_plutus/spend_raw.py | 18 +- .../tests/tests_plutus/test_delegation.py | 8 +- .../tests/tests_plutus/test_lobster.py | 3 +- .../tests/tests_plutus/test_spend_build.py | 12 +- .../tests/tests_plutus/test_spend_raw.py | 12 +- .../tests/tests_plutus_v2/mint_build.py | 7 +- .../tests/tests_plutus_v2/mint_raw.py | 7 +- .../tests/tests_plutus_v2/spend_build.py | 12 +- .../tests/tests_plutus_v2/spend_raw.py | 12 +- .../tests/tests_plutus_v2/test_mint_build.py | 5 +- .../tests/tests_plutus_v2/test_mint_raw.py | 5 +- .../test_spend_collateral_build.py | 3 +- cardano_node_tests/utils/artifacts.py | 7 +- cardano_node_tests/utils/cluster_nodes.py | 20 +- cardano_node_tests/utils/cluster_scripts.py | 2 +- cardano_node_tests/utils/clusterlib_utils.py | 40 +-- cardano_node_tests/utils/dbsync_conn.py | 4 +- cardano_node_tests/utils/dbsync_queries.py | 304 +++++++++--------- cardano_node_tests/utils/dbsync_types.py | 24 +- cardano_node_tests/utils/dbsync_utils.py | 40 +-- cardano_node_tests/utils/faucet.py | 11 +- cardano_node_tests/utils/gh_issue.py | 10 +- cardano_node_tests/utils/governance_setup.py | 2 +- cardano_node_tests/utils/helpers.py | 4 +- cardano_node_tests/utils/logfiles.py | 4 +- cardano_node_tests/utils/temptools.py | 6 +- 43 files changed, 334 insertions(+), 364 deletions(-) diff --git a/cardano_node_tests/cluster_management/cache.py b/cardano_node_tests/cluster_management/cache.py index 1df62e1db..754a125e6 100644 --- a/cardano_node_tests/cluster_management/cache.py +++ b/cardano_node_tests/cluster_management/cache.py @@ -12,7 +12,7 @@ class ClusterManagerCache: """ # single `ClusterLib` instance can be used in multiple tests executed on the same worker - cluster_obj: tp.Optional[clusterlib.ClusterLib] = None + cluster_obj: clusterlib.ClusterLib | None = None # data for initialized cluster instance test_data: dict = dataclasses.field(default_factory=dict) addrs_data: dict = dataclasses.field(default_factory=dict) diff --git a/cardano_node_tests/cluster_management/cluster_getter.py b/cardano_node_tests/cluster_management/cluster_getter.py index 5fe872e38..140a86419 100644 --- a/cardano_node_tests/cluster_management/cluster_getter.py +++ b/cardano_node_tests/cluster_management/cluster_getter.py @@ -184,7 +184,7 @@ def _respin(self, start_cmd: str = "", stop_cmd: str = "") -> bool: # noqa: C90 f"stop_cmd='{startup_files.stop_script}'" ) - excp: tp.Optional[Exception] = None + excp: Exception | None = None for i in range(2): if i > 0: self.log( diff --git a/cardano_node_tests/cluster_management/manager.py b/cardano_node_tests/cluster_management/manager.py index e1dbc86ee..bf65ce4a6 100644 --- a/cardano_node_tests/cluster_management/manager.py +++ b/cardano_node_tests/cluster_management/manager.py @@ -290,7 +290,7 @@ def on_test_stop(self) -> None: def _get_resources_by_glob( self, glob: str, - from_set: tp.Optional[tp.Iterable[str]] = None, + from_set: tp.Iterable[str] | None = None, ) -> list[str]: if from_set is not None and isinstance(from_set, str): msg = "`from_set` cannot be a string" @@ -305,8 +305,8 @@ def _get_resources_by_glob( def get_locked_resources( self, - from_set: tp.Optional[tp.Iterable[str]] = None, - worker_id: tp.Optional[str] = None, + from_set: tp.Iterable[str] | None = None, + worker_id: str | None = None, ) -> list[str]: """Get resources locked by worker. @@ -317,8 +317,8 @@ def get_locked_resources( def get_used_resources( self, - from_set: tp.Optional[tp.Iterable[str]] = None, - worker_id: tp.Optional[str] = None, + from_set: tp.Iterable[str] | None = None, + worker_id: str | None = None, ) -> list[str]: """Get resources used by worker. diff --git a/cardano_node_tests/tests/delegation.py b/cardano_node_tests/tests/delegation.py index fb250b739..efcacf090 100644 --- a/cardano_node_tests/tests/delegation.py +++ b/cardano_node_tests/tests/delegation.py @@ -107,7 +107,7 @@ def cluster_and_pool( def db_check_delegation( pool_user: clusterlib.PoolUser | PoolUserScript, - db_record: tp.Optional[dbsync_types.TxRecord], + db_record: dbsync_types.TxRecord | None, deleg_epoch: int, pool_id: str, check_registration: bool = True, @@ -128,9 +128,9 @@ def delegate_stake_addr( cluster_obj: clusterlib.ClusterLib, addrs_data: dict, temp_template: str, - pool_user: tp.Optional[clusterlib.PoolUser] = None, + pool_user: clusterlib.PoolUser | None = None, pool_id: str = "", - cold_vkey: tp.Optional[pl.Path] = None, + cold_vkey: pl.Path | None = None, amount: int = 100_000_000, use_build_cmd: bool = False, ) -> DelegationOut: diff --git a/cardano_node_tests/tests/kes.py b/cardano_node_tests/tests/kes.py index 697fa3d3a..0b7007dec 100644 --- a/cardano_node_tests/tests/kes.py +++ b/cardano_node_tests/tests/kes.py @@ -28,8 +28,8 @@ def check_kes_period_info_result( # noqa: C901 kes_output: dict[str, tp.Any], expected_scenario: str, check_id: str, - expected_start_kes: tp.Optional[int] = None, - pool_num: tp.Optional[int] = None, + expected_start_kes: int | None = None, + pool_num: int | None = None, ) -> list[str]: """Check output `kes-period-info` command. diff --git a/cardano_node_tests/tests/plutus_common.py b/cardano_node_tests/tests/plutus_common.py index 756672dbc..3276c015c 100644 --- a/cardano_node_tests/tests/plutus_common.py +++ b/cardano_node_tests/tests/plutus_common.py @@ -1,7 +1,6 @@ import dataclasses import itertools import pathlib as pl -import typing as tp import pytest from cardano_clusterlib import clusterlib @@ -469,13 +468,13 @@ class PlutusScriptData: @dataclasses.dataclass(frozen=True, order=True) class PlutusOp: script_file: clusterlib.FileType - datum_file: tp.Optional[pl.Path] = None - datum_cbor_file: tp.Optional[pl.Path] = None - datum_value: tp.Optional[str] = None - redeemer_file: tp.Optional[pl.Path] = None - redeemer_cbor_file: tp.Optional[pl.Path] = None - redeemer_value: tp.Optional[str] = None - execution_cost: tp.Optional[ExecutionCost] = None + datum_file: pl.Path | None = None + datum_cbor_file: pl.Path | None = None + datum_value: str | None = None + redeemer_file: pl.Path | None = None + redeemer_cbor_file: pl.Path | None = None + redeemer_value: str | None = None + execution_cost: ExecutionCost | None = None @dataclasses.dataclass(frozen=True, order=True) @@ -734,7 +733,7 @@ def create_script_context_w_blockers( cluster_obj: clusterlib.ClusterLib, plutus_version: int, redeemer_file: pl.Path, - tx_file: tp.Optional[pl.Path] = None, + tx_file: pl.Path | None = None, ) -> None: """Run the `create-script-context` command (available in plutus-apps). diff --git a/cardano_node_tests/tests/test_chain_transactions.py b/cardano_node_tests/tests/test_chain_transactions.py index fbe257576..774fb9459 100644 --- a/cardano_node_tests/tests/test_chain_transactions.py +++ b/cardano_node_tests/tests/test_chain_transactions.py @@ -3,7 +3,6 @@ import logging import pathlib as pl import time -import typing as tp import allure import pytest @@ -50,7 +49,7 @@ def _gen_signed_tx( out_addr: clusterlib.AddressRecord, tx_name: str, fee: int, - invalid_hereafter: tp.Optional[int] = None, + invalid_hereafter: int | None = None, ) -> tuple[clusterlib.UTXOData, clusterlib.TxRawOutput, pl.Path]: """Generate Tx and return Tx output in a format that can be used as input for next Tx.""" send_amount = txin.amount - fee diff --git a/cardano_node_tests/tests/test_pools.py b/cardano_node_tests/tests/test_pools.py index 9fd021659..98120879e 100644 --- a/cardano_node_tests/tests/test_pools.py +++ b/cardano_node_tests/tests/test_pools.py @@ -12,7 +12,6 @@ import json import logging import pathlib as pl -import typing as tp import allure import hypothesis @@ -126,8 +125,8 @@ def _register_stake_pool_w_build( vrf_vkey_file: clusterlib.FileType, cold_key_pair: clusterlib.ColdKeyPair, tx_name: str, - reward_account_vkey_file: tp.Optional[clusterlib.FileType] = None, - deposit: tp.Optional[int] = None, + reward_account_vkey_file: clusterlib.FileType | None = None, + deposit: int | None = None, destination_dir: clusterlib.FileType = ".", ) -> tuple[pl.Path, clusterlib.TxRawOutput]: """Register a stake pool using a `transaction build` command. @@ -335,7 +334,7 @@ def _create_register_pool( temp_dir: pl.Path, pool_owners: list[clusterlib.PoolUser], pool_data: clusterlib.PoolData, - request: tp.Optional[FixtureRequest] = None, + request: FixtureRequest | None = None, use_build_cmd: bool = False, ) -> clusterlib.PoolCreationOutput: """Create and register a stake pool. @@ -402,7 +401,7 @@ def _create_register_pool_delegate_stake_tx( temp_template: str, temp_dir: pl.Path, pool_data: clusterlib.PoolData, - request: tp.Optional[FixtureRequest] = None, + request: FixtureRequest | None = None, use_build_cmd: bool = False, ) -> clusterlib.PoolCreationOutput: """Create and register a stake pool, delegate stake address - all in single TX. @@ -532,7 +531,7 @@ def _create_register_pool_tx_delegate_stake_tx( temp_template: str, temp_dir: pl.Path, pool_data: clusterlib.PoolData, - request: tp.Optional[FixtureRequest] = None, + request: FixtureRequest | None = None, use_build_cmd: bool = False, ) -> clusterlib.PoolCreationOutput: """Create and register a stake pool - first TX; delegate stake address - second TX. diff --git a/cardano_node_tests/tests/test_reconnect.py b/cardano_node_tests/tests/test_reconnect.py index a08281bfc..87503d788 100644 --- a/cardano_node_tests/tests/test_reconnect.py +++ b/cardano_node_tests/tests/test_reconnect.py @@ -72,7 +72,7 @@ def node_query_utxo( cluster_obj: clusterlib.ClusterLib, node: str, address: str = "", - tx_raw_output: tp.Optional[clusterlib.TxRawOutput] = None, + tx_raw_output: clusterlib.TxRawOutput | None = None, ) -> list[clusterlib.UTXOData]: """Query UTxO on given node.""" orig_socket = os.environ.get("CARDANO_NODE_SOCKET_PATH") diff --git a/cardano_node_tests/tests/test_rollback.py b/cardano_node_tests/tests/test_rollback.py index 407be2565..bcba32b97 100644 --- a/cardano_node_tests/tests/test_rollback.py +++ b/cardano_node_tests/tests/test_rollback.py @@ -9,7 +9,6 @@ import pathlib as pl import shutil import time -import typing as tp import allure import pytest @@ -137,7 +136,7 @@ def node_query_utxo( cluster_obj: clusterlib.ClusterLib, node: str, address: str = "", - tx_raw_output: tp.Optional[clusterlib.TxRawOutput] = None, + tx_raw_output: clusterlib.TxRawOutput | None = None, ) -> list[clusterlib.UTXOData]: """Query UTxO on given node.""" orig_socket = os.environ.get("CARDANO_NODE_SOCKET_PATH") diff --git a/cardano_node_tests/tests/test_scripts.py b/cardano_node_tests/tests/test_scripts.py index 242857c09..7e21a567e 100644 --- a/cardano_node_tests/tests/test_scripts.py +++ b/cardano_node_tests/tests/test_scripts.py @@ -11,7 +11,6 @@ import pathlib as pl import random import re -import typing as tp import allure import hypothesis @@ -48,9 +47,9 @@ def multisig_tx( dst_address: str, amount: int, payment_skey_files: list[pl.Path], - multisig_script: tp.Optional[pl.Path] = None, - invalid_hereafter: tp.Optional[int] = None, - invalid_before: tp.Optional[int] = None, + multisig_script: pl.Path | None = None, + invalid_hereafter: int | None = None, + invalid_before: int | None = None, use_build_cmd: bool = False, submit_method: str = submit_utils.SubmitMethods.CLI, ) -> clusterlib.TxRawOutput: diff --git a/cardano_node_tests/tests/test_staking_rewards.py b/cardano_node_tests/tests/test_staking_rewards.py index f65ee3cfc..44e358787 100644 --- a/cardano_node_tests/tests/test_staking_rewards.py +++ b/cardano_node_tests/tests/test_staking_rewards.py @@ -890,11 +890,11 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: ) ) - mir_tx_raw_reserves: tp.Optional[clusterlib.TxRawOutput] = None + mir_tx_raw_reserves: clusterlib.TxRawOutput | None = None if mir_reward and this_epoch == init_epoch + 2: mir_tx_raw_reserves = _mir_tx("reserves") - mir_tx_raw_treasury: tp.Optional[clusterlib.TxRawOutput] = None + mir_tx_raw_treasury: clusterlib.TxRawOutput | None = None if mir_reward and this_epoch == init_epoch + 3: assert reward_per_epoch > mir_reward mir_tx_raw_treasury = _mir_tx("treasury") diff --git a/cardano_node_tests/tests/test_tx_many_utxos.py b/cardano_node_tests/tests/test_tx_many_utxos.py index 5ed4ecbd5..dadf14bb1 100644 --- a/cardano_node_tests/tests/test_tx_many_utxos.py +++ b/cardano_node_tests/tests/test_tx_many_utxos.py @@ -4,7 +4,6 @@ import logging import random import time -import typing as tp import allure import pytest @@ -105,7 +104,7 @@ def many_utxos( amount = less_than_1_ada + 1_000_000 # Repeat transaction when "BadInputsUTxO" error happens - excp: tp.Optional[clusterlib.CLIError] = None + excp: clusterlib.CLIError | None = None for r in range(2): if r > 0: cluster.wait_for_new_block(2) diff --git a/cardano_node_tests/tests/test_tx_negative.py b/cardano_node_tests/tests/test_tx_negative.py index 27f7aa418..6084a7cfc 100644 --- a/cardano_node_tests/tests/test_tx_negative.py +++ b/cardano_node_tests/tests/test_tx_negative.py @@ -221,10 +221,10 @@ def _submit_wrong_validity( cluster_obj: clusterlib.ClusterLib, pool_users: list[clusterlib.PoolUser], temp_template: str, - invalid_before: tp.Optional[int] = None, - invalid_hereafter: tp.Optional[int] = None, + invalid_before: int | None = None, + invalid_hereafter: int | None = None, use_build_cmd=False, - ) -> tuple[tp.Optional[int], str, tp.Optional[clusterlib.TxRawOutput]]: + ) -> tuple[int | None, str, clusterlib.TxRawOutput | None]: """Try to build and submit a transaction with wrong validity interval.""" src_address = pool_users[0].payment.address dst_address = pool_users[1].payment.address @@ -288,7 +288,7 @@ def _submit_wrong_validity( def _get_validity_range( self, cluster_obj: clusterlib.ClusterLib, tx_body_file: pl.Path - ) -> tuple[tp.Optional[int], tp.Optional[int]]: + ) -> tuple[int | None, int | None]: """Get validity range from a transaction body.""" tx_loaded = tx_view.load_tx_view(cluster_obj=cluster_obj, tx_body_file=tx_body_file) diff --git a/cardano_node_tests/tests/tests_conway/conway_common.py b/cardano_node_tests/tests/tests_conway/conway_common.py index b58e80554..0ca296f22 100644 --- a/cardano_node_tests/tests/tests_conway/conway_common.py +++ b/cardano_node_tests/tests/tests_conway/conway_common.py @@ -217,9 +217,9 @@ def cast_vote( payment_addr: clusterlib.AddressRecord, action_txid: str, action_ix: int, - approve_cc: tp.Optional[bool] = None, - approve_drep: tp.Optional[bool] = None, - approve_spo: tp.Optional[bool] = None, + approve_cc: bool | None = None, + approve_drep: bool | None = None, + approve_spo: bool | None = None, cc_skip_votes: bool = False, drep_skip_votes: bool = False, spo_skip_votes: bool = False, @@ -489,7 +489,7 @@ def propose_pparams_update( anchor_data_hash: str, pool_user: clusterlib.PoolUser, proposals: list[clusterlib_utils.UpdateProposal], - prev_action_rec: tp.Optional[governance_utils.PrevActionRec] = None, + prev_action_rec: governance_utils.PrevActionRec | None = None, ) -> PParamPropRec: """Propose a pparams update.""" deposit_amt = cluster_obj.conway_genesis["govActionDeposit"] diff --git a/cardano_node_tests/tests/tests_conway/test_drep.py b/cardano_node_tests/tests/tests_conway/test_drep.py index 5f66e7a7a..6a0348b16 100644 --- a/cardano_node_tests/tests/tests_conway/test_drep.py +++ b/cardano_node_tests/tests/tests_conway/test_drep.py @@ -62,7 +62,7 @@ def get_payment_addr( cluster_manager: cluster_management.ClusterManager, cluster_obj: clusterlib.ClusterLib, caching_key: str = "", - amount: tp.Optional[int] = None, + amount: int | None = None, ) -> clusterlib.AddressRecord: """Create new payment address.""" @@ -1822,8 +1822,8 @@ def _check_enactment( def _save_drep_states( id: str, - drep1: tp.Optional[governance_utils.DRepRegistration], - drep2: tp.Optional[governance_utils.DRepRegistration], + drep1: governance_utils.DRepRegistration | None, + drep2: governance_utils.DRepRegistration | None, ) -> None: curr_epoch = cluster.g_query.get_epoch() if drep1 is not None: diff --git a/cardano_node_tests/tests/tests_plutus/spend_build.py b/cardano_node_tests/tests/tests_plutus/spend_build.py index 2151f830e..7d1ee24b2 100644 --- a/cardano_node_tests/tests/tests_plutus/spend_build.py +++ b/cardano_node_tests/tests/tests_plutus/spend_build.py @@ -1,6 +1,5 @@ import dataclasses import logging -import typing as tp import pytest from cardano_clusterlib import clusterlib @@ -21,12 +20,9 @@ def _build_fund_script( payment_addr: clusterlib.AddressRecord, dst_addr: clusterlib.AddressRecord, plutus_op: plutus_common.PlutusOp, - tokens: tp.Optional[ - list[plutus_common.Token] - ] = None, # tokens must already be in `payment_addr` - tokens_collateral: tp.Optional[ - list[plutus_common.Token] - ] = None, # tokens must already be in `payment_addr` + tokens: list[plutus_common.Token] | None = None, # tokens must already be in `payment_addr` + tokens_collateral: list[plutus_common.Token] + | None = None, # tokens must already be in `payment_addr` embed_datum: bool = False, ) -> tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData], clusterlib.TxRawOutput]: """Fund a Plutus script and create the locked UTxO and collateral UTxO. @@ -139,14 +135,14 @@ def _build_spend_locked_txin( # noqa: C901 amount: int, deposit_amount: int = 0, txins: clusterlib.OptionalUTXOData = (), - tx_files: tp.Optional[clusterlib.TxFiles] = None, - invalid_hereafter: tp.Optional[int] = None, - invalid_before: tp.Optional[int] = None, - tokens: tp.Optional[list[plutus_common.Token]] = None, + tx_files: clusterlib.TxFiles | None = None, + invalid_hereafter: int | None = None, + invalid_before: int | None = None, + tokens: list[plutus_common.Token] | None = None, expect_failure: bool = False, script_valid: bool = True, submit_tx: bool = True, -) -> tuple[str, tp.Optional[clusterlib.TxRawOutput], list]: +) -> tuple[str, clusterlib.TxRawOutput | None, list]: """Spend the locked UTxO. Uses `cardano-cli transaction build` command for building the transactions. diff --git a/cardano_node_tests/tests/tests_plutus/spend_raw.py b/cardano_node_tests/tests/tests_plutus/spend_raw.py index 4d33f305c..471c5d585 100644 --- a/cardano_node_tests/tests/tests_plutus/spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus/spend_raw.py @@ -1,6 +1,5 @@ import dataclasses import logging -import typing as tp import pytest from cardano_clusterlib import clusterlib @@ -27,12 +26,9 @@ def _fund_script( amount: int, fee_txsize: int = FEE_REDEEM_TXSIZE, deposit_amount: int = 0, - tokens: tp.Optional[ - list[plutus_common.Token] - ] = None, # tokens must already be in `payment_addr` - tokens_collateral: tp.Optional[ - list[plutus_common.Token] - ] = None, # tokens must already be in `payment_addr` + tokens: list[plutus_common.Token] | None = None, # tokens must already be in `payment_addr` + tokens_collateral: list[plutus_common.Token] + | None = None, # tokens must already be in `payment_addr` collateral_fraction_offset: float = 1.0, embed_datum: bool = False, ) -> tuple[list[clusterlib.UTXOData], list[clusterlib.UTXOData], clusterlib.TxRawOutput]: @@ -136,10 +132,10 @@ def _spend_locked_txin( # noqa: C901 amount: int, fee_txsize: int = FEE_REDEEM_TXSIZE, txins: clusterlib.OptionalUTXOData = (), - tx_files: tp.Optional[clusterlib.TxFiles] = None, - invalid_hereafter: tp.Optional[int] = None, - invalid_before: tp.Optional[int] = None, - tokens: tp.Optional[list[plutus_common.Token]] = None, + tx_files: clusterlib.TxFiles | None = None, + invalid_hereafter: int | None = None, + invalid_before: int | None = None, + tokens: list[plutus_common.Token] | None = None, expect_failure: bool = False, script_valid: bool = True, submit_tx: bool = True, diff --git a/cardano_node_tests/tests/tests_plutus/test_delegation.py b/cardano_node_tests/tests/tests_plutus/test_delegation.py index 9faf40ee4..bc9bcc8d1 100644 --- a/cardano_node_tests/tests/tests_plutus/test_delegation.py +++ b/cardano_node_tests/tests/tests_plutus/test_delegation.py @@ -121,7 +121,7 @@ def register_delegate_stake_addr( pool_user: delegation.PoolUserScript, pool_id: str, redeemer_file: pl.Path, - reference_script_utxos: tp.Optional[list[clusterlib.UTXOData]], + reference_script_utxos: list[clusterlib.UTXOData] | None, use_build_cmd: bool, ) -> tuple[clusterlib.TxRawOutput, list[dict]]: """Submit registration certificate and delegate to pool.""" @@ -220,7 +220,7 @@ def register_stake_addr( collaterals: list[clusterlib.UTXOData], pool_user: delegation.PoolUserScript, redeemer_file: pl.Path, - reference_script_utxos: tp.Optional[list[clusterlib.UTXOData]], + reference_script_utxos: list[clusterlib.UTXOData] | None, use_build_cmd: bool, ) -> tuple[clusterlib.TxRawOutput, list[dict]]: """Register a stake address.""" @@ -303,7 +303,7 @@ def delegate_stake_addr( pool_user: delegation.PoolUserScript, pool_id: str, redeemer_file: pl.Path, - reference_script_utxos: tp.Optional[list[clusterlib.UTXOData]], + reference_script_utxos: list[clusterlib.UTXOData] | None, use_build_cmd: bool, ) -> tuple[clusterlib.TxRawOutput, list[dict]]: """Delegate a stake address to a pool.""" @@ -387,7 +387,7 @@ def deregister_stake_addr( collaterals: list[clusterlib.UTXOData], pool_user: delegation.PoolUserScript, redeemer_file: pl.Path, - reference_script_utxos: tp.Optional[list[clusterlib.UTXOData]], + reference_script_utxos: list[clusterlib.UTXOData] | None, use_build_cmd: bool, ) -> tuple[clusterlib.TxRawOutput, list[dict]]: """Deregister stake address.""" diff --git a/cardano_node_tests/tests/tests_plutus/test_lobster.py b/cardano_node_tests/tests/tests_plutus/test_lobster.py index 5edcaf499..99a69d82d 100644 --- a/cardano_node_tests/tests/tests_plutus/test_lobster.py +++ b/cardano_node_tests/tests/tests_plutus/test_lobster.py @@ -6,7 +6,6 @@ import logging import pathlib as pl import random -import typing as tp import allure import pytest @@ -347,7 +346,7 @@ def test_lobster_name( vote_utxos = token_utxos_step3 vote_counter = 0 - utxo_counter_token: tp.Optional[clusterlib.UTXOData] = None + utxo_counter_token: clusterlib.UTXOData | None = None for vote_num, vote_val in enumerate(votes, start=1): # normal votes if vote_num <= votes_num: diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_build.py index 8901501f3..9b4e7af6b 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_build.py @@ -294,12 +294,12 @@ def test_guessing_game( __: tp.Any # mypy workaround temp_template = common.get_test_id(cluster) - datum_file: tp.Optional[pl.Path] = None - datum_cbor_file: tp.Optional[pl.Path] = None - datum_value: tp.Optional[str] = None - redeemer_file: tp.Optional[pl.Path] = None - redeemer_cbor_file: tp.Optional[pl.Path] = None - redeemer_value: tp.Optional[str] = None + datum_file: pl.Path | None = None + datum_cbor_file: pl.Path | None = None + datum_value: str | None = None + redeemer_file: pl.Path | None = None + redeemer_cbor_file: pl.Path | None = None + redeemer_value: str | None = None if variant == "typed_json": script_file = plutus_common.GUESSING_GAME[plutus_version].script_file diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_raw.py index afe8e85fc..836214681 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_raw.py @@ -331,12 +331,12 @@ def test_guessing_game( temp_template = common.get_test_id(cluster) amount = 2_000_000 - datum_file: tp.Optional[pl.Path] = None - datum_cbor_file: tp.Optional[pl.Path] = None - datum_value: tp.Optional[str] = None - redeemer_file: tp.Optional[pl.Path] = None - redeemer_cbor_file: tp.Optional[pl.Path] = None - redeemer_value: tp.Optional[str] = None + datum_file: pl.Path | None = None + datum_cbor_file: pl.Path | None = None + datum_value: str | None = None + redeemer_file: pl.Path | None = None + redeemer_cbor_file: pl.Path | None = None + redeemer_value: str | None = None if variant == "typed_json": script_file = plutus_common.GUESSING_GAME[plutus_version].script_file diff --git a/cardano_node_tests/tests/tests_plutus_v2/mint_build.py b/cardano_node_tests/tests/tests_plutus_v2/mint_build.py index 528012b5d..1f96bf738 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/mint_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/mint_build.py @@ -1,6 +1,5 @@ import logging import pathlib as pl -import typing as tp from cardano_clusterlib import clusterlib @@ -17,12 +16,12 @@ def _fund_issuer( issuer_addr: clusterlib.AddressRecord, minting_cost: plutus_common.ScriptCost, amount: int, - reference_script: tp.Optional[pl.Path] = None, - inline_datum: tp.Optional[pl.Path] = None, + reference_script: pl.Path | None = None, + inline_datum: pl.Path | None = None, ) -> tuple[ list[clusterlib.UTXOData], list[clusterlib.UTXOData], - tp.Optional[clusterlib.UTXOData], + clusterlib.UTXOData | None, clusterlib.TxRawOutput, ]: """Fund the token issuer.""" diff --git a/cardano_node_tests/tests/tests_plutus_v2/mint_raw.py b/cardano_node_tests/tests/tests_plutus_v2/mint_raw.py index e21e138f7..032c517e8 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/mint_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/mint_raw.py @@ -1,6 +1,5 @@ import logging import pathlib as pl -import typing as tp from cardano_clusterlib import clusterlib @@ -22,12 +21,12 @@ def _fund_issuer( amount: int, fee_txsize: int = FEE_MINT_TXSIZE, collateral_utxo_num: int = 1, - reference_script: tp.Optional[pl.Path] = None, - datum_file: tp.Optional[pl.Path] = None, + reference_script: pl.Path | None = None, + datum_file: pl.Path | None = None, ) -> tuple[ list[clusterlib.UTXOData], list[clusterlib.UTXOData], - tp.Optional[clusterlib.UTXOData], + clusterlib.UTXOData | None, clusterlib.TxRawOutput, ]: """Fund the token issuer.""" diff --git a/cardano_node_tests/tests/tests_plutus_v2/spend_build.py b/cardano_node_tests/tests/tests_plutus_v2/spend_build.py index 3e21423a8..0a9e53382 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/spend_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/spend_build.py @@ -1,6 +1,5 @@ import json import logging -import typing as tp from cardano_clusterlib import clusterlib @@ -50,14 +49,13 @@ def _build_fund_script( plutus_op: plutus_common.PlutusOp, use_reference_script: bool = False, use_inline_datum: bool = True, - collateral_amount: tp.Optional[int] = None, - tokens_collateral: tp.Optional[ - list[plutus_common.Token] - ] = None, # tokens must already be in `payment_addr` + collateral_amount: int | None = None, + tokens_collateral: list[plutus_common.Token] + | None = None, # tokens must already be in `payment_addr` ) -> tuple[ list[clusterlib.UTXOData], list[clusterlib.UTXOData], - tp.Optional[clusterlib.UTXOData], + clusterlib.UTXOData | None, clusterlib.TxRawOutput, ]: """Fund a Plutus script and create the locked UTxO and collateral UTxO and reference script. @@ -183,7 +181,7 @@ def _build_reference_txin( cluster: clusterlib.ClusterLib, amount: int, payment_addr: clusterlib.AddressRecord, - dst_addr: tp.Optional[clusterlib.AddressRecord] = None, + dst_addr: clusterlib.AddressRecord | None = None, ) -> list[clusterlib.UTXOData]: """Create a basic txin to use as readonly reference input. diff --git a/cardano_node_tests/tests/tests_plutus_v2/spend_raw.py b/cardano_node_tests/tests/tests_plutus_v2/spend_raw.py index e129b0ea1..c77555b2f 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/spend_raw.py @@ -1,6 +1,5 @@ import json import logging -import typing as tp from cardano_clusterlib import clusterlib @@ -46,14 +45,13 @@ def _fund_script( redeem_cost: plutus_common.ScriptCost, use_reference_script: bool = False, use_inline_datum: bool = False, - collateral_amount: tp.Optional[int] = None, - tokens_collateral: tp.Optional[ - list[plutus_common.Token] - ] = None, # tokens must already be in `payment_addr` + collateral_amount: int | None = None, + tokens_collateral: list[plutus_common.Token] + | None = None, # tokens must already be in `payment_addr` ) -> tuple[ list[clusterlib.UTXOData], list[clusterlib.UTXOData], - tp.Optional[clusterlib.UTXOData], + clusterlib.UTXOData | None, clusterlib.TxRawOutput, ]: """Fund a Plutus script and create the locked UTxO, collateral UTxO and reference script.""" @@ -169,7 +167,7 @@ def _build_reference_txin( cluster: clusterlib.ClusterLib, amount: int, payment_addr: clusterlib.AddressRecord, - dst_addr: tp.Optional[clusterlib.AddressRecord] = None, + dst_addr: clusterlib.AddressRecord | None = None, ) -> list[clusterlib.UTXOData]: """Create a basic txin to use as readonly reference input. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py index 4be1a63d4..c527dd5c3 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py @@ -3,7 +3,6 @@ import json import logging import pathlib as pl -import typing as tp import allure import pytest @@ -54,8 +53,8 @@ def _build_reference_txin( cluster: clusterlib.ClusterLib, amount: int, payment_addr: clusterlib.AddressRecord, - dst_addr: tp.Optional[clusterlib.AddressRecord] = None, - inline_datum: tp.Optional[pl.Path] = None, + dst_addr: clusterlib.AddressRecord | None = None, + inline_datum: pl.Path | None = None, ) -> list[clusterlib.UTXOData]: """Create a basic txin to use as readonly reference input. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py index 222fe999b..babf8d1d6 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py @@ -3,7 +3,6 @@ import json import logging import pathlib as pl -import typing as tp import allure import pytest @@ -54,8 +53,8 @@ def _build_reference_txin( temp_template: str, amount: int, payment_addr: clusterlib.AddressRecord, - dst_addr: tp.Optional[clusterlib.AddressRecord] = None, - datum_file: tp.Optional[pl.Path] = None, + dst_addr: clusterlib.AddressRecord | None = None, + datum_file: pl.Path | None = None, ) -> list[clusterlib.UTXOData]: """Create a basic txin to use as readonly reference input. diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py index 8c6931a57..1ce1d4e51 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py @@ -1,7 +1,6 @@ """Tests for collateral while spending with Plutus V2 using `transaction build`.""" import logging -import typing as tp import allure import pytest @@ -62,7 +61,7 @@ def _build_spend_locked_txin( script_utxos: list[clusterlib.UTXOData], collateral_utxos: list[clusterlib.UTXOData], plutus_op: plutus_common.PlutusOp, - total_collateral_amount: tp.Optional[int] = None, + total_collateral_amount: int | None = None, return_collateral_txouts: clusterlib.OptionalTxOuts = (), ) -> clusterlib.TxRawOutput: # for mypy diff --git a/cardano_node_tests/utils/artifacts.py b/cardano_node_tests/utils/artifacts.py index 6960bc803..e968f971e 100644 --- a/cardano_node_tests/utils/artifacts.py +++ b/cardano_node_tests/utils/artifacts.py @@ -5,7 +5,6 @@ import os import pathlib as pl import shutil -import typing as tp from _pytest.config import Config from cardano_clusterlib import clusterlib @@ -19,9 +18,7 @@ CLUSTER_INSTANCE_ID_FILENAME = "cluster_instance_id.log" -def save_cli_coverage( - cluster_obj: clusterlib.ClusterLib, pytest_config: Config -) -> tp.Optional[pl.Path]: +def save_cli_coverage(cluster_obj: clusterlib.ClusterLib, pytest_config: Config) -> pl.Path | None: """Save CLI coverage info.""" cli_coverage_dir = pytest_config.getoption(CLI_COVERAGE_ARG) if not (cli_coverage_dir and cluster_obj.cli_coverage): @@ -36,7 +33,7 @@ def save_cli_coverage( return json_file -def save_start_script_coverage(log_file: pl.Path, pytest_config: Config) -> tp.Optional[pl.Path]: +def save_start_script_coverage(log_file: pl.Path, pytest_config: Config) -> pl.Path | None: """Save info about CLI commands executed by cluster start script.""" cli_coverage_dir = pytest_config.getoption(CLI_COVERAGE_ARG) if not (cli_coverage_dir and log_file.exists()): diff --git a/cardano_node_tests/utils/cluster_nodes.py b/cardano_node_tests/utils/cluster_nodes.py index 6456cb836..8d1fc16d3 100644 --- a/cardano_node_tests/utils/cluster_nodes.py +++ b/cardano_node_tests/utils/cluster_nodes.py @@ -37,8 +37,8 @@ class ClusterEnv: class ServiceStatus: name: str status: str - pid: tp.Optional[int] - uptime: tp.Optional[str] + pid: int | None + uptime: str | None message: str = "" @@ -347,7 +347,7 @@ def get_cluster_env() -> ClusterEnv: def reload_supervisor_config( - instance_num: tp.Optional[int] = None, delay: int = configuration.TX_SUBMISSION_DELAY + instance_num: int | None = None, delay: int = configuration.TX_SUBMISSION_DELAY ) -> None: """Reload supervisor configuration.""" LOGGER.info("Reloading supervisor configuration.") @@ -380,7 +380,7 @@ def start_cluster(cmd: str, args: list[str]) -> clusterlib.ClusterLib: def restart_all_nodes( - instance_num: tp.Optional[int] = None, delay: int = configuration.TX_SUBMISSION_DELAY + instance_num: int | None = None, delay: int = configuration.TX_SUBMISSION_DELAY ) -> None: """Restart all Cardano nodes of the running cluster.""" LOGGER.info("Restarting all cluster nodes.") @@ -402,9 +402,7 @@ def restart_all_nodes( time.sleep(delay) -def services_action( - service_names: list[str], action: str, instance_num: tp.Optional[int] = None -) -> None: +def services_action(service_names: list[str], action: str, instance_num: int | None = None) -> None: """Perform action on services on the running cluster.""" LOGGER.info(f"Performing '{action}' action on services {service_names}.") @@ -424,13 +422,13 @@ def services_action( ) from exc -def start_nodes(node_names: list[str], instance_num: tp.Optional[int] = None) -> None: +def start_nodes(node_names: list[str], instance_num: int | None = None) -> None: """Start list of Cardano nodes of the running cluster.""" service_names = [f"nodes:{n}" for n in node_names] services_action(service_names=service_names, action="start", instance_num=instance_num) -def stop_nodes(node_names: list[str], instance_num: tp.Optional[int] = None) -> None: +def stop_nodes(node_names: list[str], instance_num: int | None = None) -> None: """Stop list of Cardano nodes of the running cluster.""" service_names = [f"nodes:{n}" for n in node_names] services_action(service_names=service_names, action="stop", instance_num=instance_num) @@ -438,7 +436,7 @@ def stop_nodes(node_names: list[str], instance_num: tp.Optional[int] = None) -> def restart_nodes( node_names: list[str], - instance_num: tp.Optional[int] = None, + instance_num: int | None = None, delay: int = configuration.TX_SUBMISSION_DELAY, ) -> None: """Restart list of Cardano nodes of the running cluster.""" @@ -451,7 +449,7 @@ def restart_nodes( def services_status( - service_names: tp.Optional[list[str]] = None, instance_num: tp.Optional[int] = None + service_names: list[str] | None = None, instance_num: int | None = None ) -> list[ServiceStatus]: """Return status info for list of services running on the running cluster (all by default).""" if instance_num is None: diff --git a/cardano_node_tests/utils/cluster_scripts.py b/cardano_node_tests/utils/cluster_scripts.py index e2dcc47a3..d23ea6f22 100644 --- a/cardano_node_tests/utils/cluster_scripts.py +++ b/cardano_node_tests/utils/cluster_scripts.py @@ -112,7 +112,7 @@ def gen_split_topology_files( class LocalScripts(ScriptsTypes): """Scripts for starting local cluster.""" - _has_dns_rebinding_protection: tp.ClassVar[tp.Optional[bool]] = None + _has_dns_rebinding_protection: tp.ClassVar[bool | None] = None def __init__(self, num_pools: int = -1) -> None: super().__init__() diff --git a/cardano_node_tests/utils/clusterlib_utils.py b/cardano_node_tests/utils/clusterlib_utils.py index c51400cb4..6c6661998 100644 --- a/cardano_node_tests/utils/clusterlib_utils.py +++ b/cardano_node_tests/utils/clusterlib_utils.py @@ -27,7 +27,7 @@ class UpdateProposal: arg: str value: tp.Any name: str = "" - check_func: tp.Optional[tp.Callable] = None + check_func: tp.Callable | None = None @dataclasses.dataclass(frozen=True, order=True) @@ -56,31 +56,31 @@ def build_and_submit_tx( readonly_reference_txins: clusterlib.OptionalUTXOData = (), script_txins: clusterlib.OptionalScriptTxIn = (), return_collateral_txouts: clusterlib.OptionalTxOuts = (), - total_collateral_amount: tp.Optional[int] = None, + total_collateral_amount: int | None = None, mint: clusterlib.OptionalMint = (), - tx_files: tp.Optional[clusterlib.TxFiles] = None, + tx_files: clusterlib.TxFiles | None = None, complex_certs: clusterlib.OptionalScriptCerts = (), complex_proposals: clusterlib.OptionalScriptProposals = (), change_address: str = "", - fee_buffer: tp.Optional[int] = None, - raw_fee: tp.Optional[int] = None, + fee_buffer: int | None = None, + raw_fee: int | None = None, required_signers: cl_types.OptionalFiles = (), - required_signer_hashes: tp.Optional[list[str]] = None, + required_signer_hashes: list[str] | None = None, withdrawals: clusterlib.OptionalTxOuts = (), script_withdrawals: clusterlib.OptionalScriptWithdrawals = (), script_votes: clusterlib.OptionalScriptVotes = (), - deposit: tp.Optional[int] = None, - current_treasury_value: tp.Optional[int] = None, - treasury_donation: tp.Optional[int] = None, - invalid_hereafter: tp.Optional[int] = None, - invalid_before: tp.Optional[int] = None, - witness_override: tp.Optional[int] = None, + deposit: int | None = None, + current_treasury_value: int | None = None, + treasury_donation: int | None = None, + invalid_hereafter: int | None = None, + invalid_before: int | None = None, + witness_override: int | None = None, witness_count_add: int = 0, script_valid: bool = True, - calc_script_cost_file: tp.Optional[cl_types.FileType] = None, + calc_script_cost_file: cl_types.FileType | None = None, join_txouts: bool = True, destination_dir: cl_types.FileType = ".", - cli_asset_balancing: tp.Optional[bool] = None, + cli_asset_balancing: bool | None = None, ) -> clusterlib.TxRawOutput: """ Build and submit a transaction. @@ -246,7 +246,7 @@ def deregister_stake_address( pool_user: clusterlib.PoolUser, name_template: str, deposit_amt: int = -1, -) -> tp.Optional[clusterlib.TxRawOutput]: +) -> clusterlib.TxRawOutput | None: """Deregister stake address.""" stake_addr_info = cluster_obj.g_query.get_stake_addr_info(pool_user.stake.address) if not stake_addr_info: @@ -289,7 +289,7 @@ def deregister_stake_address( def create_payment_addr_records( *names: str, cluster_obj: clusterlib.ClusterLib, - stake_vkey_file: tp.Optional[cl_types.FileType] = None, + stake_vkey_file: cl_types.FileType | None = None, destination_dir: cl_types.FileType = ".", ) -> list[clusterlib.AddressRecord]: """Create new payment address(es).""" @@ -560,8 +560,8 @@ def mint_or_burn_witness( cluster_obj: clusterlib.ClusterLib, new_tokens: list[TokenRecord], temp_template: str, - invalid_hereafter: tp.Optional[int] = None, - invalid_before: tp.Optional[int] = None, + invalid_hereafter: int | None = None, + invalid_before: int | None = None, submit_method: str = submit_utils.SubmitMethods.CLI, use_build_cmd: bool = False, sign_incrementally: bool = False, @@ -992,7 +992,7 @@ def get_ledger_state( def save_ledger_state( cluster_obj: clusterlib.ClusterLib, state_name: str, - ledger_state: tp.Optional[dict] = None, + ledger_state: dict | None = None, destination_dir: cl_types.FileType = ".", ) -> pl.Path: """Save ledger state to file. @@ -1163,7 +1163,7 @@ def create_script_context( cluster_obj: clusterlib.ClusterLib, plutus_version: int, redeemer_file: pl.Path, - tx_file: tp.Optional[pl.Path] = None, + tx_file: pl.Path | None = None, ) -> None: """Run the `create-script-context` command (available in plutus-apps).""" if plutus_version == 1: diff --git a/cardano_node_tests/utils/dbsync_conn.py b/cardano_node_tests/utils/dbsync_conn.py index 6e4c40364..67af1383b 100644 --- a/cardano_node_tests/utils/dbsync_conn.py +++ b/cardano_node_tests/utils/dbsync_conn.py @@ -15,7 +15,7 @@ class DBSyncCache: """Cache connection to db-sync database for each cluster instance.""" - conns: tp.ClassVar[dict[int, tp.Optional[psycopg2.extensions.connection]]] = {0: None} + conns: tp.ClassVar[dict[int, psycopg2.extensions.connection | None]] = {0: None} def _conn(instance_num: int) -> psycopg2.extensions.connection: @@ -27,7 +27,7 @@ def _conn(instance_num: int) -> psycopg2.extensions.connection: return conn -def _close(instance_num: int, conn: tp.Optional[psycopg2.extensions.connection]) -> None: +def _close(instance_num: int, conn: psycopg2.extensions.connection | None) -> None: if conn is None or conn.closed == 1: return diff --git a/cardano_node_tests/utils/dbsync_queries.py b/cardano_node_tests/utils/dbsync_queries.py index 4794d6f71..f84b483e0 100644 --- a/cardano_node_tests/utils/dbsync_queries.py +++ b/cardano_node_tests/utils/dbsync_queries.py @@ -25,22 +25,22 @@ class PoolDataDBRow: reward_addr: memoryview reward_addr_view: str active_epoch_no: int - meta_id: tp.Optional[int] + meta_id: int | None margin: decimal.Decimal fixed_cost: int - deposit: tp.Optional[decimal.Decimal] + deposit: decimal.Decimal | None registered_tx_id: int - metadata_url: tp.Optional[str] - metadata_hash: tp.Optional[memoryview] + metadata_url: str | None + metadata_hash: memoryview | None owner_stake_address_id: int owner: memoryview - ipv4: tp.Optional[str] - ipv6: tp.Optional[str] - dns_name: tp.Optional[str] - port: tp.Optional[int] - retire_cert_index: tp.Optional[int] - retire_announced_tx_id: tp.Optional[int] - retiring_epoch: tp.Optional[int] + ipv4: str | None + ipv6: str | None + dns_name: str | None + port: int | None + retire_cert_index: int | None + retire_announced_tx_id: int | None + retiring_epoch: int | None @pydantic.dataclasses.dataclass(frozen=True, config=_CONF_ARBITRARY_T_ALLOWED) @@ -80,18 +80,18 @@ class TxDBRow: fee: decimal.Decimal deposit: int size: int - invalid_before: tp.Optional[decimal.Decimal] - invalid_hereafter: tp.Optional[decimal.Decimal] + invalid_before: decimal.Decimal | None + invalid_hereafter: decimal.Decimal | None treasury_donation: int - tx_out_id: tp.Optional[int] - tx_out_tx_id: tp.Optional[int] - utxo_ix: tp.Optional[int] - tx_out_addr: tp.Optional[str] - tx_out_addr_has_script: tp.Optional[bool] - tx_out_value: tp.Optional[decimal.Decimal] - tx_out_data_hash: tp.Optional[memoryview] - tx_out_inline_datum_hash: tp.Optional[memoryview] - tx_out_reference_script_hash: tp.Optional[memoryview] + tx_out_id: int | None + tx_out_tx_id: int | None + utxo_ix: int | None + tx_out_addr: str | None + tx_out_addr_has_script: bool | None + tx_out_value: decimal.Decimal | None + tx_out_data_hash: memoryview | None + tx_out_inline_datum_hash: memoryview | None + tx_out_reference_script_hash: memoryview | None metadata_count: int reserve_count: int treasury_count: int @@ -106,14 +106,14 @@ class TxDBRow: script_count: int redeemer_count: int extra_key_witness_count: int - ma_tx_out_id: tp.Optional[int] - ma_tx_out_policy: tp.Optional[memoryview] - ma_tx_out_name: tp.Optional[memoryview] - ma_tx_out_quantity: tp.Optional[decimal.Decimal] - ma_tx_mint_id: tp.Optional[int] - ma_tx_mint_policy: tp.Optional[memoryview] - ma_tx_mint_name: tp.Optional[memoryview] - ma_tx_mint_quantity: tp.Optional[decimal.Decimal] + ma_tx_out_id: int | None + ma_tx_out_policy: memoryview | None + ma_tx_out_name: memoryview | None + ma_tx_out_quantity: decimal.Decimal | None + ma_tx_mint_id: int | None + ma_tx_mint_policy: memoryview | None + ma_tx_mint_name: memoryview | None + ma_tx_mint_quantity: decimal.Decimal | None @pydantic.dataclasses.dataclass(frozen=True, config=_CONF_ARBITRARY_T_ALLOWED) @@ -153,9 +153,9 @@ class StakeAddrDBRow: @pydantic.dataclasses.dataclass(frozen=True) class StakeDelegDBRow: tx_id: int - active_epoch_no: tp.Optional[int] - pool_id: tp.Optional[str] - address: tp.Optional[str] + active_epoch_no: int | None + pool_id: str | None + address: str | None @pydantic.dataclasses.dataclass(frozen=True) @@ -172,14 +172,14 @@ class TxInDBRow: address: str value: decimal.Decimal tx_hash: memoryview - reference_script_hash: tp.Optional[memoryview] - reference_script_json: tp.Optional[dict] - reference_script_bytes: tp.Optional[memoryview] - reference_script_type: tp.Optional[str] - ma_tx_out_id: tp.Optional[int] - ma_tx_out_policy: tp.Optional[memoryview] - ma_tx_out_name: tp.Optional[memoryview] - ma_tx_out_quantity: tp.Optional[decimal.Decimal] + reference_script_hash: memoryview | None + reference_script_json: dict | None + reference_script_bytes: memoryview | None + reference_script_type: str | None + ma_tx_out_id: int | None + ma_tx_out_policy: memoryview | None + ma_tx_out_name: memoryview | None + ma_tx_out_quantity: decimal.Decimal | None @pydantic.dataclasses.dataclass(frozen=True, config=_CONF_ARBITRARY_T_ALLOWED) @@ -189,10 +189,10 @@ class TxInNoMADBRow: address: str value: decimal.Decimal tx_hash: memoryview - reference_script_hash: tp.Optional[memoryview] - reference_script_json: tp.Optional[dict] - reference_script_bytes: tp.Optional[memoryview] - reference_script_type: tp.Optional[str] + reference_script_hash: memoryview | None + reference_script_json: dict | None + reference_script_bytes: memoryview | None + reference_script_type: str | None @pydantic.dataclasses.dataclass(frozen=True, config=_CONF_ARBITRARY_T_ALLOWED) @@ -210,7 +210,7 @@ class ScriptDBRow: tx_id: int hash: memoryview type: str - serialised_size: tp.Optional[int] + serialised_size: int | None @pydantic.dataclasses.dataclass(frozen=True, config=_CONF_ARBITRARY_T_ALLOWED) @@ -248,7 +248,7 @@ class RewardDBRow: amount: decimal.Decimal earned_epoch: int spendable_epoch: int - pool_id: tp.Optional[str] = "" + pool_id: str | None = "" @pydantic.dataclasses.dataclass(frozen=True, config=_CONF_ARBITRARY_T_ALLOWED) @@ -256,24 +256,24 @@ class UTxODBRow: tx_hash: memoryview utxo_ix: int payment_address: str - stake_address: tp.Optional[str] + stake_address: str | None has_script: bool value: int - data_hash: tp.Optional[memoryview] + data_hash: memoryview | None @pydantic.dataclasses.dataclass(frozen=True) class BlockDBRow: id: int - epoch_no: tp.Optional[int] - slot_no: tp.Optional[int] - epoch_slot_no: tp.Optional[int] - block_no: tp.Optional[int] - previous_id: tp.Optional[int] - tx_count: tp.Optional[int] - proto_major: tp.Optional[int] - proto_minor: tp.Optional[int] - pool_id: tp.Optional[str] + epoch_no: int | None + slot_no: int | None + epoch_slot_no: int | None + block_no: int | None + previous_id: int | None + tx_count: int | None + proto_major: int | None + proto_minor: int | None + pool_id: str | None @pydantic.dataclasses.dataclass(frozen=True, config=_CONF_ARBITRARY_T_ALLOWED) @@ -295,60 +295,60 @@ class SchemaVersionStages: @pydantic.dataclasses.dataclass(frozen=True, config=_CONF_ARBITRARY_T_ALLOWED) class ParamProposalDBRow: id: int - epoch_no: tp.Optional[int] - key: tp.Optional[memoryview] - min_fee_a: tp.Optional[int] - min_fee_b: tp.Optional[int] - max_block_size: tp.Optional[int] - max_tx_size: tp.Optional[int] - max_bh_size: tp.Optional[int] - key_deposit: tp.Optional[int] - pool_deposit: tp.Optional[int] - max_epoch: tp.Optional[int] - optimal_pool_count: tp.Optional[int] - influence: tp.Optional[float] - monetary_expand_rate: tp.Optional[float] - treasury_growth_rate: tp.Optional[float] - decentralisation: tp.Optional[float] - entropy: tp.Optional[memoryview] - protocol_major: tp.Optional[int] - protocol_minor: tp.Optional[int] - min_utxo_value: tp.Optional[int] - min_pool_cost: tp.Optional[int] - coins_per_utxo_size: tp.Optional[int] - cost_model_id: tp.Optional[int] - price_mem: tp.Optional[float] - price_step: tp.Optional[float] - max_tx_ex_mem: tp.Optional[int] - max_tx_ex_steps: tp.Optional[int] - max_block_ex_mem: tp.Optional[int] - max_block_ex_steps: tp.Optional[int] - max_val_size: tp.Optional[int] - collateral_percent: tp.Optional[int] - max_collateral_inputs: tp.Optional[int] - registered_tx_id: tp.Optional[int] - pvt_motion_no_confidence: tp.Optional[float] - pvt_committee_normal: tp.Optional[float] - pvt_committee_no_confidence: tp.Optional[float] - pvt_hard_fork_initiation: tp.Optional[float] - dvt_motion_no_confidence: tp.Optional[float] - dvt_committee_normal: tp.Optional[float] - dvt_committee_no_confidence: tp.Optional[float] - dvt_update_to_constitution: tp.Optional[float] - dvt_hard_fork_initiation: tp.Optional[float] - dvt_p_p_network_group: tp.Optional[float] - dvt_p_p_economic_group: tp.Optional[float] - dvt_p_p_technical_group: tp.Optional[float] - dvt_p_p_gov_group: tp.Optional[float] - dvt_treasury_withdrawal: tp.Optional[float] - committee_min_size: tp.Optional[int] - committee_max_term_length: tp.Optional[int] - gov_action_lifetime: tp.Optional[int] - gov_action_deposit: tp.Optional[int] - drep_deposit: tp.Optional[int] - drep_activity: tp.Optional[decimal.Decimal] - pvtpp_security_group: tp.Optional[float] - min_fee_ref_script_cost_per_byte: tp.Optional[float] + epoch_no: int | None + key: memoryview | None + min_fee_a: int | None + min_fee_b: int | None + max_block_size: int | None + max_tx_size: int | None + max_bh_size: int | None + key_deposit: int | None + pool_deposit: int | None + max_epoch: int | None + optimal_pool_count: int | None + influence: float | None + monetary_expand_rate: float | None + treasury_growth_rate: float | None + decentralisation: float | None + entropy: memoryview | None + protocol_major: int | None + protocol_minor: int | None + min_utxo_value: int | None + min_pool_cost: int | None + coins_per_utxo_size: int | None + cost_model_id: int | None + price_mem: float | None + price_step: float | None + max_tx_ex_mem: int | None + max_tx_ex_steps: int | None + max_block_ex_mem: int | None + max_block_ex_steps: int | None + max_val_size: int | None + collateral_percent: int | None + max_collateral_inputs: int | None + registered_tx_id: int | None + pvt_motion_no_confidence: float | None + pvt_committee_normal: float | None + pvt_committee_no_confidence: float | None + pvt_hard_fork_initiation: float | None + dvt_motion_no_confidence: float | None + dvt_committee_normal: float | None + dvt_committee_no_confidence: float | None + dvt_update_to_constitution: float | None + dvt_hard_fork_initiation: float | None + dvt_p_p_network_group: float | None + dvt_p_p_economic_group: float | None + dvt_p_p_technical_group: float | None + dvt_p_p_gov_group: float | None + dvt_treasury_withdrawal: float | None + committee_min_size: int | None + committee_max_term_length: int | None + gov_action_lifetime: int | None + gov_action_deposit: int | None + drep_deposit: int | None + drep_activity: decimal.Decimal | None + pvtpp_security_group: float | None + min_fee_ref_script_cost_per_byte: float | None @pydantic.dataclasses.dataclass(frozen=True) @@ -394,7 +394,7 @@ class EpochParamDBRow: collateral_percent: int max_collateral_inputs: int block_id: int - extra_entropy: tp.Optional[memoryview] + extra_entropy: memoryview | None coins_per_utxo_size: int pvt_motion_no_confidence: float pvt_committee_normal: float @@ -445,7 +445,7 @@ class DrepRegistrationDBRow: cert_index: int deposit: int drep_hash_id: int - voting_anchor_id: tp.Optional[int] + voting_anchor_id: int | None hash_raw: memoryview hash_view: str has_script: bool @@ -456,7 +456,7 @@ class GovActionProposalDBRow: id: int tx_id: int action_ix: int - prev_gov_action_proposal: tp.Optional[int] + prev_gov_action_proposal: int | None deposit: int return_address: int expiration: int @@ -464,19 +464,19 @@ class GovActionProposalDBRow: type: str description: dict param_proposal: int - ratified_epoch: tp.Optional[int] - enacted_epoch: tp.Optional[int] - dropped_epoch: tp.Optional[int] - expired_epoch: tp.Optional[int] + ratified_epoch: int | None + enacted_epoch: int | None + dropped_epoch: int | None + expired_epoch: int | None @pydantic.dataclasses.dataclass(frozen=True) class VotingProcedureDBRow: id: int voter_role: str - committee_voter: tp.Optional[int] - drep_voter: tp.Optional[int] - pool_voter: tp.Optional[int] + committee_voter: int | None + drep_voter: int | None + pool_voter: int | None vote: str @@ -503,10 +503,10 @@ class TreasuryWithdrawalDBRow: tx_id: int action_ix: int expiration: int - ratified_epoch: tp.Optional[int] - enacted_epoch: tp.Optional[int] - dropped_epoch: tp.Optional[int] - expired_epoch: tp.Optional[int] + ratified_epoch: int | None + enacted_epoch: int | None + dropped_epoch: int | None + expired_epoch: int | None addr_view: str amount: decimal.Decimal @@ -523,11 +523,11 @@ class OffChainVoteDrepDataDBRow: id: int hash: memoryview language: str - comment: tp.Optional[str] + comment: str | None json: dict bytes: memoryview - warning: tp.Optional[str] - is_valid: tp.Optional[bool] + warning: str | None + is_valid: bool | None payment_address: str given_name: str objectives: str @@ -544,28 +544,28 @@ class OffChainVoteDataDBRow: data_hash: memoryview data_json: dict data_bytes: memoryview - data_warning: tp.Optional[str] + data_warning: str | None data_language: str - data_comment: tp.Optional[str] - data_is_valid: tp.Optional[bool] - auth_name: tp.Optional[str] - auth_wit_alg: tp.Optional[str] - auth_pub_key: tp.Optional[str] - auth_signature: tp.Optional[str] - auth_warning: tp.Optional[str] - ext_update_id: tp.Optional[int] - ext_update_title: tp.Optional[str] - ext_update_uri: tp.Optional[str] - gov_act_id: tp.Optional[int] - gov_act_title: tp.Optional[str] - gov_act_abstract: tp.Optional[str] - gov_act_motivation: tp.Optional[str] - gov_act_rationale: tp.Optional[str] - ref_id: tp.Optional[int] - ref_label: tp.Optional[str] - ref_uri: tp.Optional[str] - ref_hash_digest: tp.Optional[str] - ref_hash_alg: tp.Optional[str] + data_comment: str | None + data_is_valid: bool | None + auth_name: str | None + auth_wit_alg: str | None + auth_pub_key: str | None + auth_signature: str | None + auth_warning: str | None + ext_update_id: int | None + ext_update_title: str | None + ext_update_uri: str | None + gov_act_id: int | None + gov_act_title: str | None + gov_act_abstract: str | None + gov_act_motivation: str | None + gov_act_rationale: str | None + ref_id: int | None + ref_label: str | None + ref_uri: str | None + ref_hash_digest: str | None + ref_hash_alg: str | None vot_anchor_url: str vot_anchor_data_hash: memoryview vot_anchor_type: str @@ -582,7 +582,7 @@ class DelegationVoteDBRow: @pydantic.dataclasses.dataclass(frozen=True, config=_CONF_ARBITRARY_T_ALLOWED) class NewConstitutionInfoDBRow: id: int - script_hash: tp.Optional[memoryview] + script_hash: memoryview | None gov_action_type: str gap_id: int tx_id: int @@ -624,7 +624,7 @@ def execute(query: str, vars: tp.Sequence = ()) -> tp.Iterator[psycopg2.extensio class SchemaVersion: """Query and cache db-sync schema version.""" - _stages: tp.ClassVar[tp.Optional[SchemaVersionStages]] = None + _stages: tp.ClassVar[SchemaVersionStages | None] = None @classmethod def stages(cls) -> SchemaVersionStages: diff --git a/cardano_node_tests/utils/dbsync_types.py b/cardano_node_tests/utils/dbsync_types.py index 64a0de73f..e16366543 100644 --- a/cardano_node_tests/utils/dbsync_types.py +++ b/cardano_node_tests/utils/dbsync_types.py @@ -65,7 +65,7 @@ class UTxORecord: datum_hash: str = "" inline_datum_hash: str = "" inline_datum: str | dict | None = None - reference_script: tp.Optional[dict] = None + reference_script: dict | None = None reference_script_hash: str = "" @@ -81,7 +81,7 @@ class GetUTxORecord: @dataclasses.dataclass(frozen=True, order=True) class PaymentAddrRecord: payment_address: str - stake_address: tp.Optional[str] + stake_address: str | None amount_sum: int utxos: list[GetUTxORecord] @@ -100,7 +100,7 @@ class PoolDataRecord: pledge: int reward_addr: str active_epoch_no: int - meta_id: tp.Optional[int] + meta_id: int | None margin: float fixed_cost: int registered_tx_id: int @@ -108,9 +108,9 @@ class PoolDataRecord: metadata_hash: str owners: list[str] relays: list[dict[str, dict[str, tp.Any]]] - retire_cert_index: tp.Optional[int] - retire_announced_tx_id: tp.Optional[int] - retiring_epoch: tp.Optional[int] + retire_cert_index: int | None + retire_announced_tx_id: int | None + retiring_epoch: int | None @dataclasses.dataclass(frozen=True, order=True) @@ -141,8 +141,8 @@ class TxRecord: fee: int deposit: int size: int - invalid_before: tp.Optional[int] - invalid_hereafter: tp.Optional[int] + invalid_before: int | None + invalid_hereafter: int | None treasury_donation: int txins: list[UTxORecord] txouts: list[UTxORecord] @@ -204,7 +204,7 @@ class DrepRegistrationRecord: cert_index: int deposit: int drep_hash_id: int - voting_anchor_id: tp.Optional[int] + voting_anchor_id: int | None hash_hex: str hash_bech32: str has_script: bool @@ -217,10 +217,10 @@ class OffChainVoteDataRecord: hash: str json: dict bytes: str - warning: tp.Optional[str] + warning: str | None language: str - comment: tp.Optional[str] - is_valid: tp.Optional[bool] + comment: str | None + is_valid: bool | None authors: list[dict[str, tp.Any]] references: list[dict[str, tp.Any]] gov_action_data: dict[str, tp.Any] diff --git a/cardano_node_tests/utils/dbsync_utils.py b/cardano_node_tests/utils/dbsync_utils.py index 7d9d8e377..4430a21bf 100644 --- a/cardano_node_tests/utils/dbsync_utils.py +++ b/cardano_node_tests/utils/dbsync_utils.py @@ -119,7 +119,7 @@ def get_utxo(address: str) -> dbsync_types.PaymentAddrRecord: ) -def get_pool_data(pool_id_bech32: str) -> tp.Optional[dbsync_types.PoolDataRecord]: +def get_pool_data(pool_id_bech32: str) -> dbsync_types.PoolDataRecord | None: """Get pool data from db-sync.""" pools = list(dbsync_queries.query_pool_data(pool_id_bech32)) if not pools: @@ -533,7 +533,7 @@ def get_tx_record_retry(txhash: str, retry_num: int = 3) -> dbsync_types.TxRecor def get_tx( cluster_obj: clusterlib.ClusterLib, tx_raw_output: clusterlib.TxRawOutput, retry_num: int = 3 -) -> tp.Optional[dbsync_types.TxRecord]: +) -> dbsync_types.TxRecord | None: """Get a transaction data from db-sync.""" if not configuration.HAS_DBSYNC: return None @@ -546,7 +546,7 @@ def get_tx( def check_tx( cluster_obj: clusterlib.ClusterLib, tx_raw_output: clusterlib.TxRawOutput, retry_num: int = 3 -) -> tp.Optional[dbsync_types.TxRecord]: +) -> dbsync_types.TxRecord | None: """Check a transaction in db-sync.""" response = get_tx(cluster_obj=cluster_obj, tx_raw_output=tx_raw_output, retry_num=retry_num) @@ -562,7 +562,7 @@ def check_tx_phase_2_failure( tx_raw_output: clusterlib.TxRawOutput, collateral_charged: int, retry_num: int = 3, -) -> tp.Optional[dbsync_types.TxRecord]: +) -> dbsync_types.TxRecord | None: """Check a transaction in db-sync when a phase 2 failure happens.""" if not configuration.HAS_DBSYNC: return None @@ -601,7 +601,7 @@ def check_tx_phase_2_failure( def check_pool_deregistration( pool_id: str, retiring_epoch: int -) -> tp.Optional[dbsync_types.PoolDataRecord]: +) -> dbsync_types.PoolDataRecord | None: """Check pool retirement in db-sync.""" if not configuration.HAS_DBSYNC: return None @@ -622,7 +622,7 @@ def check_pool_deregistration( def check_pool_data( # noqa: C901 ledger_pool_data: dict, pool_id: str -) -> tp.Optional[dbsync_types.PoolDataRecord]: +) -> dbsync_types.PoolDataRecord | None: """Check comparison for pool data between ledger and db-sync.""" # pylint: disable=too-many-branches if not configuration.HAS_DBSYNC: @@ -809,7 +809,7 @@ def check_plutus_costs( raise AssertionError("\n".join(errors)) -def check_param_proposal(protocol_params: dict) -> tp.Optional[dbsync_queries.ParamProposalDBRow]: +def check_param_proposal(protocol_params: dict) -> dbsync_queries.ParamProposalDBRow | None: """Check expected values in the `param_proposal` table in db-sync.""" if not configuration.HAS_DBSYNC: return None @@ -854,7 +854,7 @@ def check_param_proposal(protocol_params: dict) -> tp.Optional[dbsync_queries.Pa return param_proposal_db -def _get_float_pparam(pparam: tp.Any) -> tp.Optional[float]: +def _get_float_pparam(pparam: tp.Any) -> float | None: if pparam is None: return None if isinstance(pparam, dict): @@ -950,7 +950,7 @@ def _check_param_proposal( def check_conway_param_update_proposal( param_proposal_ledger: dict, -) -> tp.Optional[dbsync_queries.ParamProposalDBRow]: +) -> dbsync_queries.ParamProposalDBRow | None: """Check comparison for param proposal between ledger and db-sync.""" if not configuration.HAS_DBSYNC: return None @@ -976,7 +976,7 @@ def check_conway_param_update_proposal( def check_conway_param_update_enactment( pparams: dict, epoch_no: int -) -> tp.Optional[dbsync_queries.EpochParamDBRow]: +) -> dbsync_queries.EpochParamDBRow | None: """Check params enactment between ledger and epoch param in db-sync.""" if not configuration.HAS_DBSYNC: return None @@ -1016,7 +1016,7 @@ def check_proposal_refunds(stake_address: str, refunds_num: int) -> None: def check_conway_gov_action_proposal_description( update_proposal: dict, txhash: str = "", action_ix: int = 0 -) -> tp.Optional[dbsync_queries.GovActionProposalDBRow]: +) -> dbsync_queries.GovActionProposalDBRow | None: """Check expected values in the gov_action_proposal table in db-sync.""" if not configuration.HAS_DBSYNC: return None @@ -1041,7 +1041,7 @@ def get_gov_action_proposals( return gov_action_proposals -def get_committee_member(cold_key: str) -> tp.Optional[dbsync_types.CommitteeRegistrationRecord]: +def get_committee_member(cold_key: str) -> dbsync_types.CommitteeRegistrationRecord | None: """Get committee member data from db-sync.""" cc_members = list(dbsync_queries.query_committee_registration(cold_key=cold_key)) if not cc_members: @@ -1062,7 +1062,7 @@ def get_committee_member(cold_key: str) -> tp.Optional[dbsync_types.CommitteeReg def check_committee_member_registration( cc_member_cold_key: str, -) -> tp.Optional[dbsync_types.CommitteeRegistrationRecord]: +) -> dbsync_types.CommitteeRegistrationRecord | None: """Check committee member registration in db-sync.""" if not configuration.HAS_DBSYNC: return None @@ -1079,7 +1079,7 @@ def check_committee_member_registration( def get_deregistered_committee_member( cold_key: str, -) -> tp.Optional[dbsync_types.CommitteeDeregistrationRecord]: +) -> dbsync_types.CommitteeDeregistrationRecord | None: """Get deregistered committee member data from db-sync.""" deregistered_cc_members = list(dbsync_queries.query_committee_deregistration(cold_key=cold_key)) if not deregistered_cc_members: @@ -1100,7 +1100,7 @@ def get_deregistered_committee_member( def check_committee_member_deregistration( cc_member_cold_key: str, -) -> tp.Optional[dbsync_types.CommitteeDeregistrationRecord]: +) -> dbsync_types.CommitteeDeregistrationRecord | None: """Check committee member deregistration in db-sync.""" if not configuration.HAS_DBSYNC: return None @@ -1116,7 +1116,7 @@ def check_committee_member_deregistration( return cc_member_data -def get_drep(drep_hash: str, drep_deposit: int) -> tp.Optional[dbsync_types.DrepRegistrationRecord]: +def get_drep(drep_hash: str, drep_deposit: int) -> dbsync_types.DrepRegistrationRecord | None: """Get drep data from db-sync.""" dreps = list(dbsync_queries.query_drep_registration(drep_hash, drep_deposit)) if not dreps: @@ -1140,7 +1140,7 @@ def get_drep(drep_hash: str, drep_deposit: int) -> tp.Optional[dbsync_types.Drep def check_drep_registration( drep: governance_utils.DRepRegistration, drep_state: list[list[dict[str, tp.Any]]] -) -> tp.Optional[dbsync_types.DrepRegistrationRecord]: +) -> dbsync_types.DrepRegistrationRecord | None: """Check drep registration in db-sync.""" if not configuration.HAS_DBSYNC: return None @@ -1160,7 +1160,7 @@ def check_drep_registration( def check_drep_deregistration( drep: governance_utils.DRepRegistration, -) -> tp.Optional[dbsync_types.DrepRegistrationRecord]: +) -> dbsync_types.DrepRegistrationRecord | None: """Check drep deregistration in db-sync.""" if not configuration.HAS_DBSYNC: return None @@ -1354,7 +1354,7 @@ def check_off_chain_drep_registration( raise AssertionError("\n".join(errors)) -def get_action_data(data_hash: str) -> tp.Optional[dbsync_types.OffChainVoteDataRecord]: # noqa: C901 +def get_action_data(data_hash: str) -> dbsync_types.OffChainVoteDataRecord | None: # noqa: C901 """Get off chain action data from db-sync.""" votes = list(dbsync_queries.query_off_chain_vote_data(data_hash)) if not votes: @@ -1388,7 +1388,7 @@ def get_action_data(data_hash: str) -> tp.Optional[dbsync_types.OffChainVoteData "rationale": vote.gov_act_rationale, } if vote.ref_id: - reference: dict[str, str | tp.Optional[dict[str, str]]] + reference: dict[str, str | dict[str, str] | None] reference = {"label": vote.ref_label, "uri": vote.ref_uri} if vote.ref_hash_digest and vote.ref_hash_alg: reference["referenceHash"] = { diff --git a/cardano_node_tests/utils/faucet.py b/cardano_node_tests/utils/faucet.py index 8fecb470d..d7b8e6c3f 100644 --- a/cardano_node_tests/utils/faucet.py +++ b/cardano_node_tests/utils/faucet.py @@ -1,7 +1,6 @@ import contextlib import logging import random -import typing as tp import cardano_clusterlib.types as cl_types from cardano_clusterlib import clusterlib @@ -16,13 +15,13 @@ def fund_from_faucet( *dst_addrs: clusterlib.AddressRecord | clusterlib.PoolUser, cluster_obj: clusterlib.ClusterLib, - faucet_data: tp.Optional[dict] = None, - all_faucets: tp.Optional[dict[str, dict]] = None, + faucet_data: dict | None = None, + all_faucets: dict[str, dict] | None = None, amount: None | int | list[int] = None, - tx_name: tp.Optional[str] = None, + tx_name: str | None = None, destination_dir: clusterlib.FileType = ".", force: bool = False, -) -> tp.Optional[clusterlib.TxRawOutput]: +) -> clusterlib.TxRawOutput | None: """Send `amount` from faucet addr to all `dst_addrs`.""" if amount is None: amount = 1000_000_000 @@ -76,7 +75,7 @@ def return_funds_to_faucet( cluster_obj: clusterlib.ClusterLib, faucet_addr: str, amount: int | list[int] = -1, - tx_name: tp.Optional[str] = None, + tx_name: str | None = None, destination_dir: cl_types.FileType = ".", ) -> None: """Send `amount` from all `src_addrs` to `faucet_addr`. diff --git a/cardano_node_tests/utils/gh_issue.py b/cardano_node_tests/utils/gh_issue.py index f68e2447e..1c7496d3b 100644 --- a/cardano_node_tests/utils/gh_issue.py +++ b/cardano_node_tests/utils/gh_issue.py @@ -11,15 +11,15 @@ class GHIssue: """GitHub issue.""" - TOKEN: tp.ClassVar[tp.Optional[str]] = None + TOKEN: tp.ClassVar[str | None] = None issue_cache: tp.ClassVar[dict[str, str]] = {} - _github_instance: tp.ClassVar[tp.Optional[github.Github]] = None + _github_instance: tp.ClassVar[github.Github | None] = None _github_instance_error: tp.ClassVar[bool] = False @classmethod - def _get_github(cls) -> tp.Optional[github.Github]: + def _get_github(cls) -> github.Github | None: """Get GitHub instance.""" if cls._github_instance is not None: return cls._github_instance @@ -45,14 +45,14 @@ def __init__(self, number: int, repo: str) -> None: self.repo = repo @property - def github(self) -> tp.Optional[github.Github]: + def github(self) -> github.Github | None: return self._get_github() @property def url(self) -> str: return f"https://github.com/{self.repo}/issues/{self.number}" - def get_state(self) -> tp.Optional[str]: + def get_state(self) -> str | None: """Get issue state.""" if not self.github: LOGGER.error("Failed to get GitHub instance") diff --git a/cardano_node_tests/utils/governance_setup.py b/cardano_node_tests/utils/governance_setup.py index f20693f04..ca83771a1 100644 --- a/cardano_node_tests/utils/governance_setup.py +++ b/cardano_node_tests/utils/governance_setup.py @@ -260,7 +260,7 @@ def get_default_governance( gov_data_store = gov_data_dir / GOV_DATA_STORE governance_data = None - def _setup_gov() -> tp.Optional[governance_utils.GovernanceRecords]: + def _setup_gov() -> governance_utils.GovernanceRecords | None: if gov_data_store.exists(): return None diff --git a/cardano_node_tests/utils/helpers.py b/cardano_node_tests/utils/helpers.py index 11b00f36a..68a1a393d 100644 --- a/cardano_node_tests/utils/helpers.py +++ b/cardano_node_tests/utils/helpers.py @@ -233,7 +233,7 @@ def encode_bech32(prefix: str, data: str) -> str: return run_command(f"echo '{data}' | bech32 {prefix}", shell=True).decode().strip() -def check_dir_arg(dir_path: str) -> tp.Optional[pl.Path]: +def check_dir_arg(dir_path: str) -> pl.Path | None: """Check that the dir passed as argparse parameter is a valid existing dir.""" if not dir_path: return None @@ -244,7 +244,7 @@ def check_dir_arg(dir_path: str) -> tp.Optional[pl.Path]: return abs_path -def check_file_arg(file_path: str) -> tp.Optional[pl.Path]: +def check_file_arg(file_path: str) -> pl.Path | None: """Check that the file passed as argparse parameter is a valid existing file.""" if not file_path: return None diff --git a/cardano_node_tests/utils/logfiles.py b/cardano_node_tests/utils/logfiles.py index 2359f3b99..9f4ed79d4 100644 --- a/cardano_node_tests/utils/logfiles.py +++ b/cardano_node_tests/utils/logfiles.py @@ -196,8 +196,8 @@ def _search_log_lines( logfile: pl.Path, rotated_logs: list[RotableLog], errors_re: re.Pattern, - errors_ignored_re: tp.Optional[re.Pattern] = None, - errors_look_back_re: tp.Optional[re.Pattern] = None, + errors_ignored_re: re.Pattern | None = None, + errors_look_back_re: re.Pattern | None = None, ) -> list[tuple[pl.Path, str]]: """Search for errors in the log file and, if needed, in the corresponding rotated logs.""" errors = [] diff --git a/cardano_node_tests/utils/temptools.py b/cardano_node_tests/utils/temptools.py index 278f86c3b..8b65a5130 100644 --- a/cardano_node_tests/utils/temptools.py +++ b/cardano_node_tests/utils/temptools.py @@ -15,9 +15,9 @@ class PytestTempDirs: fixture. """ - pytest_worker_tmp: tp.ClassVar[tp.Optional[pl.Path]] = None - pytest_root_tmp: tp.ClassVar[tp.Optional[pl.Path]] = None - pytest_shared_tmp: tp.ClassVar[tp.Optional[pl.Path]] = None + pytest_worker_tmp: tp.ClassVar[pl.Path | None] = None + pytest_root_tmp: tp.ClassVar[pl.Path | None] = None + pytest_shared_tmp: tp.ClassVar[pl.Path | None] = None _err_init_str = "PytestTempDirs are not initialized" From b36400f5a3dbe5f14948c93bf14373f77ff38954 Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Fri, 29 Nov 2024 13:52:05 +0100 Subject: [PATCH 166/168] chore: capitalize comments in Python files --- cardano_node_tests/cardano_cli_coverage.py | 8 +- .../cluster_management/cache.py | 6 +- .../cluster_management/cluster_getter.py | 106 +++---- .../cluster_management/manager.py | 24 +- .../pytest_plugins/xdist_scheduler.py | 16 +- cardano_node_tests/tests/common.py | 18 +- cardano_node_tests/tests/conftest.py | 22 +- cardano_node_tests/tests/delegation.py | 18 +- cardano_node_tests/tests/kes.py | 2 +- cardano_node_tests/tests/plutus_common.py | 16 +- .../tests/test_addr_registration.py | 20 +- cardano_node_tests/tests/test_blocks.py | 16 +- .../tests/test_chain_transactions.py | 10 +- cardano_node_tests/tests/test_cli.py | 40 +-- .../tests/test_configuration.py | 8 +- cardano_node_tests/tests/test_dbsync.py | 10 +- cardano_node_tests/tests/test_delegation.py | 104 +++---- .../tests/test_env_network_id.py | 2 +- cardano_node_tests/tests/test_kes.py | 48 +-- cardano_node_tests/tests/test_ledger_state.py | 16 +- cardano_node_tests/tests/test_metrics.py | 2 +- cardano_node_tests/tests/test_mir_certs.py | 58 ++-- .../tests/test_native_tokens.py | 156 +++++----- cardano_node_tests/tests/test_node_upgrade.py | 4 +- .../tests/test_pool_saturation.py | 32 +- cardano_node_tests/tests/test_pools.py | 210 ++++++------- cardano_node_tests/tests/test_scripts.py | 286 +++++++++--------- cardano_node_tests/tests/test_socket_path.py | 2 +- .../tests/test_staking_no_rewards.py | 134 ++++---- .../tests/test_staking_rewards.py | 196 ++++++------ cardano_node_tests/tests/test_tx_basic.py | 24 +- cardano_node_tests/tests/test_tx_fees.py | 48 +-- .../tests/test_tx_many_utxos.py | 6 +- cardano_node_tests/tests/test_tx_mempool.py | 2 +- cardano_node_tests/tests/test_tx_metadata.py | 40 +-- cardano_node_tests/tests/test_tx_negative.py | 34 +-- .../tests/test_tx_unbalanced.py | 20 +- .../tests/test_update_proposals.py | 14 +- .../test_update_plutusv2_builtins.py | 2 +- .../tests/tests_plutus/mint_build.py | 2 +- .../tests/tests_plutus/mint_raw.py | 4 +- .../tests/tests_plutus/spend_build.py | 16 +- .../tests/tests_plutus/spend_raw.py | 16 +- .../tests/tests_plutus/test_lobster.py | 28 +- .../tests/tests_plutus/test_mint_build.py | 52 ++-- .../tests_plutus/test_mint_negative_build.py | 2 +- .../tests_plutus/test_mint_negative_raw.py | 16 +- .../tests/tests_plutus/test_mint_raw.py | 64 ++-- .../tests/tests_plutus/test_spend_build.py | 46 +-- .../tests_plutus/test_spend_compat_build.py | 2 +- .../tests_plutus/test_spend_compat_raw.py | 2 +- .../tests_plutus/test_spend_datum_build.py | 12 +- .../tests_plutus/test_spend_datum_raw.py | 10 +- .../tests_plutus/test_spend_negative_build.py | 12 +- .../tests_plutus/test_spend_negative_raw.py | 30 +- .../tests/tests_plutus/test_spend_raw.py | 30 +- .../tests/tests_plutus_v2/mint_build.py | 6 +- .../tests/tests_plutus_v2/mint_raw.py | 6 +- .../tests/tests_plutus_v2/spend_build.py | 10 +- .../tests/tests_plutus_v2/spend_raw.py | 12 +- .../tests/tests_plutus_v2/test_mint_build.py | 34 +-- .../test_mint_negative_build.py | 4 +- .../tests_plutus_v2/test_mint_negative_raw.py | 8 +- .../tests/tests_plutus_v2/test_mint_raw.py | 16 +- .../test_mint_secp256k1_build.py | 6 +- .../test_mint_secp256k1_raw.py | 4 +- .../tests/tests_plutus_v2/test_spend_build.py | 20 +- .../test_spend_collateral_build.py | 6 +- .../test_spend_collateral_raw.py | 2 +- .../test_spend_compat_build.py | 2 +- .../tests_plutus_v2/test_spend_compat_raw.py | 2 +- .../tests_plutus_v2/test_spend_datum_build.py | 20 +- .../tests_plutus_v2/test_spend_datum_raw.py | 12 +- .../tests/tests_plutus_v2/test_spend_raw.py | 24 +- .../test_spend_ref_inputs_build.py | 54 ++-- .../test_spend_ref_inputs_raw.py | 46 +-- .../test_spend_ref_scripts_build.py | 86 +++--- .../test_spend_ref_scripts_raw.py | 80 ++--- .../test_spend_secp256k1_build.py | 22 +- .../test_spend_secp256k1_raw.py | 12 +- cardano_node_tests/tests/tx_common.py | 2 +- cardano_node_tests/utils/cluster_nodes.py | 2 +- cardano_node_tests/utils/cluster_scripts.py | 54 ++-- cardano_node_tests/utils/clusterlib_utils.py | 56 ++-- cardano_node_tests/utils/configuration.py | 20 +- cardano_node_tests/utils/faucet.py | 2 +- cardano_node_tests/utils/locking.py | 2 +- cardano_node_tests/utils/model_ekg.py | 2 +- cardano_node_tests/utils/testnet_cleanup.py | 14 +- cardano_node_tests/utils/tx_view.py | 30 +- cardano_node_tests/utils/types.py | 2 +- 91 files changed, 1402 insertions(+), 1402 deletions(-) diff --git a/cardano_node_tests/cardano_cli_coverage.py b/cardano_node_tests/cardano_cli_coverage.py index 13bac1b19..3982bb3d1 100755 --- a/cardano_node_tests/cardano_cli_coverage.py +++ b/cardano_node_tests/cardano_cli_coverage.py @@ -92,7 +92,7 @@ def merge_coverage(dict_a: dict, dict_b: dict) -> dict: dict_a[key] = sorted(new_list) elif key in dict_a and isinstance(value, addable) and isinstance(dict_a[key], addable): dict_a[key] += value - # skipped arguments and commands are not in the available commands dict + # Skipped arguments and commands are not in the available commands dict elif key not in dict_a: continue elif not isinstance(value, dict): @@ -120,17 +120,17 @@ def parse_cmd_output(output: str) -> list[str]: section_start = True continue if section_start: - # skip line with wrapped description from previous command + # Skip line with wrapped description from previous command if line.startswith(" "): continue - # skip line with subsection description + # Skip line with subsection description if not line.startswith(" "): continue line_s = line.strip() if not line_s: continue item = line_s.split()[0] - # in case the item looks like "-h,--help", take only the long option + # In case the item looks like "-h,--help", take only the long option arg = item.split(",")[-1].strip() cli_args.append(arg) diff --git a/cardano_node_tests/cluster_management/cache.py b/cardano_node_tests/cluster_management/cache.py index 754a125e6..07f6f26fc 100644 --- a/cardano_node_tests/cluster_management/cache.py +++ b/cardano_node_tests/cluster_management/cache.py @@ -11,9 +11,9 @@ class ClusterManagerCache: Here goes only data that makes sense to reuse in multiple tests. """ - # single `ClusterLib` instance can be used in multiple tests executed on the same worker + # Single `ClusterLib` instance can be used in multiple tests executed on the same worker cluster_obj: clusterlib.ClusterLib | None = None - # data for initialized cluster instance + # Data for initialized cluster instance test_data: dict = dataclasses.field(default_factory=dict) addrs_data: dict = dataclasses.field(default_factory=dict) last_checksum: str = "" @@ -22,7 +22,7 @@ class ClusterManagerCache: class CacheManager: """Set of cache management methods.""" - # every pytest worker has its own cache, i.e. this cache is local to single worker + # Every pytest worker has its own cache, i.e. this cache is local to single worker cache: tp.ClassVar[dict[int, ClusterManagerCache]] = {} @classmethod diff --git a/cardano_node_tests/cluster_management/cluster_getter.py b/cardano_node_tests/cluster_management/cluster_getter.py index 140a86419..f82b1c846 100644 --- a/cardano_node_tests/cluster_management/cluster_getter.py +++ b/cardano_node_tests/cluster_management/cluster_getter.py @@ -87,7 +87,7 @@ class _ClusterGetStatus: instance_dir: pl.Path = pl.Path("/nonexistent") final_lock_resources: tp.Iterable[str] = () final_use_resources: tp.Iterable[str] = () - # status files + # Status files started_tests_sfiles: tp.Iterable[pl.Path] = () marked_ready_sfiles: tp.Iterable[pl.Path] = () marked_running_my_anywhere: tp.Iterable[pl.Path] = () @@ -322,11 +322,11 @@ def _is_healthy(self, instance_num: int) -> bool: def _cluster_needs_respin(self, instance_num: int) -> bool: """Check if it is necessary to respin cluster.""" instance_dir = self.pytest_tmp_dir / f"{common.CLUSTER_DIR_TEMPLATE}{instance_num}" - # if cluster instance is not started yet + # If cluster instance is not started yet if not (instance_dir / common.CLUSTER_RUNNING_FILE).exists(): return True - # if it was indicated that the cluster instance needs to be respun + # If it was indicated that the cluster instance needs to be respun if list(instance_dir.glob(f"{common.RESPIN_NEEDED_GLOB}_*")): return True @@ -340,13 +340,13 @@ def _cluster_needs_respin(self, instance_num: int) -> bool: def _test_needs_respin(self, cget_status: _ClusterGetStatus) -> bool: """Check if it is necessary to respin cluster for the test.""" - # if this is non-initial marked test, we can ignore custom start command, + # If this is non-initial marked test, we can ignore custom start command, # as it was handled by the initial marked test noninitial_marked_test = cget_status.mark and cget_status.marked_ready_sfiles if noninitial_marked_test: return False - # respin is needed when custom start command was specified + # Respin is needed when custom start command was specified return bool(cget_status.start_cmd) def _on_marked_test_stop(self, instance_num: int, mark: str) -> None: @@ -354,7 +354,7 @@ def _on_marked_test_stop(self, instance_num: int, mark: str) -> None: self.log(f"c{instance_num}: in `_on_marked_test_stop`") instance_dir = self.pytest_tmp_dir / f"{common.CLUSTER_DIR_TEMPLATE}{instance_num}" - # set cluster instance to be respun if needed + # Set cluster instance to be respun if needed respin_after_mark_files = list( instance_dir.glob(f"{common.RESPIN_AFTER_MARK_GLOB}_@@{mark}@@_*") ) @@ -364,17 +364,17 @@ def _on_marked_test_stop(self, instance_num: int, mark: str) -> None: self.log(f"c{instance_num}: in `_on_marked_test_stop`, creating 'respin needed' file") (instance_dir / f"{common.RESPIN_NEEDED_GLOB}_{self.worker_id}").touch() - # remove files that indicates that the mark is ready + # Remove files that indicates that the mark is ready marked_ready_sfiles = instance_dir.glob(f"{common.TEST_CURR_MARK_GLOB}_@@{mark}@@_*") for f in marked_ready_sfiles: f.unlink() - # remove file that indicates resources that are locked by the marked tests + # Remove file that indicates resources that are locked by the marked tests marked_lock_files = instance_dir.glob(f"{common.RESOURCE_LOCKED_GLOB}_*_%%{mark}%%_*") for f in marked_lock_files: f.unlink() - # remove file that indicates resources that are in-use by the marked tests + # Remove file that indicates resources that are in-use by the marked tests marked_use_files = instance_dir.glob(f"{common.RESOURCE_IN_USE_GLOB}_*_%%{mark}%%_*") for f in marked_use_files: f.unlink() @@ -401,23 +401,23 @@ def _update_marked_tests( keep track of marked tests and clear the mark and cluster instance only when no marked test was running for some time. """ - # no need to continue if there are no marked tests + # No need to continue if there are no marked tests if not list(cget_status.instance_dir.glob(f"{common.TEST_CURR_MARK_GLOB}_*")): return - # marked tests don't need to be running yet if the cluster is being respun + # Marked tests don't need to be running yet if the cluster is being respun respin_in_progress = list( cget_status.instance_dir.glob(f"{common.RESPIN_IN_PROGRESS_GLOB}_*") ) if respin_in_progress: return - # get marked tests status + # Get marked tests status marked_tests_status = self._get_marked_tests_status( marked_tests_cache=marked_tests_cache, instance_num=cget_status.instance_num ) - # update marked tests status + # Update marked tests status instance_num = cget_status.instance_num marks_in_progress = [ f.name.split("@@")[1] @@ -430,7 +430,7 @@ def _update_marked_tests( for m in marked_tests_status: marked_tests_status[m] += 1 - # clean the stale status files if we are waiting too long for the next marked test + # Clean the stale status files if we are waiting too long for the next marked test if marked_tests_status[m] >= 20: self.log( f"c{instance_num}: no marked tests running for a while, " @@ -507,7 +507,7 @@ def _is_already_running(self) -> bool: ) ) - # test is already running, nothing to set up + # Test is already running, nothing to set up if test_on_worker and self._cluster_instance_num != -1: self.log(f"{test_on_worker[0]} already exists") return True @@ -567,7 +567,7 @@ def _marked_select_instance(self, cget_status: _ClusterGetStatus) -> bool: ) return False - # if here, this will be the first test with the mark + # If here, this will be the first test with the mark return True def _fail_on_all_dead(self) -> None: @@ -581,25 +581,25 @@ def _fail_on_all_dead(self) -> None: def _cleanup_dead_clusters(self, cget_status: _ClusterGetStatus) -> None: """Cleanup if the selected cluster instance failed to start.""" - # move on to other cluster instance + # Move on to other cluster instance cget_status.selected_instance = -1 cget_status.respin_here = False cget_status.respin_ready = False - # remove status files that are checked by other workers + # Remove status files that are checked by other workers for sf in cget_status.instance_dir.glob(f"{common.TEST_CURR_MARK_GLOB}_*"): sf.unlink() def _init_respin(self, cget_status: _ClusterGetStatus) -> bool: """Initialize respin of this cluster instance on this worker.""" - # respin already initialized + # Respin already initialized if cget_status.respin_here: return True if not (cget_status.cluster_needs_respin or self._test_needs_respin(cget_status)): return True - # if tests are running on the instance, we cannot respin, therefore we cannot continue + # If tests are running on the instance, we cannot respin, therefore we cannot continue if cget_status.started_tests_sfiles: self.log(f"c{cget_status.instance_num}: tests are running, cannot respin") return False @@ -619,7 +619,7 @@ def _init_respin(self, cget_status: _ClusterGetStatus) -> bool: if not respin_in_progress_file.exists(): respin_in_progress_file.touch() - # remove mark status files as these will not be valid after respin + # Remove mark status files as these will not be valid after respin for f in cget_status.instance_dir.glob(f"{common.TEST_CURR_MARK_GLOB}_*"): f.unlink() @@ -636,7 +636,7 @@ def _finish_respin(self, cget_status: _ClusterGetStatus) -> bool: cget_status.respin_ready = False cget_status.respin_here = False - # remove status files that are no longer valid after respin + # Remove status files that are no longer valid after respin for f in cget_status.instance_dir.glob(f"{common.RESPIN_IN_PROGRESS_GLOB}_*"): f.unlink() for f in cget_status.instance_dir.glob(f"{common.RESPIN_NEEDED_GLOB}_*"): @@ -646,7 +646,7 @@ def _finish_respin(self, cget_status: _ClusterGetStatus) -> bool: # NOTE: when `_respin` is called, the env variables needed for cluster start scripts need # to be already set (e.g. CARDANO_NODE_SOCKET_PATH) self.log(f"c{cget_status.instance_num}: ready to respin cluster") - # the actual `_respin` function will be called outside of global lock so other workers + # The actual `_respin` function will be called outside of global lock so other workers # don't need to wait cget_status.respin_ready = True return False @@ -666,30 +666,30 @@ def _create_test_status_files(self, cget_status: _ClusterGetStatus) -> None: """Create status files for test that is about to start on this cluster instance.""" mark_res_str = f"_%%{cget_status.mark}%%" if cget_status.mark else "" - # create status file for each in-use resource + # Create status file for each in-use resource for r in cget_status.final_use_resources: ( self.instance_dir / f"{common.RESOURCE_IN_USE_GLOB}_@@{r}@@{mark_res_str}_{self.worker_id}" ).touch() - # create status file for each locked resource + # Create status file for each locked resource for r in cget_status.final_lock_resources: ( self.instance_dir / f"{common.RESOURCE_LOCKED_GLOB}_@@{r}@@{mark_res_str}_{self.worker_id}" ).touch() - # cleanup = cluster respin after test (group of tests) is finished + # Cleanup = cluster respin after test (group of tests) is finished if cget_status.cleanup: - # cleanup after group of test that are marked with a marker + # Cleanup after group of test that are marked with a marker if cget_status.mark: self.log(f"c{cget_status.instance_num}: cleanup and mark") ( self.instance_dir / f"{common.RESPIN_AFTER_MARK_GLOB}_@@{cget_status.mark}@@_{self.worker_id}" ).touch() - # cleanup after single test (e.g. singleton) + # Cleanup after single test (e.g. singleton) else: self.log(f"c{cget_status.instance_num}: cleanup and not mark") (self.instance_dir / f"{common.RESPIN_NEEDED_GLOB}_{self.worker_id}").touch() @@ -699,7 +699,7 @@ def _create_test_status_files(self, cget_status: _ClusterGetStatus) -> None: test_running_file = ( self.instance_dir / f"{common.TEST_RUNNING_GLOB}{mark_run_str}_{self.worker_id}" ) - # write the name of the test that is starting on this cluster instance, leave out the + # Write the name of the test that is starting on this cluster instance, leave out the # '(setup)' part test_running_file.write_text(cget_status.current_test.split(" ")[0]) @@ -775,7 +775,7 @@ def get_cluster_instance( # noqa: C901 "'DEV_CLUSTER_RUNNING' is set." ) - # check if the development cluster instance is ready by now so we don't need to obtain + # Check if the development cluster instance is ready by now so we don't need to obtain # cluster lock when it is not necessary if not self._is_dev_cluster_ready(): with locking.FileLockIfXdist(self.cluster_lock): @@ -793,7 +793,7 @@ def get_cluster_instance( # noqa: C901 if resources.Resources.CLUSTER not in lock_resources: msg = "Custom start command can be used only together with singleton." raise RuntimeError(msg) - # always clean after test(s) that started cluster with custom configuration + # Always clean after test(s) that started cluster with custom configuration cleanup = True use_resources = self._init_use_resources( @@ -813,26 +813,26 @@ def get_cluster_instance( # noqa: C901 self.log(f"want to run test '{cget_status.current_test}'") - # iterate until it is possible to start the test + # Iterate until it is possible to start the test while True: if cget_status.respin_ready: self._respin(start_cmd=start_cmd) - # sleep for a while to avoid too many checks in a short time + # Sleep for a while to avoid too many checks in a short time _xdist_sleep(random.uniform(0.6, 1.2) * cget_status.sleep_delay) # pylint: disable=consider-using-max-builtin cget_status.sleep_delay = max(cget_status.sleep_delay, 1) - # nothing time consuming can go under this lock as all other workers will need to wait + # Nothing time consuming can go under this lock as all other workers will need to wait with locking.FileLockIfXdist(self.cluster_lock): if self._is_already_running(): return self.cluster_instance_num - # fail if all cluster instances are dead + # Fail if all cluster instances are dead self._fail_on_all_dead() if mark: - # check if tests with my mark are already locked to any cluster instance + # Check if tests with my mark are already locked to any cluster instance cget_status.marked_running_my_anywhere = list( self.pytest_tmp_dir.glob( f"{common.CLUSTER_DIR_TEMPLATE}*/" @@ -846,14 +846,14 @@ def get_cluster_instance( # noqa: C901 cget_status.sleep_delay = 5 continue - # set "prio" for this test if indicated + # Set "prio" for this test if indicated self._init_prio(cget_status) self._cluster_instance_num = -1 - # try all existing cluster instances; randomize the order + # Try all existing cluster instances; randomize the order for instance_num in random.sample(available_instances, k=self.num_of_instances): - # if instance to run the test on was already decided, skip all other instances + # If instance to run the test on was already decided, skip all other instances if cget_status.selected_instance not in (-1, instance_num): continue @@ -863,17 +863,17 @@ def get_cluster_instance( # noqa: C901 ) cget_status.instance_dir.mkdir(exist_ok=True) - # cleanup cluster instance where attempt to start cluster failed repeatedly + # Cleanup cluster instance where attempt to start cluster failed repeatedly if (cget_status.instance_dir / common.CLUSTER_DEAD_FILE).exists(): self._cleanup_dead_clusters(cget_status) continue - # cluster respin planned or in progress, so no new tests can start + # Cluster respin planned or in progress, so no new tests can start if self._respun_by_other_worker(cget_status): cget_status.sleep_delay = 5 continue - # are there tests already running on this cluster instance? + # Are there tests already running on this cluster instance? cget_status.started_tests_sfiles = list( cget_status.instance_dir.glob(f"{common.TEST_RUNNING_GLOB}_*") ) @@ -883,7 +883,7 @@ def get_cluster_instance( # noqa: C901 cget_status.instance_dir.glob(f"{common.TEST_CURR_MARK_GLOB}_@@{mark}@@_*") ) - # if marked tests are already running, update their status + # If marked tests are already running, update their status self._update_marked_tests( marked_tests_cache=marked_tests_cache, cget_status=cget_status ) @@ -892,7 +892,7 @@ def get_cluster_instance( # noqa: C901 # Cache the result as the check itself can be expensive. cget_status.cluster_needs_respin = self._cluster_needs_respin(instance_num) - # select this instance for running marked tests if possible + # Select this instance for running marked tests if possible if mark and not self._marked_select_instance(cget_status): cget_status.sleep_delay = 2 continue @@ -918,27 +918,27 @@ def get_cluster_instance( # noqa: C901 # not "any pool"). need_resolve_resources = not cget_status.marked_ready_sfiles - # check availability of the required resources + # Check availability of the required resources if need_resolve_resources and not self._resolve_resources_availability( cget_status ): cget_status.sleep_delay = 5 continue - # if respin is needed, indicate that the cluster will be re-spun + # If respin is needed, indicate that the cluster will be re-spun # (after all currently running tests are finished) if not self._init_respin(cget_status): continue - # we've found suitable cluster instance + # We've found suitable cluster instance cget_status.selected_instance = instance_num self._cluster_instance_num = instance_num self.log(f"c{instance_num}: can run test '{cget_status.current_test}'") - # set environment variables that are needed when respinning the cluster + # Set environment variables that are needed when respinning the cluster # and running tests cluster_nodes.set_cluster_env(instance_num) - # remove "prio" status file + # Remove "prio" status file if prio: ( self.pytest_tmp_dir / f"{common.PRIO_IN_PROGRESS_GLOB}_{self.worker_id}" @@ -949,20 +949,20 @@ def get_cluster_instance( # noqa: C901 # don't try to prepare another cluster instance. self._init_marked_test(cget_status) - # if needed, finish respin related actions + # If needed, finish respin related actions if not self._finish_respin(cget_status): continue - # from this point on, all conditions needed to start the test are met + # From this point on, all conditions needed to start the test are met break else: - # if the test cannot start on any instance, return to top-level loop + # If the test cannot start on any instance, return to top-level loop cget_status.tried_all_instances = True continue self._create_test_status_files(cget_status) - # cluster instance is ready, we can start the test + # Cluster instance is ready, we can start the test break return instance_num diff --git a/cardano_node_tests/cluster_management/manager.py b/cardano_node_tests/cluster_management/manager.py index bf65ce4a6..e0a1ea991 100644 --- a/cardano_node_tests/cluster_management/manager.py +++ b/cardano_node_tests/cluster_management/manager.py @@ -36,7 +36,7 @@ def _get_manager_fixture_line_str() -> str: """Get `filename#lineno` of current fixture, called from contextmanager.""" - # get past `cache_fixture` and `contextmanager` to the fixture + # Get past `cache_fixture` and `contextmanager` to the fixture calling_frame = inspect.currentframe().f_back.f_back.f_back # type: ignore assert calling_frame return helpers.get_line_str_from_frame(frame=calling_frame) @@ -173,7 +173,7 @@ def stop_all_clusters(self) -> None: """Stop all cluster instances.""" self.log("called `stop_all_clusters`") - # don't stop cluster if it was started outside of test framework + # Don't stop cluster if it was started outside of test framework if configuration.DEV_CLUSTER_RUNNING: LOGGER.warning("Ignoring request to stop clusters as 'DEV_CLUSTER_RUNNING' is set.") return @@ -261,26 +261,26 @@ def on_test_stop(self) -> None: if not list(self.instance_dir.glob(f"{common.RESPIN_NEEDED_GLOB}_*")): logfiles.clean_ignore_rules(ignore_file_id=self.worker_id) - # remove resource locking files created by the worker, ignore resources that have mark + # Remove resource locking files created by the worker, ignore resources that have mark resource_locking_files = list( self.instance_dir.glob(f"{common.RESOURCE_LOCKED_GLOB}_@@*@@_{self.worker_id}") ) for f in resource_locking_files: f.unlink() - # remove "resource in use" files created by the worker, ignore resources that have mark + # Remove "resource in use" files created by the worker, ignore resources that have mark resource_in_use_files = list( self.instance_dir.glob(f"{common.RESOURCE_IN_USE_GLOB}_@@*@@_{self.worker_id}") ) for f in resource_in_use_files: f.unlink() - # remove file that indicates that a test is running on the worker + # Remove file that indicates that a test is running on the worker next( iter(self.instance_dir.glob(f"{common.TEST_RUNNING_GLOB}*_{self.worker_id}")) ).unlink(missing_ok=True) - # log names of tests that keep running on the cluster instance + # Log names of tests that keep running on the cluster instance tnames = [ tf.read_text().strip() for tf in self.instance_dir.glob(f"{common.TEST_RUNNING_GLOB}*") @@ -339,14 +339,14 @@ def _save_cli_coverage(self) -> None: def _reload_cluster_obj(self, state_dir: pl.Path) -> None: """Reload cluster instance data if necessary.""" addrs_data_checksum = helpers.checksum(state_dir / cluster_nodes.ADDRS_DATA) - # the checksum will not match when cluster was respun + # The checksum will not match when cluster was respun if addrs_data_checksum == self.cache.last_checksum: return - # save CLI coverage collected by the old `cluster_obj` instance + # Save CLI coverage collected by the old `cluster_obj` instance self._save_cli_coverage() - # replace the old `cluster_obj` instance and reload data + # Replace the old `cluster_obj` instance and reload data self.cache.cluster_obj = cluster_nodes.get_cluster_type().get_cluster_obj() self.cache.test_data = {} self.cache.addrs_data = cluster_nodes.load_addrs_data() @@ -367,7 +367,7 @@ def init( **IMPORTANT**: This method must be called before any other method of this class. """ - # get number of initialized cluster instance once it is possible to start a test + # Get number of initialized cluster instance once it is possible to start a test instance_num = cluster_getter.ClusterGetter( worker_id=self.worker_id, pytest_config=self.pytest_config, @@ -383,11 +383,11 @@ def init( ) self._cluster_instance_num = instance_num - # reload cluster instance data if necessary + # Reload cluster instance data if necessary state_dir = cluster_nodes.get_cluster_env().state_dir self._reload_cluster_obj(state_dir=state_dir) - # initialize `cardano_clusterlib.ClusterLib` object + # Initialize `cardano_clusterlib.ClusterLib` object cluster_obj = self.cache.cluster_obj if not cluster_obj: msg = "`cluster_obj` not available, that cannot happen" diff --git a/cardano_node_tests/pytest_plugins/xdist_scheduler.py b/cardano_node_tests/pytest_plugins/xdist_scheduler.py index 49102bc8c..053c20dd1 100644 --- a/cardano_node_tests/pytest_plugins/xdist_scheduler.py +++ b/cardano_node_tests/pytest_plugins/xdist_scheduler.py @@ -53,7 +53,7 @@ def _split_scope(self, nodeid: str) -> str: Example: example/loadsuite/test/test_gamma.py::test_beta0[param]@group_name@long """ - # check the index of ']' to avoid the case: parametrize mark value has '@' + # Check the index of ']' to avoid the case: parametrize mark value has '@' param_end_idx = nodeid.rfind("]") scope_start_idx = param_end_idx if param_end_idx != -1 else 0 @@ -105,11 +105,11 @@ def _assign_work_unit(self, node: workermanage.WorkerController) -> None: assigned_to_node = self.assigned_work.setdefault(node, collections.OrderedDict()) scope, work_unit = None, None - # check if there are any long-running tests already pending + # Check if there are any long-running tests already pending long_pending = self._is_long_pending(assigned_to_node) if long_pending: - # try to find a work unit with no long-running test if there is already a long-running + # Try to find a work unit with no long-running test if there is already a long-running # test pending scope = self._get_short_scope() if scope: @@ -121,14 +121,14 @@ def _assign_work_unit(self, node: workermanage.WorkerController) -> None: if scope: work_unit = self.workqueue.pop(scope) - # grab the first unit of work if none was grabbed above + # Grab the first unit of work if none was grabbed above if work_unit is None: scope, work_unit = self.workqueue.popitem(last=False) - # keep track of the assigned work + # Keep track of the assigned work assigned_to_node[scope] = work_unit - # ask the node to execute the workload + # Ask the node to execute the workload worker_collection = self.registered_collections[node] nodeids_indexes = [ worker_collection.index(nodeid) @@ -150,7 +150,7 @@ def pytest_collection_modifyitems(items: list) -> None: comps = [item.nodeid] - # add the group name to nodeid as suffix + # Add the group name to nodeid as suffix if group_marker: gname = ( group_marker.args[0] @@ -159,7 +159,7 @@ def pytest_collection_modifyitems(items: list) -> None: ) comps.append(gname) - # add "long" to nodeid as suffix + # Add "long" to nodeid as suffix if long_marker: comps.append(LONG_MARKER) diff --git a/cardano_node_tests/tests/common.py b/cardano_node_tests/tests/common.py index fb130baa6..c03129f3d 100644 --- a/cardano_node_tests/tests/common.py +++ b/cardano_node_tests/tests/common.py @@ -30,7 +30,7 @@ _BLD_SKIP_REASON = "transaction era must be the same as node era" BUILD_UNUSABLE = bool(_BLD_SKIP_REASON) -# common `skipif`s +# Common `skipif`s SKIPIF_BUILD_UNUSABLE = pytest.mark.skipif( BUILD_UNUSABLE, reason=( @@ -79,7 +79,7 @@ ) -# common parametrization +# Common parametrization PARAM_USE_BUILD_CMD = pytest.mark.parametrize( "use_build_cmd", ( @@ -118,16 +118,16 @@ ) -# intervals for `wait_for_epoch_interval` (negative values are counted from the end of an epoch) +# Intervals for `wait_for_epoch_interval` (negative values are counted from the end of an epoch) if cluster_nodes.get_cluster_type().type == cluster_nodes.ClusterType.LOCAL: - # time buffer at the end of an epoch, enough to do something that takes several transactions + # Time buffer at the end of an epoch, enough to do something that takes several transactions EPOCH_STOP_SEC_BUFFER = -40 - # time when all ledger state info is available for the current epoch + # Time when all ledger state info is available for the current epoch EPOCH_START_SEC_LEDGER_STATE = -19 - # time buffer at the end of an epoch after getting ledger state info + # Time buffer at the end of an epoch after getting ledger state info EPOCH_STOP_SEC_LEDGER_STATE = -15 else: - # we can be more generous on testnets + # We can be more generous on testnets EPOCH_STOP_SEC_BUFFER = -200 EPOCH_START_SEC_LEDGER_STATE = -300 EPOCH_STOP_SEC_LEDGER_STATE = -200 @@ -166,7 +166,7 @@ def get_test_id(cluster_obj: clusterlib.ClusterLib) -> str: f"{curr_test.test_function}{curr_test.test_params}_ci{cluster_obj.cluster_id}_{rand_str}" ) - # log test ID to cluster manager log file - getting test ID happens early + # Log test ID to cluster manager log file - getting test ID happens early # after the start of a test, so the log entry can be used for determining # time of the test start cm: cluster_management.ClusterManager = cluster_obj._cluster_manager # type: ignore @@ -294,7 +294,7 @@ def fail_on_fork( err_msg.append(f"Following nodes appear to be out of sync: {sorted(unsynced_nodes)}") if err_msg: - # the local cluster needs to be respun before it is usable again + # The local cluster needs to be respun before it is usable again cluster_manager.set_needs_respin() raise AssertionError("\n".join(err_msg)) diff --git a/cardano_node_tests/tests/conftest.py b/cardano_node_tests/tests/conftest.py index 3b7c10e46..9fa116680 100644 --- a/cardano_node_tests/tests/conftest.py +++ b/cardano_node_tests/tests/conftest.py @@ -32,10 +32,10 @@ LOGGER = logging.getLogger(__name__) INTERRUPTED_NAME = ".session_interrupted" -# make sure there's enough time to stop all cluster instances at the end of session +# Make sure there's enough time to stop all cluster instances at the end of session workermanage.NodeManager.EXIT_TIMEOUT = 30 -# use custom xdist scheduler +# Use custom xdist scheduler pytest_plugins = ("cardano_node_tests.pytest_plugins.xdist_scheduler",) @@ -67,7 +67,7 @@ def pytest_addoption(parser: tp.Any) -> None: def pytest_configure(config: tp.Any) -> None: - # don't bother collecting metadata if all tests are skipped + # Don't bother collecting metadata if all tests are skipped if config.getvalue("skipall"): return @@ -144,7 +144,7 @@ def _skip_all_tests(config: tp.Any, items: list) -> bool: @pytest.hookimpl(tryfirst=True) def pytest_collection_modifyitems(config: tp.Any, items: list) -> None: # noqa: C901 - # prevent on slave nodes (xdist) + # Prevent on slave nodes (xdist) if hasattr(config, "slaveinput"): return @@ -158,12 +158,12 @@ def _mark_needs_dbsync(item: tp.Any) -> None: if "needs_dbsync" not in item.keywords: return - # all tests marked with 'needs_dbsync' are db-sync tests, and should be marked + # All tests marked with 'needs_dbsync' are db-sync tests, and should be marked # with the 'dbsync' marker as well if "dbsync" not in item.keywords: item.add_marker(pytest.mark.dbsync) - # skip all tests that require db-sync when db-sync is not available + # Skip all tests that require db-sync when db-sync is not available if not configuration.HAS_DBSYNC: item.add_marker(skip_dbsync_marker) @@ -218,7 +218,7 @@ def _save_all_cluster_instances_artifacts( """Save artifacts of all cluster instances after all tests are finished.""" cluster_manager_obj.log("running `_save_all_cluster_instances_artifacts`") - # stop all cluster instances + # Stop all cluster instances with helpers.ignore_interrupt(): cluster_manager_obj.save_all_clusters_artifacts() @@ -227,7 +227,7 @@ def _stop_all_cluster_instances(cluster_manager_obj: cluster_management.ClusterM """Stop all cluster instances after all tests are finished.""" cluster_manager_obj.log("running `_stop_all_cluster_instances`") - # stop all cluster instances + # Stop all cluster instances with helpers.ignore_interrupt(): cluster_manager_obj.stop_all_clusters() @@ -237,7 +237,7 @@ def _testnet_cleanup(pytest_root_tmp: pl.Path) -> None: if cluster_nodes.get_cluster_type().type != cluster_nodes.ClusterType.TESTNET: return - # there's only one cluster instance for testnets, so we don't need to use cluster manager + # There's only one cluster instance for testnets, so we don't need to use cluster manager cluster_obj = cluster_nodes.get_cluster_type().get_cluster_obj() destdir = pytest_root_tmp.parent / f"cleanup-{pytest_root_tmp.stem}-{helpers.get_rand_str(8)}" @@ -339,7 +339,7 @@ def testfile_temp_dir() -> pl.Path: The dir is specific to a single test file. """ - # get a dir path based on the test file running + # Get a dir path based on the test file running dir_path = ( (os.environ.get("PYTEST_CURRENT_TEST") or "unknown") .split("::")[0] @@ -368,7 +368,7 @@ def cluster_manager( request: FixtureRequest, ) -> tp.Generator[cluster_management.ClusterManager, None, None]: """Return instance of `cluster_management.ClusterManager`.""" - # hide from traceback to make logs errors more readable + # Hide from traceback to make logs errors more readable __tracebackhide__ = True # pylint: disable=unused-variable cluster_manager_obj = cluster_management.ClusterManager( diff --git a/cardano_node_tests/tests/delegation.py b/cardano_node_tests/tests/delegation.py index efcacf090..922f7491b 100644 --- a/cardano_node_tests/tests/delegation.py +++ b/cardano_node_tests/tests/delegation.py @@ -78,9 +78,9 @@ def cluster_and_pool( return cluster_obj, pool_id blocks_before = clusterlib_utils.get_blocks_before(cluster_obj) - # sort pools by how many blocks they produce + # Sort pools by how many blocks they produce pool_ids_s = sorted(blocks_before, key=blocks_before.get, reverse=True) # type: ignore - # select a pool with reasonable margin + # Select a pool with reasonable margin for pool_id in pool_ids_s: pool_params = cluster_obj.g_query.get_pool_state(stake_pool_id=pool_id) if pool_params.pool_params["margin"] <= 0.5 and not pool_params.retiring: @@ -135,7 +135,7 @@ def delegate_stake_addr( use_build_cmd: bool = False, ) -> DelegationOut: """Submit registration certificate and delegate to pool.""" - # create key pairs and addresses + # Create key pairs and addresses if not pool_user: stake_addr_rec = clusterlib_utils.create_stake_addr_records( f"{temp_template}_addr0", cluster_obj=cluster_obj @@ -148,7 +148,7 @@ def delegate_stake_addr( pool_user = clusterlib.PoolUser(payment=payment_addr_rec, stake=stake_addr_rec) - # fund payment address + # Fund payment address clusterlib_utils.fund_from_faucet( pool_user.payment, cluster_obj=cluster_obj, @@ -156,7 +156,7 @@ def delegate_stake_addr( amount=amount, ) - # create stake address registration cert if address is not already registered + # Create stake address registration cert if address is not already registered if cluster_obj.g_query.get_stake_addr_info(pool_user.stake.address): stake_addr_reg_cert_file = None else: @@ -166,7 +166,7 @@ def delegate_stake_addr( stake_vkey_file=pool_user.stake.vkey_file, ) - # create stake address delegation cert + # Create stake address delegation cert deleg_kwargs: dict[str, tp.Any] = { "addr_name": f"{temp_template}_addr0", "stake_vkey_file": pool_user.stake.vkey_file, @@ -185,7 +185,7 @@ def delegate_stake_addr( src_address = pool_user.payment.address src_init_balance = cluster_obj.g_query.get_address_balance(src_address) - # register stake address and delegate it to pool + # Register stake address and delegate it to pool certificate_files = [stake_addr_deleg_cert_file] if stake_addr_reg_cert_file: certificate_files.insert(0, stake_addr_reg_cert_file) @@ -213,14 +213,14 @@ def delegate_stake_addr( src_address=src_address, tx_name=f"{temp_template}_reg_deleg", tx_files=tx_files ) - # check that the balance for source address was correctly updated + # Check that the balance for source address was correctly updated deposit = cluster_obj.g_query.get_address_deposit() if stake_addr_reg_cert_file else 0 assert ( cluster_obj.g_query.get_address_balance(src_address) == src_init_balance - deposit - tx_raw_output.fee ), f"Incorrect balance for source address `{src_address}`" - # check that the stake address was delegated + # Check that the stake address was delegated stake_addr_info = cluster_obj.g_query.get_stake_addr_info(pool_user.stake.address) assert stake_addr_info.delegation, f"Stake address was not delegated yet: {stake_addr_info}" assert stake_addr_info.delegation == pool_id, "Stake address delegated to wrong pool" diff --git a/cardano_node_tests/tests/kes.py b/cardano_node_tests/tests/kes.py index 0b7007dec..2172b4712 100644 --- a/cardano_node_tests/tests/kes.py +++ b/cardano_node_tests/tests/kes.py @@ -15,7 +15,7 @@ LOGGER = logging.getLogger(__name__) -# valid scenarios when we are testing the kes-period-info cli command +# Valid scenarios when we are testing the kes-period-info cli command class KesScenarios: ALL_VALID = "all_valid" ALL_INVALID = "all_invalid" diff --git a/cardano_node_tests/tests/plutus_common.py b/cardano_node_tests/tests/plutus_common.py index 3276c015c..fd232ed81 100644 --- a/cardano_node_tests/tests/plutus_common.py +++ b/cardano_node_tests/tests/plutus_common.py @@ -87,7 +87,7 @@ class ExecutionCost: fixed_cost: int -# scripts execution cost for Txs with single UTxO input and single Plutus script +# Scripts execution cost for Txs with single UTxO input and single Plutus script ALWAYS_FAILS_COST = ExecutionCost(per_time=476_468, per_space=1_700, fixed_cost=133) ALWAYS_SUCCEEDS_COST = ExecutionCost(per_time=368_100, per_space=1_700, fixed_cost=125) GUESSING_GAME_COST = ExecutionCost(per_time=236_715_138, per_space=870_842, fixed_cost=67_315) @@ -501,7 +501,7 @@ def check_plutus_costs( # We have the costs calibrated only for local testnet return - # sort records by total cost + # Sort records by total cost sorted_plutus = sorted( plutus_costs, key=lambda x: x["executionUnits"]["memory"] # type: ignore @@ -610,7 +610,7 @@ def check_return_collateral(cluster_obj: clusterlib.ClusterLib, tx_output: clust return_collateral_utxos = cluster_obj.g_query.get_utxo(tx_raw_output=tx_output) protocol_params = cluster_obj.g_query.get_protocol_params() - # when total collateral amount is specified, it is necessary to specify also return + # When total collateral amount is specified, it is necessary to specify also return # collateral `TxOut` to get the change, otherwise all collaterals will be collected if tx_output.total_collateral_amount and not tx_output.return_collateral_txouts: assert not return_collateral_utxos, "Return collateral UTxO was unexpectedly created" @@ -619,10 +619,10 @@ def check_return_collateral(cluster_obj: clusterlib.ClusterLib, tx_output: clust if not (tx_output.return_collateral_txouts or tx_output.total_collateral_amount): return - # check that correct return collateral UTxO was created + # Check that correct return collateral UTxO was created assert return_collateral_utxos, "Return collateral UTxO was NOT created" - # check that return collateral is the only output and that the index matches + # Check that return collateral is the only output and that the index matches out_utxos_ix = {r.utxo_ix for r in return_collateral_utxos} assert len(out_utxos_ix) == 1, "There are other outputs other than return collateral" # TODO: the index of change can be either 0 (in old node versions) or `txouts_count`, @@ -672,9 +672,9 @@ def check_return_collateral(cluster_obj: clusterlib.ClusterLib, tx_output: clust utxos=tx_output.return_collateral_txouts, coin=coin ), f"Incorrect return collateral token balance for token '{coin}'" - # automatic return collateral with `transaction build` command + # Automatic return collateral with `transaction build` command elif tx_output.change_address: - # check that the collateral amount charged corresponds to 'collateralPercentage' + # Check that the collateral amount charged corresponds to 'collateralPercentage' assert collateral_charged == round( tx_output.fee * protocol_params["collateralPercentage"] / 100 ), "The collateral amount charged is not the expected amount" @@ -683,7 +683,7 @@ def check_return_collateral(cluster_obj: clusterlib.ClusterLib, tx_output: clust tx_output.change_address == return_collateral_utxos[0].address ), "Return collateral address doesn't match change address" - # the returned amount is the total of all collaterals minus fee + # The returned amount is the total of all collaterals minus fee expected_return_amount = int(tx_collaterals_amount - collateral_charged) assert returned_amount == expected_return_amount, ( diff --git a/cardano_node_tests/tests/test_addr_registration.py b/cardano_node_tests/tests/test_addr_registration.py index 1ce8f02b1..3c88431bf 100644 --- a/cardano_node_tests/tests/test_addr_registration.py +++ b/cardano_node_tests/tests/test_addr_registration.py @@ -36,7 +36,7 @@ def pool_users( ) fixture_cache.value = created_users - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( created_users[0], cluster_obj=cluster, @@ -223,7 +223,7 @@ def test_addr_registration_deregistration( user_payment = pool_users[0].payment src_init_balance = cluster.g_query.get_address_balance(user_payment.address) - # create stake address registration cert + # Create stake address registration cert address_deposit = common.get_conway_address_deposit(cluster_obj=cluster) stake_addr_reg_cert_file = cluster.g_stake_address.gen_stake_addr_registration_cert( @@ -232,14 +232,14 @@ def test_addr_registration_deregistration( stake_vkey_file=user_registered.stake.vkey_file, ) - # create stake address deregistration cert + # Create stake address deregistration cert stake_addr_dereg_cert_file = cluster.g_stake_address.gen_stake_addr_deregistration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=address_deposit, stake_vkey_file=user_registered.stake.vkey_file, ) - # register and deregister stake address in single TX + # Register and deregister stake address in single TX tx_files = clusterlib.TxFiles( certificate_files=[ stake_addr_reg_cert_file, @@ -272,12 +272,12 @@ def test_addr_registration_deregistration( deposit=0, ) - # check that the stake address is not registered + # Check that the stake address is not registered assert not cluster.g_query.get_stake_addr_info( user_registered.stake.address ).address, f"Stake address is registered: {user_registered.stake.address}" - # check that the balance for source address was correctly updated and that key deposit + # Check that the balance for source address was correctly updated and that key deposit # was not needed assert ( cluster.g_query.get_address_balance(user_payment.address) @@ -401,7 +401,7 @@ def test_registration_cert_with_wrong_key( """ temp_template = common.get_test_id(cluster) - # create stake address registration cert, use wrong stake vkey + # Create stake address registration cert, use wrong stake vkey with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_stake_address.gen_stake_addr_registration_cert( addr_name=f"{temp_template}_addr0", @@ -429,14 +429,14 @@ def test_register_addr_with_wrong_key( user_registered = pool_users_disposable[0] user_payment = pool_users[0].payment - # create stake address registration cert + # Create stake address registration cert stake_addr_reg_cert_file = cluster.g_stake_address.gen_stake_addr_registration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=common.get_conway_address_deposit(cluster_obj=cluster), stake_vkey_file=user_registered.stake.vkey_file, ) - # register stake address, use wrong payment skey + # Register stake address, use wrong payment skey tx_files = clusterlib.TxFiles( certificate_files=[stake_addr_reg_cert_file], signing_key_files=[pool_users[1].payment.skey_file], @@ -466,7 +466,7 @@ def test_deregister_not_registered_addr( user_registered = pool_users_disposable[0] user_payment = pool_users[0].payment - # files for deregistering stake address + # Files for deregistering stake address stake_addr_dereg_cert = cluster.g_stake_address.gen_stake_addr_deregistration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=common.get_conway_address_deposit(cluster_obj=cluster), diff --git a/cardano_node_tests/tests/test_blocks.py b/cardano_node_tests/tests/test_blocks.py index fc162e1b7..8bdf52789 100644 --- a/cardano_node_tests/tests/test_blocks.py +++ b/cardano_node_tests/tests/test_blocks.py @@ -210,7 +210,7 @@ def test_unstable_stake_distribution( pool_name = cluster_management.Resources.POOL3 pool_rec = cluster_manager.cache.addrs_data[pool_name] - # wait for epoch interval where stake distribution for next epoch is unstable, + # Wait for epoch interval where stake distribution for next epoch is unstable, # that is anytime before last 300 slots of current epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, @@ -218,7 +218,7 @@ def test_unstable_stake_distribution( stop=-int(300 * cluster.slot_length + 5), ) - # it should NOT be possible to query leadership schedule + # It should NOT be possible to query leadership schedule with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_query.get_leadership_schedule( vrf_skey_file=pool_rec["vrf_key_pair"].skey_file, @@ -259,7 +259,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( *addrs, cluster_obj=cluster, @@ -302,7 +302,7 @@ def test_block_production( # noqa: C901 pool_id_dec = helpers.decode_bech32(pool_id) pool_mapping[pool_id_dec] = {"pool_id": pool_id, "pool_idx": idx} - # delegate to each pool + # Delegate to each pool delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -310,7 +310,7 @@ def test_block_production( # noqa: C901 pool_id=pool_id, ) - # create sqlite db + # Create sqlite db conn = sqlite3.connect(configuration.BLOCK_PRODUCTION_DB) cur = conn.cursor() cur.execute("CREATE TABLE IF NOT EXISTS runs(run_id, topology)") @@ -338,7 +338,7 @@ def _save_state(curr_epoch: int) -> None: ) blocks_before: dict[str, int] = ledger_state["blocksBefore"] - # save blocks data to sqlite db + # Save blocks data to sqlite db cur = conn.cursor() for pool_id_dec, num_blocks in blocks_before.items(): pool_rec = pool_mapping[pool_id_dec] @@ -374,7 +374,7 @@ def _save_state(curr_epoch: int) -> None: curr_epoch = cluster.wait_for_new_epoch(padding_seconds=5) epoch_end_timestamp = cluster.time_to_epoch_end() + time.time() - # send tx + # Send tx src_addr, dst_addr = random.sample(payment_addrs, 2) destinations = [clusterlib.TxOut(address=dst_addr.address, amount=1_000_000)] tx_files = clusterlib.TxFiles(signing_key_files=[src_addr.skey_file]) @@ -389,7 +389,7 @@ def _save_state(curr_epoch: int) -> None: time.sleep(2) curr_time = time.time() - # save also data for the last epoch + # Save also data for the last epoch _save_state(cluster.g_query.get_epoch()) conn.close() diff --git a/cardano_node_tests/tests/test_chain_transactions.py b/cardano_node_tests/tests/test_chain_transactions.py index 774fb9459..ba62260ae 100644 --- a/cardano_node_tests/tests/test_chain_transactions.py +++ b/cardano_node_tests/tests/test_chain_transactions.py @@ -55,11 +55,11 @@ def _gen_signed_tx( send_amount = txin.amount - fee out_file = f"{tx_name}_tx.body" - # create Tx data + # Create Tx data txout = clusterlib.TxOut(address=out_addr.address, amount=send_amount) tx_files = clusterlib.TxFiles(signing_key_files=[payment_addr.skey_file]) - # build Tx + # Build Tx tx_raw_output = cluster_obj.g_transaction.build_raw_tx_bare( out_file=out_file, txouts=[txout], @@ -69,14 +69,14 @@ def _gen_signed_tx( invalid_hereafter=invalid_hereafter, ) - # sign Tx + # Sign Tx tx_file = cluster_obj.g_transaction.sign_tx( tx_body_file=tx_raw_output.out_file, tx_name=tx_name, signing_key_files=tx_files.signing_key_files, ) - # transform output of this Tx (`TxOut`) to input for next Tx (`UTXOData`) + # Transform output of this Tx (`TxOut`) to input for next Tx (`UTXOData`) txid = cluster_obj.g_transaction.get_txid(tx_body_file=tx_raw_output.out_file) out_utxo = clusterlib.UTXOData( utxo_hash=txid, @@ -194,7 +194,7 @@ def test_tx_chaining( raise AssertionError(submit_err) if configuration.HAS_DBSYNC: - # wait a bit for all Txs to appear in db-sync + # Wait a bit for all Txs to appear in db-sync time.sleep(5) check_tx_outs = [ diff --git a/cardano_node_tests/tests/test_cli.py b/cardano_node_tests/tests/test_cli.py index 0247a09c7..0cc194dfd 100644 --- a/cardano_node_tests/tests/test_cli.py +++ b/cardano_node_tests/tests/test_cli.py @@ -317,12 +317,12 @@ def test_address_info_script(self, cluster: clusterlib.ClusterLib): """Check script address info.""" temp_template = common.get_test_id(cluster) - # create payment address + # Create payment address payment_rec = cluster.g_address.gen_payment_addr_and_keys( name=temp_template, ) - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -331,7 +331,7 @@ def test_address_info_script(self, cluster: clusterlib.ClusterLib): slot_type_arg=clusterlib.MultiSlotTypeArgs.AFTER, ) - # create script address + # Create script address address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) @@ -349,21 +349,21 @@ def test_address_info_payment_with_outfile(self, cluster: clusterlib.ClusterLib) """Compare payment address info with and without outfile provided.""" common.get_test_id(cluster) - # just a static address to preform the test + # Just a static address to preform the test address = "addr_test1vzp4kj0rmnl5q5046e2yy697fndej56tm35jekemj6ew2gczp74wk" - # get address information + # Get address information cli_out = cluster.cli(["address", "info", "--address", str(address)]) address_info_no_outfile = json.loads(cli_out.stdout.rstrip().decode("utf-8")) - # get address information using an output file + # Get address information using an output file out_file = "/dev/stdout" cli_out = cluster.cli( ["address", "info", "--address", str(address), "--out-file", out_file] ) address_info_with_outfile = json.loads(cli_out.stdout.rstrip().decode("utf-8")) - # check if the information obtained by the two methods is the same + # Check if the information obtained by the two methods is the same assert ( address_info_no_outfile == address_info_with_outfile ), "Address information doesn't match" @@ -649,22 +649,22 @@ def test_non_extended_key_valid(self, cluster: clusterlib.ClusterLib): """Check that the non-extended verification key is according the verification key.""" temp_template = common.get_test_id(cluster) - # get an extended verification key + # Get an extended verification key payment_keys = cluster.g_address.gen_payment_key_pair( key_name=f"{temp_template}_extended", extended=True ) with open(payment_keys.vkey_file, encoding="utf-8") as in_file: - # ignore the first 4 chars, just an informative keyword + # Ignore the first 4 chars, just an informative keyword extended_vkey = json.loads(in_file.read().strip()).get("cborHex", "")[4:] - # get a non-extended verification key using the extended key + # Get a non-extended verification key using the extended key non_extended_key_file = cluster.g_key.gen_non_extended_verification_key( key_name=temp_template, extended_verification_key_file=payment_keys.vkey_file ) with open(non_extended_key_file, encoding="utf-8") as in_file: - # ignore the first 4 chars, just an informative keyword + # Ignore the first 4 chars, just an informative keyword non_extended_vkey = json.loads(in_file.read().strip()).get("cborHex", "")[4:] assert extended_vkey.startswith(non_extended_vkey) @@ -696,12 +696,12 @@ def test_non_extended_key_error(self, cluster: clusterlib.ClusterLib): """ temp_template = common.get_test_id(cluster) - # get an extended key + # Get an extended key payment_keys = cluster.g_address.gen_payment_key_pair( key_name=f"{temp_template}_extended", extended=True ) - # try to get a non-extended verification key using the extended signing key + # Try to get a non-extended verification key using the extended signing key with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_key.gen_non_extended_verification_key( key_name=temp_template, extended_verification_key_file=payment_keys.skey_file @@ -747,14 +747,14 @@ def test_pretty_utxo( amount1 = 2_000_000 amount2 = 2_500_000 - # create source and destination payment addresses + # Create source and destination payment addresses payment_addrs = clusterlib_utils.create_payment_addr_records( f"{temp_template}_src", f"{temp_template}_dst", cluster_obj=cluster, ) - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( payment_addrs[0], cluster_obj=cluster, @@ -962,7 +962,7 @@ def _check_stake_snapshot( # noqa: C901 try: if option == "single_pool": - # make sure the queries can be finished in single epoch + # Make sure the queries can be finished in single epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster_obj, start=1, @@ -979,13 +979,13 @@ def _check_stake_snapshot( # noqa: C901 stake_pool_ids=expected_pool_ids ) elif option == "all_pools": - # sleep till the end of epoch for stable stake distribution + # Sleep till the end of epoch for stable stake distribution clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster_obj, start=common.EPOCH_START_SEC_LEDGER_STATE, stop=common.EPOCH_STOP_SEC_LEDGER_STATE, ) - # get up-to-date list of available pools + # Get up-to-date list of available pools expected_pool_ids = [ cluster_obj.g_stake_pool.get_stake_pool_id( cluster_manager.cache.addrs_data[p]["cold_key_pair"].vkey_file @@ -1052,7 +1052,7 @@ def _dump_on_error(): expected_pool_ids_dec = set(expected_pool_ids_mapping.values()) out_pool_ids_dec = set(stake_snapshot["pools"].keys()) - # retired pools and newly created ones may not yet be on the snapshot + # Retired pools and newly created ones may not yet be on the snapshot if not expected_pool_ids_dec.issubset(out_pool_ids_dec): errors.append( f"Expected pools: {expected_pool_ids_dec}\nVS\n" @@ -1060,7 +1060,7 @@ def _dump_on_error(): "Difference: " f"{expected_pool_ids_dec.symmetric_difference(out_pool_ids_dec)}" ) - # active stake can be lower than sum of stakes, as some pools may not be running + # Active stake can be lower than sum of stakes, as some pools may not be running # and minting blocks if sum_mark < stake_snapshot["total"]["stakeMark"]: total_stake_errors.append( diff --git a/cardano_node_tests/tests/test_configuration.py b/cardano_node_tests/tests/test_configuration.py index adb4fe105..3d1af79cd 100644 --- a/cardano_node_tests/tests/test_configuration.py +++ b/cardano_node_tests/tests/test_configuration.py @@ -25,12 +25,12 @@ def epoch_length_start_cluster() -> pl.Path: """Update *epochLength* to 1200.""" shared_tmp = temptools.get_pytest_shared_tmp() - # need to lock because this same fixture can run on several workers in parallel + # Need to lock because this same fixture can run on several workers in parallel with locking.FileLockIfXdist(f"{shared_tmp}/startup_files_epoch_1200.lock"): destdir = shared_tmp / "startup_files_epoch_1200" destdir.mkdir(exist_ok=True) - # return existing script if it is already generated by other worker + # Return existing script if it is already generated by other worker destdir_ls = list(destdir.glob("start-cluster*")) if destdir_ls: return destdir_ls[0] @@ -54,12 +54,12 @@ def slot_length_start_cluster() -> pl.Path: """Update *slotLength* to 0.3.""" shared_tmp = temptools.get_pytest_shared_tmp() - # need to lock because this same fixture can run on several workers in parallel + # Need to lock because this same fixture can run on several workers in parallel with locking.FileLockIfXdist(f"{shared_tmp}/startup_files_slot_03.lock"): destdir = shared_tmp / "startup_files_slot_03" destdir.mkdir(exist_ok=True) - # return existing script if it is already generated by other worker + # Return existing script if it is already generated by other worker destdir_ls = list(destdir.glob("start-cluster*")) if destdir_ls: return destdir_ls[0] diff --git a/cardano_node_tests/tests/test_dbsync.py b/cardano_node_tests/tests/test_dbsync.py index 83a5e8792..9c2037e0e 100644 --- a/cardano_node_tests/tests/test_dbsync.py +++ b/cardano_node_tests/tests/test_dbsync.py @@ -23,7 +23,7 @@ LOGGER = logging.getLogger(__name__) -# all tests in this module need dbsync +# All tests in this module need dbsync pytestmark = pytest.mark.needs_dbsync @@ -128,7 +128,7 @@ def test_blocks(self, cluster: clusterlib.ClusterLib): # noqa: C901 block_no = int(tip["block"]) epoch = int(tip["epoch"]) - # check records for last 50 epochs + # Check records for last 50 epochs epoch_from = epoch - 50 epoch_from = epoch_from if epoch_from >= 0 else 0 @@ -230,7 +230,7 @@ def test_blocks(self, cluster: clusterlib.ClusterLib): # noqa: C901 ) raise AssertionError(msg) - # if cardano-node knows about Babbage and network is in Alonzo or higher era, check that + # If cardano-node knows about Babbage and network is in Alonzo or higher era, check that # the highest known protocol major version matches the expected value if rec and not (rec.proto_major == 8 and rec.proto_minor == 0): pytest.xfail( @@ -303,14 +303,14 @@ def test_reconnect_dbsync( cluster_nodes.restart_all_nodes() - # create source and destination payment addresses + # Create source and destination payment addresses payment_addrs = clusterlib_utils.create_payment_addr_records( f"{temp_template}_src", f"{temp_template}_dst", cluster_obj=cluster, ) - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( payment_addrs[0], cluster_obj=cluster, diff --git a/cardano_node_tests/tests/test_delegation.py b/cardano_node_tests/tests/test_delegation.py index e984289b3..d27b3252d 100644 --- a/cardano_node_tests/tests/test_delegation.py +++ b/cardano_node_tests/tests/test_delegation.py @@ -76,7 +76,7 @@ def pool_users( ) fixture_cache.value = created_users - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( created_users[0], cluster_obj=cluster, @@ -123,7 +123,7 @@ def pool_users_cluster_and_pool( ) fixture_cache.value = created_users - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( created_users[0], cluster_obj=cluster, @@ -176,7 +176,7 @@ def test_delegate_using_pool_id( ) init_epoch = cluster.g_query.get_epoch() - # submit registration certificate and delegate to pool + # Submit registration certificate and delegate to pool delegation_out = delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -219,7 +219,7 @@ def test_delegate_using_vkey( ) init_epoch = cluster.g_query.get_epoch() - # submit registration certificate and delegate to pool + # Submit registration certificate and delegate to pool node_cold = cluster_manager.cache.addrs_data[pool_name]["cold_key_pair"] delegation_out = delegation.delegate_stake_addr( cluster_obj=cluster, @@ -416,7 +416,7 @@ def test_deregister_delegated( cluster, pool_id = cluster_and_pool_and_rewards temp_template = common.get_test_id(cluster) - # create two payment addresses that share single stake address (just to test that + # Create two payment addresses that share single stake address (just to test that # delegation works as expected even under such circumstances) stake_addr_rec = clusterlib_utils.create_stake_addr_records( f"{temp_template}_addr0", cluster_obj=cluster @@ -428,7 +428,7 @@ def test_deregister_delegated( stake_vkey_file=stake_addr_rec.vkey_file, ) - # fund payment address + # Fund payment address clusterlib_utils.fund_from_faucet( *payment_addr_recs, cluster_obj=cluster, @@ -442,7 +442,7 @@ def test_deregister_delegated( ) init_epoch = cluster.g_query.get_epoch() - # submit registration certificate and delegate to pool + # Submit registration certificate and delegate to pool delegation_out = delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -459,7 +459,7 @@ def test_deregister_delegated( cluster_obj=cluster, tx_raw_output=delegation_out.tx_raw_output ) if tx_db_deleg: - # check in db-sync that both payment addresses share single stake address + # Check in db-sync that both payment addresses share single stake address assert ( dbsync_utils.get_utxo(address=payment_addr_recs[0].address).stake_address == stake_addr_rec.address @@ -484,12 +484,12 @@ def test_deregister_delegated( delegation_out.pool_user.stake.address ).reward_account_balance, f"User of pool '{pool_id}' hasn't received any rewards" - # make sure we have enough time to finish deregistration in one epoch + # Make sure we have enough time to finish deregistration in one epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=5, stop=common.EPOCH_STOP_SEC_BUFFER ) - # files for deregistering stake address + # Files for deregistering stake address stake_addr_dereg_cert = cluster.g_stake_address.gen_stake_addr_deregistration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=common.get_conway_address_deposit(cluster_obj=cluster), @@ -503,7 +503,7 @@ def test_deregister_delegated( ], ) - # attempt to deregister the stake address - deregistration is expected to fail + # Attempt to deregister the stake address - deregistration is expected to fail # because there are rewards in the stake address with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.send_tx( @@ -522,7 +522,7 @@ def test_deregister_delegated( delegation_out.pool_user.stake.address ).reward_account_balance - # withdraw rewards to payment address, deregister stake address + # Withdraw rewards to payment address, deregister stake address tx_raw_deregister_output = cluster.g_transaction.send_tx( src_address=src_address, tx_name=f"{temp_template}_dereg_withdraw", @@ -532,7 +532,7 @@ def test_deregister_delegated( ], ) - # check that the key deposit was returned and rewards withdrawn + # Check that the key deposit was returned and rewards withdrawn assert ( cluster.g_query.get_address_balance(src_address) == src_payment_balance @@ -541,7 +541,7 @@ def test_deregister_delegated( + cluster.g_query.get_address_deposit() ), f"Incorrect balance for source address `{src_address}`" - # check that the stake address is no longer delegated + # Check that the stake address is no longer delegated stake_addr_info = cluster.g_query.get_stake_addr_info( delegation_out.pool_user.stake.address ) @@ -592,7 +592,7 @@ def test_undelegate( ) init_epoch = cluster.g_query.get_epoch() - # submit registration certificate and delegate to pool + # Submit registration certificate and delegate to pool delegation_out = delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -604,7 +604,7 @@ def test_undelegate( cluster.g_query.get_epoch() == init_epoch ), "Delegation took longer than expected and would affect other checks" - # check records in db-sync + # Check records in db-sync tx_db_deleg = dbsync_utils.check_tx( cluster_obj=cluster, tx_raw_output=delegation_out.tx_raw_output ) @@ -623,7 +623,7 @@ def test_undelegate( delegation_out.pool_user.stake.address ).reward_account_balance, f"User of pool '{pool_id}' hasn't received any rewards" - # files for deregistering / re-registering stake address + # Files for deregistering / re-registering stake address address_deposit = common.get_conway_address_deposit(cluster_obj=cluster) stake_addr_dereg_cert_file = cluster.g_stake_address.gen_stake_addr_deregistration_cert( @@ -649,7 +649,7 @@ def test_undelegate( delegation_out.pool_user.stake.address ).reward_account_balance - # withdraw rewards to payment address; deregister and re-register stake address + # Withdraw rewards to payment address; deregister and re-register stake address tx_raw_undeleg = cluster.g_transaction.send_tx( src_address=src_address, tx_name=f"{temp_template}_undeleg_withdraw", @@ -659,13 +659,13 @@ def test_undelegate( ], ) - # check that the key deposit was NOT returned and rewards were withdrawn + # Check that the key deposit was NOT returned and rewards were withdrawn assert ( cluster.g_query.get_address_balance(src_address) == src_payment_balance - tx_raw_undeleg.fee + reward_balance ), f"Incorrect balance for source address `{src_address}`" - # check that the stake address is no longer delegated + # Check that the stake address is no longer delegated stake_addr_info = cluster.g_query.get_stake_addr_info( delegation_out.pool_user.stake.address ) @@ -680,10 +680,10 @@ def test_undelegate( delegation_out.pool_user.stake.address ).reward_account_balance, "No reward was received next epoch after undelegation" - # check `transaction view` command + # Check `transaction view` command tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_undeleg) - # check records in db-sync + # Check records in db-sync tx_db_undeleg = dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_undeleg) if tx_db_undeleg: assert delegation_out.pool_user.stake.address in tx_db_undeleg.stake_deregistration @@ -738,7 +738,7 @@ def test_addr_delegation_deregistration( address_deposit = common.get_conway_address_deposit(cluster_obj=cluster) - # create stake address registration cert + # Create stake address registration cert stake_addr_reg_cert_file = cluster.g_stake_address.gen_stake_addr_registration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=address_deposit, @@ -746,7 +746,7 @@ def test_addr_delegation_deregistration( stake_address=stake_address, ) - # create stake address deregistration cert + # Create stake address deregistration cert stake_addr_dereg_cert = cluster.g_stake_address.gen_stake_addr_deregistration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=address_deposit, @@ -754,7 +754,7 @@ def test_addr_delegation_deregistration( stake_address=stake_address, ) - # register stake address + # Register stake address tx_files = clusterlib.TxFiles( certificate_files=[stake_addr_reg_cert_file], signing_key_files=[user_payment.skey_file, user_registered.stake.skey_file], @@ -765,7 +765,7 @@ def test_addr_delegation_deregistration( tx_files=tx_files, ) - # check that the stake address is registered + # Check that the stake address is registered assert cluster.g_query.get_stake_addr_info( user_registered.stake.address ).address, f"Stake address is not registered: {user_registered.stake.address}" @@ -774,7 +774,7 @@ def test_addr_delegation_deregistration( if tx_db_reg: assert user_registered.stake.address in tx_db_reg.stake_registration - # check that the balance for source address was correctly updated + # Check that the balance for source address was correctly updated assert ( cluster.g_query.get_address_balance(user_payment.address) == src_init_balance - tx_raw_output_reg.fee - cluster.g_query.get_address_deposit() @@ -782,7 +782,7 @@ def test_addr_delegation_deregistration( src_registered_balance = cluster.g_query.get_address_balance(user_payment.address) - # create stake address delegation cert + # Create stake address delegation cert stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", stake_vkey_file=stake_vkey_file, @@ -796,7 +796,7 @@ def test_addr_delegation_deregistration( ) init_epoch = cluster.g_query.get_epoch() - # delegate and deregister stake address in single TX + # Delegate and deregister stake address in single TX tx_files = clusterlib.TxFiles( certificate_files=[stake_addr_deleg_cert_file, stake_addr_dereg_cert], signing_key_files=[user_payment.skey_file, user_registered.stake.skey_file], @@ -834,7 +834,7 @@ def _build_deleg_dereg() -> clusterlib.TxRawOutput: tx_files=tx_files, ) - # check that the balance for source address was correctly updated and that the key + # Check that the balance for source address was correctly updated and that the key # deposit was returned assert ( cluster.g_query.get_address_balance(user_payment.address) @@ -843,7 +843,7 @@ def _build_deleg_dereg() -> clusterlib.TxRawOutput: + cluster.g_query.get_address_deposit() ), f"Incorrect balance for source address `{user_payment.address}`" - # check that the stake address was NOT delegated + # Check that the stake address was NOT delegated stake_addr_info = cluster.g_query.get_stake_addr_info(user_registered.stake.address) assert not stake_addr_info.delegation, f"Stake address was delegated: {stake_addr_info}" @@ -873,7 +873,7 @@ def test_delegation_cert_with_wrong_key( cluster, pool_id = cluster_and_pool temp_template = common.get_test_id(cluster) - # create stake address delegation cert, use wrong stake vkey + # Create stake address delegation cert, use wrong stake vkey with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", @@ -906,14 +906,14 @@ def test_delegate_addr_with_wrong_key( user_registered = pool_users_disposable_cluster_and_pool[0] user_payment = pool_users_cluster_and_pool[0].payment - # create stake address registration cert + # Create stake address registration cert stake_addr_reg_cert_file = cluster.g_stake_address.gen_stake_addr_registration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=common.get_conway_address_deposit(cluster_obj=cluster), stake_vkey_file=user_registered.stake.vkey_file, ) - # register stake address + # Register stake address tx_files = clusterlib.TxFiles( certificate_files=[stake_addr_reg_cert_file], signing_key_files=[user_payment.skey_file, user_registered.stake.skey_file], @@ -922,12 +922,12 @@ def test_delegate_addr_with_wrong_key( src_address=user_payment.address, tx_name=f"{temp_template}_reg", tx_files=tx_files ) - # check that the stake address is registered + # Check that the stake address is registered assert cluster.g_query.get_stake_addr_info( user_registered.stake.address ).address, f"Stake address is not registered: {user_registered.stake.address}" - # create stake address delegation cert + # Create stake address delegation cert stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", stake_vkey_file=user_registered.stake.vkey_file, @@ -935,7 +935,7 @@ def test_delegate_addr_with_wrong_key( always_abstain=True, ) - # delegate stake address, use wrong payment skey + # Delegate stake address, use wrong payment skey tx_files = clusterlib.TxFiles( certificate_files=[stake_addr_deleg_cert_file], signing_key_files=[pool_users_cluster_and_pool[1].payment.skey_file], @@ -971,7 +971,7 @@ def test_delegate_unknown_addr( user_registered = pool_users_disposable_cluster_and_pool[0] user_payment = pool_users_cluster_and_pool[0].payment - # create stake address delegation cert + # Create stake address delegation cert stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", stake_vkey_file=user_registered.stake.vkey_file, @@ -979,7 +979,7 @@ def test_delegate_unknown_addr( always_abstain=True, ) - # delegate unknown stake address + # Delegate unknown stake address tx_files = clusterlib.TxFiles( certificate_files=[stake_addr_deleg_cert_file], signing_key_files=[user_payment.skey_file, user_registered.stake.skey_file], @@ -1032,7 +1032,7 @@ def test_delegate_deregistered_addr( user_registered = pool_users_disposable_cluster_and_pool[0] user_payment = pool_users_cluster_and_pool[0].payment - # create stake address registration cert + # Create stake address registration cert address_deposit = common.get_conway_address_deposit(cluster_obj=cluster) stake_addr_reg_cert_file = cluster.g_stake_address.gen_stake_addr_registration_cert( addr_name=f"{temp_template}_addr0", @@ -1040,7 +1040,7 @@ def test_delegate_deregistered_addr( stake_vkey_file=user_registered.stake.vkey_file, ) - # register stake address + # Register stake address tx_files = clusterlib.TxFiles( certificate_files=[stake_addr_reg_cert_file], signing_key_files=[user_payment.skey_file, user_registered.stake.skey_file], @@ -1049,12 +1049,12 @@ def test_delegate_deregistered_addr( src_address=user_payment.address, tx_name=f"{temp_template}_reg", tx_files=tx_files ) - # check that the stake address is registered + # Check that the stake address is registered assert cluster.g_query.get_stake_addr_info( user_registered.stake.address ).address, f"Stake address is not registered: {user_registered.stake.address}" - # deregister stake address + # Deregister stake address stake_addr_dereg_cert_file = cluster.g_stake_address.gen_stake_addr_deregistration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=address_deposit, @@ -1073,12 +1073,12 @@ def test_delegate_deregistered_addr( tx_files=tx_files_deregister, ) - # check that the stake address is not registered + # Check that the stake address is not registered assert not cluster.g_query.get_stake_addr_info( user_registered.stake.address ).address, f"Stake address is registered: {user_registered.stake.address}" - # create stake address delegation cert + # Create stake address delegation cert stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", stake_vkey_file=user_registered.stake.vkey_file, @@ -1086,7 +1086,7 @@ def test_delegate_deregistered_addr( always_abstain=True, ) - # delegate deregistered stake address + # Delegate deregistered stake address tx_files = clusterlib.TxFiles( certificate_files=[stake_addr_deleg_cert_file], signing_key_files=[user_payment.skey_file, user_registered.stake.skey_file], @@ -1136,14 +1136,14 @@ def test_delegatee_not_registered( user_registered = pool_users_disposable[0] user_payment = pool_users[0].payment - # create stake address registration cert + # Create stake address registration cert stake_addr_reg_cert_file = cluster.g_stake_address.gen_stake_addr_registration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=common.get_conway_address_deposit(cluster_obj=cluster), stake_vkey_file=user_registered.stake.vkey_file, ) - # register stake address + # Register stake address tx_files = clusterlib.TxFiles( certificate_files=[stake_addr_reg_cert_file], signing_key_files=[user_payment.skey_file, user_registered.stake.skey_file], @@ -1152,15 +1152,15 @@ def test_delegatee_not_registered( src_address=user_payment.address, tx_name=f"{temp_template}_reg", tx_files=tx_files ) - # check that the stake address is registered + # Check that the stake address is registered assert cluster.g_query.get_stake_addr_info( user_registered.stake.address ).address, f"Stake address is not registered: {user_registered.stake.address}" - # create pool cold keys and ceritifcate, but don't register the pool + # Create pool cold keys and ceritifcate, but don't register the pool node_cold = cluster.g_node.gen_cold_key_pair_and_counter(node_name=f"{temp_template}_pool") - # create stake address delegation cert + # Create stake address delegation cert stake_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", stake_vkey_file=user_registered.stake.vkey_file, @@ -1168,7 +1168,7 @@ def test_delegatee_not_registered( always_abstain=True, ) - # delegate stake address + # Delegate stake address tx_files = clusterlib.TxFiles( certificate_files=[stake_addr_deleg_cert_file], signing_key_files=[pool_users[0].payment.skey_file], diff --git a/cardano_node_tests/tests/test_env_network_id.py b/cardano_node_tests/tests/test_env_network_id.py index 5aa185880..37fd9d00f 100644 --- a/cardano_node_tests/tests/test_env_network_id.py +++ b/cardano_node_tests/tests/test_env_network_id.py @@ -113,7 +113,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, diff --git a/cardano_node_tests/tests/test_kes.py b/cardano_node_tests/tests/test_kes.py index a83e8127d..03ffeda2b 100644 --- a/cardano_node_tests/tests/test_kes.py +++ b/cardano_node_tests/tests/test_kes.py @@ -116,7 +116,7 @@ def _check_block_production( ledger_state=ledger_state, ) - # check if the pool is minting any blocks + # Check if the pool is minting any blocks blocks_made = ledger_state["blocksCurrent"] or {} is_minting = pool_id_dec in blocks_made @@ -406,7 +406,7 @@ def test_opcert_invalid_kes_period( ignore_file_id=cluster_manager.worker_id, ) - # generate new operational certificate with `--kes-period` in the future + # Generate new operational certificate with `--kes-period` in the future invalid_kes_period = cluster.g_query.get_kes_period() + 100 invalid_opcert_file = cluster.g_node.gen_node_operational_cert( node_name=f"{node_name}_invalid_opcert_file", @@ -418,7 +418,7 @@ def test_opcert_invalid_kes_period( with cluster_manager.respin_on_failure(): with logfiles.expect_errors(expected_errors, worker_id=cluster_manager.worker_id): - # restart the node with the new operational certificate (restart all nodes so + # Restart the node with the new operational certificate (restart all nodes so # the connection is established again) shutil.copy(invalid_opcert_file, opcert_file) cluster_nodes.restart_all_nodes(delay=5) @@ -434,13 +434,13 @@ def test_opcert_invalid_kes_period( ) _save_metrics(pool_num=pool_num, temp_template=f"{temp_template}_{this_epoch}") - # check that the pool is not minting any blocks + # Check that the pool is not minting any blocks assert ( not is_minting ), f"The pool '{pool_name}' has minted blocks in epoch {this_epoch}" if invalid_opcert_epoch == 1: - # check kes-period-info with operational certificate with + # Check kes-period-info with operational certificate with # invalid `--kes-period` kes_period_info = cluster.g_query.get_kes_period_info(invalid_opcert_file) with open( @@ -458,7 +458,7 @@ def test_opcert_invalid_kes_period( ) ) - # test the `CounterOverIncrementedOCERT` error - the counter will now be +2 from + # Test the `CounterOverIncrementedOCERT` error - the counter will now be +2 from # last used opcert counter value if invalid_opcert_epoch == 2: overincrement_kes_period = cluster.g_query.get_kes_period() @@ -469,7 +469,7 @@ def test_opcert_invalid_kes_period( cold_counter_file=cold_counter_file, kes_period=overincrement_kes_period, ) - # copy the new certificate and restart the node (restart all nodes so + # Copy the new certificate and restart the node (restart all nodes so # the connection is established again) shutil.copy(overincrement_opcert_file, opcert_file) cluster_nodes.restart_all_nodes(delay=5) @@ -493,7 +493,7 @@ def test_opcert_invalid_kes_period( ) if invalid_opcert_epoch == 3: - # check kes-period-info with operational certificate with + # Check kes-period-info with operational certificate with # invalid kes-period kes_period_info = cluster.g_query.get_kes_period_info(invalid_opcert_file) with open( @@ -511,11 +511,11 @@ def test_opcert_invalid_kes_period( ) ) - # in Babbage we'll use the original counter for issuing new valid opcert so the counter + # In Babbage we'll use the original counter for issuing new valid opcert so the counter # value of new valid opcert equals to counter value of the original opcert +1 shutil.copy(cold_counter_file_orig, cold_counter_file) - # generate new operational certificate with valid `--kes-period` + # Generate new operational certificate with valid `--kes-period` valid_kes_period = cluster.g_query.get_kes_period() valid_opcert_file = cluster.g_node.gen_node_operational_cert( node_name=f"{node_name}_valid_opcert_file", @@ -524,7 +524,7 @@ def test_opcert_invalid_kes_period( cold_counter_file=cold_counter_file, kes_period=valid_kes_period, ) - # copy the new certificate and restart the node (restart all nodes so + # Copy the new certificate and restart the node (restart all nodes so # the connection is established again) shutil.copy(valid_opcert_file, opcert_file) cluster_nodes.restart_all_nodes(delay=5) @@ -541,7 +541,7 @@ def test_opcert_invalid_kes_period( ) _save_metrics(pool_num=pool_num, temp_template=f"{temp_template}_{this_epoch}") - # check that the pool is minting blocks + # Check that the pool is minting blocks if is_minting: break else: @@ -565,7 +565,7 @@ def test_opcert_invalid_kes_period( ) raise AssertionError(msg) - # check kes-period-info with valid operational certificate + # Check kes-period-info with valid operational certificate kes_period_info = cluster.g_query.get_kes_period_info(valid_opcert_file) with open(f"{temp_template}_kes_period_info_4.json", "w", encoding="utf-8") as out_fp: json.dump(kes_period_info, out_fp, indent=2) @@ -580,7 +580,7 @@ def test_opcert_invalid_kes_period( ) ) - # check kes-period-info with operational certificate with invalid kes-period + # Check kes-period-info with operational certificate with invalid kes-period kes_period_info = cluster.g_query.get_kes_period_info(invalid_opcert_file) with open(f"{temp_template}_kes_period_info_5.json", "w", encoding="utf-8") as out_fp: json.dump(kes_period_info, out_fp, indent=2) @@ -636,7 +636,7 @@ def test_update_valid_opcert( opcert_file_old = shutil.copy(opcert_file, f"{opcert_file}_old") with cluster_manager.respin_on_failure(): - # generate new operational certificate with valid `--kes-period` + # Generate new operational certificate with valid `--kes-period` new_kes_period = cluster.g_query.get_kes_period() new_opcert_file = cluster.g_node.gen_node_operational_cert( node_name=f"{node_name}_new_opcert_file", @@ -646,7 +646,7 @@ def test_update_valid_opcert( kes_period=new_kes_period, ) - # copy new operational certificate to the node + # Copy new operational certificate to the node logfiles.add_ignore_rule( files_glob="*.stdout", regex="MuxBearerClosed", @@ -654,12 +654,12 @@ def test_update_valid_opcert( ) shutil.copy(new_opcert_file, opcert_file) - # stop the node so the corresponding pool is not minting new blocks + # Stop the node so the corresponding pool is not minting new blocks cluster_nodes.stop_nodes([node_name]) time.sleep(10) - # check kes-period-info while the pool is not minting blocks + # Check kes-period-info while the pool is not minting blocks kes_period_info_new = cluster.g_query.get_kes_period_info(opcert_file) with open(f"{temp_template}_kes_period_info_1.json", "w", encoding="utf-8") as out_fp: json.dump(kes_period_info_new, out_fp, indent=2) @@ -694,7 +694,7 @@ def test_update_valid_opcert( f"New and old opcert counters don't match: {new_opcert_num} vs {old_opcert_num}" ) - # start the node with the new operational certificate (restart all nodes so + # Start the node with the new operational certificate (restart all nodes so # the connection is established again) cluster_nodes.restart_all_nodes(delay=5) @@ -710,7 +710,7 @@ def test_update_valid_opcert( ) _save_metrics(pool_num=pool_num, temp_template=f"{temp_template}_{this_epoch}") - # check that the pool is minting blocks + # Check that the pool is minting blocks if is_minting: break else: @@ -734,7 +734,7 @@ def test_update_valid_opcert( ) raise AssertionError(msg) - # check that metrics reported by kes-period-info got updated once the pool started + # Check that metrics reported by kes-period-info got updated once the pool started # minting blocks again kes_period_info_updated = cluster.g_query.get_kes_period_info(opcert_file) with open(f"{temp_template}_kes_period_info_3.json", "w", encoding="utf-8") as out_fp: @@ -759,7 +759,7 @@ def test_update_valid_opcert( f"Both updated and old opcert counters have same value '{old_opcert_num}'" ) - # check kes-period-info with operational certificate with a wrong counter + # Check kes-period-info with operational certificate with a wrong counter kes_period_info_invalid = cluster.g_query.get_kes_period_info(opcert_file_old) with open(f"{temp_template}_kes_period_info_4.json", "w", encoding="utf-8") as out_fp: json.dump(kes_period_info_invalid, out_fp, indent=2) @@ -791,7 +791,7 @@ def test_no_kes_period_arg( temp_template = common.get_test_id(cluster) out_file = pl.Path(f"{temp_template}_shouldnt_exist.opcert") - # try to generate new operational certificate without specifying the `--kes-period` + # Try to generate new operational certificate without specifying the `--kes-period` with pytest.raises(clusterlib.CLIError) as excinfo: cluster.cli( [ @@ -828,7 +828,7 @@ def test_negative_kes_period_arg( node_name = pool_name.replace("node-", "") - # generate new operational certificate with negative value for `--kes-period` + # Generate new operational certificate with negative value for `--kes-period` invalid_kes_period = -100 try: diff --git a/cardano_node_tests/tests/test_ledger_state.py b/cardano_node_tests/tests/test_ledger_state.py index e4dfef822..45d1f1a4e 100644 --- a/cardano_node_tests/tests/test_ledger_state.py +++ b/cardano_node_tests/tests/test_ledger_state.py @@ -39,7 +39,7 @@ def test_stake_snapshot(self, cluster: clusterlib.ClusterLib): # noqa: C901 # pylint: disable=too-many-statements,too-many-locals,too-many-branches temp_template = common.get_test_id(cluster) - # make sure the queries can be finished in single epoch + # Make sure the queries can be finished in single epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=5, stop=common.EPOCH_STOP_SEC_BUFFER ) @@ -86,22 +86,22 @@ def _get_delegations(snapshot: str) -> dict[str, list[str]]: f"{LEDGER_STATE_KEYS.difference(ledger_state_keys)}" ) - # stake addresses (hashes) and corresponding amounts + # Stake addresses (hashes) and corresponding amounts stake_mark = _get_hashes("pstakeMark") stake_set = _get_hashes("pstakeSet") stake_go = _get_hashes("pstakeGo") - # pools (hashes) and stake addresses (hashes) delegated to corresponding pool + # Pools (hashes) and stake addresses (hashes) delegated to corresponding pool delegations_mark = _get_delegations("pstakeMark") delegations_set = _get_delegations("pstakeSet") delegations_go = _get_delegations("pstakeGo") - # all delegated stake addresses (hashes) + # All delegated stake addresses (hashes) delegated_hashes_mark = set(itertools.chain.from_iterable(delegations_mark.values())) delegated_hashes_set = set(itertools.chain.from_iterable(delegations_set.values())) delegated_hashes_go = set(itertools.chain.from_iterable(delegations_go.values())) - # check if all delegated addresses are listed among stake addresses + # Check if all delegated addresses are listed among stake addresses stake_hashes_mark = set(stake_mark) if not delegated_hashes_mark.issubset(stake_hashes_mark): errors.append( @@ -131,7 +131,7 @@ def _get_delegations(snapshot: str) -> dict[str, list[str]]: for pool_id_dec in delegation_pool_ids: pool_id = helpers.encode_bech32(prefix="pool", data=pool_id_dec) - # get stake info from ledger state + # Get stake info from ledger state pstake_hashes_mark = delegations_mark.get(pool_id_dec) or () seen_hashes_mark.update(pstake_hashes_mark) pstake_amounts_mark = [stake_mark[h] for h in pstake_hashes_mark] @@ -147,7 +147,7 @@ def _get_delegations(snapshot: str) -> dict[str, list[str]]: pstake_amounts_go = [stake_go[h] for h in pstake_hashes_go] pstake_sum_go = functools.reduce(lambda x, y: x + y, pstake_amounts_go, 0) - # get stake info from `stake-snapshot` command + # Get stake info from `stake-snapshot` command stake_snapshot = cluster.g_query.get_stake_snapshot(stake_pool_ids=[pool_id]) if "pools" in stake_snapshot: pstake_mark_cmd = stake_snapshot["pools"][pool_id_dec]["stakeMark"] @@ -197,7 +197,7 @@ def _get_delegations(snapshot: str) -> dict[str, list[str]]: errors.append(f"total_set: {sum_set} != {stake_snapshot['total']['stakeSet']}") if sum_go != stake_snapshot["total"]["stakeGo"]: errors.append(f"total_go: {sum_go} != {stake_snapshot['total']['stakeGo']}") - # active stake can be lower than sum of stakes, as some pools may not be running + # Active stake can be lower than sum of stakes, as some pools may not be running # and minting blocks else: if sum_mark < stake_snapshot["activeStakeMark"]: diff --git a/cardano_node_tests/tests/test_metrics.py b/cardano_node_tests/tests/test_metrics.py index 681a3f2e8..506486566 100644 --- a/cardano_node_tests/tests/test_metrics.py +++ b/cardano_node_tests/tests/test_metrics.py @@ -16,7 +16,7 @@ LOGGER = logging.getLogger(__name__) -# skip all tests for now +# Skip all tests for now pytestmark = pytest.mark.skip(reason="metrics data are not stable yet") diff --git a/cardano_node_tests/tests/test_mir_certs.py b/cardano_node_tests/tests/test_mir_certs.py index 5fc36cb2e..96bb54e0a 100644 --- a/cardano_node_tests/tests/test_mir_certs.py +++ b/cardano_node_tests/tests/test_mir_certs.py @@ -73,7 +73,7 @@ def pool_users( ) fixture_cache.value = created_users - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( *created_users, cluster_obj=cluster_pots, @@ -150,7 +150,7 @@ def test_transfer_to_treasury( ], ) - # send the transaction at the beginning of an epoch + # Send the transaction at the beginning of an epoch if cluster.time_from_epoch_start() > (cluster.epoch_length_sec // 6): cluster.wait_for_new_epoch() @@ -171,7 +171,7 @@ def test_transfer_to_treasury( == clusterlib.calculate_utxos_balance(tx_raw_output.txins) - tx_raw_output.fee ), f"Incorrect balance for source address `{pool_user.payment.address}`" - # check `transaction view` command + # Check `transaction view` command tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output) tx_db_record = dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) @@ -190,9 +190,9 @@ def test_transfer_to_treasury( cluster.wait_for_new_epoch() pots_records = _wait_for_ada_pots(epoch_from=tx_epoch) - # normally `treasury[-1]` > `treasury[-2]` + # Normally `treasury[-1]` > `treasury[-2]` assert (pots_records[-1].treasury - pots_records[-2].treasury) > amount - # normally `reserves[-1]` < `reserves[-2]` + # Normally `reserves[-1]` < `reserves[-2]` assert (pots_records[-2].reserves - pots_records[-1].reserves) > amount @allure.link(helpers.get_vcs_link()) @@ -225,7 +225,7 @@ def test_build_transfer_to_treasury( ], ) - # send the transaction at the beginning of an epoch + # Send the transaction at the beginning of an epoch if cluster.time_from_epoch_start() > (cluster.epoch_length_sec // 6): cluster.wait_for_new_epoch() @@ -271,9 +271,9 @@ def test_build_transfer_to_treasury( cluster.wait_for_new_epoch() pots_records = _wait_for_ada_pots(epoch_from=tx_epoch) - # normally `treasury[-1]` > `treasury[-2]` + # Normally `treasury[-1]` > `treasury[-2]` assert (pots_records[-1].treasury - pots_records[-2].treasury) > amount - # normally `reserves[-1]` < `reserves[-2]` + # Normally `reserves[-1]` < `reserves[-2]` assert (pots_records[-2].reserves - pots_records[-1].reserves) > amount @allure.link(helpers.get_vcs_link()) @@ -305,7 +305,7 @@ def test_transfer_to_reserves( ], ) - # send the transaction at the beginning of an epoch + # Send the transaction at the beginning of an epoch if cluster.time_from_epoch_start() > (cluster.epoch_length_sec // 6): cluster.wait_for_new_epoch() @@ -326,7 +326,7 @@ def test_transfer_to_reserves( == clusterlib.calculate_utxos_balance(tx_raw_output.txins) - tx_raw_output.fee ), f"Incorrect balance for source address `{pool_user.payment.address}`" - # check `transaction view` command + # Check `transaction view` command tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output) tx_db_record = dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) @@ -345,9 +345,9 @@ def test_transfer_to_reserves( cluster.wait_for_new_epoch() pots_records = _wait_for_ada_pots(epoch_from=tx_epoch) - # normally `treasury[-1]` > `treasury[-2]` + # Normally `treasury[-1]` > `treasury[-2]` assert pots_records[-1].treasury < pots_records[-2].treasury - # normally `reserves[-1]` < `reserves[-2]` + # Normally `reserves[-1]` < `reserves[-2]` assert pots_records[-1].reserves > pots_records[-2].reserves @allure.link(helpers.get_vcs_link()) @@ -380,7 +380,7 @@ def test_build_transfer_to_reserves( ], ) - # send the transaction at the beginning of an epoch + # Send the transaction at the beginning of an epoch if cluster.time_from_epoch_start() > (cluster.epoch_length_sec // 6): cluster.wait_for_new_epoch() @@ -426,9 +426,9 @@ def test_build_transfer_to_reserves( cluster.wait_for_new_epoch() pots_records = _wait_for_ada_pots(epoch_from=tx_epoch) - # normally `treasury[-1]` > `treasury[-2]` + # Normally `treasury[-1]` > `treasury[-2]` assert pots_records[-1].treasury < pots_records[-2].treasury - # normally `reserves[-1]` < `reserves[-2]` + # Normally `reserves[-1]` < `reserves[-2]` assert pots_records[-1].reserves > pots_records[-2].reserves @allure.link(helpers.get_vcs_link()) @@ -472,7 +472,7 @@ def test_pay_stake_addr_from( ], ) - # send the transaction at the beginning of an epoch + # Send the transaction at the beginning of an epoch if cluster.time_from_epoch_start() > (cluster.epoch_length_sec // 6): cluster.wait_for_new_epoch() @@ -504,7 +504,7 @@ def test_pay_stake_addr_from( == init_reward + amount ), f"Incorrect reward balance for stake address `{registered_user.stake.address}`" - # check `transaction view` command + # Check `transaction view` command tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output) tx_db_record = dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) @@ -565,7 +565,7 @@ def test_build_pay_stake_addr_from( ], ) - # send the transaction at the beginning of an epoch + # Send the transaction at the beginning of an epoch if cluster.time_from_epoch_start() > (cluster.epoch_length_sec // 6): cluster.wait_for_new_epoch() @@ -675,7 +675,7 @@ def test_pay_stake_addr_from_both( ], ) - # send the transaction at the beginning of an epoch + # Send the transaction at the beginning of an epoch if cluster.time_from_epoch_start() > (cluster.epoch_length_sec // 6): cluster.wait_for_new_epoch() @@ -818,7 +818,7 @@ def test_pay_multi_stake_addrs( ], ) - # send the transaction at the beginning of an epoch + # Send the transaction at the beginning of an epoch if cluster.time_from_epoch_start() > (cluster.epoch_length_sec // 6): cluster.wait_for_new_epoch() @@ -951,19 +951,19 @@ def test_pay_unregistered_stake_addr_from( # noqa: C901 ], ) - # register the stake address, if it is supposed to be known on blockchain + # Register the stake address, if it is supposed to be known on blockchain if addr_history == "addr_known": tx_raw_out_reg = clusterlib_utils.register_stake_address( cluster_obj=cluster_pots, pool_user=pool_user, name_template=temp_template ) - # deregister the stake address before submitting the Tx with MIR cert + # Deregister the stake address before submitting the Tx with MIR cert if fund_src == TREASURY: tx_raw_out_dereg = clusterlib_utils.deregister_stake_address( cluster_obj=cluster_pots, pool_user=pool_user, name_template=temp_template ) - # send the transaction at the beginning of an epoch + # Send the transaction at the beginning of an epoch if cluster.time_from_epoch_start() > (cluster.epoch_length_sec // 6): cluster.wait_for_new_epoch() @@ -980,7 +980,7 @@ def test_pay_unregistered_stake_addr_from( # noqa: C901 tx_epoch = cluster.g_query.get_epoch() - # deregister the stake address after submitting the Tx with MIR cert + # Deregister the stake address after submitting the Tx with MIR cert if addr_history == "addr_known" and fund_src != TREASURY: tx_raw_out_dereg = clusterlib_utils.deregister_stake_address( cluster_obj=cluster_pots, pool_user=pool_user, name_template=temp_template @@ -1009,7 +1009,7 @@ def test_pay_unregistered_stake_addr_from( # noqa: C901 f"({tx_db_record.reserve[0].amount} != {amount})" ) - # wait for next epoch and check the reward + # Wait for next epoch and check the reward cluster.wait_for_new_epoch() assert ( @@ -1017,14 +1017,14 @@ def test_pay_unregistered_stake_addr_from( # noqa: C901 ), f"Reward was added for unregistered stake address `{pool_user.stake.address}`" if tx_db_record: - # check that the amount was not transferred out of the pot + # Check that the amount was not transferred out of the pot pots_records = _wait_for_ada_pots(epoch_from=tx_epoch) if fund_src == TREASURY: - # normally `treasury[-1]` > `treasury[-2]` + # Normally `treasury[-1]` > `treasury[-2]` assert abs(pots_records[-1].treasury - pots_records[-2].treasury) < amount else: - # normally `reserves[-1]` < `reserves[-2]` + # Normally `reserves[-1]` < `reserves[-2]` assert abs(pots_records[-2].reserves - pots_records[-1].reserves) < amount @@ -1066,7 +1066,7 @@ def test_exceed_pay_stake_addr_from( ], ) - # send the transaction at the beginning of an epoch + # Send the transaction at the beginning of an epoch if cluster.time_from_epoch_start() > (cluster.epoch_length_sec // 6): cluster.wait_for_new_epoch() diff --git a/cardano_node_tests/tests/test_native_tokens.py b/cardano_node_tests/tests/test_native_tokens.py index 493baa494..00d6b9dfd 100644 --- a/cardano_node_tests/tests/test_native_tokens.py +++ b/cardano_node_tests/tests/test_native_tokens.py @@ -85,7 +85,7 @@ def issuers_addrs( cluster_obj=cluster, ) - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -176,13 +176,13 @@ def test_minting_and_burning_witnesses( token_mint_addr = issuers_addrs[0] - # create issuers + # Create issuers if aname_type == "asset_name": _issuers_vkey_files = [p.vkey_file for p in issuers_addrs] payment_vkey_files = _issuers_vkey_files[1:] token_issuers = issuers_addrs else: - # create unique script/policyid for an empty asset name + # Create unique script/policyid for an empty asset name _empty_issuers = clusterlib_utils.create_payment_addr_records( *[f"token_minting_{temp_template}_{i}" for i in range(4)], cluster_obj=cluster, @@ -190,7 +190,7 @@ def test_minting_and_burning_witnesses( payment_vkey_files = [p.vkey_file for p in _empty_issuers] token_issuers = [issuers_addrs[0], *_empty_issuers] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -208,7 +208,7 @@ def test_minting_and_burning_witnesses( script=multisig_script, ) - # token minting + # Token minting tx_out_mint = clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, new_tokens=[token_mint], @@ -220,7 +220,7 @@ def test_minting_and_burning_witnesses( token_utxo = cluster.g_query.get_utxo(tx_raw_output=tx_out_mint, coins=[token]) assert token_utxo and token_utxo[0].amount == amount, "The token was not minted" - # token burning + # Token burning token_burn = dataclasses.replace(token_mint, amount=-amount) tx_out_burn = clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, @@ -233,14 +233,14 @@ def test_minting_and_burning_witnesses( token_utxo = cluster.g_query.get_utxo(tx_raw_output=tx_out_burn, coins=[token]) assert not token_utxo, "The token was not burnt" - # check expected fees + # Check expected fees assert helpers.is_in_interval( tx_out_mint.fee, expected_fee, frac=0.15 ) and helpers.is_in_interval( tx_out_burn.fee, expected_fee, frac=0.15 ), "TX fee doesn't fit the expected interval" - # check `transaction view` command + # Check `transaction view` command tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_out_mint) tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_out_burn) @@ -283,13 +283,13 @@ def test_minting_and_burning_sign( if aname_type == "asset_name": issuer_addr = issuers_addrs[1] else: - # create unique script/policyid for an empty asset name + # Create unique script/policyid for an empty asset name issuer_addr = clusterlib_utils.create_payment_addr_records( f"token_minting_{temp_template}", cluster_obj=cluster, )[0] - # create simple script + # Create simple script keyhash = cluster.g_address.get_payment_vkey_hash(payment_vkey_file=issuer_addr.vkey_file) script_content = {"keyHash": keyhash, "type": "sig"} script = pl.Path(f"{temp_template}.script") @@ -307,7 +307,7 @@ def test_minting_and_burning_sign( script=script, ) - # token minting + # Token minting tx_out_mint = clusterlib_utils.mint_or_burn_sign( cluster_obj=cluster, new_tokens=[token_mint], @@ -319,7 +319,7 @@ def test_minting_and_burning_sign( token_utxo = cluster.g_query.get_utxo(tx_raw_output=tx_out_mint, coins=[token]) assert token_utxo and token_utxo[0].amount == amount, "The token was not minted" - # token burning + # Token burning token_burn = dataclasses.replace(token_mint, amount=-amount) tx_out_burn = clusterlib_utils.mint_or_burn_sign( cluster_obj=cluster, @@ -332,7 +332,7 @@ def test_minting_and_burning_sign( token_utxo = cluster.g_query.get_utxo(tx_raw_output=tx_out_mint, coins=[token]) assert not token_utxo, "The token was not burnt" - # check expected fees + # Check expected fees assert helpers.is_in_interval( tx_out_mint.fee, expected_fee, frac=0.15 ) and helpers.is_in_interval( @@ -382,7 +382,7 @@ def test_minting_multiple_scripts( tokens_mint = [] for i in range(num_of_scripts): - # create simple script + # Create simple script keyhash = cluster.g_address.get_payment_vkey_hash( payment_vkey_file=i_addrs[i].vkey_file ) @@ -396,7 +396,7 @@ def test_minting_multiple_scripts( policyid = cluster.g_transaction.get_policyid(script) aname_token = f"{policyid}.{asset_name}" - # for each script mint both token identified by policyid + asset name and token + # For each script mint both token identified by policyid + asset name and token # identified by just policyid tokens_mint.extend( [ @@ -417,7 +417,7 @@ def test_minting_multiple_scripts( ] ) - # token minting + # Token minting tx_out_mint = clusterlib_utils.mint_or_burn_sign( cluster_obj=cluster, new_tokens=tokens_mint, @@ -433,7 +433,7 @@ def test_minting_multiple_scripts( utxo_mint and utxo_mint[0].amount == amount ), f"The {t.token} token was not minted" - # token burning + # Token burning tokens_burn = [dataclasses.replace(t, amount=-amount) for t in tokens_mint] tx_out_burn = clusterlib_utils.mint_or_burn_sign( cluster_obj=cluster, @@ -448,14 +448,14 @@ def test_minting_multiple_scripts( utxo_burn = clusterlib.filter_utxos(utxos=burn_utxos, coin=t.token) assert not utxo_burn, f"The {t.token} token was not burnt" - # check expected fees + # Check expected fees assert helpers.is_in_interval( tx_out_mint.fee, expected_fee, frac=0.15 ) and helpers.is_in_interval( tx_out_burn.fee, expected_fee, frac=0.15 ), "TX fee doesn't fit the expected interval" - # check `transaction view` command + # Check `transaction view` command tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_out_mint) tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_out_burn) @@ -494,7 +494,7 @@ def test_minting_burning_diff_tokens_single_tx( token_mint_addr = issuers_addrs[0] issuer_addr = issuers_addrs[1] - # create simple script + # Create simple script keyhash = cluster.g_address.get_payment_vkey_hash(payment_vkey_file=issuer_addr.vkey_file) script_content = {"keyHash": keyhash, "type": "sig"} script = pl.Path(f"{temp_template}.script") @@ -519,7 +519,7 @@ def test_minting_burning_diff_tokens_single_tx( for t in tokens ] - # first token minting + # First token minting tx_out_mint1 = clusterlib_utils.mint_or_burn_sign( cluster_obj=cluster, new_tokens=[tokens_mint[0]], @@ -532,7 +532,7 @@ def test_minting_burning_diff_tokens_single_tx( token1_mint_utxo = cluster.g_query.get_utxo(tx_raw_output=tx_out_mint1, coins=[tokens[0]]) assert token1_mint_utxo and token1_mint_utxo[0].amount == amount, "The token was not minted" - # second token minting and first token burning in single TX + # Second token minting and first token burning in single TX token_burn1 = dataclasses.replace(tokens_mint[0], amount=-amount) tx_out_mint_burn = clusterlib_utils.mint_or_burn_sign( cluster_obj=cluster, @@ -553,7 +553,7 @@ def test_minting_burning_diff_tokens_single_tx( ) assert token2_mint_utxo and token2_mint_utxo[0].amount == amount, "The token was not minted" - # second token burning + # Second token burning token_burn2 = dataclasses.replace(tokens_mint[1], amount=-amount) tx_out_burn2 = clusterlib_utils.mint_or_burn_sign( cluster_obj=cluster, @@ -567,7 +567,7 @@ def test_minting_burning_diff_tokens_single_tx( token2_burn_utxo = cluster.g_query.get_utxo(tx_raw_output=tx_out_burn2, coins=[tokens[1]]) assert not token2_burn_utxo, "The token was not burnt" - # check expected fees + # Check expected fees assert helpers.is_in_interval( tx_out_mint_burn.fee, expected_fee, frac=0.15 ), "TX fee doesn't fit the expected interval" @@ -610,7 +610,7 @@ def test_minting_burning_same_token_single_tx( token_mint_addr = issuers_addrs[0] issuer_addr = issuers_addrs[1] - # create simple script + # Create simple script keyhash = cluster.g_address.get_payment_vkey_hash(payment_vkey_file=issuer_addr.vkey_file) script_content = {"keyHash": keyhash, "type": "sig"} script = pl.Path(f"{temp_template}.script") @@ -620,7 +620,7 @@ def test_minting_burning_same_token_single_tx( policyid = cluster.g_transaction.get_policyid(script) token = f"{policyid}.{asset_name}" - # build and sign a transaction + # Build and sign a transaction tx_files = clusterlib.TxFiles( signing_key_files=[issuer_addr.skey_file, token_mint_addr.skey_file], ) @@ -649,7 +649,7 @@ def test_minting_burning_same_token_single_tx( tx_files=tx_files, txouts=txouts, fee_buffer=2_000_000, - # token minting and burning in the same TX + # Token minting and burning in the same TX mint=mint, witness_override=len(tx_files.signing_key_files), ) @@ -667,7 +667,7 @@ def test_minting_burning_same_token_single_tx( src_address=token_mint_addr.address, tx_name=f"{temp_template}_mint_burn", txouts=txouts, - # token minting and burning in the same TX + # Token minting and burning in the same TX mint=mint, tx_files=tx_files, fee=fee, @@ -679,7 +679,7 @@ def test_minting_burning_same_token_single_tx( tx_name=f"{temp_template}_mint_burn", ) - # submit signed transaction + # Submit signed transaction submit_utils.submit_tx( submit_method=submit_method, cluster_obj=cluster, @@ -690,7 +690,7 @@ def test_minting_burning_same_token_single_tx( token_utxo = cluster.g_query.get_utxo(tx_raw_output=tx_output, coins=[token]) assert token_utxo and token_utxo[0].amount == 1, "The token was not minted" - # check expected fees + # Check expected fees assert helpers.is_in_interval( tx_output.fee, expected_fee, frac=0.15 ), "TX fee doesn't fit the expected interval" @@ -1024,7 +1024,7 @@ def test_minting_and_partial_burning( payment_vkey_files = [p.vkey_file for p in issuers_addrs] token_mint_addr = issuers_addrs[0] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1042,7 +1042,7 @@ def test_minting_and_partial_burning( script=multisig_script, ) - # token minting + # Token minting tx_out_mint = clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, new_tokens=[token_mint], @@ -1055,7 +1055,7 @@ def test_minting_and_partial_burning( token_utxo = cluster.g_query.get_utxo(tx_raw_output=tx_out_mint, coins=[token]) assert token_utxo and token_utxo[0].amount == amount, "The token was not minted" - # token burning + # Token burning # the `transaction build` command doesn't balance MAs, so use the `build-raw` with # clusterlib magic for this partial burning burn_amount = amount - 10 @@ -1073,7 +1073,7 @@ def test_minting_and_partial_burning( token_utxo and token_utxo[0].amount == amount - burn_amount ), "The token was not burned" - # burn the rest of tokens + # Burn the rest of tokens final_burn = dataclasses.replace(token_mint, amount=-10) tx_out_burn2 = clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, @@ -1084,7 +1084,7 @@ def test_minting_and_partial_burning( sign_incrementally=True, ) - # check expected fee + # Check expected fee assert helpers.is_in_interval( tx_out_mint.fee, expected_fee, frac=0.15 ), "TX fee doesn't fit the expected interval" @@ -1125,7 +1125,7 @@ def test_minting_unicode_asset_name( token_mint_addr = issuers_addrs[0] issuer_addr = issuers_addrs[1] - # create simple script + # Create simple script keyhash = cluster.g_address.get_payment_vkey_hash(payment_vkey_file=issuer_addr.vkey_file) script_content = {"keyHash": keyhash, "type": "sig"} script = pl.Path(f"{temp_template}.script") @@ -1143,7 +1143,7 @@ def test_minting_unicode_asset_name( script=script, ) - # token minting + # Token minting tx_out_mint = clusterlib_utils.mint_or_burn_sign( cluster_obj=cluster, new_tokens=[token_mint], @@ -1157,7 +1157,7 @@ def test_minting_unicode_asset_name( token_utxo and token_utxo[0].amount == amount ), "The token was not minted or expected chars are not present in the asset name" - # token burning + # Token burning token_burn = dataclasses.replace(token_mint, amount=-amount) tx_out_burn = clusterlib_utils.mint_or_burn_sign( cluster_obj=cluster, @@ -1170,7 +1170,7 @@ def test_minting_unicode_asset_name( token_utxo = cluster.g_query.get_utxo(tx_raw_output=tx_out_burn, coins=[token]) assert not token_utxo, "The token was not burnt" - # check expected fees + # Check expected fees assert helpers.is_in_interval( tx_out_mint.fee, expected_fee, frac=0.15 ) and helpers.is_in_interval( @@ -1206,7 +1206,7 @@ def test_valid_policy_after( token_mint_addr = issuers_addrs[0] payment_vkey_files = [p.vkey_file for p in issuers_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1232,7 +1232,7 @@ def test_valid_policy_after( ) ) - # token minting + # Token minting tx_out_mint = clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, new_tokens=tokens_to_mint, @@ -1249,7 +1249,7 @@ def test_valid_policy_after( ) assert token_utxo and token_utxo[0].amount == amount, "The token was not minted" - # token burning + # Token burning tokens_to_burn = [dataclasses.replace(t, amount=-amount) for t in tokens_to_mint] tx_out_burn = clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, @@ -1267,7 +1267,7 @@ def test_valid_policy_after( ) assert not token_utxo, "The token was not burnt" - # check expected fees + # Check expected fees assert helpers.is_in_interval( tx_out_mint.fee, expected_fee, frac=0.15 ) and helpers.is_in_interval( @@ -1300,7 +1300,7 @@ def test_valid_policy_before( before_slot = cluster.g_query.get_slot_no() + 10_000 - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1326,7 +1326,7 @@ def test_valid_policy_before( ) ) - # token minting + # Token minting tx_out_mint = clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, new_tokens=tokens_to_mint, @@ -1343,7 +1343,7 @@ def test_valid_policy_before( ) assert token_utxo and token_utxo[0].amount == amount, "The token was not minted" - # token burning + # Token burning tokens_to_burn = [dataclasses.replace(t, amount=-amount) for t in tokens_to_mint] tx_out_burn = clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, @@ -1361,7 +1361,7 @@ def test_valid_policy_before( ) assert not token_utxo, "The token was not burnt" - # check expected fees + # Check expected fees assert helpers.is_in_interval( tx_out_mint.fee, expected_fee, frac=0.15 ) and helpers.is_in_interval( @@ -1387,7 +1387,7 @@ def test_policy_before_past( before_slot = cluster.g_query.get_slot_no() - 1 - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1413,7 +1413,7 @@ def test_policy_before_past( ) ) - # token minting - valid range, slot is already in the past + # Token minting - valid range, slot is already in the past with pytest.raises(clusterlib.CLIError) as excinfo: clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, @@ -1424,7 +1424,7 @@ def test_policy_before_past( ) assert "OutsideValidityIntervalUTxO" in str(excinfo.value) - # token minting - invalid range, slot is already in the past + # Token minting - invalid range, slot is already in the past with pytest.raises(clusterlib.CLIError) as excinfo: clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, @@ -1461,7 +1461,7 @@ def test_policy_before_future( before_slot = cluster.g_query.get_slot_no() + 10_000 - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1487,7 +1487,7 @@ def test_policy_before_future( ) ) - # token minting - invalid range, slot is in the future + # Token minting - invalid range, slot is in the future with pytest.raises(clusterlib.CLIError) as excinfo: clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, @@ -1524,7 +1524,7 @@ def test_policy_after_future( after_slot = cluster.g_query.get_slot_no() + 10_000 - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1550,7 +1550,7 @@ def test_policy_after_future( ) ) - # token minting - valid range, slot is in the future + # Token minting - valid range, slot is in the future with pytest.raises(clusterlib.CLIError) as excinfo: clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, @@ -1561,7 +1561,7 @@ def test_policy_after_future( ) assert "OutsideValidityIntervalUTxO" in str(excinfo.value) - # token minting - invalid range, slot is in the future + # Token minting - invalid range, slot is in the future with pytest.raises(clusterlib.CLIError) as excinfo: clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, @@ -1598,7 +1598,7 @@ def test_policy_after_past( after_slot = cluster.g_query.get_slot_no() - 1 - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1624,7 +1624,7 @@ def test_policy_after_past( ) ) - # token minting - valid slot, invalid range - `invalid_hereafter` is in the past + # Token minting - valid slot, invalid range - `invalid_hereafter` is in the past with pytest.raises(clusterlib.CLIError) as excinfo: clusterlib_utils.mint_or_burn_witness( cluster_obj=cluster, @@ -1669,7 +1669,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -1738,7 +1738,7 @@ def test_transfer_tokens( clusterlib.TxOut(address=dst_address, amount=amount, coin=new_token.token), ] - # destinations with both native token and Lovelace (it doesn't matter on the amounts) for + # Destinations with both native token and Lovelace (it doesn't matter on the amounts) for # calculating minimum required Lovelace value for tx output calc_destinations = [ *ma_destinations, @@ -1890,7 +1890,7 @@ def test_transfer_multiple_tokens( clusterlib.TxOut(address=dst_address2, amount=amount, coin=t.token) ) - # destinations with both native token and Lovelace (it doesn't matter on the amounts) for + # Destinations with both native token and Lovelace (it doesn't matter on the amounts) for # calculating minimum required Lovelace value for tx output calc_destinations_address1 = [ *ma_destinations_address1, @@ -2113,7 +2113,7 @@ def test_transfer_invalid_token_amount( if use_build_cmd: with pytest.raises(clusterlib.CLIError) as excinfo: - # add ADA txout for change address - see node issue #3057 + # Add ADA txout for change address - see node issue #3057 destinations.append( clusterlib.TxOut(address=src_address, amount=min_amount_lovelace) ) @@ -2172,7 +2172,7 @@ def _mint_tx( token_mint_addr_skey_files = {n.token_mint_addr.skey_file for n in new_tokens} src_address = new_tokens[0].token_mint_addr.address - # build and sign a transaction + # Build and sign a transaction tx_files = clusterlib.TxFiles( signing_key_files=[*issuers_skey_files, *token_mint_addr_skey_files], ) @@ -2294,7 +2294,7 @@ def test_minting_amount_above_the_allowed( script=script, ) - # token minting + # Token minting with pytest.raises(clusterlib.CLIError) as excinfo: self._mint_tx( cluster_obj=cluster, @@ -2341,7 +2341,7 @@ def test_multiasset_txouts_syntax( token_mint_addr = issuers_addrs[0] issuer_addr = issuers_addrs[1] - # create simple script + # Create simple script keyhash = cluster.g_address.get_payment_vkey_hash(payment_vkey_file=issuer_addr.vkey_file) script_content = {"keyHash": keyhash, "type": "sig"} script = pl.Path(f"{temp_template}.script") @@ -2376,19 +2376,19 @@ def test_multiasset_txouts_syntax( tx_raw_blueprint = cluster.g_transaction.build_raw_tx( src_address=token_mint_addr.address, tx_name=f"{temp_template}_mint_burn", - # token minting and burning in the same TX + # Token minting and burning in the same TX mint=mint, tx_files=tx_files, fee=fee, ) - # assemble CLI arguments for `transaction build` using data from `tx_raw_blueprint` + # Assemble CLI arguments for `transaction build` using data from `tx_raw_blueprint` assert tx_raw_blueprint.txins assert tx_raw_blueprint.txouts assert tx_raw_blueprint.mint - # test syntax for multi-asset values and txouts, see + # Test syntax for multi-asset values and txouts, see # https://github.com/IntersectMBO/cardano-node/pull/2072 coin_txouts = [f"{t.amount} {t.coin}" for t in tx_raw_blueprint.txouts] txout_parts = [ @@ -2424,10 +2424,10 @@ def test_multiasset_txouts_syntax( str(out_file), ] - # build transaction body + # Build transaction body cluster.cli(build_raw_args) - # create signed transaction + # Create signed transaction out_file_signed = cluster.g_transaction.sign_tx( tx_body_file=out_file, signing_key_files=tx_files.signing_key_files, @@ -2436,13 +2436,13 @@ def test_multiasset_txouts_syntax( tx_raw_output = dataclasses.replace(tx_raw_blueprint, out_file=out_file) - # submit signed transaction + # Submit signed transaction cluster.g_transaction.submit_tx(tx_file=out_file_signed, txins=tx_raw_output.txins) token_utxo = cluster.g_query.get_utxo(tx_raw_output=tx_raw_output, coins=[token]) assert token_utxo and token_utxo[0].amount == 1_000, "The token was not minted" - # check expected fees + # Check expected fees assert helpers.is_in_interval( tx_raw_output.fee, expected_fee, frac=0.15 ), "TX fee doesn't fit the expected interval" @@ -2495,7 +2495,7 @@ def test_script_reference_utxo( token_mint_addr = issuers_addrs[0] issuer_addr = issuers_addrs[1] - # create simple script + # Create simple script if script_version == "simple_v1": invalid_before = None invalid_hereafter = None @@ -2527,7 +2527,7 @@ def test_script_reference_utxo( policyid = cluster.g_transaction.get_policyid(script) token = f"{policyid}.{asset_name}" - # create reference UTxO + # Create reference UTxO reference_utxo, tx_out_reference = clusterlib_utils.create_reference_utxo( temp_template=temp_template, cluster_obj=cluster, @@ -2538,7 +2538,7 @@ def test_script_reference_utxo( ) assert reference_utxo.reference_script - # build and sign a transaction + # Build and sign a transaction tx_files = clusterlib.TxFiles( signing_key_files=[issuer_addr.skey_file, token_mint_addr.skey_file], ) @@ -2590,7 +2590,7 @@ def test_script_reference_utxo( src_address=token_mint_addr.address, tx_name=f"{temp_template}_mint_burn", txouts=txouts, - # token minting and burning in the same TX + # Token minting and burning in the same TX mint=mint, tx_files=tx_files, fee=fee, @@ -2604,7 +2604,7 @@ def test_script_reference_utxo( tx_name=f"{temp_template}_mint_burn", ) - # submit signed transaction + # Submit signed transaction cluster.g_transaction.submit_tx(tx_file=out_file_signed, txins=tx_raw_output.txins) token_utxo = cluster.g_query.get_utxo(tx_raw_output=tx_raw_output, coins=[token]) @@ -2612,10 +2612,10 @@ def test_script_reference_utxo( token_utxo and token_utxo[0].amount == amount - burn_amount ), "The token was not minted / burned" - # check that reference UTxO was NOT spent + # Check that reference UTxO was NOT spent assert cluster.g_query.get_utxo(utxo=reference_utxo), "Reference input was spent" - # check expected fees + # Check expected fees assert helpers.is_in_interval( tx_raw_output.fee, expected_fee, frac=0.15 ), "TX fee doesn't fit the expected interval" diff --git a/cardano_node_tests/tests/test_node_upgrade.py b/cardano_node_tests/tests/test_node_upgrade.py index 3b120a9e6..37fe95b90 100644 --- a/cardano_node_tests/tests/test_node_upgrade.py +++ b/cardano_node_tests/tests/test_node_upgrade.py @@ -44,7 +44,7 @@ def payment_addr_locked( cluster_obj=cluster, )[0] - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, @@ -68,7 +68,7 @@ def payment_addrs_disposable( cluster_obj=cluster, ) - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, diff --git a/cardano_node_tests/tests/test_pool_saturation.py b/cardano_node_tests/tests/test_pool_saturation.py index 97e00f6e7..4cd51652a 100644 --- a/cardano_node_tests/tests/test_pool_saturation.py +++ b/cardano_node_tests/tests/test_pool_saturation.py @@ -142,7 +142,7 @@ def _check_pool_records(pool_records: dict[int, PoolRecord]) -> None: saturated_epoch = oversaturated_epoch - 2 nonsaturated_epoch = oversaturated_epoch - 4 - # check that rewards per block per stake for "pool2" in the epoch where the pool is + # Check that rewards per block per stake for "pool2" in the epoch where the pool is # oversaturated is lower than in epochs where pools are not oversaturated assert ( pool1_user_rewards_per_block[nonsaturated_epoch] @@ -170,7 +170,7 @@ def _check_pool_records(pool_records: dict[int, PoolRecord]) -> None: > pool2_user_rewards_per_block[oversaturated_epoch] ) - # check that oversaturated pool doesn't lead to increased rewards for pool owner + # Check that oversaturated pool doesn't lead to increased rewards for pool owner # when compared to saturated pool, i.e. total pool margin amount is not increased pool1_rew_fraction_sat = pool1_owner_rewards_per_block[saturated_epoch] pool2_rew_fraction_sat = pool2_owner_rewards_per_block[saturated_epoch] @@ -248,16 +248,16 @@ def _save_pool_records() -> None: with open(f"{temp_template}_pool_records.pickle", "wb") as out_data: pickle.dump(pool_records, out_data) - # make sure there are rewards already available + # Make sure there are rewards already available clusterlib_utils.wait_for_rewards(cluster_obj=cluster) - # make sure we have enough time to finish the delegation in one epoch + # Make sure we have enough time to finish the delegation in one epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=5, stop=common.EPOCH_STOP_SEC_BUFFER ) init_epoch = cluster.g_query.get_epoch() - # submit registration certificates and delegate to pools + # Submit registration certificates and delegate to pools for idx, res in enumerate(cluster_management.Resources.ALL_POOLS, start=1): pool_addrs_data = cluster_manager.cache.addrs_data[res] reward_addr = clusterlib.PoolUser( @@ -290,7 +290,7 @@ def _save_pool_records() -> None: saturation_amounts={}, ) - # record initial reward balance for each pool + # Record initial reward balance for each pool for pool_rec in pool_records.values(): user_payment_balance = cluster.g_query.get_address_balance( pool_rec.delegation_out.pool_user.payment.address @@ -327,7 +327,7 @@ def _save_pool_records() -> None: prev_epoch = pool_records[2].owner_rewards[-1].epoch_no this_epoch = cluster.wait_for_epoch(epoch_no=prev_epoch + 1, future_is_ok=False) - # make sure we have enough time to finish everything in single epoch + # Make sure we have enough time to finish everything in single epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=10, stop=50, force_epoch=True ) @@ -344,7 +344,7 @@ def _save_pool_records() -> None: ) for pool_rec in pool_records.values(): - # reward balance in previous epoch + # Reward balance in previous epoch prev_user_reward = pool_rec.user_rewards[-1].reward_total prev_owner_reward = pool_rec.owner_rewards[-1].reward_total @@ -352,7 +352,7 @@ def _save_pool_records() -> None: ledger_state["blocksBefore"].get(pool_rec.id_dec) or 0 ) - # current reward balance + # Current reward balance user_reward = cluster.g_query.get_stake_addr_info( pool_rec.delegation_out.pool_user.stake.address ).reward_account_balance @@ -360,7 +360,7 @@ def _save_pool_records() -> None: pool_rec.reward_addr.stake.address ).reward_account_balance - # total reward amounts received this epoch + # Total reward amounts received this epoch owner_reward_epoch = owner_reward - prev_owner_reward # We cannot compare with previous rewards in epochs where # `this_epoch >= init_epoch + epoch_withdrawal`. @@ -370,7 +370,7 @@ def _save_pool_records() -> None: else: user_reward_epoch = user_reward - prev_user_reward - # store collected rewards info + # Store collected rewards info user_payment_balance = cluster.g_query.get_address_balance( pool_rec.delegation_out.pool_user.payment.address ) @@ -398,13 +398,13 @@ def _save_pool_records() -> None: cluster_obj=cluster, ledger_state=ledger_state, pool_id=pool_rec.id ) - # check that pool owner received rewards + # Check that pool owner received rewards if this_epoch >= 5: assert ( owner_reward_epoch ), f"New reward was not received by pool owner of pool '{pool_rec.id}'" - # fund the delegated addresses - saturate all pools + # Fund the delegated addresses - saturate all pools if this_epoch == init_epoch + epoch_saturate: clusterlib_utils.fund_from_faucet( *[p.delegation_out.pool_user.payment for p in pool_records.values()], @@ -449,10 +449,10 @@ def _save_pool_records() -> None: force=True, ) - # transfer funds back to faucet so the pools are no longer (over)saturated + # Transfer funds back to faucet so the pools are no longer (over)saturated # and staked amount is +- same as the `initial_balance` if this_epoch >= init_epoch + epoch_withdrawal: - # withdraw rewards of pool users of all pools + # Withdraw rewards of pool users of all pools try: _withdraw_rewards( *[p.delegation_out.pool_user for p in pool_records.values()], @@ -504,7 +504,7 @@ def _save_pool_records() -> None: msg = "Failed to finish actions in single epoch, it would affect other checks" raise AssertionError(msg) except Exception: - # at this point the cluster needs respin in case of any failure + # At this point the cluster needs respin in case of any failure if cluster.g_query.get_epoch() >= init_epoch + epoch_saturate: cluster_manager.set_needs_respin() _save_pool_records() diff --git a/cardano_node_tests/tests/test_pools.py b/cardano_node_tests/tests/test_pools.py index 98120879e..ad0c8ab87 100644 --- a/cardano_node_tests/tests/test_pools.py +++ b/cardano_node_tests/tests/test_pools.py @@ -43,12 +43,12 @@ def pool_cost_start_cluster() -> pl.Path: """Update *minPoolCost* to 500.""" shared_tmp = temptools.get_pytest_shared_tmp() - # need to lock because this same fixture can run on several workers in parallel + # Need to lock because this same fixture can run on several workers in parallel with locking.FileLockIfXdist(f"{shared_tmp}/startup_files_pool_500.lock"): destdir = shared_tmp / "startup_files_pool_500" destdir.mkdir(exist_ok=True) - # return existing script if it is already generated by other worker + # Return existing script if it is already generated by other worker destdir_ls = list(destdir.glob("start-cluster*")) if destdir_ls: return destdir_ls[0] @@ -84,7 +84,7 @@ def _check_pool( pool_params=pool_params, pool_creation_data=pool_data ) - # check pool data in db-sync if available + # Check pool data in db-sync if available dbsync_utils.check_pool_data(ledger_pool_data=pool_params, pool_id=stake_pool_id) @@ -108,12 +108,12 @@ def _check_staking( for owner in pool_owners: stake_addr_info = cluster_obj.g_query.get_stake_addr_info(owner.stake.address) - # check that the stake address was delegated + # Check that the stake address was delegated assert stake_addr_info.delegation, f"Stake address was not delegated yet: {stake_addr_info}" assert stake_pool_id == stake_addr_info.delegation, "Stake address delegated to wrong pool" assert ( - # strip 'e0' from the beginning of the address hash + # Strip 'e0' from the beginning of the address hash helpers.decode_bech32(stake_addr_info.address)[2:] in pool_params["owners"] ), "'owner' value is different than expected" @@ -162,7 +162,7 @@ def _register_stake_pool_w_build( cold_key_pair.skey_file, ] - # submit the pool registration certificate through a tx + # Submit the pool registration certificate through a tx tx_files = clusterlib.TxFiles( certificate_files=[pool_reg_cert_file], signing_key_files=signing_key_files, @@ -177,7 +177,7 @@ def _register_stake_pool_w_build( witness_override=len(pool_owners) * 3, destination_dir=destination_dir, ) - # sign incrementally (just to check that it works) + # Sign incrementally (just to check that it works) tx_signed = cluster_obj.g_transaction.sign_tx( tx_body_file=tx_raw_output.out_file, signing_key_files=signing_key_files[:1], @@ -214,21 +214,21 @@ def _create_stake_pool_w_build( Returns: PoolCreationOutput: A tuple containing pool creation output. """ - # create the KES key pair + # Create the KES key pair node_kes = cluster_obj.g_node.gen_kes_key_pair( node_name=pool_data.pool_name, destination_dir=destination_dir, ) LOGGER.debug(f"KES keys created - {node_kes.vkey_file}; {node_kes.skey_file}") - # create the VRF key pair + # Create the VRF key pair node_vrf = cluster_obj.g_node.gen_vrf_key_pair( node_name=pool_data.pool_name, destination_dir=destination_dir, ) LOGGER.debug(f"VRF keys created - {node_vrf.vkey_file}; {node_vrf.skey_file}") - # create the cold key pair and node operational certificate counter + # Create the cold key pair and node operational certificate counter node_cold = cluster_obj.g_node.gen_cold_key_pair_and_counter( node_name=pool_data.pool_name, destination_dir=destination_dir, @@ -298,7 +298,7 @@ def _deregister_stake_pool_w_build( destination_dir=destination_dir, ) - # submit the pool deregistration certificate through a tx + # Submit the pool deregistration certificate through a tx tx_files = clusterlib.TxFiles( certificate_files=[pool_dereg_cert_file], signing_key_files=[ @@ -346,7 +346,7 @@ def _create_register_pool( src_address = pool_owners[0].payment.address src_init_balance = cluster_obj.g_query.get_address_balance(src_address) - # create and register pool + # Create and register pool if use_build_cmd: pool_creation_out = _create_stake_pool_w_build( cluster_obj=cluster_obj, @@ -362,7 +362,7 @@ def _create_register_pool( cluster_obj=cluster_obj, tx_raw_output=pool_creation_out.tx_raw_output ) - # deregister stake pool + # Deregister stake pool def _deregister(): depoch = 1 if cluster_obj.time_to_epoch_end() >= DEREG_BUFFER_SEC else 2 with helpers.change_cwd(temp_dir): @@ -377,7 +377,7 @@ def _deregister(): if request is not None: request.addfinalizer(_deregister) - # check that the balance for source address was correctly updated + # Check that the balance for source address was correctly updated assert ( cluster_obj.g_query.get_address_balance(src_address) == src_init_balance @@ -385,7 +385,7 @@ def _deregister(): - pool_creation_out.tx_raw_output.fee ), f"Incorrect balance for source address `{src_address}`" - # check that pool was correctly setup + # Check that pool was correctly setup _check_pool( cluster_obj=cluster_obj, stake_pool_id=pool_creation_out.stake_pool_id, @@ -410,12 +410,12 @@ def _create_register_pool_delegate_stake_tx( """ temp_dir = temp_dir.expanduser().resolve() - # create node VRF key pair + # Create node VRF key pair node_vrf = cluster_obj.g_node.gen_vrf_key_pair(node_name=pool_data.pool_name) - # create node cold key pair and counter + # Create node cold key pair and counter node_cold = cluster_obj.g_node.gen_cold_key_pair_and_counter(node_name=pool_data.pool_name) - # create stake address registration certs + # Create stake address registration certs stake_addr_reg_cert_files = [ cluster_obj.g_stake_address.gen_stake_addr_registration_cert( addr_name=f"{temp_template}_addr{i}", @@ -425,7 +425,7 @@ def _create_register_pool_delegate_stake_tx( for i, p in enumerate(pool_owners) ] - # create stake address delegation cert + # Create stake address delegation cert stake_addr_deleg_cert_files = [ cluster_obj.g_stake_address.gen_stake_addr_delegation_cert( addr_name=f"{temp_template}_addr{i}", @@ -435,7 +435,7 @@ def _create_register_pool_delegate_stake_tx( for i, p in enumerate(pool_owners) ] - # create stake pool registration cert + # Create stake pool registration cert pool_reg_cert_file = cluster_obj.g_stake_pool.gen_pool_registration_cert( pool_data=pool_data, vrf_vkey_file=node_vrf.vkey_file, @@ -446,7 +446,7 @@ def _create_register_pool_delegate_stake_tx( src_address = pool_owners[0].payment.address src_init_balance = cluster_obj.g_query.get_address_balance(src_address) - # register and delegate stake address, create and register pool + # Register and delegate stake address, create and register pool tx_files = clusterlib.TxFiles( certificate_files=[ pool_reg_cert_file, @@ -479,7 +479,7 @@ def _create_register_pool_delegate_stake_tx( src_address=src_address, tx_name=f"{temp_template}_reg_deleg", tx_files=tx_files ) - # deregister stake pool + # Deregister stake pool def _deregister(): depoch = 1 if cluster_obj.time_to_epoch_end() >= DEREG_BUFFER_SEC else 2 with helpers.change_cwd(temp_dir): @@ -494,7 +494,7 @@ def _deregister(): if request is not None: request.addfinalizer(_deregister) - # check that the balance for source address was correctly updated + # Check that the balance for source address was correctly updated assert ( cluster_obj.g_query.get_address_balance(src_address) == src_init_balance @@ -503,7 +503,7 @@ def _deregister(): - tx_raw_output.fee ), f"Incorrect balance for source address `{src_address}`" - # check that pool and staking were correctly setup + # Check that pool and staking were correctly setup stake_pool_id = cluster_obj.g_stake_pool.get_stake_pool_id(node_cold.vkey_file) _check_pool(cluster_obj=cluster_obj, stake_pool_id=stake_pool_id, pool_data=pool_data) _check_staking( @@ -538,7 +538,7 @@ def _create_register_pool_tx_delegate_stake_tx( Common functionality for tests. """ - # create and register pool + # Create and register pool pool_creation_out = _create_register_pool( cluster_obj=cluster_obj, temp_template=temp_template, @@ -549,7 +549,7 @@ def _create_register_pool_tx_delegate_stake_tx( use_build_cmd=use_build_cmd, ) - # create stake address registration certs + # Create stake address registration certs stake_addr_reg_cert_files = [ cluster_obj.g_stake_address.gen_stake_addr_registration_cert( addr_name=f"{temp_template}_addr{i}", @@ -559,7 +559,7 @@ def _create_register_pool_tx_delegate_stake_tx( for i, p in enumerate(pool_owners) ] - # create stake address delegation cert + # Create stake address delegation cert stake_addr_deleg_cert_files = [ cluster_obj.g_stake_address.gen_stake_addr_delegation_cert( addr_name=f"{temp_template}_addr{i}", @@ -572,7 +572,7 @@ def _create_register_pool_tx_delegate_stake_tx( src_address = pool_owners[0].payment.address src_init_balance = cluster_obj.g_query.get_address_balance(src_address) - # register and delegate stake address + # Register and delegate stake address tx_files = clusterlib.TxFiles( certificate_files=[*stake_addr_reg_cert_files, *stake_addr_deleg_cert_files], signing_key_files=[ @@ -601,7 +601,7 @@ def _create_register_pool_tx_delegate_stake_tx( src_address=src_address, tx_name=f"{temp_template}_reg_deleg", tx_files=tx_files ) - # check that the balance for source address was correctly updated + # Check that the balance for source address was correctly updated assert ( cluster_obj.g_query.get_address_balance(src_address) == src_init_balance @@ -609,7 +609,7 @@ def _create_register_pool_tx_delegate_stake_tx( - tx_raw_output.fee ), f"Incorrect balance for source address `{src_address}`" - # check that staking was correctly setup + # Check that staking was correctly setup _check_staking( pool_owners, cluster_obj=cluster_obj, @@ -655,14 +655,14 @@ def test_stake_pool_metadata( pool_metadata_hash=cluster.g_stake_pool.gen_pool_metadata_hash(pool_metadata_file), ) - # create pool owners + # Create pool owners pool_owners = clusterlib_utils.create_pool_users( cluster_obj=cluster, name_template=temp_template, no_of_addr=3, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, @@ -670,7 +670,7 @@ def test_stake_pool_metadata( amount=900_000_000, ) - # register pool and delegate stake address + # Register pool and delegate stake address pool_creation_out = _create_register_pool_delegate_stake_tx( cluster_obj=cluster, pool_owners=pool_owners, @@ -681,10 +681,10 @@ def test_stake_pool_metadata( use_build_cmd=use_build_cmd, ) - # check `transaction view` command + # Check `transaction view` command tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=pool_creation_out.tx_raw_output) - # check dbsync `PoolOfflineData` table + # Check dbsync `PoolOfflineData` table if configuration.HAS_DBSYNC: pool_params = cluster.g_query.get_pool_state( stake_pool_id=pool_creation_out.stake_pool_id @@ -736,14 +736,14 @@ def test_stake_pool_not_avail_metadata( pool_metadata_hash=cluster.g_stake_pool.gen_pool_metadata_hash(pool_metadata_file), ) - # create pool owners + # Create pool owners pool_owners = clusterlib_utils.create_pool_users( cluster_obj=cluster, name_template=temp_template, no_of_addr=1, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, @@ -751,7 +751,7 @@ def test_stake_pool_not_avail_metadata( amount=900_000_000, ) - # register pool and delegate stake address + # Register pool and delegate stake address pool_creation_out = _create_register_pool_tx_delegate_stake_tx( cluster_obj=cluster, pool_owners=pool_owners, @@ -762,7 +762,7 @@ def test_stake_pool_not_avail_metadata( use_build_cmd=use_build_cmd, ) - # check dbsync `PoolOffChainFetchError` table + # Check dbsync `PoolOffChainFetchError` table # since the metadata url is invalid the dbsync dedicated thread will not fetch the data # and will insert an error on the specific table # https://github.com/IntersectMBO/cardano-db-sync/blob/master/doc/pool-offchain-data.md @@ -806,14 +806,14 @@ def test_create_stake_pool( pool_margin=0.123, ) - # create pool owners + # Create pool owners pool_owners = clusterlib_utils.create_pool_users( cluster_obj=cluster, name_template=temp_template, no_of_addr=no_of_addr, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, @@ -821,7 +821,7 @@ def test_create_stake_pool( amount=900_000_000, ) - # register pool + # Register pool _create_register_pool( cluster_obj=cluster, temp_template=temp_template, @@ -871,14 +871,14 @@ def test_deregister_stake_pool( pool_metadata_hash=cluster.g_stake_pool.gen_pool_metadata_hash(pool_metadata_file), ) - # create pool owners + # Create pool owners pool_owners = clusterlib_utils.create_pool_users( cluster_obj=cluster, name_template=temp_template, no_of_addr=3, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, @@ -886,7 +886,7 @@ def test_deregister_stake_pool( amount=900_000_000, ) - # register pool and delegate stake address + # Register pool and delegate stake address pool_creation_out = _create_register_pool_tx_delegate_stake_tx( cluster_obj=cluster, pool_owners=pool_owners, @@ -903,7 +903,7 @@ def test_deregister_stake_pool( pool_owner.stake.address ).reward_account_balance - # deregister stake pool + # Deregister stake pool clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=5, stop=common.EPOCH_STOP_SEC_BUFFER ) @@ -963,7 +963,7 @@ def test_deregister_stake_pool( pool_id=pool_creation_out.stake_pool_id, retiring_epoch=depoch ) - # check `transaction view` command + # Check `transaction view` command tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output) @allure.link(helpers.get_vcs_link()) @@ -1014,12 +1014,12 @@ def test_reregister_stake_pool( pool_metadata_hash=cluster.g_stake_pool.gen_pool_metadata_hash(pool_metadata_file), ) - # create pool owners + # Create pool owners pool_owners = clusterlib_utils.create_pool_users( cluster_obj=cluster, name_template=temp_template ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, @@ -1027,7 +1027,7 @@ def test_reregister_stake_pool( amount=1_500_000_000, ) - # register pool and delegate stake address + # Register pool and delegate stake address pool_creation_out = _create_register_pool_delegate_stake_tx( cluster_obj=cluster, pool_owners=pool_owners, @@ -1036,7 +1036,7 @@ def test_reregister_stake_pool( pool_data=pool_data, ) - # deregister stake pool + # Deregister stake pool clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=5, stop=common.EPOCH_STOP_SEC_BUFFER ) @@ -1053,7 +1053,7 @@ def test_reregister_stake_pool( == depoch ) - # check that the pool was deregistered + # Check that the pool was deregistered cluster.wait_for_epoch(epoch_no=depoch, padding_seconds=5) assert not ( cluster.g_query.get_pool_state( @@ -1065,7 +1065,7 @@ def test_reregister_stake_pool( pool_id=pool_creation_out.stake_pool_id, retiring_epoch=depoch ) - # check that the stake addresses are no longer delegated + # Check that the stake addresses are no longer delegated for owner_rec in pool_owners: stake_addr_info = cluster.g_query.get_stake_addr_info(owner_rec.stake.address) assert ( @@ -1075,7 +1075,7 @@ def test_reregister_stake_pool( src_address = pool_owners[0].payment.address src_init_balance = cluster.g_query.get_address_balance(src_address) - # reregister the pool by resubmitting the pool registration certificate, + # Reregister the pool by resubmitting the pool registration certificate, # delegate stake address to pool again (the address is already registered) tx_files = clusterlib.TxFiles( certificate_files=[ @@ -1088,7 +1088,7 @@ def test_reregister_stake_pool( src_address=src_address, tx_name=temp_template, tx_files=tx_files ) - # deregister stake pool + # Deregister stake pool def _deregister(): depoch = 1 if cluster.time_to_epoch_end() >= DEREG_BUFFER_SEC else 2 with helpers.change_cwd(testfile_temp_dir): @@ -1102,7 +1102,7 @@ def _deregister(): request.addfinalizer(_deregister) - # check that the balance for source address was correctly updated + # Check that the balance for source address was correctly updated assert ( cluster.g_query.get_address_balance(src_address) == src_init_balance - tx_raw_output.fee - cluster.g_query.get_pool_deposit() @@ -1111,14 +1111,14 @@ def _deregister(): f"({src_init_balance}, {tx_raw_output.fee}, {cluster.g_query.get_pool_deposit()})" ) - # check that the stake addresses were delegated + # Check that the stake addresses were delegated _check_staking( pool_owners=pool_owners, cluster_obj=cluster, stake_pool_id=pool_creation_out.stake_pool_id, ) - # check that pool was correctly setup + # Check that pool was correctly setup _check_pool( cluster_obj=cluster, stake_pool_id=pool_creation_out.stake_pool_id, pool_data=pool_data ) @@ -1175,12 +1175,12 @@ def test_cancel_stake_pool_deregistration( pool_metadata_hash=cluster.g_stake_pool.gen_pool_metadata_hash(pool_metadata_file), ) - # create pool owners + # Create pool owners pool_owners = clusterlib_utils.create_pool_users( cluster_obj=cluster, name_template=temp_template ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, @@ -1188,7 +1188,7 @@ def test_cancel_stake_pool_deregistration( amount=1_500_000_000, ) - # register pool and delegate stake address + # Register pool and delegate stake address pool_creation_out = _create_register_pool_delegate_stake_tx( cluster_obj=cluster, pool_owners=pool_owners, @@ -1197,7 +1197,7 @@ def test_cancel_stake_pool_deregistration( pool_data=pool_data, ) - # deregister stake pool in epoch + 2 + # Deregister stake pool in epoch + 2 clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=5, stop=common.EPOCH_STOP_SEC_BUFFER ) @@ -1219,7 +1219,7 @@ def test_cancel_stake_pool_deregistration( src_address = pool_owners[0].payment.address src_init_balance = cluster.g_query.get_address_balance(src_address) - # reregister the pool by resubmitting the pool registration certificate, + # Reregister the pool by resubmitting the pool registration certificate, # delegate stake address to pool again (the address is already registered) tx_files = clusterlib.TxFiles( certificate_files=[ @@ -1235,7 +1235,7 @@ def test_cancel_stake_pool_deregistration( deposit=0, # no additional deposit, the pool is already registered ) - # deregister stake pool + # Deregister stake pool def _deregister(): depoch = 1 if cluster.time_to_epoch_end() >= DEREG_BUFFER_SEC else 2 with helpers.change_cwd(testfile_temp_dir): @@ -1249,7 +1249,7 @@ def _deregister(): request.addfinalizer(_deregister) - # check that the balance for source address was correctly updated + # Check that the balance for source address was correctly updated # and no additional pool deposit was used assert ( cluster.g_query.get_address_balance(src_address) == src_init_balance - tx_raw_output.fee @@ -1264,12 +1264,12 @@ def _deregister(): msg = "Pool `{pool_creation_out.stake_pool_id}` got deregistered." raise AssertionError(msg) - # check that pool is still correctly setup + # Check that pool is still correctly setup _check_pool( cluster_obj=cluster, stake_pool_id=pool_creation_out.stake_pool_id, pool_data=pool_data ) - # check that the stake addresses is still delegated + # Check that the stake addresses is still delegated _check_staking( pool_owners=pool_owners, cluster_obj=cluster, @@ -1338,14 +1338,14 @@ def test_update_stake_pool_metadata( ), ) - # create pool owners + # Create pool owners pool_owners = clusterlib_utils.create_pool_users( cluster_obj=cluster, name_template=temp_template, no_of_addr=no_of_addr, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, @@ -1353,7 +1353,7 @@ def test_update_stake_pool_metadata( amount=900_000_000 * no_of_addr, ) - # register pool + # Register pool pool_creation_out = _create_register_pool( cluster_obj=cluster, temp_template=temp_template, @@ -1364,13 +1364,13 @@ def test_update_stake_pool_metadata( use_build_cmd=use_build_cmd, ) - # make sure the update doesn't happen close to epoch boundary + # Make sure the update doesn't happen close to epoch boundary clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=10, stop=common.EPOCH_STOP_SEC_BUFFER ) update_epoch = cluster.g_query.get_epoch() - # update the pool metadata by resubmitting the pool registration certificate + # Update the pool metadata by resubmitting the pool registration certificate if use_build_cmd: _register_stake_pool_w_build( cluster_obj=cluster, @@ -1392,7 +1392,7 @@ def test_update_stake_pool_metadata( ) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) - # check that pool is going to be updated with correct data + # Check that pool is going to be updated with correct data future_params = cluster.g_query.get_pool_state( stake_pool_id=pool_creation_out.stake_pool_id ).future_pool_params @@ -1457,14 +1457,14 @@ def test_update_stake_pool_parameters( pool_data, pool_pledge=1, pool_cost=min_pool_cost + 1_000_000, pool_margin=0.9 ) - # create pool owners + # Create pool owners pool_owners = clusterlib_utils.create_pool_users( cluster_obj=cluster, name_template=temp_template, no_of_addr=no_of_addr, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, @@ -1472,7 +1472,7 @@ def test_update_stake_pool_parameters( amount=900_000_000 * no_of_addr, ) - # register pool + # Register pool pool_creation_out = _create_register_pool( cluster_obj=cluster, temp_template=temp_template, @@ -1483,13 +1483,13 @@ def test_update_stake_pool_parameters( use_build_cmd=use_build_cmd, ) - # make sure the update doesn't happen close to epoch boundary + # Make sure the update doesn't happen close to epoch boundary clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=10, stop=common.EPOCH_STOP_SEC_BUFFER ) update_epoch = cluster.g_query.get_epoch() - # update the pool parameters by resubmitting the pool registration certificate + # Update the pool parameters by resubmitting the pool registration certificate if use_build_cmd: _register_stake_pool_w_build( cluster_obj=cluster, @@ -1511,7 +1511,7 @@ def test_update_stake_pool_parameters( ) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) - # check that pool is going to be updated with correct data + # Check that pool is going to be updated with correct data future_params = cluster.g_query.get_pool_state( stake_pool_id=pool_creation_out.stake_pool_id ).future_pool_params @@ -1558,14 +1558,14 @@ def test_sign_in_multiple_stages( pool_margin=0.01, ) - # create pool owners + # Create pool owners pool_owners = clusterlib_utils.create_pool_users( cluster_obj=cluster, name_template=temp_template, no_of_addr=2, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( pool_owners[0].payment, cluster_obj=cluster, @@ -1573,12 +1573,12 @@ def test_sign_in_multiple_stages( amount=900_000_000, ) - # create node VRF key pair + # Create node VRF key pair node_vrf = cluster.g_node.gen_vrf_key_pair(node_name=pool_data.pool_name) - # create node cold key pair and counter + # Create node cold key pair and counter node_cold = cluster.g_node.gen_cold_key_pair_and_counter(node_name=pool_data.pool_name) - # create stake pool registration cert + # Create stake pool registration cert pool_reg_cert_file = cluster.g_stake_pool.gen_pool_registration_cert( pool_data=pool_data, vrf_vkey_file=node_vrf.vkey_file, @@ -1589,7 +1589,7 @@ def test_sign_in_multiple_stages( src_address = pool_owners[0].payment.address src_init_balance = cluster.g_query.get_address_balance(src_address) - # keys to sign the TX with + # Keys to sign the TX with witness_skeys = ( pool_owners[0].payment.skey_file, pool_owners[1].payment.skey_file, @@ -1618,7 +1618,7 @@ def test_sign_in_multiple_stages( fee=fee, ) - # create witness file for each signing key + # Create witness file for each signing key witness_files = [ cluster.g_transaction.witness_tx( tx_body_file=tx_raw_output.out_file, @@ -1628,14 +1628,14 @@ def test_sign_in_multiple_stages( for idx, skey in enumerate(witness_skeys) ] - # sign TX using witness files + # Sign TX using witness files tx_witnessed_file = cluster.g_transaction.assemble_tx( tx_body_file=tx_raw_output.out_file, witness_files=witness_files, tx_name=temp_template ) - # create and register pool + # Create and register pool cluster.g_transaction.submit_tx(tx_file=tx_witnessed_file, txins=tx_raw_output.txins) - # deregister stake pool + # Deregister stake pool def _deregister(): depoch = 1 if cluster.time_to_epoch_end() >= DEREG_BUFFER_SEC else 2 with helpers.change_cwd(testfile_temp_dir): @@ -1649,7 +1649,7 @@ def _deregister(): request.addfinalizer(_deregister) - # check that the balance for source address was correctly updated + # Check that the balance for source address was correctly updated assert ( cluster.g_query.get_address_balance(src_address) == src_init_balance - tx_raw_output.fee - cluster.g_query.get_pool_deposit() @@ -1657,7 +1657,7 @@ def _deregister(): cluster.wait_for_block(2) - # check that the pool was correctly registered on chain + # Check that the pool was correctly registered on chain stake_pool_id = cluster.g_stake_pool.get_stake_pool_id(node_cold.vkey_file) _check_pool( cluster_obj=cluster, @@ -1695,14 +1695,14 @@ def test_pool_registration_deregistration( pool_margin=0.01, ) - # create pool owners + # Create pool owners pool_owner = clusterlib_utils.create_pool_users( cluster_obj=cluster, name_template=temp_template, no_of_addr=1, )[0] - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( pool_owner.payment, cluster_obj=cluster, @@ -1718,7 +1718,7 @@ def test_pool_registration_deregistration( node_vrf = cluster.g_node.gen_vrf_key_pair(node_name=pool_data.pool_name) node_cold = cluster.g_node.gen_cold_key_pair_and_counter(node_name=pool_data.pool_name) - # create pool registration cert + # Create pool registration cert pool_reg_cert_file = cluster.g_stake_pool.gen_pool_registration_cert( pool_data=pool_data, vrf_vkey_file=node_vrf.vkey_file, @@ -1732,14 +1732,14 @@ def test_pool_registration_deregistration( ) dereg_epoch = cluster.g_query.get_epoch() - # create pool deregistration cert + # Create pool deregistration cert pool_dereg_cert_file = cluster.g_stake_pool.gen_pool_deregistration_cert( pool_name=pool_data.pool_name, cold_vkey_file=node_cold.vkey_file, epoch=cluster.g_query.get_epoch() + 1, ) - # register and deregister stake pool in single TX + # Register and deregister stake pool in single TX tx_files = clusterlib.TxFiles( certificate_files=[pool_reg_cert_file, pool_dereg_cert_file], signing_key_files=[ @@ -1754,7 +1754,7 @@ def test_pool_registration_deregistration( tx_files=tx_files, ) - # check that the balance for source address was correctly updated + # Check that the balance for source address was correctly updated assert ( cluster.g_query.get_address_balance(pool_owner.payment.address) == src_init_balance - tx_raw_output.fee - cluster.g_query.get_pool_deposit() @@ -1843,7 +1843,7 @@ def test_stake_pool_low_cost( pool_margin=0.123, ) - # register pool, expect failure + # Register pool, expect failure with pytest.raises(clusterlib.CLIError) as excinfo: _create_register_pool( cluster_obj=cluster, @@ -1853,7 +1853,7 @@ def test_stake_pool_low_cost( pool_data=pool_data, ) - # check that it failed in an expected way + # Check that it failed in an expected way err = str(excinfo.value) if pool_cost < 0: assert "--pool-cost: Failed reading" in err or "expecting digit" in err @@ -1935,7 +1935,7 @@ def pool_users( ) fixture_cache.value = created_users - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( created_users[0], cluster_obj=cluster, @@ -1974,9 +1974,9 @@ def gen_pool_registration_cert_data( ) pool_metadata_hash = cluster.g_stake_pool.gen_pool_metadata_hash(pool_metadata_file) - # create node VRF key pair + # Create node VRF key pair node_vrf = cluster.g_node.gen_vrf_key_pair(node_name=pool_name) - # create node cold key pair and counter + # Create node cold key pair and counter node_cold = cluster.g_node.gen_cold_key_pair_and_counter(node_name=pool_name) return pool_name, pool_metadata_hash, node_vrf, node_cold @@ -2091,7 +2091,7 @@ def test_pool_registration_missing_cold_skey( certificate_files=[pool_reg_cert_file], signing_key_files=[ pool_users[0].payment.skey_file, - # missing node_cold.skey_file + # Missing node_cold.skey_file ], ) @@ -2131,7 +2131,7 @@ def test_pool_registration_missing_payment_skey( tx_files = clusterlib.TxFiles( certificate_files=[pool_reg_cert_file], signing_key_files=[ - # missing payment skey file + # Missing payment skey file node_cold.skey_file, ], ) @@ -2465,7 +2465,7 @@ def test_stake_pool_long_metadata_url( pool_metadata_hash=pool_metadata_hash, ) - # create stake pool registration cert + # Create stake pool registration cert with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_stake_pool.gen_pool_registration_cert( pool_data=pool_data, diff --git a/cardano_node_tests/tests/test_scripts.py b/cardano_node_tests/tests/test_scripts.py index 7e21a567e..b05950ca9 100644 --- a/cardano_node_tests/tests/test_scripts.py +++ b/cardano_node_tests/tests/test_scripts.py @@ -55,9 +55,9 @@ def multisig_tx( ) -> clusterlib.TxRawOutput: """Build and submit multisig transaction.""" # pylint: disable=too-many-arguments - # create TX body + # Create TX body script_txins = ( - # empty `txins` means Tx inputs will be selected automatically by ClusterLib magic + # Empty `txins` means Tx inputs will be selected automatically by ClusterLib magic [clusterlib.ScriptTxIn(txins=[], script_file=multisig_script)] if multisig_script else [] ) destinations = [clusterlib.TxOut(address=dst_address, amount=amount)] @@ -94,7 +94,7 @@ def multisig_tx( invalid_before=invalid_before, ) - # create witness file for each key + # Create witness file for each key witness_files = [ cluster_obj.g_transaction.witness_tx( tx_body_file=tx_raw_output.out_file, @@ -104,14 +104,14 @@ def multisig_tx( for idx, skey in enumerate(payment_skey_files) ] - # sign TX using witness files + # Sign TX using witness files tx_witnessed_file = cluster_obj.g_transaction.assemble_tx( tx_body_file=tx_raw_output.out_file, witness_files=witness_files, tx_name=temp_template, ) - # submit signed TX + # Submit signed TX submit_utils.submit_tx( submit_method=submit_method, cluster_obj=cluster_obj, @@ -119,7 +119,7 @@ def multisig_tx( txins=tx_raw_output.txins, ) - # check final balances + # Check final balances out_utxos = cluster_obj.g_query.get_utxo(tx_raw_output=tx_raw_output) assert ( clusterlib.filter_utxos(utxos=out_utxos, address=src_address)[0].amount @@ -154,7 +154,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -178,19 +178,19 @@ def test_script_addr_length( payment_vkey_files = [p.vkey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, payment_vkey_files=payment_vkey_files, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # check script address length + # Check script address length assert len(script_address) == len(payment_addrs[0].address) @allure.link(helpers.get_vcs_link()) @@ -212,19 +212,19 @@ def test_multisig_all( payment_vkey_files = [p.vkey_file for p in payment_addrs] payment_skey_files = [p.skey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, payment_vkey_files=payment_vkey_files, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_out_to = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -236,7 +236,7 @@ def test_multisig_all( submit_method=submit_method, ) - # send funds from script address + # Send funds from script address tx_out_from = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_from", @@ -278,21 +278,21 @@ def test_multisig_any( skeys_len = len(payment_skey_files) - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ANY, payment_vkey_files=payment_vkey_files, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) tx_raw_outputs = [] - # send funds to script address + # Send funds to script address tx_raw_outputs.append( multisig_tx( cluster_obj=cluster, @@ -306,7 +306,7 @@ def test_multisig_any( ) ) - # send funds from script address using single witness + # Send funds from script address using single witness expected_fee = 204_969 for i in range(5): tx_raw_outputs.append( @@ -323,12 +323,12 @@ def test_multisig_any( ) ) - # check expected fees + # Check expected fees assert helpers.is_in_interval( tx_raw_outputs[-1].fee, expected_fee, frac=0.15 ), "TX fee doesn't fit the expected interval" - # send funds from script address using multiple witnesses + # Send funds from script address using multiple witnesses for i in range(5): num_of_skeys = random.randrange(2, skeys_len) tx_raw_outputs.append( @@ -370,7 +370,7 @@ def test_multisig_atleast( skeys_len = len(payment_skey_files) required = skeys_len - 4 - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.AT_LEAST, @@ -378,14 +378,14 @@ def test_multisig_atleast( required=required, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) tx_raw_outputs = [] - # send funds to script address + # Send funds to script address tx_raw_outputs.append( multisig_tx( cluster_obj=cluster, @@ -399,7 +399,7 @@ def test_multisig_atleast( ) ) - # send funds from script address + # Send funds from script address for i in range(5): num_of_skeys = random.randrange(required, skeys_len) tx_raw_outputs.append( @@ -437,23 +437,23 @@ def test_normal_tx_to_script_addr( src_address = payment_addrs[0].address amount = 2_000_000 - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, payment_vkey_files=[p.vkey_file for p in payment_addrs], ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # record initial balances + # Record initial balances src_init_balance = cluster.g_query.get_address_balance(src_address) dst_init_balance = cluster.g_query.get_address_balance(script_address) - # send funds to script address + # Send funds to script address destinations = [clusterlib.TxOut(address=script_address, amount=amount)] tx_files = clusterlib.TxFiles(signing_key_files=[payment_addrs[0].skey_file]) @@ -467,7 +467,7 @@ def test_normal_tx_to_script_addr( tx_files=tx_files, ) - # check final balances + # Check final balances assert ( cluster.g_query.get_address_balance(src_address) == src_init_balance - amount - tx_raw_output.fee @@ -500,19 +500,19 @@ def test_normal_tx_from_script_addr( payment_vkey_files = [p.vkey_file for p in payment_addrs] payment_skey_files = [p.skey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ANY, payment_vkey_files=payment_vkey_files, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_out_to = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -524,16 +524,16 @@ def test_normal_tx_from_script_addr( submit_method=submit_method, ) - # record initial balances + # Record initial balances src_init_balance = cluster.g_query.get_address_balance(script_address) dst_init_balance = cluster.g_query.get_address_balance(dst_addr.address) - # send funds from script address + # Send funds from script address destinations = [clusterlib.TxOut(address=dst_addr.address, amount=amount)] tx_files = clusterlib.TxFiles( signing_key_files=[dst_addr.skey_file], ) - # empty `txins` means Tx inputs will be selected automatically by ClusterLib magic + # Empty `txins` means Tx inputs will be selected automatically by ClusterLib magic script_txins = [clusterlib.ScriptTxIn(txins=[], script_file=multisig_script)] tx_out_from = clusterlib_utils.build_and_submit_tx( @@ -547,7 +547,7 @@ def test_normal_tx_from_script_addr( tx_files=tx_files, ) - # check final balances + # Check final balances assert ( cluster.g_query.get_address_balance(script_address) == src_init_balance - amount - tx_out_from.fee @@ -572,19 +572,19 @@ def test_multisig_empty_all( payment_skey_files = [p.skey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, payment_vkey_files=(), ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_out_to = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -594,7 +594,7 @@ def test_multisig_empty_all( payment_skey_files=[payment_skey_files[0]], ) - # send funds from script address + # Send funds from script address tx_out_from = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_from", @@ -605,7 +605,7 @@ def test_multisig_empty_all( multisig_script=multisig_script, ) - # check expected fees + # Check expected fees expected_fee = 176_809 assert helpers.is_in_interval( tx_out_from.fee, expected_fee, frac=0.15 @@ -627,7 +627,7 @@ def test_multisig_no_required_atleast( payment_vkey_files = [p.vkey_file for p in payment_addrs] payment_skey_files = [p.skey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.AT_LEAST, @@ -635,12 +635,12 @@ def test_multisig_no_required_atleast( required=0, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_out_to = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -650,7 +650,7 @@ def test_multisig_no_required_atleast( payment_skey_files=[payment_skey_files[0]], ) - # send funds from script address + # Send funds from script address try: tx_out_from = multisig_tx( cluster_obj=cluster, @@ -667,7 +667,7 @@ def test_multisig_no_required_atleast( return raise - # check expected fees + # Check expected fees expected_fee = 176_765 assert helpers.is_in_interval( tx_out_from.fee, expected_fee, frac=0.15 @@ -700,7 +700,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -725,19 +725,19 @@ def test_multisig_all_missing_skey( payment_vkey_files = [p.vkey_file for p in payment_addrs] payment_skey_files = [p.skey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, payment_vkey_files=payment_vkey_files, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_raw_output = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -747,7 +747,7 @@ def test_multisig_all_missing_skey( payment_skey_files=[payment_skey_files[0]], ) - # send funds from script address, omit one skey + # Send funds from script address, omit one skey with pytest.raises(clusterlib.CLIError) as excinfo: multisig_tx( cluster_obj=cluster, @@ -779,19 +779,19 @@ def test_multisig_any_unlisted_skey( payment_vkey_files = [p.vkey_file for p in payment_addrs[:-1]] payment_skey_files = [p.skey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ANY, payment_vkey_files=payment_vkey_files, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_raw_output = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -801,7 +801,7 @@ def test_multisig_any_unlisted_skey( payment_skey_files=[payment_skey_files[0]], ) - # send funds from script address, use skey that is not listed in the script + # Send funds from script address, use skey that is not listed in the script with pytest.raises(clusterlib.CLIError) as excinfo: multisig_tx( cluster_obj=cluster, @@ -836,7 +836,7 @@ def test_multisig_atleast_low_num_of_skeys( skeys_len = len(payment_skey_files) required = skeys_len - 4 - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.AT_LEAST, @@ -844,12 +844,12 @@ def test_multisig_atleast_low_num_of_skeys( required=required, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_raw_output = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -859,7 +859,7 @@ def test_multisig_atleast_low_num_of_skeys( payment_skey_files=[payment_skey_files[0]], ) - # send funds from script address, use lower number of skeys then required + # Send funds from script address, use lower number of skeys then required for num_of_skeys in range(1, required): with pytest.raises(clusterlib.CLIError) as excinfo: multisig_tx( @@ -897,7 +897,7 @@ def _fund_script_time_locking( payment_vkey_files = [p.vkey_file for p in payment_addrs] payment_skey_files = [p.skey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster_obj.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -906,12 +906,12 @@ def _fund_script_time_locking( slot_type_arg=slot_type_arg, ) - # create script address + # Create script address script_address = cluster_obj.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_output = multisig_tx( cluster_obj=cluster_obj, temp_template=f"{temp_template}_to", @@ -944,7 +944,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -1074,7 +1074,7 @@ def test_script_after( payment_vkey_files = [p.vkey_file for p in payment_addrs] payment_skey_files = [p.skey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1083,12 +1083,12 @@ def test_script_after( slot_type_arg=clusterlib.MultiSlotTypeArgs.AFTER, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_out_to = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -1099,7 +1099,7 @@ def test_script_after( use_build_cmd=use_build_cmd, ) - # send funds from script address + # Send funds from script address invalid_hereafter = cluster.g_query.get_slot_no() + 1_000 if use_tx_validity else None tx_out_from = multisig_tx( cluster_obj=cluster, @@ -1114,13 +1114,13 @@ def test_script_after( use_build_cmd=use_build_cmd, ) - # check expected fees + # Check expected fees expected_fee = 280_693 if use_build_cmd else 323_857 assert helpers.is_in_interval( tx_out_from.fee, expected_fee, frac=0.15 ), "TX fee doesn't fit the expected interval" - # check `transaction view` command + # Check `transaction view` command tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_out_from) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_out_to) @@ -1149,7 +1149,7 @@ def test_script_before( before_slot = cluster.g_query.get_slot_no() + 10_000 - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1158,12 +1158,12 @@ def test_script_before( slot_type_arg=clusterlib.MultiSlotTypeArgs.BEFORE, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_out_to = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -1174,7 +1174,7 @@ def test_script_before( use_build_cmd=use_build_cmd, ) - # send funds from script address + # Send funds from script address tx_out_from = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_from", @@ -1188,7 +1188,7 @@ def test_script_before( use_build_cmd=use_build_cmd, ) - # check expected fees + # Check expected fees expected_fee = 279_241 if use_build_cmd else 323_989 assert helpers.is_in_interval( tx_out_from.fee, expected_fee, frac=0.15 @@ -1225,7 +1225,7 @@ def test_tx_missing_validity( slot_num = 100 slot_type_arg = clusterlib.MultiSlotTypeArgs.AFTER - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1234,12 +1234,12 @@ def test_tx_missing_validity( slot_type_arg=slot_type_arg, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -1250,7 +1250,7 @@ def test_tx_missing_validity( use_build_cmd=use_build_cmd, ) - # send funds from script address - missing required validity interval + # Send funds from script address - missing required validity interval with pytest.raises(clusterlib.CLIError) as excinfo: multisig_tx( cluster_obj=cluster, @@ -1283,7 +1283,7 @@ def test_tx_negative_validity( payment_vkey_files = [p.vkey_file for p in payment_addrs] payment_skey_files = [p.skey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1292,12 +1292,12 @@ def test_tx_negative_validity( slot_type_arg=clusterlib.MultiSlotTypeArgs.AFTER, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -1308,7 +1308,7 @@ def test_tx_negative_validity( use_build_cmd=use_build_cmd, ) - # send funds from script address - negative validity interval + # Send funds from script address - negative validity interval with pytest.raises(clusterlib.CLIError) as excinfo: multisig_tx( cluster_obj=cluster, @@ -1374,7 +1374,7 @@ def test_before_past( payment_skey_files = [p.skey_file for p in payment_addrs] - # send funds from script address - valid range, slot is already in the past + # Send funds from script address - valid range, slot is already in the past with pytest.raises(clusterlib.CLIError) as excinfo: multisig_tx( cluster_obj=cluster, @@ -1391,7 +1391,7 @@ def test_before_past( err_str = str(excinfo.value) assert "OutsideValidityIntervalUTxO" in err_str, err_str - # send funds from script address - invalid range, slot is already in the past + # Send funds from script address - invalid range, slot is already in the past with pytest.raises(clusterlib.CLIError) as excinfo: multisig_tx( cluster_obj=cluster, @@ -1441,7 +1441,7 @@ def test_before_future( payment_skey_files = [p.skey_file for p in payment_addrs] - # send funds from script address - invalid range, slot is in the future + # Send funds from script address - invalid range, slot is in the future with pytest.raises(clusterlib.CLIError) as excinfo: multisig_tx( cluster_obj=cluster, @@ -1493,7 +1493,7 @@ def test_after_future( payment_skey_files = [p.skey_file for p in payment_addrs] - # send funds from script address - valid range, slot is in the future + # Send funds from script address - valid range, slot is in the future with pytest.raises(clusterlib.CLIError) as excinfo: multisig_tx( cluster_obj=cluster, @@ -1510,7 +1510,7 @@ def test_after_future( err_str = str(excinfo.value) assert "OutsideValidityIntervalUTxO" in err_str, err_str - # send funds from script address - invalid range, slot is in the future + # Send funds from script address - invalid range, slot is in the future with pytest.raises(clusterlib.CLIError) as excinfo: multisig_tx( cluster_obj=cluster, @@ -1562,7 +1562,7 @@ def test_after_past( payment_skey_files = [p.skey_file for p in payment_addrs] - # send funds from script address - valid slot, + # Send funds from script address - valid slot, # invalid range - `invalid_hereafter` is in the past with pytest.raises(clusterlib.CLIError) as excinfo: multisig_tx( @@ -1610,7 +1610,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -1640,7 +1640,7 @@ def test_tx_script_metadata_json( payment_vkey_files = [p.vkey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1697,7 +1697,7 @@ def test_tx_script_metadata_cbor( payment_vkey_files = [p.vkey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.AT_LEAST, @@ -1755,7 +1755,7 @@ def test_tx_script_no_metadata( payment_vkey_files = [p.vkey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ANY, @@ -1801,7 +1801,7 @@ def test_tx_script_invalid( tx_files = clusterlib.TxFiles( signing_key_files=[payment_addrs[0].skey_file], - # not valid script file + # Not valid script file auxiliary_script_files=[JSON_METADATA_FILE], ) @@ -1844,7 +1844,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -1886,7 +1886,7 @@ def test_incremental_signing( before_slot = cluster.g_query.get_slot_no() + 10_000 - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -1895,12 +1895,12 @@ def test_incremental_signing( slot_type_arg=clusterlib.MultiSlotTypeArgs.BEFORE, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_out_to = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -1912,18 +1912,18 @@ def test_incremental_signing( submit_method=submit_method, ) - # record initial balances + # Record initial balances src_init_balance = cluster.g_query.get_address_balance(script_address) dst_init_balance = cluster.g_query.get_address_balance(dst_addr.address) - # send funds from script address + # Send funds from script address destinations = [clusterlib.TxOut(address=dst_addr.address, amount=amount)] tx_files = clusterlib.TxFiles( metadata_json_files=[JSON_METADATA_FILE], metadata_cbor_files=[CBOR_METADATA_FILE], signing_key_files=payment_skey_files, ) - # empty `txins` means Tx inputs will be selected automatically by ClusterLib magic + # Empty `txins` means Tx inputs will be selected automatically by ClusterLib magic script_txins = [clusterlib.ScriptTxIn(txins=[], script_file=multisig_script)] invalid_hereafter = cluster.g_query.get_slot_no() + 1_000 @@ -1962,7 +1962,7 @@ def test_incremental_signing( invalid_before=100, ) - # sign or witness Tx body with first 2 skey and thus create Tx file that will be used for + # Sign or witness Tx body with first 2 skey and thus create Tx file that will be used for # incremental signing if tx_is == "signed": tx_signed = cluster.g_transaction.sign_tx( @@ -1971,7 +1971,7 @@ def test_incremental_signing( tx_name=f"{temp_template}_from0", ) else: - # sign Tx body using witness files + # Sign Tx body using witness files witness_files = [ cluster.g_transaction.witness_tx( tx_body_file=tx_out_from.out_file, @@ -1986,9 +1986,9 @@ def test_incremental_signing( tx_name=f"{temp_template}_from0", ) - # incrementally sign the already signed Tx with rest of required skeys + # Incrementally sign the already signed Tx with rest of required skeys for idx, skey in enumerate(payment_skey_files[2:], start=1): - # sign multiple times with the same skey to see that it doesn't affect Tx fee + # Sign multiple times with the same skey to see that it doesn't affect Tx fee for r in range(5): tx_signed = cluster.g_transaction.sign_tx( tx_file=tx_signed, @@ -2003,7 +2003,7 @@ def test_incremental_signing( txins=tx_out_from.txins, ) - # check final balances + # Check final balances assert ( cluster.g_query.get_address_balance(script_address) == src_init_balance - amount - tx_out_from.fee @@ -2044,7 +2044,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -2075,7 +2075,7 @@ def test_script_utxo_datum( src_addr = payment_addrs[0] dst_addr = payment_addrs[1] - # create multisig script + # Create multisig script if script_version == "simple_v1": multisig_script = pl.Path(f"{temp_template}_multisig.script") script_content = { @@ -2095,7 +2095,7 @@ def test_script_utxo_datum( slot_type_arg=clusterlib.MultiSlotTypeArgs.AFTER, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) @@ -2153,7 +2153,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -2286,14 +2286,14 @@ def test_script_reference_utxo( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check that reference UTxO was NOT spent + # Check that reference UTxO was NOT spent assert cluster.g_query.get_utxo(utxo=reference_utxo), "Reference input was spent" dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_out_reference) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_out_to) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_out_from) - # check expected script type + # Check expected script type if ( script_type_str == "SimpleScriptV1" and reference_utxo.reference_script["script"]["type"] == "SimpleScriptV2" @@ -2336,12 +2336,12 @@ def test_spend_reference_script( reference_addr = payment_addrs[1] if address_type == "byron": - # create reference UTxO on Byron address + # Create reference UTxO on Byron address reference_addr = clusterlib_utils.gen_byron_addr( cluster_obj=cluster, name_template=temp_template ) - # create multisig script + # Create multisig script if script_version == "simple_v1": multisig_script = pl.Path(f"{temp_template}_multisig.script") script_content = { @@ -2361,7 +2361,7 @@ def test_spend_reference_script( slot_type_arg=clusterlib.MultiSlotTypeArgs.AFTER, ) - # create reference UTxO + # Create reference UTxO reference_utxo, tx_out_reference = clusterlib_utils.create_reference_utxo( temp_template=temp_template, cluster_obj=cluster, @@ -2372,7 +2372,7 @@ def test_spend_reference_script( ) assert reference_utxo.reference_script - # spend the reference UTxO + # Spend the reference UTxO destinations = [clusterlib.TxOut(address=payment_addr.address, amount=amount)] tx_files = clusterlib.TxFiles( signing_key_files=[reference_addr.skey_file], @@ -2392,7 +2392,7 @@ def test_spend_reference_script( witness_count_add=0 if use_build_cmd else 2, ) - # check that the reference UTxO was spent + # Check that the reference UTxO was spent assert not cluster.g_query.get_utxo( utxo=reference_utxo ), f"Reference script UTxO was NOT spent: '{reference_utxo}`" @@ -2428,7 +2428,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -2461,7 +2461,7 @@ def test_nested_script( dst_addr2 = payment_addrs[2] dst_addr3 = payment_addrs[3] - # create multisig script + # Create multisig script multisig_script = pl.Path(f"{temp_template}_multisig.script") script_content = { "type": type_top, @@ -2495,12 +2495,12 @@ def test_nested_script( with open(multisig_script, "w", encoding="utf-8") as fp_out: json.dump(script_content, fp_out, indent=4) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_out_to = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -2512,7 +2512,7 @@ def test_nested_script( submit_method=submit_method, ) - # we don't need to include any signatures for the nested "any" case, meeting the slot range + # We don't need to include any signatures for the nested "any" case, meeting the slot range # is enough payment_skey_files = [] if type_nested == "all": @@ -2524,7 +2524,7 @@ def test_nested_script( if not payment_skey_files: payment_skey_files.append(dst_addr2.skey_file) - # fund script address + # Fund script address invalid_hereafter = cluster.g_query.get_slot_no() + 1_000 tx_out_from = multisig_tx( cluster_obj=cluster, @@ -2561,7 +2561,7 @@ def test_nested_optional_all( dst_addr1 = payment_addrs[1] - # create multisig script + # Create multisig script multisig_script = pl.Path(f"{temp_template}_multisig.script") script_content = { "type": "any", @@ -2592,12 +2592,12 @@ def test_nested_optional_all( with open(multisig_script, "w", encoding="utf-8") as fp_out: json.dump(script_content, fp_out, indent=4) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address tx_out_to = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -2609,7 +2609,7 @@ def test_nested_optional_all( submit_method=submit_method, ) - # fund script address + # Fund script address tx_out_from = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_from", @@ -2666,7 +2666,7 @@ def test_invalid( # noqa: C901 type_top = "all" type_nested = "any" payment_skey_files = [dst_addr1.skey_file, dst_addr2.skey_file] - # valid interval is in the future + # Valid interval is in the future invalid_hereafter = last_slot_no + 1_000 invalid_before = invalid_hereafter - 100 script_top = [] @@ -2676,7 +2676,7 @@ def test_invalid( # noqa: C901 type_top = "all" type_nested = "any" payment_skey_files = [dst_addr1.skey_file, dst_addr2.skey_file] - # valid interval is in the past + # Valid interval is in the past invalid_hereafter = last_slot_no - 10 invalid_before = 10 script_top = [] @@ -2698,7 +2698,7 @@ def test_invalid( # noqa: C901 payment_skey_files = [dst_addr1.skey_file, dst_addr2.skey_file] invalid_before = 10 invalid_hereafter = last_slot_no + 1_000 - # conflicting intervals + # Conflicting intervals script_top = [{"type": "before", "slot": invalid_before + 10}] script_nested = [{"type": "after", "slot": invalid_before + 11}] expected_err = "ScriptWitnessNotValidatingUTXOW" @@ -2708,7 +2708,7 @@ def test_invalid( # noqa: C901 invalid_before = 10 invalid_hereafter = last_slot_no + 1_000 payment_skey_files = [dst_addr1.skey_file, dst_addr2.skey_file] - # valid interval is in the past + # Valid interval is in the past script_top = [{"type": "before", "slot": last_slot_no - 100}] script_nested = [] expected_err = "ScriptWitnessNotValidatingUTXOW" @@ -2717,7 +2717,7 @@ def test_invalid( # noqa: C901 type_nested = "all" invalid_before = 10 invalid_hereafter = last_slot_no + 1_000 - # none of the "ANY" conditions are met: + # None of the "ANY" conditions are met: # `dst_addr1.skey_file` is missing # nested "ALL" condition is not met - `dst_addr3.skey_file` is missing payment_skey_files = [dst_addr2.skey_file] @@ -2728,7 +2728,7 @@ def test_invalid( # noqa: C901 type_top = "any" type_nested = "all" payment_skey_files = [dst_addr2.skey_file] - # valid interval is in the future + # Valid interval is in the future invalid_hereafter = last_slot_no + 1_000 invalid_before = last_slot_no + 200 script_top = [{"type": "after", "slot": invalid_before}] @@ -2740,7 +2740,7 @@ def test_invalid( # noqa: C901 invalid_before = 10 invalid_hereafter = last_slot_no + 1_000 payment_skey_files = [dst_addr2.skey_file, dst_addr3.skey_file] - # none of the "ANY" conditions are met: + # None of the "ANY" conditions are met: # `dst_addr1.skey_file` is missing # nested "ALL" condition is not met - the valid interval is in the past script_top = [] @@ -2751,7 +2751,7 @@ def test_invalid( # noqa: C901 type_nested = "all" invalid_before = 10 invalid_hereafter = last_slot_no + 1_000 - # none of the "ANY" conditions are met: + # None of the "ANY" conditions are met: # `dst_addr1.skey_file` is missing # valid interval is in the past # nested "ALL" condition is not met - `dst_addr3.skey_file` is missing @@ -2763,7 +2763,7 @@ def test_invalid( # noqa: C901 msg = f"Unknown scenario: {scenario}" raise AssertionError(msg) - # create multisig script + # Create multisig script multisig_script = pl.Path(f"{temp_template}_multisig.script") script_content = { "type": type_top, @@ -2798,12 +2798,12 @@ def test_invalid( # noqa: C901 with open(multisig_script, "w", encoding="utf-8") as fp_out: json.dump(script_content, fp_out, indent=4) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # fund script address + # Fund script address tx_raw_output = multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -2855,7 +2855,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -2884,7 +2884,7 @@ def test_script_v2( payment_vkey_files = [p.vkey_file for p in payment_addrs] payment_skey_files = [p.skey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, @@ -2893,12 +2893,12 @@ def test_script_v2( slot_type_arg=clusterlib.MultiSlotTypeArgs.AFTER, ) - # create script address + # Create script address script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=multisig_script ) - # send funds to script address + # Send funds to script address multisig_tx( cluster_obj=cluster, temp_template=f"{temp_template}_to", @@ -2909,7 +2909,7 @@ def test_script_v2( submit_method=submit_method, ) - # try to send funds from script address + # Try to send funds from script address with pytest.raises((clusterlib.CLIError, submit_api.SubmitApiError)) as excinfo: multisig_tx( cluster_obj=cluster, @@ -2948,7 +2948,7 @@ def test_auxiliary_scripts( payment_vkey_files = [p.vkey_file for p in payment_addrs] - # create multisig script + # Create multisig script multisig_script = cluster.g_transaction.build_multisig_script( script_name=temp_template, script_type_arg=clusterlib.MultiSigTypeArgs.ALL, diff --git a/cardano_node_tests/tests/test_socket_path.py b/cardano_node_tests/tests/test_socket_path.py index 4adcb60ee..71ae36226 100644 --- a/cardano_node_tests/tests/test_socket_path.py +++ b/cardano_node_tests/tests/test_socket_path.py @@ -105,7 +105,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, diff --git a/cardano_node_tests/tests/test_staking_no_rewards.py b/cardano_node_tests/tests/test_staking_no_rewards.py index 8ad62db7f..6eb44a063 100644 --- a/cardano_node_tests/tests/test_staking_no_rewards.py +++ b/cardano_node_tests/tests/test_staking_no_rewards.py @@ -76,7 +76,7 @@ def test_no_reward_unmet_pledge1( ) init_epoch = cluster.g_query.get_epoch() - # submit registration certificate and delegate to pool + # Submit registration certificate and delegate to pool delegation_out = delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -94,13 +94,13 @@ def test_no_reward_unmet_pledge1( delegation_out.pool_user.stake.address ).reward_account_balance, f"User of pool '{pool_name}' hasn't received any rewards" - # make sure we have enough time to finish the pool update in one epoch + # Make sure we have enough time to finish the pool update in one epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=5, stop=common.EPOCH_STOP_SEC_BUFFER ) update_epoch = cluster.g_query.get_epoch() - # load and update original pool data + # Load and update original pool data loaded_data = clusterlib_utils.load_registered_pool_data( cluster_obj=cluster, pool_name=f"changed_{pool_name}", pool_id=pool_id ) @@ -108,7 +108,7 @@ def test_no_reward_unmet_pledge1( loaded_data, pool_pledge=loaded_data.pool_pledge * 9 ) - # increase the needed pledge amount - update the pool parameters by resubmitting the pool + # Increase the needed pledge amount - update the pool parameters by resubmitting the pool # registration certificate cluster.g_stake_pool.register_stake_pool( pool_data=pool_data_updated, @@ -136,7 +136,7 @@ def test_no_reward_unmet_pledge1( return_orig_epoch = cluster.wait_for_epoch(epoch_no=no_rewards_epoch + 3, padding_seconds=5) with cluster_manager.respin_on_failure(): - # check that NO new rewards were received by those delegating to the pool + # Check that NO new rewards were received by those delegating to the pool assert ( orig_user_reward == cluster.g_query.get_stake_addr_info( @@ -144,7 +144,7 @@ def test_no_reward_unmet_pledge1( ).reward_account_balance ), "Received unexpected rewards" - # check that pool owner is also NOT receiving rewards + # Check that pool owner is also NOT receiving rewards assert ( orig_owner_reward == cluster.g_query.get_stake_addr_info( @@ -154,7 +154,7 @@ def test_no_reward_unmet_pledge1( # Return the pool to the original state - restore pledge settings. - # fund pool owner's addresses so balance keeps higher than pool pledge after fees etc. + # Fund pool owner's addresses so balance keeps higher than pool pledge after fees etc. # are deducted clusterlib_utils.fund_from_faucet( pool_owner, @@ -164,7 +164,7 @@ def test_no_reward_unmet_pledge1( force=True, ) - # update the pool to original parameters by resubmitting + # Update the pool to original parameters by resubmitting # the pool registration certificate cluster.g_stake_pool.register_stake_pool( pool_data=loaded_data, @@ -178,7 +178,7 @@ def test_no_reward_unmet_pledge1( cluster.wait_for_epoch(epoch_no=return_orig_epoch + 5, padding_seconds=30) - # check that new rewards were received by those delegating to the pool + # Check that new rewards were received by those delegating to the pool assert ( orig_user_reward < cluster.g_query.get_stake_addr_info( @@ -186,7 +186,7 @@ def test_no_reward_unmet_pledge1( ).reward_account_balance ), "New reward was not received by stake address" - # check that pool owner is also receiving rewards + # Check that pool owner is also receiving rewards assert ( orig_owner_reward < cluster.g_query.get_stake_addr_info( @@ -194,7 +194,7 @@ def test_no_reward_unmet_pledge1( ).reward_account_balance ), "New reward was not received by pool reward address" - # check that pledge is still met after the owner address was used to pay for Txs + # Check that pledge is still met after the owner address was used to pay for Txs pool_data = clusterlib_utils.load_registered_pool_data( cluster_obj=cluster, pool_name=pool_name, pool_id=pool_id ) @@ -240,7 +240,7 @@ def test_no_reward_unmet_pledge2( ) init_epoch = cluster.g_query.get_epoch() - # submit registration certificate and delegate to pool + # Submit registration certificate and delegate to pool delegation_out = delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -258,14 +258,14 @@ def test_no_reward_unmet_pledge2( delegation_out.pool_user.stake.address ).reward_account_balance, f"User of pool '{pool_name}' hasn't received any rewards" - # load pool data + # Load pool data loaded_data = clusterlib_utils.load_registered_pool_data( cluster_obj=cluster, pool_name=f"changed_{pool_name}", pool_id=pool_id ) pledge_amount = loaded_data.pool_pledge // 2 - # withdraw part of the pledge + # Withdraw part of the pledge destinations = [ clusterlib.TxOut(address=delegation_out.pool_user.payment.address, amount=pledge_amount) ] @@ -297,7 +297,7 @@ def test_no_reward_unmet_pledge2( return_orig_epoch = cluster.wait_for_epoch(epoch_no=no_rewards_epoch + 3, padding_seconds=5) with cluster_manager.respin_on_failure(): - # check that NO new rewards were received by those delegating to the pool + # Check that NO new rewards were received by those delegating to the pool assert ( orig_user_reward == cluster.g_query.get_stake_addr_info( @@ -305,7 +305,7 @@ def test_no_reward_unmet_pledge2( ).reward_account_balance ), "Received unexpected rewards" - # check that pool owner is also NOT receiving rewards + # Check that pool owner is also NOT receiving rewards assert ( orig_owner_reward == cluster.g_query.get_stake_addr_info( @@ -315,7 +315,7 @@ def test_no_reward_unmet_pledge2( # Return the pool to the original state - restore pledge funds. - # fund user address so it has enough funds for fees etc. + # Fund user address so it has enough funds for fees etc. clusterlib_utils.fund_from_faucet( delegation_out.pool_user, cluster_obj=cluster, @@ -324,7 +324,7 @@ def test_no_reward_unmet_pledge2( force=True, ) - # return pledge + # Return pledge destinations = [ clusterlib.TxOut( address=pool_owner.payment.address, amount=pledge_amount + 100_000_000 @@ -350,7 +350,7 @@ def test_no_reward_unmet_pledge2( cluster.wait_for_epoch(epoch_no=return_orig_epoch + 5, padding_seconds=30) - # check that new rewards were received by those delegating to the pool + # Check that new rewards were received by those delegating to the pool assert ( orig_user_reward < cluster.g_query.get_stake_addr_info( @@ -358,7 +358,7 @@ def test_no_reward_unmet_pledge2( ).reward_account_balance ), "New reward was not received by stake address" - # check that pool owner is also receiving rewards + # Check that pool owner is also receiving rewards assert ( orig_owner_reward < cluster.g_query.get_stake_addr_info( @@ -405,7 +405,7 @@ def test_no_reward_deregistered_stake_addr( ) init_epoch = cluster.g_query.get_epoch() - # submit registration certificate and delegate to pool + # Submit registration certificate and delegate to pool delegation_out = delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -423,7 +423,7 @@ def test_no_reward_deregistered_stake_addr( delegation_out.pool_user.stake.address ).reward_account_balance, f"User of pool '{pool_name}' hasn't received any rewards" - # withdraw rewards from owner's stake address if there are any + # Withdraw rewards from owner's stake address if there are any if cluster.g_query.get_stake_addr_info(pool_owner.stake.address).reward_account_balance: cluster.g_stake_address.withdraw_reward( stake_addr_record=pool_owner.stake, @@ -431,7 +431,7 @@ def test_no_reward_deregistered_stake_addr( tx_name=temp_template, ) - # deregister stake address - owner's stake is lower than pledge + # Deregister stake address - owner's stake is lower than pledge stake_addr_dereg_cert = cluster.g_stake_address.gen_stake_addr_deregistration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=common.get_conway_address_deposit(cluster_obj=cluster), @@ -451,7 +451,7 @@ def test_no_reward_deregistered_stake_addr( ) with cluster_manager.respin_on_failure(): - # check that the key deposit was returned + # Check that the key deposit was returned assert ( cluster.g_query.get_address_balance(pool_owner.payment.address) == src_init_balance @@ -459,7 +459,7 @@ def test_no_reward_deregistered_stake_addr( + cluster.g_query.get_address_deposit() ), f"Incorrect balance for source address `{pool_owner.payment.address}`" - # check that the stake address is no longer delegated + # Check that the stake address is no longer delegated assert not cluster.g_query.get_stake_addr_info( pool_owner.stake.address ), "Stake address still delegated" @@ -477,7 +477,7 @@ def test_no_reward_deregistered_stake_addr( epoch_no=no_rewards_epoch + 3, padding_seconds=5 ) - # check that NO new rewards were received by those delegating to the pool + # Check that NO new rewards were received by those delegating to the pool assert ( orig_user_reward == cluster.g_query.get_stake_addr_info( @@ -485,7 +485,7 @@ def test_no_reward_deregistered_stake_addr( ).reward_account_balance ), "Received unexpected rewards" - # check that pool owner is also NOT receiving rewards + # Check that pool owner is also NOT receiving rewards assert ( orig_owner_reward == cluster.g_query.get_stake_addr_info( @@ -496,7 +496,7 @@ def test_no_reward_deregistered_stake_addr( # Return the pool to the original state - reregister stake address and # delegate it to the pool. - # fund pool owner's addresses so balance keeps higher than pool pledge after fees etc. + # Fund pool owner's addresses so balance keeps higher than pool pledge after fees etc. # are deducted clusterlib_utils.fund_from_faucet( pool_owner, @@ -508,7 +508,7 @@ def test_no_reward_deregistered_stake_addr( src_updated_balance = cluster.g_query.get_address_balance(pool_owner.payment.address) - # reregister stake address and delegate it to pool + # Reregister stake address and delegate it to pool tx_files = clusterlib.TxFiles( certificate_files=[ pool_rec["stake_addr_registration_cert"], @@ -522,7 +522,7 @@ def test_no_reward_deregistered_stake_addr( tx_files=tx_files, ) - # check that the balance for source address was correctly updated + # Check that the balance for source address was correctly updated assert ( cluster.g_query.get_address_balance(pool_owner.payment.address) == src_updated_balance - tx_raw_output.fee - cluster.g_query.get_address_deposit() @@ -530,7 +530,7 @@ def test_no_reward_deregistered_stake_addr( cluster.wait_for_epoch(epoch_no=return_orig_epoch + 4, padding_seconds=30) - # check that the stake address was delegated + # Check that the stake address was delegated stake_addr_info = cluster.g_query.get_stake_addr_info(pool_owner.stake.address) assert ( stake_addr_info.delegation @@ -538,7 +538,7 @@ def test_no_reward_deregistered_stake_addr( assert pool_id == stake_addr_info.delegation, "Stake address delegated to wrong pool" - # check that new rewards were received by those delegating to the pool + # Check that new rewards were received by those delegating to the pool assert ( orig_user_reward < cluster.g_query.get_stake_addr_info( @@ -546,7 +546,7 @@ def test_no_reward_deregistered_stake_addr( ).reward_account_balance ), "New reward was not received by stake address" - # check that pool owner is also receiving rewards + # Check that pool owner is also receiving rewards assert ( orig_user_reward < cluster.g_query.get_stake_addr_info( @@ -554,7 +554,7 @@ def test_no_reward_deregistered_stake_addr( ).reward_account_balance ), "New reward was not received by pool reward address" - # check that pledge is still met after the owner address was used to pay for Txs + # Check that pledge is still met after the owner address was used to pay for Txs pool_data = clusterlib_utils.load_registered_pool_data( cluster_obj=cluster, pool_name=pool_name, pool_id=pool_id ) @@ -601,7 +601,7 @@ def test_no_reward_deregistered_reward_addr( ) init_epoch = cluster.g_query.get_epoch() - # submit registration certificate and delegate to pool + # Submit registration certificate and delegate to pool delegation_out = delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -619,7 +619,7 @@ def test_no_reward_deregistered_reward_addr( delegation_out.pool_user.stake.address ).reward_account_balance, f"User of pool '{pool_name}' hasn't received any rewards" - # withdraw pool rewards to payment address + # Withdraw pool rewards to payment address # use `transaction build` if possible if common.BUILD_UNUSABLE: cluster.g_stake_address.withdraw_reward( @@ -635,7 +635,7 @@ def test_no_reward_deregistered_reward_addr( tx_name=temp_template, ) - # deregister the pool reward address + # Deregister the pool reward address stake_addr_dereg_cert = cluster.g_stake_address.gen_stake_addr_deregistration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=common.get_conway_address_deposit(cluster_obj=cluster), @@ -655,7 +655,7 @@ def test_no_reward_deregistered_reward_addr( ) with cluster_manager.respin_on_failure(): - # check that the key deposit was returned + # Check that the key deposit was returned assert ( cluster.g_query.get_address_balance(pool_reward.payment.address) == src_init_balance @@ -663,7 +663,7 @@ def test_no_reward_deregistered_reward_addr( + cluster.g_query.get_address_deposit() ), f"Incorrect balance for source address `{pool_reward.payment.address}`" - # check that the reward address is no longer delegated + # Check that the reward address is no longer delegated assert not cluster.g_query.get_stake_addr_info( pool_reward.stake.address ), "Stake address still delegated" @@ -674,7 +674,7 @@ def test_no_reward_deregistered_reward_addr( return_orig_epoch = cluster.wait_for_epoch(epoch_no=dereg_epoch + 3, padding_seconds=10) - # check that pool owner is NOT receiving rewards + # Check that pool owner is NOT receiving rewards assert ( cluster.g_query.get_stake_addr_info( pool_reward.stake.address @@ -682,7 +682,7 @@ def test_no_reward_deregistered_reward_addr( == 0 ), "Pool owner received unexpected rewards" - # check that new rewards are received by those delegating to the pool + # Check that new rewards are received by those delegating to the pool assert ( orig_user_reward < cluster.g_query.get_stake_addr_info( @@ -692,7 +692,7 @@ def test_no_reward_deregistered_reward_addr( # Return the pool to the original state - reregister reward address. - # fund pool owner's addresses so balance keeps higher than pool pledge after fees etc. + # Fund pool owner's addresses so balance keeps higher than pool pledge after fees etc. # are deducted clusterlib_utils.fund_from_faucet( pool_reward, @@ -704,7 +704,7 @@ def test_no_reward_deregistered_reward_addr( src_updated_balance = cluster.g_query.get_address_balance(pool_reward.payment.address) - # reregister reward address + # Reregister reward address tx_files = clusterlib.TxFiles( certificate_files=[ pool_rec["reward_addr_registration_cert"], @@ -717,7 +717,7 @@ def test_no_reward_deregistered_reward_addr( tx_files=tx_files, ) - # check that the balance for source address was correctly updated + # Check that the balance for source address was correctly updated assert ( cluster.g_query.get_address_balance(pool_reward.payment.address) == src_updated_balance - tx_raw_output.fee - cluster.g_query.get_address_deposit() @@ -725,7 +725,7 @@ def test_no_reward_deregistered_reward_addr( cluster.wait_for_epoch(epoch_no=return_orig_epoch + 4, padding_seconds=30) - # check that new rewards were received by those delegating to the pool + # Check that new rewards were received by those delegating to the pool assert ( orig_user_reward < cluster.g_query.get_stake_addr_info( @@ -733,7 +733,7 @@ def test_no_reward_deregistered_reward_addr( ).reward_account_balance ), "New reward was not received by stake address" - # check that pool owner is also receiving rewards + # Check that pool owner is also receiving rewards assert ( cluster.g_query.get_stake_addr_info( pool_reward.stake.address @@ -741,7 +741,7 @@ def test_no_reward_deregistered_reward_addr( > 0 ), "New reward was not received by pool reward address" - # check that pledge is still met after the owner address was used to pay for Txs + # Check that pledge is still met after the owner address was used to pay for Txs pool_data = clusterlib_utils.load_registered_pool_data( cluster_obj=cluster, pool_name=pool_name, pool_id=pool_id ) @@ -801,20 +801,20 @@ def test_deregister_reward_addr_retire_pool( else: pytest.xfail(f"Pool '{pool_name}' hasn't received any rewards, cannot continue.") - # make sure we have enough time to finish reward address deregistration in one epoch + # Make sure we have enough time to finish reward address deregistration in one epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=5, stop=common.EPOCH_STOP_SEC_BUFFER ) dereg_reward_epoch = cluster.g_query.get_epoch() - # withdraw pool rewards to payment address + # Withdraw pool rewards to payment address cluster.g_stake_address.withdraw_reward( stake_addr_record=pool_reward.stake, dst_addr_record=pool_reward.payment, tx_name=temp_template, ) - # deregister the pool reward address + # Deregister the pool reward address stake_addr_dereg_cert = cluster.g_stake_address.gen_stake_addr_deregistration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=common.get_conway_address_deposit(cluster_obj=cluster), @@ -834,7 +834,7 @@ def test_deregister_reward_addr_retire_pool( ) with cluster_manager.respin_on_failure(): - # check that the key deposit was returned + # Check that the key deposit was returned assert ( cluster.g_query.get_address_balance(pool_reward.payment.address) == src_init_balance @@ -842,7 +842,7 @@ def test_deregister_reward_addr_retire_pool( + cluster.g_query.get_address_deposit() ), f"Incorrect balance for source address `{pool_reward.payment.address}`" - # check that the reward address is no longer delegated + # Check that the reward address is no longer delegated assert not cluster.g_query.get_stake_addr_info( pool_reward.stake.address ), "Stake address still delegated" @@ -851,7 +851,7 @@ def test_deregister_reward_addr_retire_pool( epoch_no=dereg_reward_epoch + 3, padding_seconds=5 ) - # check that pool owner is NOT receiving rewards + # Check that pool owner is NOT receiving rewards assert ( cluster.g_query.get_stake_addr_info( pool_reward.stake.address @@ -859,7 +859,7 @@ def test_deregister_reward_addr_retire_pool( == 0 ), "Pool owner received unexpected rewards" - # fund pool owner's addresses so balance keeps higher than pool pledge after fees etc. + # Fund pool owner's addresses so balance keeps higher than pool pledge after fees etc. # are deducted clusterlib_utils.fund_from_faucet( pool_owner, @@ -880,7 +880,7 @@ def test_deregister_reward_addr_retire_pool( node_cold = pool_rec["cold_key_pair"] pool_id = cluster.g_stake_pool.get_stake_pool_id(node_cold.vkey_file) - # deregister stake pool + # Deregister stake pool depoch = cluster.g_query.get_epoch() + 1 __, tx_raw_output = cluster.g_stake_pool.deregister_stake_pool( pool_owners=[pool_owner], @@ -893,12 +893,12 @@ def test_deregister_reward_addr_retire_pool( cluster.wait_for_epoch(epoch_no=dereg_pool_epoch + 1, padding_seconds=5) - # check that the pool was deregistered + # Check that the pool was deregistered assert not cluster.g_query.get_pool_state( stake_pool_id=pool_id ).pool_params, f"The pool {pool_id} was not deregistered" - # check command kes-period-info case: de-register pool + # Check command kes-period-info case: de-register pool kes_period_info = cluster.g_query.get_kes_period_info(pool_opcert_file) kes_period_info_errors_list.extend( kes.check_kes_period_info_result( @@ -910,12 +910,12 @@ def test_deregister_reward_addr_retire_pool( ) ) - # check that the balance for source address was correctly updated + # Check that the balance for source address was correctly updated assert src_dereg_balance - tx_raw_output.fee == cluster.g_query.get_address_balance( pool_owner.payment.address ) - # check that the pool deposit was NOT returned to reward or stake address + # Check that the pool deposit was NOT returned to reward or stake address assert ( cluster.g_query.get_stake_addr_info(pool_owner.stake.address).reward_account_balance == stake_acount_balance @@ -932,7 +932,7 @@ def test_deregister_reward_addr_retire_pool( src_updated_balance = cluster.g_query.get_address_balance(pool_reward.payment.address) - # reregister the pool by resubmitting the pool registration certificate, + # Reregister the pool by resubmitting the pool registration certificate, # delegate stake address to pool again, reregister reward address tx_files = clusterlib.TxFiles( certificate_files=[ @@ -953,7 +953,7 @@ def test_deregister_reward_addr_retire_pool( tx_files=tx_files, ) - # check command kes-period-info case: re-register pool, check without + # Check command kes-period-info case: re-register pool, check without # waiting to take effect kes_period_info = cluster.g_query.get_kes_period_info(pool_opcert_file) kes_period_info_errors_list.extend( @@ -966,7 +966,7 @@ def test_deregister_reward_addr_retire_pool( ) ) - # check that the balance for source address was correctly updated and that the + # Check that the balance for source address was correctly updated and that the # pool deposit was needed assert ( cluster.g_query.get_address_balance(pool_reward.payment.address) @@ -985,7 +985,7 @@ def test_deregister_reward_addr_retire_pool( msg = f"Stake pool `{pool_id}` not registered even after 5 epochs." raise AssertionError(msg) - # check command kes-period-info case: re-register pool + # Check command kes-period-info case: re-register pool kes_period_info = cluster.g_query.get_kes_period_info(pool_opcert_file) kes_period_info_errors_list.extend( kes.check_kes_period_info_result( @@ -997,10 +997,10 @@ def test_deregister_reward_addr_retire_pool( ) ) - # wait before checking delegation and rewards + # Wait before checking delegation and rewards cluster.wait_for_epoch(epoch_no=rereg_epoch + 3, padding_seconds=30) - # check that the stake address was delegated + # Check that the stake address was delegated stake_addr_info = cluster.g_query.get_stake_addr_info(pool_owner.stake.address) assert ( stake_addr_info.delegation @@ -1008,12 +1008,12 @@ def test_deregister_reward_addr_retire_pool( assert pool_id == stake_addr_info.delegation, "Stake address delegated to wrong pool" - # check that pool owner is receiving rewards + # Check that pool owner is receiving rewards assert cluster.g_query.get_stake_addr_info( pool_reward.stake.address ).reward_account_balance, "New reward was not received by pool reward address" - # check that pledge is still met after the owner address was used to pay for Txs + # Check that pledge is still met after the owner address was used to pay for Txs pool_data = clusterlib_utils.load_registered_pool_data( cluster_obj=cluster, pool_name=pool_name, pool_id=pool_id ) diff --git a/cardano_node_tests/tests/test_staking_rewards.py b/cardano_node_tests/tests/test_staking_rewards.py index 44e358787..30c951cb5 100644 --- a/cardano_node_tests/tests/test_staking_rewards.py +++ b/cardano_node_tests/tests/test_staking_rewards.py @@ -128,10 +128,10 @@ def _check_member_pool_ids( """Check that in each epoch member rewards were received from the expected pool.""" epoch_to = rewards_by_idx[max(rewards_by_idx)].epoch_no - # reward records obtained from TX + # Reward records obtained from TX pool_ids_dict = {} for r_tx in rewards_by_idx.values(): - # rewards are received from pool to which the address was delegated 4 epochs ago + # Rewards are received from pool to which the address was delegated 4 epochs ago pool_epoch = r_tx.epoch_no - 4 rec_for_epoch_tx = rewards_by_idx.get(pool_epoch) if ( @@ -147,7 +147,7 @@ def _check_member_pool_ids( pool_first_epoch = min(pool_ids_dict) - # reward records obtained from db-sync + # Reward records obtained from db-sync db_pool_ids_dict = {} for r_db in reward_db_record.rewards: if ( @@ -167,10 +167,10 @@ def _check_leader_pool_ids( """Check that in each epoch leader rewards were received from the expected pool.""" epoch_to = rewards_by_idx[max(rewards_by_idx)].epoch_no - # reward records obtained from TX + # Reward records obtained from TX pool_ids_dict = {} for r_tx in rewards_by_idx.values(): - # rewards are received on address that was set as pool reward address 4 epochs ago + # Rewards are received on address that was set as pool reward address 4 epochs ago pool_epoch = r_tx.epoch_no - 4 rec_for_epoch_tx = rewards_by_idx.get(pool_epoch) if ( @@ -186,7 +186,7 @@ def _check_leader_pool_ids( pool_first_epoch = min(pool_ids_dict) - # reward records obtained from db-sync + # Reward records obtained from db-sync db_pool_ids_dict: dict = {} for r_db in reward_db_record.rewards: if ( @@ -212,7 +212,7 @@ def _dbsync_check_rewards( epoch_from = rewards[1].epoch_no epoch_to = rewards[-1].epoch_no - # when dealing with spendable epochs, last "spendable epoch" is last "earned epoch" + 2 + # When dealing with spendable epochs, last "spendable epoch" is last "earned epoch" + 2 reward_db_record = dbsync_utils.check_address_reward( address=stake_address, epoch_from=epoch_from, epoch_to=epoch_to + 2 ) @@ -220,11 +220,11 @@ def _dbsync_check_rewards( rewards_by_idx = {r.epoch_no: r for r in rewards} - # check that in each epoch rewards were received from the expected pool + # Check that in each epoch rewards were received from the expected pool _check_member_pool_ids(rewards_by_idx=rewards_by_idx, reward_db_record=reward_db_record) _check_leader_pool_ids(rewards_by_idx=rewards_by_idx, reward_db_record=reward_db_record) - # compare reward amounts with db-sync + # Compare reward amounts with db-sync user_rewards_dict = {r.epoch_no: r.reward_per_epoch for r in rewards if r.reward_per_epoch} user_db_rewards_dict = _add_spendable(rewards=reward_db_record.rewards, max_epoch=epoch_to) assert user_rewards_dict == user_db_rewards_dict @@ -281,11 +281,11 @@ def test_reward_simple( f"(epoch length: {cluster.epoch_length_sec / 60 / 60} hours)" ) - # make sure we have enough time to finish the registration/delegation in one epoch + # Make sure we have enough time to finish the registration/delegation in one epoch clusterlib_utils.wait_for_epoch_interval(cluster_obj=cluster, start=10, stop=-300) init_epoch = cluster.g_query.get_epoch() - # submit registration certificate and delegate to pool + # Submit registration certificate and delegate to pool delegation_out = delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -338,7 +338,7 @@ def test_reward_amount( # noqa: C901 __: tp.Any # mypy workaround cluster, pool_name = cluster_use_pool_and_rewards - # make sure there are rewards already available + # Make sure there are rewards already available clusterlib_utils.wait_for_rewards(cluster_obj=cluster) temp_template = common.get_test_id(cluster) @@ -351,7 +351,7 @@ def test_reward_amount( # noqa: C901 token_rand = clusterlib.get_rand_str(5) token_amount = 1_000_000 - # create two payment addresses that share single stake address (just to test that + # Create two payment addresses that share single stake address (just to test that # delegation works as expected even under such circumstances) stake_addr_rec = clusterlib_utils.create_stake_addr_records( f"{temp_template}_addr0", cluster_obj=cluster @@ -363,7 +363,7 @@ def test_reward_amount( # noqa: C901 stake_vkey_file=stake_addr_rec.vkey_file, ) - # fund payment address + # Fund payment address clusterlib_utils.fund_from_faucet( *payment_addr_recs, cluster_obj=cluster, @@ -372,14 +372,14 @@ def test_reward_amount( # noqa: C901 pool_user = clusterlib.PoolUser(payment=payment_addr_recs[1], stake=stake_addr_rec) - # make sure we have enough time to finish the registration/delegation in one epoch + # Make sure we have enough time to finish the registration/delegation in one epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=5, stop=common.EPOCH_STOP_SEC_BUFFER ) init_epoch = cluster.g_query.get_epoch() - # submit registration certificate and delegate to pool + # Submit registration certificate and delegate to pool pool_id = delegation.get_pool_id( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, pool_name=pool_name ) @@ -393,7 +393,7 @@ def test_reward_amount( # noqa: C901 native_tokens: list[clusterlib_utils.TokenRecord] = [] if VERSIONS.transaction_era >= VERSIONS.MARY: - # create native tokens UTxOs for pool user + # Create native tokens UTxOs for pool user native_tokens = clusterlib_utils.new_tokens( *[f"couttscoin{token_rand}{i}".encode().hex() for i in range(5)], cluster_obj=cluster, @@ -403,14 +403,14 @@ def test_reward_amount( # noqa: C901 amount=token_amount, ) - # make sure we managed to finish registration in the expected epoch + # Make sure we managed to finish registration in the expected epoch assert ( cluster.g_query.get_epoch() == init_epoch ), "Delegation took longer than expected and would affect other checks" user_stake_addr_dec = helpers.decode_bech32(delegation_out.pool_user.stake.address)[2:] - # balance for both payment addresses associated with the single stake address + # Balance for both payment addresses associated with the single stake address user_payment_balance = cluster.g_query.get_address_balance( payment_addr_recs[0].address ) + cluster.g_query.get_address_balance(payment_addr_recs[1].address) @@ -435,7 +435,7 @@ def test_reward_amount( # noqa: C901 ) ] - # ledger state db + # Ledger state db rs_records: dict = {init_epoch: None} def _check_ledger_state( @@ -485,7 +485,7 @@ def _check_ledger_state( assert user_stake_addr_dec not in pstake_set assert user_stake_addr_dec not in pstake_go - # make sure ledger state and actual stake correspond + # Make sure ledger state and actual stake correspond assert pstake_mark[user_stake_addr_dec] == user_rewards[-1].stake_total if this_epoch == init_epoch + 2: @@ -512,7 +512,7 @@ def _check_ledger_state( LOGGER.info("Checking rewards for 9 epochs.") for __ in range(9): - # reward balance in previous epoch + # Reward balance in previous epoch prev_user_reward = user_rewards[-1].reward_total prev_owner_rec = owner_rewards[-1] prev_owner_epoch = prev_owner_rec.epoch_no @@ -520,7 +520,7 @@ def _check_ledger_state( this_epoch = cluster.wait_for_epoch(epoch_no=prev_owner_epoch + 1, future_is_ok=False) - # sleep till the end of epoch + # Sleep till the end of epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=common.EPOCH_START_SEC_LEDGER_STATE, @@ -528,7 +528,7 @@ def _check_ledger_state( force_epoch=True, ) - # current reward balance + # Current reward balance user_reward = cluster.g_query.get_stake_addr_info( delegation_out.pool_user.stake.address ).reward_account_balance @@ -536,11 +536,11 @@ def _check_ledger_state( pool_reward.stake.address ).reward_account_balance - # total reward amounts received this epoch + # Total reward amounts received this epoch user_reward_epoch = user_reward - prev_user_reward owner_reward_epoch = owner_reward - prev_owner_reward - # store collected rewards info + # Store collected rewards info user_rewards.append( RewardRecord( epoch_no=this_epoch, @@ -559,7 +559,7 @@ def _check_ledger_state( ) ) - # wait 4 epochs for first rewards + # Wait 4 epochs for first rewards if this_epoch >= init_epoch + 4: assert owner_reward > prev_owner_reward, "New reward was NOT received by pool owner" assert ( @@ -578,7 +578,7 @@ def _check_ledger_state( ) if native_tokens: - # burn native tokens + # Burn native tokens tokens_to_burn = [dataclasses.replace(t, amount=-token_amount) for t in native_tokens] clusterlib_utils.mint_or_burn_sign( cluster_obj=cluster, @@ -586,7 +586,7 @@ def _check_ledger_state( temp_template=f"{temp_template}_burn", ) - # check `transaction view` command + # Check `transaction view` command tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=withdraw_out) tx_db_record = dbsync_utils.check_tx( @@ -610,7 +610,7 @@ def _check_ledger_state( rewards=owner_rewards, ) - # check in db-sync that both payment addresses share single stake address + # Check in db-sync that both payment addresses share single stake address assert ( dbsync_utils.get_utxo(address=payment_addr_recs[0].address).stake_address == stake_addr_rec.address @@ -656,7 +656,7 @@ def test_reward_addr_delegation( # noqa: C901 __: tp.Any # mypy workaround cluster, pool_name = cluster_lock_pool_and_pots - # make sure there are rewards already available + # Make sure there are rewards already available clusterlib_utils.wait_for_rewards(cluster_obj=cluster) # MIR rewards doesn't work on Conway+ @@ -668,7 +668,7 @@ def test_reward_addr_delegation( # noqa: C901 pool_reward = clusterlib.PoolUser(payment=pool_rec["payment"], stake=pool_rec["reward"]) reward_addr_dec = helpers.decode_bech32(pool_reward.stake.address)[2:] - # fund pool owner's addresses so balance keeps higher than pool pledge after fees etc. + # Fund pool owner's addresses so balance keeps higher than pool pledge after fees etc. # are deducted clusterlib_utils.fund_from_faucet( pool_owner, @@ -682,16 +682,16 @@ def test_reward_addr_delegation( # noqa: C901 cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, pool_name=pool_name ) - # make sure we have enough time to finish delegation in one epoch + # Make sure we have enough time to finish delegation in one epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=5, stop=common.EPOCH_STOP_SEC_BUFFER ) init_epoch = cluster.g_query.get_epoch() - # rewards each epoch + # Rewards each epoch reward_records: list[RewardRecord] = [] - # ledger state db + # Ledger state db rs_records: dict = {init_epoch: None} def _check_ledger_state( @@ -737,7 +737,7 @@ def _check_ledger_state( assert reward_addr_dec not in pstake_set assert reward_addr_dec not in pstake_go - # make sure ledger state and actual stake correspond + # Make sure ledger state and actual stake correspond assert pstake_mark[reward_addr_dec] == reward_records[-1].reward_total if this_epoch == init_epoch + 2: @@ -777,7 +777,7 @@ def _check_ledger_state( assert reward_addr_dec not in pstake_set assert reward_addr_dec not in pstake_go - # check that rewards are coming from multiple sources where expected + # Check that rewards are coming from multiple sources where expected # ("LeaderReward" and "MemberReward") if init_epoch + 3 <= this_epoch <= init_epoch + 7: assert _get_rew_type_for_cred_hash(reward_addr_dec, rs_record) == [ @@ -815,7 +815,7 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: return mir_tx_raw_output - # delegate pool rewards address to pool + # Delegate pool rewards address to pool node_cold = pool_rec["cold_key_pair"] reward_addr_deleg_cert_file = cluster.g_stake_address.gen_stake_and_vote_delegation_cert( addr_name=f"{temp_template}_addr0", @@ -840,7 +840,7 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: ) with cluster_manager.respin_on_failure(): - # make sure we managed to finish delegation in the expected epoch + # Make sure we managed to finish delegation in the expected epoch assert ( cluster.g_query.get_epoch() == init_epoch ), "Delegation took longer than expected and would affect other checks" @@ -859,7 +859,7 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: LOGGER.info("Checking rewards for 8 epochs.") withdrawal_past_epoch = False for __ in range(8): - # reward balance in previous epoch + # Reward balance in previous epoch prev_reward_rec = reward_records[-1] prev_epoch = prev_reward_rec.epoch_no prev_reward_total = prev_reward_rec.reward_total @@ -868,19 +868,19 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: epoch_no=prev_epoch + 1, padding_seconds=10, future_is_ok=False ) - # current reward balance + # Current reward balance reward_total = cluster.g_query.get_stake_addr_info( pool_reward.stake.address ).reward_account_balance - # total reward amount received this epoch + # Total reward amount received this epoch if withdrawal_past_epoch: reward_per_epoch = reward_total else: reward_per_epoch = reward_total - prev_reward_total withdrawal_past_epoch = False - # store collected rewards info + # Store collected rewards info reward_records.append( RewardRecord( epoch_no=this_epoch, @@ -902,10 +902,10 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: if mir_reward and this_epoch == init_epoch + 4: assert reward_per_epoch > mir_reward - # undelegate rewards address + # Undelegate rewards address if this_epoch == init_epoch + 5: address_deposit = common.get_conway_address_deposit(cluster_obj=cluster) - # create stake address deregistration cert + # Create stake address deregistration cert reward_addr_dereg_cert_file = ( cluster.g_stake_address.gen_stake_addr_deregistration_cert( addr_name=f"{temp_template}_reward", @@ -914,7 +914,7 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: ) ) - # create stake address registration cert + # Create stake address registration cert reward_addr_reg_cert_file = ( cluster.g_stake_address.gen_stake_addr_registration_cert( addr_name=f"{temp_template}_reward", @@ -923,7 +923,7 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: ) ) - # withdraw rewards; deregister and register stake address in single TX + # Withdraw rewards; deregister and register stake address in single TX tx_files = clusterlib.TxFiles( certificate_files=[reward_addr_dereg_cert_file, reward_addr_reg_cert_file], signing_key_files=[ @@ -947,7 +947,7 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: assert reward_stake_info.address, "Reward address is not registered" assert not reward_stake_info.delegation, "Reward address is still delegated" - # sleep till the end of epoch + # Sleep till the end of epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=common.EPOCH_START_SEC_LEDGER_STATE, @@ -957,7 +957,7 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: _check_ledger_state(this_epoch=this_epoch) - # check that pledge is still met after the owner address was used to pay for Txs + # Check that pledge is still met after the owner address was used to pay for Txs pool_data = clusterlib_utils.load_registered_pool_data( cluster_obj=cluster, pool_name=pool_name, pool_id=pool_id ) @@ -966,7 +966,7 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: owner_payment_balance >= pool_data.pool_pledge ), f"Pledge is not met for pool '{pool_name}'!" - # check TX records in db-sync + # Check TX records in db-sync assert dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_deleg) assert dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_undeleg) assert not mir_tx_raw_reserves or dbsync_utils.check_tx( @@ -976,17 +976,17 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput: cluster_obj=cluster, tx_raw_output=mir_tx_raw_treasury ) - # check pool records in db-sync + # Check pool records in db-sync pool_params: dict = cluster.g_query.get_pool_state(stake_pool_id=pool_id).pool_params dbsync_utils.check_pool_data(ledger_pool_data=pool_params, pool_id=pool_id) - # check rewards in db-sync + # Check rewards in db-sync reward_db_record = _dbsync_check_rewards( stake_address=pool_reward.stake.address, rewards=reward_records, ) - # in db-sync check that there were rewards of multiple different types + # In db-sync check that there were rewards of multiple different types # ("leader", "member", "treasury", "reserves") reward_types: dict[int, list[str]] = {} for rec in reward_db_record.rewards: @@ -1049,7 +1049,7 @@ def test_decreasing_reward_transferred_funds( ) init_epoch = cluster.g_query.get_epoch() - # submit registration certificate and delegate to pool + # Submit registration certificate and delegate to pool pool_id = delegation.get_pool_id( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, pool_name=pool_name ) @@ -1071,19 +1071,19 @@ def test_decreasing_reward_transferred_funds( ).reward_account_balance: pytest.skip(f"User of pool '{pool_name}' hasn't received any rewards, cannot continue.") - # create destination address for rewards withdrawal + # Create destination address for rewards withdrawal dst_addr_record = clusterlib_utils.create_payment_addr_records( f"{temp_template}_dst_addr", cluster_obj=cluster )[0] - # fund destination address + # Fund destination address clusterlib_utils.fund_from_faucet( dst_addr_record, cluster_obj=cluster, all_faucets=cluster_manager.cache.addrs_data, ) - # transfer all funds from payment address back to faucet, so no funds are staked + # Transfer all funds from payment address back to faucet, so no funds are staked faucet.return_funds_to_faucet( delegation_out.pool_user.payment, cluster_obj=cluster, @@ -1096,7 +1096,7 @@ def test_decreasing_reward_transferred_funds( rewards_rec = [] - # keep withdrawing new rewards so reward balance is 0 + # Keep withdrawing new rewards so reward balance is 0 def _withdraw(): rewards = cluster.g_query.get_stake_addr_info( delegation_out.pool_user.stake.address @@ -1112,7 +1112,7 @@ def _withdraw(): clusterlib_utils.save_ledger_state( cluster_obj=cluster, state_name=f"{temp_template}_{epoch}" ) - # withdraw rewards to destination address + # Withdraw rewards to destination address cluster.g_stake_address.withdraw_reward( stake_addr_record=delegation_out.pool_user.stake, dst_addr_record=dst_addr_record, @@ -1171,7 +1171,7 @@ def test_2_pools_same_reward_addr( # noqa: C901 pool2_node_cold = pool2_rec["cold_key_pair"] pool2_id = cluster.g_stake_pool.get_stake_pool_id(pool2_node_cold.vkey_file) - # load pool data + # Load pool data loaded_data = clusterlib_utils.load_registered_pool_data( cluster_obj=cluster, pool_name=f"changed_{pool2_name}", pool_id=pool2_id ) @@ -1194,7 +1194,7 @@ def test_2_pools_same_reward_addr( # noqa: C901 else: pytest.xfail("Pools haven't received any rewards, cannot continue.") - # fund pool owner's addresses so balance keeps higher than pool pledge after fees etc. + # Fund pool owner's addresses so balance keeps higher than pool pledge after fees etc. # are deducted clusterlib_utils.fund_from_faucet( pool2_owner, @@ -1204,13 +1204,13 @@ def test_2_pools_same_reward_addr( # noqa: C901 force=True, ) - # make sure we have enough time to submit pool registration cert in one epoch + # Make sure we have enough time to submit pool registration cert in one epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=5, stop=common.EPOCH_STOP_SEC_BUFFER ) init_epoch = cluster.g_query.get_epoch() - # set pool2 reward address to the reward address of pool1 by resubmitting the pool + # Set pool2 reward address to the reward address of pool1 by resubmitting the pool # registration certificate pool_reg_cert_file = cluster.g_stake_pool.gen_pool_registration_cert( pool_data=loaded_data, @@ -1234,7 +1234,7 @@ def test_2_pools_same_reward_addr( # noqa: C901 deposit=0, # no additional deposit, the pool is already registered ) - # pool configuration changed, respin needed + # Pool configuration changed, respin needed cluster_manager.set_needs_respin() assert ( @@ -1242,11 +1242,11 @@ def test_2_pools_same_reward_addr( # noqa: C901 ), "Pool setup took longer than expected and would affect other checks" this_epoch = init_epoch - # rewards each epoch + # Rewards each epoch rewards_ledger_pool1: list[RewardRecord] = [] rewards_ledger_pool2: list[RewardRecord] = [] - # check rewards + # Check rewards for ep in range(6): if ep > 0: # Check that we are in the expected epoch @@ -1275,20 +1275,20 @@ def test_2_pools_same_reward_addr( # noqa: C901 leader_ids_pool1 = [pool1_id] leader_ids_pool2 = [pool2_id] - # pool re-registration took affect in `init_epoch` + 1 + # Pool re-registration took affect in `init_epoch` + 1 if this_epoch >= init_epoch + 1: leader_ids_pool1 = [pool1_id, pool2_id] leader_ids_pool2 = [] - # pool2 starts receiving leader rewards on pool1 address in `init_epoch` + 5 + # Pool2 starts receiving leader rewards on pool1 address in `init_epoch` + 5 # (re-registration epoch + 4) if this_epoch >= init_epoch + 5: - # check that the original reward address for pool2 is NOT receiving rewards + # Check that the original reward address for pool2 is NOT receiving rewards assert ( reward_for_epoch_pool2 == 0 ), "Original reward address of 'pool2' received unexpected rewards" - # rewards each epoch + # Rewards each epoch rewards_ledger_pool1.append( RewardRecord( epoch_no=this_epoch, @@ -1324,7 +1324,7 @@ def test_2_pools_same_reward_addr( # noqa: C901 rewards_ledger_pool2[-1].reward_per_epoch == 0 ), "Original reward address of 'pool2' received unexpected rewards" - # check that pledge is still met after the owner address was used to pay for Txs + # Check that pledge is still met after the owner address was used to pay for Txs pool2_data = clusterlib_utils.load_registered_pool_data( cluster_obj=cluster, pool_name=pool2_name, pool_id=pool2_id ) @@ -1333,16 +1333,16 @@ def test_2_pools_same_reward_addr( # noqa: C901 owner_payment_balance >= pool2_data.pool_pledge ), f"Pledge is not met for pool '{pool2_name}'!" - # check TX records in db-sync + # Check TX records in db-sync assert dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_update_pool) - # check pool records in db-sync + # Check pool records in db-sync pool1_params: dict = cluster.g_query.get_pool_state(stake_pool_id=pool1_id).pool_params dbsync_utils.check_pool_data(ledger_pool_data=pool1_params, pool_id=pool1_id) pool2_params: dict = cluster.g_query.get_pool_state(stake_pool_id=pool2_id).pool_params dbsync_utils.check_pool_data(ledger_pool_data=pool2_params, pool_id=pool2_id) - # check rewards in db-sync + # Check rewards in db-sync rewards_db_pool1 = _dbsync_check_rewards( stake_address=pool1_reward.stake.address, rewards=rewards_ledger_pool1, @@ -1352,7 +1352,7 @@ def test_2_pools_same_reward_addr( # noqa: C901 rewards=rewards_ledger_pool2, ) - # in db-sync check that pool1 reward address is used as reward address for pool1, and + # In db-sync check that pool1 reward address is used as reward address for pool1, and # in the expected epochs also for pool2 reward_types_pool1: dict[int, list[str]] = {} for rec in rewards_db_pool1.rewards: @@ -1368,7 +1368,7 @@ def test_2_pools_same_reward_addr( # noqa: C901 else: assert rtypes == ["leader", "leader"] - # in db-sync check that pool2 reward address is NOT used for receiving rewards anymore + # In db-sync check that pool2 reward address is NOT used for receiving rewards anymore # in the expected epochs reward_types_pool2: dict[int, list[str]] = {} for rec in rewards_db_pool2.rewards: @@ -1423,13 +1423,13 @@ def test_redelegation( # noqa: C901 cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, pool_name=pool2_name ) - # make sure we have enough time to finish the registration/delegation in one epoch + # Make sure we have enough time to finish the registration/delegation in one epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=5, stop=common.EPOCH_STOP_SEC_BUFFER ) init_epoch = cluster.g_query.get_epoch() - # submit registration certificate and delegate to pool1 + # Submit registration certificate and delegate to pool1 delegation_out = delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -1437,7 +1437,7 @@ def test_redelegation( # noqa: C901 pool_id=pool1_id, ) - # make sure we managed to finish registration in the expected epoch + # Make sure we managed to finish registration in the expected epoch assert ( cluster.g_query.get_epoch() == init_epoch ), "Delegation took longer than expected and would affect other checks" @@ -1456,7 +1456,7 @@ def test_redelegation( # noqa: C901 stake_addr_dec = helpers.decode_bech32(delegation_out.pool_user.stake.address)[2:] - # ledger state db + # Ledger state db rs_records: dict = {init_epoch: None} def _check_ledger_state( @@ -1498,7 +1498,7 @@ def _check_ledger_state( assert stake_addr_dec not in pstake_set assert stake_addr_dec not in pstake_go - # make sure ledger state and actual stake correspond + # Make sure ledger state and actual stake correspond assert pstake_mark[stake_addr_dec] == reward_records[-1].stake_total if this_epoch == init_epoch + 2: @@ -1521,7 +1521,7 @@ def _check_ledger_state( LOGGER.info("Checking rewards for 8 epochs.") withdrawal_past_epoch = False for __ in range(8): - # reward balance in previous epoch + # Reward balance in previous epoch prev_reward_rec = reward_records[-1] prev_epoch = prev_reward_rec.epoch_no prev_reward_total = prev_reward_rec.reward_total @@ -1530,28 +1530,28 @@ def _check_ledger_state( epoch_no=prev_epoch + 1, padding_seconds=10, future_is_ok=False ) - # current reward balance + # Current reward balance reward_total = cluster.g_query.get_stake_addr_info( delegation_out.pool_user.stake.address ).reward_account_balance - # total reward amount received this epoch + # Total reward amount received this epoch if withdrawal_past_epoch: reward_per_epoch = reward_total else: reward_per_epoch = reward_total - prev_reward_total withdrawal_past_epoch = False - # current payment balance + # Current payment balance payment_balance = cluster.g_query.get_address_balance( delegation_out.pool_user.payment.address ) - # stake amount this epoch + # Stake amount this epoch stake_total = payment_balance + reward_total if this_epoch == init_epoch + 2: - # re-delegate to pool2 + # Re-delegate to pool2 delegation_out_ep2 = delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -1561,7 +1561,7 @@ def _check_ledger_state( ) if this_epoch == init_epoch + 3: - # deregister stake address + # Deregister stake address clusterlib_utils.deregister_stake_address( cluster_obj=cluster, pool_user=delegation_out.pool_user, @@ -1570,7 +1570,7 @@ def _check_ledger_state( ) withdrawal_past_epoch = True - # re-register, delegate to pool1 + # Re-register, delegate to pool1 delegation_out_ep3 = delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -1584,7 +1584,7 @@ def _check_ledger_state( reward_total > prev_reward_total ), "New reward was NOT received by stake address" - # deregister stake address + # Deregister stake address clusterlib_utils.deregister_stake_address( cluster_obj=cluster, pool_user=delegation_out.pool_user, @@ -1593,7 +1593,7 @@ def _check_ledger_state( ) withdrawal_past_epoch = True - # wait for start of reward calculation, which is at 4k/f slot + # Wait for start of reward calculation, which is at 4k/f slot start_reward_calc_sec = ( 4 * cluster.genesis["securityParam"] @@ -1607,7 +1607,7 @@ def _check_ledger_state( stop=wait_for_sec, force_epoch=True, ) - # re-register, delegate to pool1 + # Re-register, delegate to pool1 delegation_out_ep4 = delegation.delegate_stake_addr( cluster_obj=cluster, addrs_data=cluster_manager.cache.addrs_data, @@ -1617,7 +1617,7 @@ def _check_ledger_state( ) if this_epoch == init_epoch + 5: - # rewards should be received even when the stake credential was + # Rewards should be received even when the stake credential was # re-registered after reward calculation have already started assert reward_total > 0, "Reward was NOT received by stake address" @@ -1630,7 +1630,7 @@ def _check_ledger_state( cluster.g_query.get_epoch() == this_epoch ), "Failed to finish actions in single epoch, it would affect other checks" - # sleep till the end of epoch + # Sleep till the end of epoch clusterlib_utils.wait_for_epoch_interval( cluster_obj=cluster, start=common.EPOCH_START_SEC_LEDGER_STATE, @@ -1638,7 +1638,7 @@ def _check_ledger_state( force_epoch=True, ) - # store collected rewards info + # Store collected rewards info reward_records.append( RewardRecord( epoch_no=this_epoch, @@ -1653,7 +1653,7 @@ def _check_ledger_state( _check_ledger_state(this_epoch=this_epoch) - # check records in db-sync + # Check records in db-sync tx_db_record_init = dbsync_utils.check_tx( cluster_obj=cluster, tx_raw_output=delegation_out.tx_raw_output ) @@ -1722,7 +1722,7 @@ def pool_users( pool_owner = clusterlib.PoolUser(payment=pool_rec["payment"], stake=pool_rec["stake"]) pool_reward = clusterlib.PoolUser(payment=pool_rec["payment"], stake=pool_rec["reward"]) - # make sure there are rewards already available + # Make sure there are rewards already available clusterlib_utils.wait_for_rewards(cluster_obj=cluster) return pool_owner, pool_reward @@ -1731,7 +1731,7 @@ def pool_users( @hypothesis.given( amount=st.integers( min_value=1, - # don't set to `MAX_UINT64` as change value of balanced Tx would exceed that value + # Don't set to `MAX_UINT64` as change value of balanced Tx would exceed that value max_value=common.MAX_UINT64 // 2, ), ) diff --git a/cardano_node_tests/tests/test_tx_basic.py b/cardano_node_tests/tests/test_tx_basic.py index b418ac713..ca8439d3b 100644 --- a/cardano_node_tests/tests/test_tx_basic.py +++ b/cardano_node_tests/tests/test_tx_basic.py @@ -48,7 +48,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( *addrs, cluster_obj=cluster, @@ -76,7 +76,7 @@ def byron_addrs( ] fixture_cache.value = new_byron_addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( *new_byron_addrs, cluster_obj=cluster, @@ -99,7 +99,7 @@ def payment_addrs_disposable( cluster_obj=cluster, ) - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( *addrs, cluster_obj=cluster, @@ -121,7 +121,7 @@ def payment_addrs_no_change( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -379,7 +379,7 @@ def test_transfer_all_funds( src_address = payment_addrs_disposable[1].address dst_address = payment_addrs_disposable[0].address - # amount value -1 means all available funds + # Amount value -1 means all available funds destinations = [clusterlib.TxOut(address=dst_address, amount=-1)] tx_files = clusterlib.TxFiles(signing_key_files=[payment_addrs_disposable[1].skey_file]) @@ -420,7 +420,7 @@ def test_transfer_all_funds( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check `transaction view` command + # Check `transaction view` command tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) @@ -1175,7 +1175,7 @@ def test_default_tx_era( tx_files=tx_files, ) - # check `transaction view` command, this will check if the tx era is the expected + # Check `transaction view` command, this will check if the tx era is the expected tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output) @@ -1202,7 +1202,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -1225,9 +1225,9 @@ def _from_to_transactions( ): """Test 1 tx from `from_num` payment addresses to `to_num` payment addresses.""" src_address = payment_addrs[0].address - # addr1..addr + # Addr1..addr from_addr_recs = payment_addrs[1 : from_num + 1] - # addr..addr + # Addr..addr dst_addresses = [ payment_addrs[i].address for i in range(from_num + 1, from_num + to_num + 1) ] @@ -1503,7 +1503,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -1609,7 +1609,7 @@ def test_incremental_signing( # Incrementally sign the already signed Tx with rest of the skeys, excluding the # required skey for idx, skey in enumerate(payment_skey_files[1:5], start=1): - # sign multiple times with the same skey to see that it doesn't affect Tx fee + # Sign multiple times with the same skey to see that it doesn't affect Tx fee for r in range(5): tx_signed = cluster.g_transaction.sign_tx( tx_file=tx_signed, diff --git a/cardano_node_tests/tests/test_tx_fees.py b/cardano_node_tests/tests/test_tx_fees.py index 813c186f1..31ffdcc55 100644 --- a/cardano_node_tests/tests/test_tx_fees.py +++ b/cardano_node_tests/tests/test_tx_fees.py @@ -58,7 +58,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -214,7 +214,7 @@ def pool_users( ) fixture_cache.value = created_users - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( *created_users[:10], cluster_obj=cluster, @@ -231,12 +231,12 @@ def _create_pool_certificates( pool_data: clusterlib.PoolData, ) -> tuple[str, clusterlib.TxFiles]: """Create certificates for registering a stake pool, delegating stake address.""" - # create node VRF key pair + # Create node VRF key pair node_vrf = cluster_obj.g_node.gen_vrf_key_pair(node_name=pool_data.pool_name) - # create node cold key pair and counter + # Create node cold key pair and counter node_cold = cluster_obj.g_node.gen_cold_key_pair_and_counter(node_name=pool_data.pool_name) - # create stake address registration certs + # Create stake address registration certs stake_addr_reg_cert_files = [ cluster_obj.g_stake_address.gen_stake_addr_registration_cert( addr_name=f"{temp_template}_addr{i}", @@ -246,7 +246,7 @@ def _create_pool_certificates( for i, p in enumerate(pool_owners) ] - # create stake address delegation cert + # Create stake address delegation cert stake_addr_deleg_cert_files = [ cluster_obj.g_stake_address.gen_stake_addr_delegation_cert( addr_name=f"{temp_template}_addr{i}", @@ -256,7 +256,7 @@ def _create_pool_certificates( for i, p in enumerate(pool_owners) ] - # create stake pool registration cert + # Create stake pool registration cert pool_reg_cert_file = cluster_obj.g_stake_pool.gen_pool_registration_cert( pool_data=pool_data, vrf_vkey_file=node_vrf.vkey_file, @@ -266,7 +266,7 @@ def _create_pool_certificates( src_address = pool_owners[0].payment.address - # register and delegate stake address, create and register pool + # Register and delegate stake address, create and register pool tx_files = clusterlib.TxFiles( certificate_files=[ pool_reg_cert_file, @@ -295,21 +295,21 @@ def _from_to_transactions( amount, expected_fee = amount_expected src_address = pool_users[0].payment.address - # addr1..addr + # Addr1..addr from_addr_recs = [p.payment for p in pool_users[1 : from_num + 1]] - # addr..addr + # Addr..addr dst_addresses = [ pool_users[i].payment.address for i in range(from_num + 1, from_num + to_num + 1) ] - # create TX data + # Create TX data _txins = [cluster_obj.g_query.get_utxo(address=r.address) for r in from_addr_recs] - # flatten the list of lists that is _txins + # Flatten the list of lists that is _txins txins = list(itertools.chain.from_iterable(_txins)) txouts = [clusterlib.TxOut(address=addr, amount=amount) for addr in dst_addresses] tx_files = clusterlib.TxFiles(signing_key_files=[r.skey_file for r in from_addr_recs]) - # calculate TX fee + # Calculate TX fee tx_fee = cluster_obj.g_transaction.calculate_tx_fee( src_address=src_address, tx_name=tx_name, txins=txins, txouts=txouts, tx_files=tx_files ) @@ -351,10 +351,10 @@ def test_pool_registration_fees( pool_metadata_hash=cluster.g_stake_pool.gen_pool_metadata_hash(pool_metadata_file), ) - # create pool owners + # Create pool owners selected_owners = pool_users[:no_of_addr] - # create certificates + # Create certificates src_address, tx_files = self._create_pool_certificates( cluster_obj=cluster, pool_owners=selected_owners, @@ -362,7 +362,7 @@ def test_pool_registration_fees( pool_data=pool_data, ) - # calculate TX fee + # Calculate TX fee tx_fee = cluster.g_transaction.calculate_tx_fee( src_address=src_address, tx_name=temp_template, tx_files=tx_files ) @@ -405,13 +405,13 @@ def test_pool_deregistration_fees( pool_metadata_hash=cluster.g_stake_pool.gen_pool_metadata_hash(pool_metadata_file), ) - # create pool owners + # Create pool owners selected_owners = pool_users[:no_of_addr] - # create node cold key pair and counter + # Create node cold key pair and counter node_cold = cluster.g_node.gen_cold_key_pair_and_counter(node_name=pool_data.pool_name) - # create deregistration certificate + # Create deregistration certificate pool_dereg_cert_file = cluster.g_stake_pool.gen_pool_deregistration_cert( pool_name=pool_data.pool_name, cold_vkey_file=node_cold.vkey_file, @@ -427,7 +427,7 @@ def test_pool_deregistration_fees( ], ) - # calculate TX fee + # Calculate TX fee tx_fee = cluster.g_transaction.calculate_tx_fee( src_address=src_address, tx_name=temp_template, tx_files=tx_files ) @@ -459,7 +459,7 @@ def test_addr_registration_fees( for i, p in enumerate(selected_users) ] - # create TX data + # Create TX data tx_files = clusterlib.TxFiles( certificate_files=[*stake_addr_reg_certs], signing_key_files=[ @@ -468,7 +468,7 @@ def test_addr_registration_fees( ], ) - # calculate TX fee + # Calculate TX fee tx_fee = cluster.g_transaction.calculate_tx_fee( src_address=src_address, tx_name=temp_template, tx_files=tx_files ) @@ -500,7 +500,7 @@ def test_addr_deregistration_fees( for i, p in enumerate(selected_users) ] - # create TX data + # Create TX data tx_files = clusterlib.TxFiles( certificate_files=[*stake_addr_dereg_certs], signing_key_files=[ @@ -509,7 +509,7 @@ def test_addr_deregistration_fees( ], ) - # calculate TX fee + # Calculate TX fee tx_fee = cluster.g_transaction.calculate_tx_fee( src_address=src_address, tx_name=temp_template, tx_files=tx_files ) diff --git a/cardano_node_tests/tests/test_tx_many_utxos.py b/cardano_node_tests/tests/test_tx_many_utxos.py index dadf14bb1..23de61738 100644 --- a/cardano_node_tests/tests/test_tx_many_utxos.py +++ b/cardano_node_tests/tests/test_tx_many_utxos.py @@ -69,11 +69,11 @@ def _from_to_transactions( src_address = payment_addr.address dst_addresses = [rec.address for rec in out_addrs] - # create TX data + # Create TX data txouts = [clusterlib.TxOut(address=addr, amount=amount) for addr in dst_addresses] tx_files = clusterlib.TxFiles(signing_key_files=[payment_addr.skey_file]) - # send TX + # Send TX cluster_obj.g_transaction.send_tx( src_address=src_address, # change is returned to `src_address` tx_name=tx_name, @@ -214,7 +214,7 @@ def _subtest(amount: int) -> None: txins_optimized.append(popped_txin) break - # build, sign and submit the transaction + # Build, sign and submit the transaction data_for_build = clusterlib.collect_data_for_build( clusterlib_obj=cluster, src_address=src_address, diff --git a/cardano_node_tests/tests/test_tx_mempool.py b/cardano_node_tests/tests/test_tx_mempool.py index aacc6560f..268985ebc 100644 --- a/cardano_node_tests/tests/test_tx_mempool.py +++ b/cardano_node_tests/tests/test_tx_mempool.py @@ -32,7 +32,7 @@ def payment_addrs_locked( cluster_obj=cluster_singleton, ) - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( *addrs, cluster_obj=cluster_singleton, diff --git a/cardano_node_tests/tests/test_tx_metadata.py b/cardano_node_tests/tests/test_tx_metadata.py index d59cab968..36baa3db8 100644 --- a/cardano_node_tests/tests/test_tx_metadata.py +++ b/cardano_node_tests/tests/test_tx_metadata.py @@ -47,7 +47,7 @@ def payment_addr( )[0] fixture_cache.value = addr - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, @@ -130,7 +130,7 @@ def test_tx_invalid_json_metadata( metadata_json_files=[self.JSON_METADATA_INVALID_FILE], ) - # it should NOT be possible to build a transaction using an invalid metadata JSON + # It should NOT be possible to build a transaction using an invalid metadata JSON with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.build_raw_tx( src_address=payment_addr.address, @@ -163,7 +163,7 @@ def test_build_tx_invalid_json_metadata( metadata_json_files=[self.JSON_METADATA_INVALID_FILE], ) - # it should NOT be possible to build a transaction using an invalid metadata JSON + # It should NOT be possible to build a transaction using an invalid metadata JSON with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.build_tx( src_address=payment_addr.address, @@ -258,7 +258,7 @@ def test_tx_metadata_json( assert tx_raw_output.fee, "Transaction had no fee" cbor_body_metadata = clusterlib_utils.load_tx_metadata(tx_body_file=tx_raw_output.out_file) - # dump it as JSON, so keys are converted to strings + # Dump it as JSON, so keys are converted to strings json_body_metadata = json.loads(json.dumps(cbor_body_metadata.metadata)) with open(self.JSON_METADATA_FILE, encoding="utf-8") as metadata_fp: @@ -268,7 +268,7 @@ def test_tx_metadata_json( json_body_metadata == json_file_metadata ), "Metadata in TX body doesn't match the original metadata" - # check TX and metadata in db-sync if available + # Check TX and metadata in db-sync if available tx_db_record = dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) if tx_db_record: db_metadata = tx_db_record._convert_metadata() @@ -311,7 +311,7 @@ def test_build_tx_metadata_json( cluster.g_transaction.submit_tx(tx_file=tx_signed, txins=tx_output.txins) cbor_body_metadata = clusterlib_utils.load_tx_metadata(tx_body_file=tx_output.out_file) - # dump it as JSON, so keys are converted to strings + # Dump it as JSON, so keys are converted to strings json_body_metadata = json.loads(json.dumps(cbor_body_metadata.metadata)) with open(self.JSON_METADATA_FILE, encoding="utf-8") as metadata_fp: @@ -321,7 +321,7 @@ def test_build_tx_metadata_json( json_body_metadata == json_file_metadata ), "Metadata in TX body doesn't match the original metadata" - # check TX and metadata in db-sync if available + # Check TX and metadata in db-sync if available tx_db_record = dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_output) if tx_db_record: db_metadata = tx_db_record._convert_metadata() @@ -360,7 +360,7 @@ def test_tx_metadata_cbor( cbor_body_metadata.metadata == cbor_file_metadata ), "Metadata in TX body doesn't match original metadata" - # check TX and metadata in db-sync if available + # Check TX and metadata in db-sync if available tx_db_record = dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) if tx_db_record: db_metadata = tx_db_record._convert_metadata() @@ -412,7 +412,7 @@ def test_build_tx_metadata_cbor( cbor_body_metadata.metadata == cbor_file_metadata ), "Metadata in TX body doesn't match original metadata" - # check TX and metadata in db-sync if available + # Check TX and metadata in db-sync if available tx_db_record = dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_output) if tx_db_record: db_metadata = tx_db_record._convert_metadata() @@ -444,7 +444,7 @@ def test_tx_metadata_both( assert tx_raw_output.fee, "Transaction had no fee" cbor_body_metadata = clusterlib_utils.load_tx_metadata(tx_body_file=tx_raw_output.out_file) - # dump it as JSON, so keys are converted to strings + # Dump it as JSON, so keys are converted to strings json_body_metadata = json.loads(json.dumps(cbor_body_metadata.metadata)) with open(self.JSON_METADATA_FILE, encoding="utf-8") as metadata_fp_json: @@ -459,11 +459,11 @@ def test_tx_metadata_both( **cbor_file_metadata, }, "Metadata in TX body doesn't match original metadata" - # check `transaction view` command + # Check `transaction view` command tx_view_out = tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output) assert json_body_metadata == tx_view_out["metadata"] - # check TX and metadata in db-sync if available + # Check TX and metadata in db-sync if available tx_db_record = dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) if tx_db_record: db_metadata = tx_db_record._convert_metadata() @@ -507,7 +507,7 @@ def test_build_tx_metadata_both( cluster.g_transaction.submit_tx(tx_file=tx_signed, txins=tx_output.txins) cbor_body_metadata = clusterlib_utils.load_tx_metadata(tx_body_file=tx_output.out_file) - # dump it as JSON, so keys are converted to strings + # Dump it as JSON, so keys are converted to strings json_body_metadata = json.loads(json.dumps(cbor_body_metadata.metadata)) with open(self.JSON_METADATA_FILE, encoding="utf-8") as metadata_fp_json: @@ -522,11 +522,11 @@ def test_build_tx_metadata_both( **cbor_file_metadata, }, "Metadata in TX body doesn't match original metadata" - # check `transaction view` command + # Check `transaction view` command tx_view_out = tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output) assert json_body_metadata == tx_view_out["metadata"] - # check TX and metadata in db-sync if available + # Check TX and metadata in db-sync if available tx_db_record = dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_output) if tx_db_record: db_metadata = tx_db_record._convert_metadata() @@ -560,10 +560,10 @@ def test_tx_duplicate_metadata_keys( assert tx_raw_output.fee, "Transaction had no fee" cbor_body_metadata = clusterlib_utils.load_tx_metadata(tx_body_file=tx_raw_output.out_file) - # dump it as JSON, so keys are converted to strings + # Dump it as JSON, so keys are converted to strings json_body_metadata = json.loads(json.dumps(cbor_body_metadata.metadata)) - # merge the input JSON files and alter the result so it matches the expected metadata + # Merge the input JSON files and alter the result so it matches the expected metadata with open(metadata_json_files[0], encoding="utf-8") as metadata_fp: json_file_metadata1 = json.load(metadata_fp) with open(metadata_json_files[1], encoding="utf-8") as metadata_fp: @@ -575,7 +575,7 @@ def test_tx_duplicate_metadata_keys( json_body_metadata == json_file_metadata ), "Metadata in TX body doesn't match the original metadata" - # check TX and metadata in db-sync if available + # Check TX and metadata in db-sync if available tx_db_record = dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) if tx_db_record: db_metadata = tx_db_record._convert_metadata() @@ -622,7 +622,7 @@ def test_tx_metadata_no_txout( assert not tx_raw_output.txouts, "Transaction has unexpected txouts" cbor_body_metadata = clusterlib_utils.load_tx_metadata(tx_body_file=tx_raw_output.out_file) - # dump it as JSON, so keys are converted to strings + # Dump it as JSON, so keys are converted to strings json_body_metadata = json.loads(json.dumps(cbor_body_metadata.metadata)) with open(self.JSON_METADATA_FILE, encoding="utf-8") as metadata_fp: @@ -632,7 +632,7 @@ def test_tx_metadata_no_txout( json_body_metadata == json_file_metadata ), "Metadata in TX body doesn't match the original metadata" - # check TX and metadata in db-sync if available + # Check TX and metadata in db-sync if available tx_db_record = dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) if tx_db_record: db_metadata = tx_db_record._convert_metadata() diff --git a/cardano_node_tests/tests/test_tx_negative.py b/cardano_node_tests/tests/test_tx_negative.py index 6084a7cfc..542f938ac 100644 --- a/cardano_node_tests/tests/test_tx_negative.py +++ b/cardano_node_tests/tests/test_tx_negative.py @@ -60,7 +60,7 @@ def cluster_wrong_tx_era( cluster: clusterlib.ClusterLib, # noqa: ARG002 ) -> clusterlib.ClusterLib: # pylint: disable=unused-argument - # the `cluster` argument (representing the `cluster` fixture) needs to be present + # The `cluster` argument (representing the `cluster` fixture) needs to be present # in order to have an actual cluster instance assigned at the time this fixture # is executed return cluster_nodes.get_cluster_type().get_cluster_obj( @@ -85,7 +85,7 @@ def pool_users( ) fixture_cache.value = created_users - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( *created_users, cluster_obj=cluster, @@ -106,7 +106,7 @@ def _send_funds_to_invalid_address( tx_files = clusterlib.TxFiles(signing_key_files=[pool_users[0].payment.skey_file]) destinations = [clusterlib.TxOut(address=addr, amount=1_000_000)] - # it should NOT be possible to build a transaction using an invalid address + # It should NOT be possible to build a transaction using an invalid address with pytest.raises(clusterlib.CLIError) as excinfo: if use_build_cmd: cluster_obj.g_transaction.build_tx( @@ -140,7 +140,7 @@ def _send_funds_from_invalid_address( tx_files = clusterlib.TxFiles(signing_key_files=[pool_users[0].payment.skey_file]) destinations = [clusterlib.TxOut(address=pool_users[1].payment.address, amount=1_000_000)] - # it should NOT be possible to build a transaction using an invalid address + # It should NOT be possible to build a transaction using an invalid address with pytest.raises(clusterlib.CLIError) as excinfo: if use_build_cmd: cluster_obj.g_transaction.build_tx( @@ -171,7 +171,7 @@ def _send_funds_invalid_change_address( tx_files = clusterlib.TxFiles(signing_key_files=[pool_users[0].payment.skey_file]) destinations = [clusterlib.TxOut(address=pool_users[1].payment.address, amount=1_000_000)] - # it should NOT be possible to build a transaction using an invalid change address + # It should NOT be possible to build a transaction using an invalid change address with pytest.raises(clusterlib.CLIError) as excinfo: cluster_obj.g_transaction.build_tx( src_address=pool_users[0].payment.address, @@ -271,7 +271,7 @@ def _submit_wrong_validity( tx_name=temp_template, ) - # it should NOT be possible to submit a transaction with negative ttl + # It should NOT be possible to submit a transaction with negative ttl with pytest.raises(clusterlib.CLIError) as excinfo: cluster_obj.g_transaction.submit_tx_bare(out_file_signed) exc_val = str(excinfo.value) @@ -450,7 +450,7 @@ def test_before_too_high( __: tp.Any # mypy workaround temp_template = common.get_test_id(cluster) - # valid values are <= `common.MAX_INT64` + # Valid values are <= `common.MAX_INT64` before_value = common.MAX_INT64 + 5 __, err_str, *__ = self._submit_wrong_validity( @@ -504,7 +504,7 @@ def test_pbt_before_negative_overflow( # In node versions < 1.36.0 we were checking error from `cardano-cli transaction submit` assert slot_no is not None - # we cannot XFAIL in PBT, so we'll pass on the xfail condition and re-test using + # We cannot XFAIL in PBT, so we'll pass on the xfail condition and re-test using # a regular test `test_before_negative_overflow` assert slot_no > 0, f"SlotNo: {slot_no}, `before_value`: {before_value}" @@ -552,7 +552,7 @@ def test_pbt_before_positive_overflow( # In node versions < 1.36.0 we were checking error from `cardano-cli transaction submit` assert slot_no is not None - # we cannot XFAIL in PBT, so we'll pass on the xfail condition and re-test using + # We cannot XFAIL in PBT, so we'll pass on the xfail condition and re-test using # a regular test `test_before_positive_overflow` assert slot_no == before_value - 1, f"SlotNo: {slot_no}, `before_value`: {before_value}" @@ -615,7 +615,7 @@ def test_duplicated_tx( tx_files = clusterlib.TxFiles(signing_key_files=[pool_users[0].payment.skey_file]) destinations = [clusterlib.TxOut(address=dst_address, amount=amount)] - # build and sign a transaction + # Build and sign a transaction fee = cluster.g_transaction.calculate_tx_fee( src_address=src_address, tx_name=temp_template, @@ -635,7 +635,7 @@ def test_duplicated_tx( tx_name=temp_template, ) - # submit a transaction for the first time + # Submit a transaction for the first time cluster.g_transaction.submit_tx(tx_file=out_file_signed, txins=tx_raw_output.txins) out_utxos = cluster.g_query.get_utxo(tx_raw_output=tx_raw_output) @@ -647,7 +647,7 @@ def test_duplicated_tx( clusterlib.filter_utxos(utxos=out_utxos, address=dst_address)[0].amount == amount ), f"Incorrect balance for destination address `{dst_address}`" - # it should NOT be possible to submit a transaction twice + # It should NOT be possible to submit a transaction twice with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.submit_tx_bare(out_file_signed) assert "ValueNotConservedUTxO" in str(excinfo.value) @@ -736,11 +736,11 @@ def test_wrong_signing_key( """ temp_template = common.get_test_id(cluster) - # use wrong signing key + # Use wrong signing key tx_files = clusterlib.TxFiles(signing_key_files=[pool_users[1].payment.skey_file]) destinations = [clusterlib.TxOut(address=pool_users[1].payment.address, amount=1_500_000)] - # it should NOT be possible to submit a transaction with wrong signing key + # It should NOT be possible to submit a transaction with wrong signing key with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.send_tx( src_address=pool_users[0].payment.address, @@ -768,7 +768,7 @@ def test_wrong_tx_era( tx_files = clusterlib.TxFiles(signing_key_files=[pool_users[0].payment.skey_file]) destinations = [clusterlib.TxOut(address=pool_users[1].payment.address, amount=1_500_000)] - # it should NOT be possible to submit a transaction when TX era > network era + # It should NOT be possible to submit a transaction when TX era > network era with pytest.raises(clusterlib.CLIError) as excinfo: cluster_wrong_tx_era.g_transaction.send_tx( src_address=pool_users[0].payment.address, @@ -1252,7 +1252,7 @@ def test_nonexistent_utxo_ix( if use_build_cmd: assert ( "The UTxO is empty" in err - # in 1.35.3 and older + # In 1.35.3 and older or "The following tx input(s) were not present in the UTxO" in err ), err else: @@ -1287,7 +1287,7 @@ def test_nonexistent_utxo_hash( if use_build_cmd: assert ( "The UTxO is empty" in err - # in 1.35.3 and older + # In 1.35.3 and older or "The following tx input(s) were not present in the UTxO" in err ), err else: diff --git a/cardano_node_tests/tests/test_tx_unbalanced.py b/cardano_node_tests/tests/test_tx_unbalanced.py index 51658b2f2..5a8b6aaf2 100644 --- a/cardano_node_tests/tests/test_tx_unbalanced.py +++ b/cardano_node_tests/tests/test_tx_unbalanced.py @@ -81,7 +81,7 @@ def payment_addrs( ) fixture_cache.value = addrs - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -132,9 +132,9 @@ def test_negative_change( src_addr_highest_utxo = cluster.g_query.get_utxo_with_highest_amount(src_address) - # use only the UTxO with the highest amount + # Use only the UTxO with the highest amount txins = [src_addr_highest_utxo] - # try to transfer +1 Lovelace more than available and use a negative change (-1) + # Try to transfer +1 Lovelace more than available and use a negative change (-1) txouts = [ clusterlib.TxOut(address=dst_address, amount=src_addr_highest_utxo.amount - fee + 1), clusterlib.TxOut(address=src_address, amount=-1), @@ -187,10 +187,10 @@ def test_build_transfer_unavailable_funds( tx_files = clusterlib.TxFiles(signing_key_files=[payment_addrs[0].skey_file]) - # use only the UTxO with the highest amount + # Use only the UTxO with the highest amount txins = [pbt_highest_utxo] amount = min(MAX_LOVELACE_AMOUNT, pbt_highest_utxo.amount + transfer_add) - # try to transfer whole balance + # Try to transfer whole balance txouts = [clusterlib.TxOut(address=dst_address, amount=amount)] with pytest.raises(clusterlib.CLIError) as excinfo: @@ -238,7 +238,7 @@ def test_wrong_balance( tx_files = clusterlib.TxFiles(signing_key_files=[payment_addrs[0].skey_file]) ttl = cluster.g_transaction.calculate_tx_ttl() - # use only the UTxO with the highest amount + # Use only the UTxO with the highest amount txins = [pbt_highest_utxo] txouts = [ clusterlib.TxOut(address=dst_address, amount=transferred_amount), @@ -261,7 +261,7 @@ def test_wrong_balance( tx_name=temp_template, ) - # it should NOT be possible to submit an unbalanced transaction + # It should NOT be possible to submit an unbalanced transaction with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.submit_tx_bare(out_file_signed) exc_val = str(excinfo.value) @@ -290,7 +290,7 @@ def test_out_of_bounds_amount( tx_files = clusterlib.TxFiles(signing_key_files=[payment_addrs[0].skey_file]) ttl = cluster.g_transaction.calculate_tx_ttl() - # use only the UTxO with the highest amount + # Use only the UTxO with the highest amount txins = [pbt_highest_utxo] txouts = [ clusterlib.TxOut(address=payment_addrs[0].address, amount=change_amount), @@ -426,7 +426,7 @@ def test_transfer_amount_bellow_minimum( cluster.cli(build_args) - # create signed transaction + # Create signed transaction out_file_signed = cluster.g_transaction.sign_tx( tx_body_file=out_file, signing_key_files=[payment_addrs[0].skey_file], @@ -434,7 +434,7 @@ def test_transfer_amount_bellow_minimum( ) with pytest.raises(clusterlib.CLIError) as excinfo_build: - # submit the signed transaction + # Submit the signed transaction cluster.g_transaction.submit_tx(tx_file=out_file_signed, txins=[pbt_highest_utxo]) exc_val = str(excinfo_build.value) diff --git a/cardano_node_tests/tests/test_update_proposals.py b/cardano_node_tests/tests/test_update_proposals.py index 150d4192d..35749b0dc 100644 --- a/cardano_node_tests/tests/test_update_proposals.py +++ b/cardano_node_tests/tests/test_update_proposals.py @@ -60,7 +60,7 @@ def payment_addr( )[0] fixture_cache.value = addr - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, @@ -109,7 +109,7 @@ def test_update_proposal( with open(f"{temp_template}_pparams_ep{this_epoch}.json", "w", encoding="utf-8") as fp_out: json.dump(protocol_params, fp_out, indent=4) - # update Alonzo+ specific parameters in separate update proposal + # Update Alonzo+ specific parameters in separate update proposal # TODO: On node >= 1.36.0 the cost models are lists. On older versions they are dicts. cost_proposal_file = ( @@ -195,11 +195,11 @@ def test_update_proposal( assert cost_model_v1 == cost_model_prop_content["PlutusV1"] assert cost_model_v2 == cost_model_prop_content["PlutusV2"] else: - # check only selected expected value as some key names don't necessarily match + # Check only selected expected value as some key names don't necessarily match assert cost_model_v1["verifyEd25519Signature-memory-arguments"] == 11 assert cost_model_v2["verifyEd25519Signature-memory-arguments"] == 11 - # check param proposal on dbsync + # Check param proposal on dbsync dbsync_utils.check_param_proposal(protocol_params=protocol_params) # Check that only one update proposal can be applied each epoch and that the last @@ -275,7 +275,7 @@ def test_update_proposal( ) time.sleep(2) - # the final update proposal + # The final update proposal decentralization = clusterlib_utils.UpdateProposal( arg="--decentralization-parameter", value=0.1, @@ -356,7 +356,7 @@ def test_update_proposal( update_proposals=update_proposals, protocol_params=protocol_params ) - # check param proposal on dbsync + # Check param proposal on dbsync dbsync_utils.check_param_proposal(protocol_params=protocol_params) assert protocol_params.get("decentralization") is None @@ -386,7 +386,7 @@ def payment_addr( )[0] fixture_cache.value = addr - # fund source addresses + # Fund source addresses clusterlib_utils.fund_from_faucet( addr, cluster_obj=cluster, diff --git a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py index 600d08858..35d9e250f 100644 --- a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py +++ b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py @@ -57,7 +57,7 @@ def payment_addrs_lg( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, diff --git a/cardano_node_tests/tests/tests_plutus/mint_build.py b/cardano_node_tests/tests/tests_plutus/mint_build.py index 54ba6d284..df890f7d2 100644 --- a/cardano_node_tests/tests/tests_plutus/mint_build.py +++ b/cardano_node_tests/tests/tests_plutus/mint_build.py @@ -43,7 +43,7 @@ def _fund_issuer( tx_files=tx_files, txouts=txouts, fee_buffer=2_000_000, - # don't join 'change' and 'collateral' txouts, we need separate UTxOs + # Don't join 'change' and 'collateral' txouts, we need separate UTxOs join_txouts=False, ) tx_signed = cluster_obj.g_transaction.sign_tx( diff --git a/cardano_node_tests/tests/tests_plutus/mint_raw.py b/cardano_node_tests/tests/tests_plutus/mint_raw.py index 21d299e9b..cc91be400 100644 --- a/cardano_node_tests/tests/tests_plutus/mint_raw.py +++ b/cardano_node_tests/tests/tests_plutus/mint_raw.py @@ -6,7 +6,7 @@ LOGGER = logging.getLogger(__name__) -# approx. fee for Tx size +# Approx. fee for Tx size FEE_MINT_TXSIZE = 400_000 @@ -46,7 +46,7 @@ def _fund_issuer( tx_files=tx_files, # TODO: workaround for https://github.com/IntersectMBO/cardano-node/issues/1892 witness_count_add=2, - # don't join 'change' and 'collateral' txouts, we need separate UTxOs + # Don't join 'change' and 'collateral' txouts, we need separate UTxOs join_txouts=False, ) diff --git a/cardano_node_tests/tests/tests_plutus/spend_build.py b/cardano_node_tests/tests/tests_plutus/spend_build.py index 7d1ee24b2..48edaaf81 100644 --- a/cardano_node_tests/tests/tests_plutus/spend_build.py +++ b/cardano_node_tests/tests/tests_plutus/spend_build.py @@ -45,7 +45,7 @@ def _build_fund_script( protocol_params=cluster_obj.g_query.get_protocol_params(), ) - # create a Tx output with a datum hash at the script address + # Create a Tx output with a datum hash at the script address tx_files = clusterlib.TxFiles( signing_key_files=[payment_addr.skey_file], @@ -60,7 +60,7 @@ def _build_fund_script( txouts = [ script_txout, - # for collateral + # For collateral clusterlib.TxOut(address=dst_addr.address, amount=redeem_cost.collateral), ] @@ -157,7 +157,7 @@ def _build_spend_locked_txin( # noqa: C901 # datum hash (datum hash is not provided for change that is handled by `build` command). script_change_rec = script_utxos[0] - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -185,7 +185,7 @@ def _build_spend_locked_txin( # noqa: C901 txouts.append( clusterlib.TxOut(address=dst_addr.address, amount=token.amount, coin=token.coin) ) - # append change + # Append change script_token_balance = clusterlib.calculate_utxos_balance( utxos=script_utxos, coin=token.coin ) @@ -199,7 +199,7 @@ def _build_spend_locked_txin( # noqa: C901 datum_hash=script_change_rec.datum_hash, ) ) - # add minimum (+ some) required Lovelace to change Tx output + # Add minimum (+ some) required Lovelace to change Tx output if lovelace_change_needed: txouts.append( clusterlib.TxOut( @@ -282,7 +282,7 @@ def _build_spend_locked_txin( # noqa: C901 return "", tx_output, [] - # calculate cost of Plutus script + # Calculate cost of Plutus script plutus_costs = cluster_obj.g_transaction.calculate_plutus_script_cost( src_address=payment_addr.address, tx_name=f"{temp_template}_step2", @@ -316,11 +316,11 @@ def _build_spend_locked_txin( # noqa: C901 utxo=u, coins=[token.coin] ), f"Token inputs were NOT spent for `{u.address}`" - # check tx view + # Check tx view tx_view.check_tx_view(cluster_obj=cluster_obj, tx_raw_output=tx_output) tx_db_record = dbsync_utils.check_tx(cluster_obj=cluster_obj, tx_raw_output=tx_output) - # compare cost of Plutus script with data from db-sync + # Compare cost of Plutus script with data from db-sync if tx_db_record: dbsync_utils.check_plutus_costs( redeemer_records=tx_db_record.redeemers, cost_records=plutus_costs diff --git a/cardano_node_tests/tests/tests_plutus/spend_raw.py b/cardano_node_tests/tests/tests_plutus/spend_raw.py index 471c5d585..edb39be0c 100644 --- a/cardano_node_tests/tests/tests_plutus/spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus/spend_raw.py @@ -13,7 +13,7 @@ LOGGER = logging.getLogger(__name__) -# approx. fee for Tx size +# Approx. fee for Tx size FEE_REDEEM_TXSIZE = 400_000 @@ -49,7 +49,7 @@ def _fund_script( collateral_fraction_offset=collateral_fraction_offset, ) - # create a Tx output with a datum hash at the script address + # Create a Tx output with a datum hash at the script address tx_files = clusterlib.TxFiles( signing_key_files=[payment_addr.skey_file], @@ -64,7 +64,7 @@ def _fund_script( txouts = [ script_txout, - # for collateral + # For collateral clusterlib.TxOut(address=dst_addr.address, amount=redeem_cost.collateral), ] @@ -147,7 +147,7 @@ def _spend_locked_txin( # noqa: C901 tx_files = tx_files or clusterlib.TxFiles() spent_tokens = tokens or () - # change will be returned to address of the first script + # Change will be returned to address of the first script change_rec = script_utxos[0] redeem_cost = plutus_common.compute_cost( @@ -160,7 +160,7 @@ def _spend_locked_txin( # noqa: C901 utxos=[*script_utxos_lovelace, *txins] ) - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -184,7 +184,7 @@ def _spend_locked_txin( # noqa: C901 txouts = [ clusterlib.TxOut(address=dst_addr.address, amount=amount), ] - # append change + # Append change if script_lovelace_balance > amount + redeem_cost.fee + fee_txsize: txouts.append( clusterlib.TxOut( @@ -198,7 +198,7 @@ def _spend_locked_txin( # noqa: C901 txouts.append( clusterlib.TxOut(address=dst_addr.address, amount=token.amount, coin=token.coin) ) - # append change + # Append change script_token_balance = clusterlib.calculate_utxos_balance( utxos=script_utxos, coin=token.coin ) @@ -299,7 +299,7 @@ def _spend_locked_txin( # noqa: C901 utxo=u, coins=[token.coin] ), f"Token inputs were NOT spent for `{u.address}`" - # check tx view + # Check tx view tx_view.check_tx_view(cluster_obj=cluster_obj, tx_raw_output=tx_raw_output) dbsync_utils.check_tx(cluster_obj=cluster_obj, tx_raw_output=tx_raw_output) diff --git a/cardano_node_tests/tests/tests_plutus/test_lobster.py b/cardano_node_tests/tests/tests_plutus/test_lobster.py index 99a69d82d..206dd64e4 100644 --- a/cardano_node_tests/tests/tests_plutus/test_lobster.py +++ b/cardano_node_tests/tests/tests_plutus/test_lobster.py @@ -46,7 +46,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -82,7 +82,7 @@ def _fund_issuer( tx_files=tx_files, txouts=txouts, fee_buffer=2_000_000, - # don't join 'change' and 'collateral' txouts, we need separate UTxOs + # Don't join 'change' and 'collateral' txouts, we need separate UTxOs join_txouts=False, ) tx_signed = cluster_obj.g_transaction.sign_tx( @@ -167,7 +167,7 @@ def _mint_lobster_nft( utxos=out_utxos, address=issuer_addr.address, coin=lobster_nft_token ) - # check expected balances + # Check expected balances # Skip change UTxO. Change txout created by `transaction build` used to be UTxO with index 0, # now it is the last UTxO. @@ -237,7 +237,7 @@ def _deploy_lobster_nft( utxos=out_utxos, address=script_address, coin=lobster_nft_token ) - # check expected balances + # Check expected balances assert ( clusterlib.calculate_utxos_balance(lovelace_utxos) == lovelace_amount ), f"Incorrect Lovelace balance for token issuer address `{script_address}`" @@ -322,7 +322,7 @@ def test_lobster_name( # Step 4: prepare for voting - # there's 50 votes, each vote is int between 1 and 100 + # There's 50 votes, each vote is int between 1 and 100 votes = [random.randint(1, 100) for __ in range(votes_num)] _votes_sum = sum(votes) # Add "random" seed to the sum of all votes. Taking the remainder after @@ -348,13 +348,13 @@ def test_lobster_name( vote_counter = 0 utxo_counter_token: clusterlib.UTXOData | None = None for vote_num, vote_val in enumerate(votes, start=1): - # normal votes + # Normal votes if vote_num <= votes_num: vote_counter += vote_val mint_val = vote_val - # final IO vote + # Final IO vote else: - # set new counter value to `(seed + counter value) % number of names` + # Set new counter value to `(seed + counter value) % number of names` # and burn excessive LobsterCounter tokens mint_val = vote_val - vote_counter vote_counter = vote_val @@ -390,14 +390,14 @@ def test_lobster_name( ] mint_txouts = [ - # mint new LobsterCounter tokens + # Mint new LobsterCounter tokens clusterlib.TxOut( address=script_address, amount=mint_val, coin=counter_token, datum_hash=LOBSTER_DATUM_HASH, ), - # mint 1 new LobsterVotes token + # Mint 1 new LobsterVotes token clusterlib.TxOut( address=script_address, amount=1, @@ -451,13 +451,13 @@ def test_lobster_name( ) vote_utxos = clusterlib.filter_utxos(utxos=out_utxos_vote, utxo_ix=utxo_ix_offset) - # check expected balances + # Check expected balances utxo_counter_tokens = [u for u in vote_utxos if u.coin == counter_token] utxo_counter_token = None try: utxos_lovelace = next(u for u in vote_utxos if u.coin == clusterlib.DEFAULT_COIN) utxo_votes_token = next(u for u in vote_utxos if u.coin == votes_token) - # when `vote_counter` is not 0 (that can happen for final vote), there needs to be + # When `vote_counter` is not 0 (that can happen for final vote), there needs to be # a counter token if vote_counter: utxo_counter_token = utxo_counter_tokens[0] @@ -477,7 +477,7 @@ def test_lobster_name( utxo_counter_token is None or utxo_counter_token.amount == vote_counter ), f"Incorrect LobsterCounter token balance for script address `{script_address}`" - # final counter value can be 0 + # Final counter value can be 0 if expected_counter_val == 0: assert ( not utxo_counter_tokens @@ -487,6 +487,6 @@ def test_lobster_name( utxo_counter_token and utxo_counter_token.amount == expected_counter_val ), "Final balance of LobsterCounter token doesn't match the expected balance" - # check transactions in db-sync + # Check transactions in db-sync for tx_out_rec in tx_outputs_all: dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_out_rec) diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_build.py b/cardano_node_tests/tests/tests_plutus/test_mint_build.py index 8c4e6b87c..9c3ff5b1f 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_build.py @@ -45,7 +45,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -490,7 +490,7 @@ def test_time_range_minting( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check expected fees + # Check expected fees expected_fee_step1 = 167_349 assert helpers.is_in_interval(tx_output_step1.fee, expected_fee_step1, frac=0.15) @@ -502,7 +502,7 @@ def test_time_range_minting( expected_costs=[plutus_v_record.execution_cost], ) - # check tx_view + # Check tx_view tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output_step2) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_output_step1) @@ -555,7 +555,7 @@ def test_two_scripts_minting( script_file1_v2 = plutus_common.MINTING_PLUTUS_V2 script_file1_v3 = plutus_common.MINTING_PLUTUS_V3 - # this is higher than `plutus_common.MINTING*_COST`, because the script context has changed + # This is higher than `plutus_common.MINTING*_COST`, because the script context has changed # to include more stuff minting_cost1_v1 = plutus_common.ExecutionCost( per_time=297_744_405, per_space=1_126_016, fixed_cost=86_439 @@ -601,7 +601,7 @@ def test_two_scripts_minting( ) txouts_step1 = [ clusterlib.TxOut(address=issuer_addr.address, amount=script_fund), - # for collaterals + # For collaterals clusterlib.TxOut(address=issuer_addr.address, amount=minting_cost1.collateral), clusterlib.TxOut(address=issuer_addr.address, amount=minting_cost2.collateral), ] @@ -611,7 +611,7 @@ def test_two_scripts_minting( tx_files=tx_files_step1, txouts=txouts_step1, fee_buffer=2_000_000, - # don't join 'change' and 'collateral' txouts, we need separate UTxOs + # Don't join 'change' and 'collateral' txouts, we need separate UTxOs join_txouts=False, ) tx_signed_step1 = cluster.g_transaction.sign_tx( @@ -678,7 +678,7 @@ def test_two_scripts_minting( clusterlib.TxOut(address=issuer_addr.address, amount=token_amount, coin=token2) ] - # mint the tokens + # Mint the tokens plutus_mint_data = [ clusterlib.Mint( txouts=mint_txouts1, @@ -758,7 +758,7 @@ def test_two_scripts_minting( token_utxo2 and token_utxo2[0].amount == token_amount ), "The 'timerange' token was not minted" - # check expected fees + # Check expected fees expected_fee_step1 = 168_977 assert helpers.is_in_interval(tx_output_step1.fee, expected_fee_step1, frac=0.15) @@ -770,10 +770,10 @@ def test_two_scripts_minting( expected_costs=[execution_cost1, minting_cost2_v2], ) - # check tx_view + # Check tx_view tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output_step2) - # check transactions in db-sync + # Check transactions in db-sync dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_output_step1) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_output_step2) @@ -855,7 +855,7 @@ def test_minting_context_equivalence( *mint_txouts, ] - # generate a dummy redeemer in order to create a txbody from which + # Generate a dummy redeemer in order to create a txbody from which # we can generate a tx and then derive the correct redeemer redeemer_file_dummy = pl.Path(f"{temp_template}_dummy_script_context.redeemer") clusterlib_utils.create_script_context( @@ -885,7 +885,7 @@ def test_minting_context_equivalence( ) assert tx_output_dummy - # generate the "real" redeemer + # Generate the "real" redeemer redeemer_file = pl.Path(f"{temp_template}_script_context.redeemer") plutus_common.create_script_context_w_blockers( @@ -911,7 +911,7 @@ def test_minting_context_equivalence( invalid_hereafter=invalid_hereafter, ) - # calculate cost of Plutus script + # Calculate cost of Plutus script plutus_costs_step2 = cluster.g_transaction.calculate_plutus_script_cost( src_address=payment_addr.address, tx_name=f"{temp_template}_step2", @@ -955,14 +955,14 @@ def test_minting_context_equivalence( expected_costs=[plutus_common.MINTING_CONTEXT_EQUIVALENCE_COST], ) - # check tx_view + # Check tx_view tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output_step2) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_output_step1) tx_db_record_step2 = dbsync_utils.check_tx( cluster_obj=cluster, tx_raw_output=tx_output_step2 ) - # compare cost of Plutus script with data from db-sync + # Compare cost of Plutus script with data from db-sync if tx_db_record_step2: dbsync_utils.check_plutus_costs( redeemer_records=tx_db_record_step2.redeemers, cost_records=plutus_costs_step2 @@ -1089,7 +1089,7 @@ def test_witness_redeemer( mint=plutus_mint_data, required_signers=[signing_key_golden], ) - # sign incrementally (just to check that it works) + # Sign incrementally (just to check that it works) tx_signed_step2 = cluster.g_transaction.sign_tx( tx_body_file=tx_output_step2.out_file, signing_key_files=[issuer_addr.skey_file], @@ -1121,7 +1121,7 @@ def test_witness_redeemer( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check expected fees + # Check expected fees expected_fee_step1 = 167_349 assert helpers.is_in_interval(tx_output_step1.fee, expected_fee_step1, frac=0.15) @@ -1202,15 +1202,15 @@ def test_ttl_horizon( *mint_txouts, ] - # calculate 3k/f + # Calculate 3k/f offset_3kf = round( 3 * cluster.genesis["securityParam"] / cluster.genesis["activeSlotsCoeff"] ) - # use 3k/f + `epoch_length` slots for ttl - this will not meet the `expect_pass` condition + # Use 3k/f + `epoch_length` slots for ttl - this will not meet the `expect_pass` condition if ttl_offset == -1: ttl_offset = offset_3kf + cluster.epoch_length - # use 3k/f - 100 slots for ttl - this will meet the `expect_pass` condition + # Use 3k/f - 100 slots for ttl - this will meet the `expect_pass` condition elif ttl_offset == -2: ttl_offset = offset_3kf - 100 @@ -1224,7 +1224,7 @@ def test_ttl_horizon( cluster_obj=cluster, slot_no=invalid_hereafter ) - # the TTL will pass if it's in epoch 'e' and the slot of the latest applied block + 3k/f + # The TTL will pass if it's in epoch 'e' and the slot of the latest applied block + 3k/f # is greater than the first slot of 'e' expect_pass = slot_no_3kf >= ttl_epoch_info.first_slot @@ -1245,7 +1245,7 @@ def test_ttl_horizon( last_slot_diff = cluster.g_query.get_slot_no() - last_slot_init expect_pass_finish = slot_no_3kf + last_slot_diff >= ttl_epoch_info.first_slot if expect_pass != expect_pass_finish: - # we have hit a boundary, and it is hard to say if the test should have passed or not + # We have hit a boundary, and it is hard to say if the test should have passed or not assert not err or "TimeTranslationPastHorizon" in err, err pytest.skip("Boundary hit, skipping") return @@ -1351,17 +1351,17 @@ def test_duplicated_collateral( altered_build_args = tx_output_step2.build_args[:] - # add a duplicate collateral + # Add a duplicate collateral collateral_idx = altered_build_args.index("--tx-in-collateral") + 1 altered_build_args.insert(collateral_idx + 1, "--tx-in-collateral") altered_build_args.insert(collateral_idx + 2, altered_build_args[collateral_idx]) - # change the output file + # Change the output file tx_body_step2 = pl.Path(f"{tx_output_step2.out_file.stem}_altered.body") out_file_idx = altered_build_args.index("--out-file") + 1 altered_build_args[out_file_idx] = str(tx_body_step2) - # build the transaction using altered arguments + # Build the transaction using altered arguments cluster.cli(altered_build_args) tx_signed_step2 = cluster.g_transaction.sign_tx( @@ -1390,7 +1390,7 @@ def test_duplicated_collateral( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check return collateral amount, this is only available on Babbage+ TX + # Check return collateral amount, this is only available on Babbage+ TX if VERSIONS.transaction_era >= VERSIONS.BABBAGE: tx_loaded = tx_view.load_tx_view(cluster_obj=cluster, tx_body_file=tx_body_step2) diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_negative_build.py b/cardano_node_tests/tests/tests_plutus/test_mint_negative_build.py index 02308cd2d..dd61e79fd 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_negative_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_negative_build.py @@ -46,7 +46,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_negative_raw.py b/cardano_node_tests/tests/tests_plutus/test_mint_negative_raw.py index 657079510..1575844e0 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_negative_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_negative_raw.py @@ -38,7 +38,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -74,7 +74,7 @@ def fund_execution_units_above_limit( execution_cost=plutus_common.ALWAYS_SUCCEEDS[plutus_version].execution_cost, ) - # for mypy + # For mypy assert plutus_op.execution_cost minting_cost = plutus_common.compute_cost( @@ -257,7 +257,7 @@ def test_low_budget( txouts=mint_txouts, script_file=plutus_v_record.script_file, collaterals=collateral_utxos, - # set execution units too low - to half of the expected values + # Set execution units too low - to half of the expected values execution_units=( plutus_v_record.execution_cost.per_time // 2, plutus_v_record.execution_cost.per_space // 2, @@ -367,7 +367,7 @@ def test_low_fee( fee_subtract = 300_000 txouts_step2 = [ - # add subtracted fee to the transferred Lovelace amount so the Tx remains balanced + # Add subtracted fee to the transferred Lovelace amount so the Tx remains balanced clusterlib.TxOut(address=issuer_addr.address, amount=lovelace_amount + fee_subtract), *mint_txouts, ] @@ -458,7 +458,7 @@ def test_execution_units_above_limit( protocol_params=cluster.g_query.get_protocol_params(), ) - # for mypy + # For mypy assert plutus_op.execution_cost policyid = cluster.g_transaction.get_policyid(plutus_op.script_file) @@ -716,7 +716,7 @@ def test_minting_with_invalid_collaterals( tx_name=f"{temp_template}_step2", ) - # it should NOT be possible to mint with an invalid collateral + # It should NOT be possible to mint with an invalid collateral with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.submit_tx(tx_file=tx_signed_step2, txins=mint_utxos) assert "NoCollateralInputs" in str(excinfo.value) @@ -752,7 +752,7 @@ def test_minting_with_insufficient_collateral( plutus_v_record = plutus_common.MINTING_PLUTUS[plutus_version] - # increase fixed cost so the required collateral is higher than minimum collateral of 2 ADA + # Increase fixed cost so the required collateral is higher than minimum collateral of 2 ADA execution_cost = dataclasses.replace(plutus_v_record.execution_cost, fixed_cost=2_000_000) minting_cost = plutus_common.compute_cost( @@ -820,7 +820,7 @@ def test_minting_with_insufficient_collateral( tx_name=f"{temp_template}_step2", ) - # it should NOT be possible to mint with a collateral with insufficient funds + # It should NOT be possible to mint with a collateral with insufficient funds with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.submit_tx(tx_file=tx_signed_step2, txins=mint_utxos) assert "InsufficientCollateral" in str(excinfo.value) diff --git a/cardano_node_tests/tests/tests_plutus/test_mint_raw.py b/cardano_node_tests/tests/tests_plutus/test_mint_raw.py index d9c6a4a95..df4979c79 100644 --- a/cardano_node_tests/tests/tests_plutus/test_mint_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_mint_raw.py @@ -41,7 +41,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -191,7 +191,7 @@ def test_minting_two_tokens( mint=plutus_mint_data, tx_files=tx_files_step2, fee=minting_cost.fee + fee_txsize, - # ttl is optional in this test + # Ttl is optional in this test invalid_hereafter=cluster.g_query.get_slot_no() + 200, ) tx_signed_step2 = cluster.g_transaction.sign_tx( @@ -224,7 +224,7 @@ def test_minting_two_tokens( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check tx view + # Check tx view tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output_step2) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output_step1) @@ -342,7 +342,7 @@ def test_witness_redeemer( fee=minting_cost.fee + mint_raw.FEE_MINT_TXSIZE, required_signers=[signing_key_golden], ) - # sign incrementally (just to check that it works) + # Sign incrementally (just to check that it works) tx_signed_step2 = cluster.g_transaction.sign_tx( tx_body_file=tx_raw_output_step2.out_file, signing_key_files=[issuer_addr.skey_file], @@ -368,7 +368,7 @@ def test_witness_redeemer( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check tx_view + # Check tx_view tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output_step2) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output_step1) @@ -496,7 +496,7 @@ def test_time_range_minting( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check tx_view + # Check tx_view tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output_step2) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output_step1) @@ -542,7 +542,7 @@ def test_two_scripts_minting( script_file1_v2 = plutus_common.MINTING_PLUTUS_V2 script_file1_v3 = plutus_common.MINTING_PLUTUS_V3 - # this is higher than `plutus_common.MINTING*_COST`, because the script context has changed + # This is higher than `plutus_common.MINTING*_COST`, because the script context has changed # to include more stuff minting_cost1_v1 = plutus_common.ExecutionCost( per_time=297_744_405, per_space=1_126_016, fixed_cost=86_439 @@ -592,7 +592,7 @@ def test_two_scripts_minting( ) txouts_step1 = [ clusterlib.TxOut(address=issuer_addr.address, amount=lovelace_amount + fee_step2_total), - # for collaterals + # For collaterals clusterlib.TxOut(address=issuer_addr.address, amount=minting_cost1.collateral), clusterlib.TxOut(address=issuer_addr.address, amount=minting_cost2.collateral), ] @@ -604,7 +604,7 @@ def test_two_scripts_minting( tx_files=tx_files_step1, # TODO: workaround for https://github.com/IntersectMBO/cardano-node/issues/1892 witness_count_add=2, - # don't join 'change' and 'collateral' txouts, we need separate UTxOs + # Don't join 'change' and 'collateral' txouts, we need separate UTxOs join_txouts=False, ) @@ -652,7 +652,7 @@ def test_two_scripts_minting( clusterlib.TxOut(address=issuer_addr.address, amount=token_amount, coin=token2) ] - # mint the tokens + # Mint the tokens plutus_mint_data = [ clusterlib.Mint( txouts=mint_txouts1, @@ -726,10 +726,10 @@ def test_two_scripts_minting( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check tx_view + # Check tx_view tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output_step2) - # check transactions in db-sync + # Check transactions in db-sync dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output_step1) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output_step2) @@ -796,7 +796,7 @@ def test_minting_policy_executed_once1( plutus_common.MINTING_TOKENNAME_PLUTUS_V1 ) - # qacoinA + # QacoinA asset_name_a_dec = f"qacoinA{clusterlib.get_rand_str(4)}" asset_name_a = asset_name_a_dec.encode("utf-8").hex() token_a = f"{policyid_tokenname}.{asset_name_a}" @@ -804,7 +804,7 @@ def test_minting_policy_executed_once1( clusterlib.TxOut(address=issuer_addr.address, amount=token_amount, coin=token_a) ] - # qacoinB + # QacoinB asset_name_b_dec = f"qacoinB{clusterlib.get_rand_str(4)}" asset_name_b = asset_name_b_dec.encode("utf-8").hex() token_b = f"{policyid_tokenname}.{asset_name_b}" @@ -812,7 +812,7 @@ def test_minting_policy_executed_once1( clusterlib.TxOut(address=issuer_addr.address, amount=token_amount, coin=token_b) ] - # mint the tokens + # Mint the tokens plutus_mint_data = [ # First redeemer and first script are ignored when there are # multiple scripts for the same minting policy. Even though we @@ -822,7 +822,7 @@ def test_minting_policy_executed_once1( clusterlib.Mint( txouts=mint_txouts_a, script_file=plutus_common.MINTING_TOKENNAME_PLUTUS_V1, - # execution units are too low, but it doesn't matter as they get ignored anyway + # Execution units are too low, but it doesn't matter as they get ignored anyway execution_units=(1, 1), redeemer_value='"ignored_value"', ), @@ -884,10 +884,10 @@ def test_minting_policy_executed_once1( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check tx_view + # Check tx_view tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output_step2) - # check transactions in db-sync + # Check transactions in db-sync dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output_step1) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output_step2) @@ -949,12 +949,12 @@ def test_minting_policy_executed_once2( policyid = cluster.g_transaction.get_policyid(plutus_common.MINTING_TOKENNAME_PLUTUS_V1) - # qacoinA + # QacoinA asset_name_a_dec = f"qacoinA{clusterlib.get_rand_str(4)}" asset_name_a = asset_name_a_dec.encode("utf-8").hex() token_a = f"{policyid}.{asset_name_a}" - # qacoinB + # QacoinB asset_name_b_dec = f"qacoinB{clusterlib.get_rand_str(4)}" asset_name_b = asset_name_b_dec.encode("utf-8").hex() token_b = f"{policyid}.{asset_name_b}" @@ -973,7 +973,7 @@ def test_minting_policy_executed_once2( plutus_common.MINTING_TOKENNAME_COST.per_time, plutus_common.MINTING_TOKENNAME_COST.per_space, ), - # both tokens will be minted even though the redeemer value + # Both tokens will be minted even though the redeemer value # matches the name of only the second one redeemer_value=f'"{asset_name_b_dec}"', ) @@ -1041,7 +1041,7 @@ def test_minting_policy_executed_once2( expected_costs=[plutus_common.MINTING_TOKENNAME_COST], ) - # check tx view + # Check tx view tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output_step2) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output_step1) @@ -1116,7 +1116,7 @@ def test_minting_context_equivalence( *mint_txouts, ] - # generate a dummy redeemer in order to create a txbody from which + # Generate a dummy redeemer in order to create a txbody from which # we can generate a tx and then derive the correct redeemer redeemer_file_dummy = pl.Path(f"{temp_template}_dummy_script_context.redeemer") clusterlib_utils.create_script_context( @@ -1150,7 +1150,7 @@ def test_minting_context_equivalence( ) assert tx_output_dummy - # generate the "real" redeemer + # Generate the "real" redeemer redeemer_file = pl.Path(f"{temp_template}_script_context.redeemer") plutus_common.create_script_context_w_blockers( @@ -1284,15 +1284,15 @@ def test_ttl_horizon( *mint_txouts, ] - # calculate 3k/f + # Calculate 3k/f offset_3kf = round( 3 * cluster.genesis["securityParam"] / cluster.genesis["activeSlotsCoeff"] ) - # use 3k/f + `epoch_length` slots for ttl - this will not meet the `expect_pass` condition + # Use 3k/f + `epoch_length` slots for ttl - this will not meet the `expect_pass` condition if ttl_offset == -1: ttl_offset = offset_3kf + cluster.epoch_length - # use 3k/f - 100 slots for ttl - this will meet the `expect_pass` condition + # Use 3k/f - 100 slots for ttl - this will meet the `expect_pass` condition elif ttl_offset == -2: ttl_offset = offset_3kf - 100 @@ -1306,7 +1306,7 @@ def test_ttl_horizon( cluster_obj=cluster, slot_no=invalid_hereafter ) - # the TTL will pass if it's in epoch 'e' and the slot of the latest applied block + 3k/f + # The TTL will pass if it's in epoch 'e' and the slot of the latest applied block + 3k/f # is greater than the first slot of 'e' expect_pass = slot_no_3kf >= ttl_epoch_info.first_slot @@ -1334,7 +1334,7 @@ def test_ttl_horizon( last_slot_diff = cluster.g_query.get_slot_no() - last_slot_init expect_pass_finish = slot_no_3kf + last_slot_diff >= ttl_epoch_info.first_slot if expect_pass != expect_pass_finish: - # we have hit a boundary and it is hard to say if the test should have passed or not + # We have hit a boundary and it is hard to say if the test should have passed or not assert not err or "TimeTranslationPastHorizon" in err, err pytest.skip("Boundary hit, skipping") return @@ -1439,17 +1439,17 @@ def test_duplicated_collateral( altered_build_args = tx_raw_output_step2.build_args[:] - # add a duplicate collateral + # Add a duplicate collateral collateral_idx = altered_build_args.index("--tx-in-collateral") + 1 altered_build_args.insert(collateral_idx + 1, "--tx-in-collateral") altered_build_args.insert(collateral_idx + 2, altered_build_args[collateral_idx]) - # change the output file + # Change the output file tx_body_step2 = pl.Path(f"{tx_raw_output_step2.out_file.stem}_altered.body") out_file_idx = altered_build_args.index("--out-file") + 1 altered_build_args[out_file_idx] = str(tx_body_step2) - # build the transaction using altered arguments + # Build the transaction using altered arguments cluster.cli(altered_build_args) tx_signed_step2 = cluster.g_transaction.sign_tx( diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_build.py index 9b4e7af6b..50e24d662 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_build.py @@ -41,7 +41,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -65,7 +65,7 @@ def pool_users( no_of_addr=2, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( created_users[0], cluster_obj=cluster, @@ -132,7 +132,7 @@ def test_txout_locking( amount=2_000_000, ) - # check expected fees + # Check expected fees expected_fee_fund = 168_845 assert helpers.is_in_interval(tx_output_fund.fee, expected_fee_fund, frac=0.15) @@ -175,7 +175,7 @@ def test_context_equivalence( amount = 10_000_000 deposit_amount = cluster.g_query.get_address_deposit() - # create stake address registration cert + # Create stake address registration cert stake_addr_reg_cert_file = cluster.g_stake_address.gen_stake_addr_registration_cert( addr_name=f"{temp_template}_addr2", deposit_amt=common.get_conway_address_deposit(cluster_obj=cluster), @@ -184,7 +184,7 @@ def test_context_equivalence( tx_files = clusterlib.TxFiles(certificate_files=[stake_addr_reg_cert_file]) - # generate a dummy redeemer in order to create a txbody from which + # Generate a dummy redeemer in order to create a txbody from which # we can generate a tx and then derive the correct redeemer redeemer_file_dummy = pl.Path(f"{temp_template}_dummy_script_context.redeemer") clusterlib_utils.create_script_context( @@ -198,7 +198,7 @@ def test_context_equivalence( execution_cost=plutus_common.CONTEXT_EQUIVALENCE_COST, ) - # fund the script address + # Fund the script address script_utxos, collateral_utxos, __ = spend_build._build_fund_script( temp_template=temp_template, cluster_obj=cluster, @@ -227,7 +227,7 @@ def test_context_equivalence( ) assert tx_output_dummy - # generate the "real" redeemer + # Generate the "real" redeemer redeemer_file = pl.Path(f"{temp_template}_script_context.redeemer") plutus_common.create_script_context_w_blockers( @@ -254,7 +254,7 @@ def test_context_equivalence( invalid_hereafter=invalid_hereafter, ) - # check expected fees + # Check expected fees if tx_output: expected_fee = 372_438 assert helpers.is_in_interval(tx_output.fee, expected_fee, frac=0.15) @@ -360,7 +360,7 @@ def test_guessing_game( amount=2_000_000, ) - # check expected fees + # Check expected fees expected_fee_fund = 168_845 assert helpers.is_in_interval(tx_output_fund.fee, expected_fee_fund, frac=0.15) @@ -410,7 +410,7 @@ def test_two_scripts_spending( script_file1_v1 = plutus_common.ALWAYS_SUCCEEDS_PLUTUS_V1 execution_cost1_v1 = plutus_common.ALWAYS_SUCCEEDS_COST script_file2_v1 = plutus_common.GUESSING_GAME_PLUTUS_V1 - # this is higher than `plutus_common.GUESSING_GAME_COST`, because the script + # This is higher than `plutus_common.GUESSING_GAME_COST`, because the script # context has changed to include more stuff execution_cost2_v1 = plutus_common.ExecutionCost( per_time=280_668_068, per_space=1_031_312, fixed_cost=79_743 @@ -491,7 +491,7 @@ def test_two_scripts_spending( script_data_file=plutus_op2.datum_file ) - # create a Tx output with a datum hash at the script address + # Create a Tx output with a datum hash at the script address tx_files_fund = clusterlib.TxFiles( signing_key_files=[payment_addrs[0].skey_file], @@ -507,7 +507,7 @@ def test_two_scripts_spending( amount=script_fund, datum_hash=datum_hash2, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost1.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost2.collateral), ] @@ -582,7 +582,7 @@ def test_two_scripts_spending( change_address=payment_addrs[0].address, ) - # calculate cost of Plutus script + # Calculate cost of Plutus script plutus_costs = cluster.g_transaction.calculate_plutus_script_cost( src_address=payment_addrs[0].address, tx_name=f"{temp_template}_step2", @@ -618,7 +618,7 @@ def test_two_scripts_spending( utxo=u, coins=[clusterlib.DEFAULT_COIN] ), f"Inputs were NOT spent for `{u.address}`" - # check expected fees + # Check expected fees assert helpers.is_in_interval(tx_output_fund.fee, expected_fee_fund, frac=0.15) assert helpers.is_in_interval(tx_output_redeem.fee, expected_fee_redeem, frac=0.15) @@ -627,10 +627,10 @@ def test_two_scripts_spending( expected_costs=[execution_cost1, execution_cost2], ) - # check tx view + # Check tx view tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output_redeem) - # check transactions in db-sync + # Check transactions in db-sync tx_redeem_record = dbsync_utils.check_tx( cluster_obj=cluster, tx_raw_output=tx_output_redeem ) @@ -693,7 +693,7 @@ def test_always_fails( ) assert "The Plutus script evaluation failed" in err, err - # check expected fees + # Check expected fees expected_fee_fund = 168_845 assert helpers.is_in_interval(tx_output_fund.fee, expected_fee_fund, frac=0.15) @@ -738,7 +738,7 @@ def test_script_invalid( plutus_op=plutus_op, ) - # include any payment txin + # Include any payment txin txins = [ r for r in cluster.g_query.get_utxo( @@ -767,7 +767,7 @@ def test_script_invalid( raise issues.consensus_947.finish_test() - # check expected fees + # Check expected fees expected_fee_fund = 168_845 assert helpers.is_in_interval(tx_output_fund.fee, expected_fee_fund, frac=0.15) @@ -840,7 +840,7 @@ def test_txout_token_locking( tokens=tokens_rec, ) - # check expected fees + # Check expected fees expected_fee_fund = 173_597 assert helpers.is_in_interval(tx_output_fund.fee, expected_fee_fund, frac=0.15) @@ -928,7 +928,7 @@ def test_partial_spending( tokens=tokens_spend_rec, ) - # check that the expected amounts of Lovelace and native tokens were spent and change UTxOs + # Check that the expected amounts of Lovelace and native tokens were spent and change UTxOs # with appropriate datum hash were created assert tx_output_spend @@ -965,7 +965,7 @@ def test_partial_spending( assert u.amount == token_amount_exp assert u.datum_hash == script_utxos[0].datum_hash - # check expected fees + # Check expected fees expected_fee_fund = 173_597 assert helpers.is_in_interval(tx_output_fund.fee, expected_fee_fund, frac=0.15) @@ -1082,7 +1082,7 @@ def test_collateral_is_txin( utxo=u, coins=[clusterlib.DEFAULT_COIN] ), f"Inputs were NOT spent for `{script_address}`" - # check expected fees + # Check expected fees expected_fee_step1 = 168_845 assert helpers.is_in_interval(tx_output_step1.fee, expected_fee_step1, frac=0.15) diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_compat_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_compat_build.py index b45ae84cd..9188c457e 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_compat_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_compat_build.py @@ -34,7 +34,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_compat_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_compat_raw.py index ef6b0e2b7..f71fab26a 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_compat_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_compat_raw.py @@ -33,7 +33,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_datum_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_datum_build.py index f7ad255c4..1a1ee3808 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_datum_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_datum_build.py @@ -41,7 +41,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -286,7 +286,7 @@ def test_no_datum_txout( or "points to a script hash that is not known" in err_str ), err_str - # check expected fees + # Check expected fees expected_fee_fund = 199_087 assert helpers.is_in_interval(tx_output_fund.fee, expected_fee_fund, frac=0.15) @@ -363,7 +363,7 @@ def test_unlock_tx_wrong_datum( plutus_op=plutus_op_1, ) - # use a wrong datum to try to unlock the funds + # Use a wrong datum to try to unlock the funds plutus_op_2 = plutus_common.PlutusOp( script_file=plutus_common.ALWAYS_SUCCEEDS[plutus_version].script_file, datum_file=plutus_common.DATUM_42, @@ -427,7 +427,7 @@ def test_unlock_non_script_utxo( execution_cost=plutus_common.ALWAYS_SUCCEEDS[plutus_version].execution_cost, ) - # create datum and collateral UTxOs + # Create datum and collateral UTxOs txouts = [ clusterlib.TxOut( @@ -466,7 +466,7 @@ def test_unlock_non_script_utxo( datum_utxo.datum_hash == datum_hash ), f"UTxO should have datum hash '{datum_hash}': {datum_utxo}" - # try to spend the "locked" UTxO + # Try to spend the "locked" UTxO with pytest.raises(clusterlib.CLIError) as excinfo: spend_build._build_spend_locked_txin( @@ -518,7 +518,7 @@ def test_too_big( script_address = pbt_script_addresses[plutus_version] - # create a Tx output with a datum hash at the script address + # Create a Tx output with a datum hash at the script address tx_files = clusterlib.TxFiles( signing_key_files=[payment_addrs[0].skey_file], diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_datum_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_datum_raw.py index 4d6dd0b37..218ec8d50 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_datum_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_datum_raw.py @@ -38,7 +38,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -295,7 +295,7 @@ def test_unlock_tx_wrong_datum( amount=amount, ) - # use a wrong datum to try to unlock the funds + # Use a wrong datum to try to unlock the funds plutus_op_2 = plutus_common.PlutusOp( script_file=plutus_common.ALWAYS_SUCCEEDS[plutus_version].script_file, datum_file=plutus_common.DATUM_42, @@ -354,7 +354,7 @@ def test_unlock_non_script_utxo( ) assert plutus_op.execution_cost # for mypy - # create datum and collateral UTxOs + # Create datum and collateral UTxOs txouts = [ clusterlib.TxOut( @@ -393,7 +393,7 @@ def test_unlock_non_script_utxo( signing_key_files=[payment_addr.skey_file, dst_addr.skey_file] ) - # try to spend the "locked" UTxO + # Try to spend the "locked" UTxO with pytest.raises(clusterlib.CLIError) as excinfo: spend_raw._spend_locked_txin( @@ -444,7 +444,7 @@ def test_too_big( script_address = pbt_script_addresses[plutus_version] - # create a Tx output with a datum hash at the script address + # Create a Tx output with a datum hash at the script address tx_files = clusterlib.TxFiles( signing_key_files=[payment_addrs[0].skey_file], diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py b/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py index a7985401f..f7eec8bcf 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_negative_build.py @@ -41,7 +41,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -237,7 +237,7 @@ def test_collateral_w_tokens( else: assert "CollateralContainsNonADA" in exc_str, exc_str - # check expected fees + # Check expected fees expected_fee_fund = 173597 assert helpers.is_in_interval(tx_output_fund.fee, expected_fee_fund, frac=0.15) @@ -299,11 +299,11 @@ def test_same_collateral_txin( "expected to be key witnessed but are actually script witnessed: " f'["{script_utxos[0].utxo_hash}#{script_utxos[0].utxo_ix}"]' in err_str - # in 1.35.3 and older + # In 1.35.3 and older or "Expected key witnessed collateral" in err_str ), err_str - # check expected fees + # Check expected fees expected_fee_fund = 168_845 assert helpers.is_in_interval(tx_output_fund.fee, expected_fee_fund, frac=0.15) @@ -449,7 +449,7 @@ def test_two_scripts_spending_one_fail( script_data_file=plutus_op2.datum_file ) - # create a Tx output with a datum hash at the script address + # Create a Tx output with a datum hash at the script address tx_files_fund = clusterlib.TxFiles( signing_key_files=[payment_addrs[0].skey_file], @@ -465,7 +465,7 @@ def test_two_scripts_spending_one_fail( amount=script_fund, datum_hash=datum_hash2, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost1.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost2.collateral), ] diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_negative_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_negative_raw.py index 036e4aacb..962686e9a 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_negative_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_negative_raw.py @@ -46,7 +46,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -425,7 +425,7 @@ def test_collateral_percent( temp_template = common.get_test_id(cluster) amount = 2_000_000 - # increase fixed cost so the required collateral is higher than minimum collateral of 2 ADA + # Increase fixed cost so the required collateral is higher than minimum collateral of 2 ADA execution_cost = plutus_common.ALWAYS_SUCCEEDS[plutus_version].execution_cost execution_cost_increased = dataclasses.replace(execution_cost, fixed_cost=2_000_000) plutus_op = plutus_common.PlutusOp( @@ -520,7 +520,7 @@ def test_two_scripts_spending_one_fail( script_data_file=plutus_op2.datum_file ) - # create a Tx output with a datum hash at the script address + # Create a Tx output with a datum hash at the script address tx_files_fund = clusterlib.TxFiles( signing_key_files=[payment_addrs[0].skey_file], @@ -536,7 +536,7 @@ def test_two_scripts_spending_one_fail( amount=amount + redeem_cost2.fee + spend_raw.FEE_REDEEM_TXSIZE, datum_hash=datum_hash2, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost1.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost2.collateral), ] @@ -714,7 +714,7 @@ def _fund_script_guessing_game( cluster_obj=cluster_obj, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( payment_addrs[0], cluster_obj=cluster_obj, @@ -949,7 +949,7 @@ def test_wrong_value_inside_range( per_time = plutus_common.GUESSING_GAME_UNTYPED[plutus_version].execution_cost.per_time per_space = plutus_common.GUESSING_GAME_UNTYPED[plutus_version].execution_cost.per_space - # try to spend the "locked" UTxO + # Try to spend the "locked" UTxO fee_redeem = ( round(per_time * cost_per_unit.per_time + per_space * cost_per_unit.per_space) @@ -1115,7 +1115,7 @@ def test_wrong_type( with open(redeemer_file, "w", encoding="utf-8") as outfile: json.dump({"bytes": redeemer_value.hex()}, outfile) - # try to spend the "locked" UTxO + # Try to spend the "locked" UTxO per_time = plutus_common.GUESSING_GAME_UNTYPED[plutus_version].execution_cost.per_time per_space = plutus_common.GUESSING_GAME_UNTYPED[plutus_version].execution_cost.per_space @@ -1194,7 +1194,7 @@ def test_too_big( {"constructor": 0, "fields": [{"bytes": redeemer_value.hex()}]} ) - # try to build a Tx for spending the "locked" UTxO + # Try to build a Tx for spending the "locked" UTxO redeemer_file = f"{temp_template}.redeemer" with open(redeemer_file, "w", encoding="utf-8") as outfile: @@ -1270,7 +1270,7 @@ def test_json_schema_typed_int_bytes_declared( redeemer_content = json.dumps({"constructor": 0, "fields": [{"int": redeemer_value.hex()}]}) - # try to build a Tx for spending the "locked" UTxO + # Try to build a Tx for spending the "locked" UTxO err = self._failed_tx_build( cluster_obj=cluster, temp_template=temp_template, @@ -1311,7 +1311,7 @@ def test_json_schema_untyped_int_bytes_declared( script_utxos, collateral_utxos, payment_addrs = fund_script_guessing_game redeemer_content = json.dumps({"int": redeemer_value.hex()}) - # try to build a Tx for spending the "locked" UTxO + # Try to build a Tx for spending the "locked" UTxO err = self._failed_tx_build( cluster_obj=cluster, temp_template=temp_template, @@ -1352,7 +1352,7 @@ def test_json_schema_typed_bytes_int_declared( script_utxos, collateral_utxos, payment_addrs = fund_script_guessing_game redeemer_content = json.dumps({"constructor": 0, "fields": [{"bytes": redeemer_value}]}) - # try to build a Tx for spending the "locked" UTxO + # Try to build a Tx for spending the "locked" UTxO err = self._failed_tx_build( cluster_obj=cluster, temp_template=temp_template, @@ -1393,7 +1393,7 @@ def test_json_schema_untyped_bytes_int_declared( script_utxos, collateral_utxos, payment_addrs = fund_script_guessing_game redeemer_content = json.dumps({"bytes": redeemer_value}) - # try to build a Tx for spending the "locked" UTxO + # Try to build a Tx for spending the "locked" UTxO err = self._failed_tx_build( cluster_obj=cluster, temp_template=temp_template, @@ -1434,7 +1434,7 @@ def test_invalid_json( script_utxos, collateral_utxos, payment_addrs = fund_script_guessing_game redeemer_content = f'{{"{redeemer_value}"}}' - # try to build a Tx for spending the "locked" UTxO + # Try to build a Tx for spending the "locked" UTxO err = self._failed_tx_build( cluster_obj=cluster, temp_template=temp_template, @@ -1475,7 +1475,7 @@ def test_json_schema_typed_invalid_type( script_utxos, collateral_utxos, payment_addrs = fund_script_guessing_game redeemer_content = json.dumps({redeemer_type: 42}) - # try to build a Tx for spending the "locked" UTxO + # Try to build a Tx for spending the "locked" UTxO err = self._failed_tx_build( cluster_obj=cluster, temp_template=temp_template, @@ -1522,7 +1522,7 @@ def test_json_schema_untyped_invalid_type( script_utxos, collateral_utxos, payment_addrs = fund_script_guessing_game redeemer_content = json.dumps({redeemer_type: 42}) - # try to build a Tx for spending the "locked" UTxO + # Try to build a Tx for spending the "locked" UTxO err = self._failed_tx_build( cluster_obj=cluster, temp_template=temp_template, diff --git a/cardano_node_tests/tests/tests_plutus/test_spend_raw.py b/cardano_node_tests/tests/tests_plutus/test_spend_raw.py index 836214681..2c43cd0fb 100644 --- a/cardano_node_tests/tests/tests_plutus/test_spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus/test_spend_raw.py @@ -45,7 +45,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -69,7 +69,7 @@ def pool_users( no_of_addr=2, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( created_users[0], cluster_obj=cluster, @@ -215,7 +215,7 @@ def test_context_equivalence( amount = 10_000_000 deposit_amount = cluster.g_query.get_address_deposit() - # create stake address registration cert + # Create stake address registration cert stake_addr_reg_cert_file = cluster.g_stake_address.gen_stake_addr_registration_cert( addr_name=f"{temp_template}_addr0", deposit_amt=common.get_conway_address_deposit(cluster_obj=cluster), @@ -224,7 +224,7 @@ def test_context_equivalence( tx_files = clusterlib.TxFiles(certificate_files=[stake_addr_reg_cert_file]) - # generate a dummy redeemer in order to create a txbody from which + # Generate a dummy redeemer in order to create a txbody from which # we can generate a tx and then derive the correct redeemer redeemer_file_dummy = pl.Path(f"{temp_template}_dummy_script_context.redeemer") clusterlib_utils.create_script_context( @@ -238,7 +238,7 @@ def test_context_equivalence( execution_cost=plutus_common.CONTEXT_EQUIVALENCE_COST, ) - # fund the script address + # Fund the script address script_utxos, collateral_utxos, __ = spend_raw._fund_script( temp_template=temp_template, cluster_obj=cluster, @@ -267,7 +267,7 @@ def test_context_equivalence( ) assert tx_output_dummy - # generate the "real" redeemer + # Generate the "real" redeemer redeemer_file = pl.Path(f"{temp_template}_script_context.redeemer") plutus_common.create_script_context_w_blockers( @@ -432,7 +432,7 @@ def test_two_scripts_spending( script_file1_v1 = plutus_common.ALWAYS_SUCCEEDS_PLUTUS_V1 execution_cost1_v1 = plutus_common.ALWAYS_SUCCEEDS_COST script_file2_v1 = plutus_common.GUESSING_GAME_PLUTUS_V1 - # this is higher than `plutus_common.GUESSING_GAME_COST`, because the script + # This is higher than `plutus_common.GUESSING_GAME_COST`, because the script # context has changed to include more stuff execution_cost2_v1 = plutus_common.ExecutionCost( per_time=280_668_068, per_space=1_031_312, fixed_cost=79_743 @@ -508,7 +508,7 @@ def test_two_scripts_spending( script_data_file=plutus_op2.datum_file ) - # create a Tx output with a datum hash at the script address + # Create a Tx output with a datum hash at the script address tx_files_fund = clusterlib.TxFiles( signing_key_files=[payment_addrs[0].skey_file], @@ -524,7 +524,7 @@ def test_two_scripts_spending( amount=amount + redeem_cost2.fee + spend_raw.FEE_REDEEM_TXSIZE, datum_hash=datum_hash2, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost1.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost2.collateral), ] @@ -561,7 +561,7 @@ def test_two_scripts_spending( # Step 2: spend the "locked" UTxOs - # for mypy + # For mypy assert plutus_op1.execution_cost and plutus_op2.execution_cost assert plutus_op1.datum_file and plutus_op2.datum_file assert plutus_op1.redeemer_cbor_file and plutus_op2.redeemer_cbor_file @@ -629,7 +629,7 @@ def test_two_scripts_spending( utxo=u, coins=[clusterlib.DEFAULT_COIN] ), f"Inputs were NOT spent for `{u.address}`" - # check tx view + # Check tx view tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output_redeem) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_output_redeem) @@ -732,7 +732,7 @@ def test_script_invalid( amount=amount, ) - # include any payment txin + # Include any payment txin txins = [ r for r in cluster.g_query.get_utxo( @@ -853,7 +853,7 @@ def test_partial_spending( token_amount_fund = 100 token_amount_spend = 20 - # add extra fee for tokens + # Add extra fee for tokens fee_redeem_txsize = spend_raw.FEE_REDEEM_TXSIZE + 5_000 plutus_op = plutus_common.PlutusOp( @@ -903,7 +903,7 @@ def test_partial_spending( txid_spend = cluster.g_transaction.get_txid(tx_body_file=tx_output_spend.out_file) change_utxos = cluster.g_query.get_utxo(txin=f"{txid_spend}#1") - # check that the expected amounts of Lovelace and native tokens were spent and change UTxOs + # Check that the expected amounts of Lovelace and native tokens were spent and change UTxOs # with appropriate datum hash were created token_amount_exp = token_amount_fund - token_amount_spend assert len(change_utxos) == len(tokens_spend_rec) + 1 @@ -986,7 +986,7 @@ def test_collaterals( ) if collateral_num: - # instead of using the collateral UTxO created by `_fund_script`, create multiple new + # Instead of using the collateral UTxO created by `_fund_script`, create multiple new # collateral UTxOs with the combined amount matching the original UTxO collateral_amount_part = int(fund_collateral_utxos[0].amount // collateral_num) + 1 txouts_collaterals = [ diff --git a/cardano_node_tests/tests/tests_plutus_v2/mint_build.py b/cardano_node_tests/tests/tests_plutus_v2/mint_build.py index 1f96bf738..9532d6cc0 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/mint_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/mint_build.py @@ -33,14 +33,14 @@ def _fund_issuer( address=issuer_addr.address, amount=amount, ), - # for collateral + # For collateral clusterlib.TxOut(address=issuer_addr.address, amount=minting_cost.collateral), ] reference_amount = 0 if reference_script: reference_amount = 20_000_000 - # for reference UTxO + # For reference UTxO txouts.append( clusterlib.TxOut( address=issuer_addr.address, @@ -56,7 +56,7 @@ def _fund_issuer( tx_files=tx_files, txouts=txouts, fee_buffer=2_000_000, - # don't join 'change' and 'collateral' txouts, we need separate UTxOs + # Don't join 'change' and 'collateral' txouts, we need separate UTxOs join_txouts=False, ) tx_signed = cluster_obj.g_transaction.sign_tx( diff --git a/cardano_node_tests/tests/tests_plutus_v2/mint_raw.py b/cardano_node_tests/tests/tests_plutus_v2/mint_raw.py index 032c517e8..bb93ab96f 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/mint_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/mint_raw.py @@ -8,7 +8,7 @@ LOGGER = logging.getLogger(__name__) -# approx. fee for Tx size +# Approx. fee for Tx size FEE_MINT_TXSIZE = 400_000 @@ -41,7 +41,7 @@ def _fund_issuer( signing_key_files=[payment_addr.skey_file], ) - # for reference script + # For reference script reference_amount = 0 txouts_reference = [] if reference_script: @@ -75,7 +75,7 @@ def _fund_issuer( tx_files=tx_files, # TODO: workaround for https://github.com/IntersectMBO/cardano-node/issues/1892 witness_count_add=2, - # don't join 'change' and 'collateral' txouts, we need separate UTxOs + # Don't join 'change' and 'collateral' txouts, we need separate UTxOs join_txouts=False, ) diff --git a/cardano_node_tests/tests/tests_plutus_v2/spend_build.py b/cardano_node_tests/tests/tests_plutus_v2/spend_build.py index 0a9e53382..77199bde5 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/spend_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/spend_build.py @@ -64,7 +64,7 @@ def _build_fund_script( """ # pylint: disable=too-many-arguments - # for mypy + # For mypy assert plutus_op.execution_cost script_fund = 200_000_000 @@ -78,7 +78,7 @@ def _build_fund_script( protocol_params=cluster.g_query.get_protocol_params(), ) - # create a Tx output with a datum hash at the script address + # Create a Tx output with a datum hash at the script address tx_files = clusterlib.TxFiles( signing_key_files=[payment_addr.skey_file], @@ -101,7 +101,7 @@ def _build_fund_script( plutus_op.datum_value if plutus_op.datum_value and not use_inline_datum else "" ), ), - # for collateral + # For collateral clusterlib.TxOut( address=dst_addr.address, amount=collateral_amount or redeem_cost.collateral ), @@ -158,7 +158,7 @@ def _build_fund_script( if VERSIONS.transaction_era >= VERSIONS.BABBAGE: dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_output) - # check if inline datum is returned by 'query utxo' + # Check if inline datum is returned by 'query utxo' if use_inline_datum: if plutus_op.datum_file: with open(plutus_op.datum_file, encoding="utf-8") as json_datum: @@ -170,7 +170,7 @@ def _build_fund_script( script_utxos[0].inline_datum == expected_datum ), "The inline datum returned by 'query utxo' is different than the expected" - # check "transaction view" + # Check "transaction view" tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output) return script_utxos, collateral_utxos, reference_utxo, tx_output diff --git a/cardano_node_tests/tests/tests_plutus_v2/spend_raw.py b/cardano_node_tests/tests/tests_plutus_v2/spend_raw.py index c77555b2f..3a2dd9f29 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/spend_raw.py @@ -10,7 +10,7 @@ LOGGER = logging.getLogger(__name__) -# approx. fee for Tx size +# Approx. fee for Tx size FEE_REDEEM_TXSIZE = 400_000 PLUTUS_OP_ALWAYS_SUCCEEDS = plutus_common.PlutusOp( @@ -61,7 +61,7 @@ def _fund_script( addr_name=temp_template, payment_script_file=plutus_op.script_file ) - # create a Tx output with a datum hash at the script address + # Create a Tx output with a datum hash at the script address tx_files = clusterlib.TxFiles( signing_key_files=[payment_addr.skey_file], @@ -92,13 +92,13 @@ def _fund_script( else "" ), ), - # for collateral + # For collateral clusterlib.TxOut( address=dst_addr.address, amount=collateral_amount or redeem_cost.collateral ), ] - # for reference script + # For reference script if use_reference_script: txouts.append( clusterlib.TxOut( @@ -143,7 +143,7 @@ def _fund_script( if VERSIONS.transaction_era >= VERSIONS.BABBAGE: dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) - # check if inline datum is returned by 'query utxo' + # Check if inline datum is returned by 'query utxo' if use_inline_datum: expected_datum = None if plutus_op.datum_file: @@ -156,7 +156,7 @@ def _fund_script( expected_datum is None or script_utxos[0].inline_datum == expected_datum ), "The inline datum returned by 'query utxo' is different than the expected" - # check "transaction view" + # Check "transaction view" tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output) return script_utxos, collateral_utxos, reference_utxo, tx_raw_output diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py index c527dd5c3..4827cd748 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_build.py @@ -37,7 +37,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -222,7 +222,7 @@ def test_minting_ref_one_token( utxo=reference_utxo ), "Reference UTxO was spent" - # check expected fees + # Check expected fees expected_fees: dict = { "v2": { "fee_1": 252_929, @@ -401,10 +401,10 @@ def test_reference_inputs_visibility( reference_script=plutus_common.MINTING_CHECK_REF_INPUTS_PLUTUS_V2, ) - # for mypy + # For mypy assert reference_utxo - # the redeemer file will be composed by the UTxO of the reference input + # The redeemer file will be composed by the UTxO of the reference input redeemer_file = f"{temp_template}.redeemer" with open(redeemer_file, "w", encoding="utf-8") as outfile: json.dump( @@ -454,7 +454,7 @@ def test_reference_inputs_visibility( *mint_txouts, ] - # if the redeemer is not the expected, script evaluation will fail and should show + # If the redeemer is not the expected, script evaluation will fail and should show # the expected error message defined by the plutus script if not valid_redeemer: with pytest.raises(clusterlib.CLIError) as excinfo: @@ -485,11 +485,11 @@ def test_reference_inputs_visibility( tx_name=f"{temp_template}_step2", ) - # the plutus script checks if the redeemer complies with the reference inputs provided + # The plutus script checks if the redeemer complies with the reference inputs provided # so a successful submit of the tx proves that the script can see the reference inputs cluster.g_transaction.submit_tx(tx_file=tx_signed_step2, txins=mint_utxos) - # check that the token was minted + # Check that the token was minted out_utxos = cluster.g_query.get_utxo(tx_raw_output=tx_output_step2) token_utxo = clusterlib.filter_utxos( utxos=out_utxos, address=issuer_addr.address, coin=token @@ -498,7 +498,7 @@ def test_reference_inputs_visibility( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check that reference UTxO was NOT spent + # Check that reference UTxO was NOT spent assert not reference_utxo or cluster.g_query.get_utxo( utxo=reference_utxo ), "Reference UTxO was spent" @@ -558,7 +558,7 @@ def test_reference_scripts_visibility( clusterlib.TxOut(address=issuer_addr.address, amount=token_amount, coin=token) ] - # the redeemer file will be composed by the script hash + # The redeemer file will be composed by the script hash redeemer_file = f"{temp_template}.redeemer" with open(redeemer_file, "w", encoding="utf-8") as outfile: json.dump( @@ -583,7 +583,7 @@ def test_reference_scripts_visibility( *mint_txouts, ] - # if the redeemer is not the expected the script evaluation will fail and should show + # If the redeemer is not the expected the script evaluation will fail and should show # the expected error message defined by the plutus script if not valid_redeemer: with pytest.raises(clusterlib.CLIError) as excinfo: @@ -615,11 +615,11 @@ def test_reference_scripts_visibility( tx_name=f"{temp_template}_step2", ) - # the plutus script checks if the redeemer complies with the reference script provided + # The plutus script checks if the redeemer complies with the reference script provided # so a successful submit of the tx proves that the script can see the reference script cluster.g_transaction.submit_tx(tx_file=tx_signed_step2, txins=mint_utxos) - # check that the token was minted + # Check that the token was minted out_utxos = cluster.g_query.get_utxo(tx_raw_output=tx_output_step2) token_utxo = clusterlib.filter_utxos( utxos=out_utxos, address=issuer_addr.address, coin=token @@ -628,7 +628,7 @@ def test_reference_scripts_visibility( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check that reference UTxO was NOT spent + # Check that reference UTxO was NOT spent assert not reference_utxo or cluster.g_query.get_utxo( utxo=reference_utxo ), "Reference UTxO was spent" @@ -681,7 +681,7 @@ def test_inline_datum_visibility( inline_datum=plutus_common.DATUM_42, ) - # to check inline datum on readonly reference input + # To check inline datum on readonly reference input with_reference_input = scenario != "reference_script" different_datum = scenario == "different_datum" datum_file = plutus_common.DATUM_43_TYPED if different_datum else plutus_common.DATUM_42 @@ -727,7 +727,7 @@ def test_inline_datum_visibility( *mint_txouts, ] - # the plutus script checks if all reference inputs have the same inline datum + # The plutus script checks if all reference inputs have the same inline datum # it will fail if the inline datums are not the same in all reference inputs and # succeed if all inline datums match if different_datum: @@ -763,7 +763,7 @@ def test_inline_datum_visibility( cluster.g_transaction.submit_tx(tx_file=tx_signed_step2, txins=mint_utxos) - # check that the token was minted + # Check that the token was minted out_utxos = cluster.g_query.get_utxo(tx_raw_output=tx_output_step2) token_utxo = clusterlib.filter_utxos( utxos=out_utxos, address=issuer_addr.address, coin=token @@ -772,7 +772,7 @@ def test_inline_datum_visibility( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check that reference UTxO was NOT spent + # Check that reference UTxO was NOT spent assert not reference_utxo or cluster.g_query.get_utxo( utxo=reference_utxo ), "Reference UTxO was spent" diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_build.py index 780fbc052..6f8c9bf7f 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_build.py @@ -34,7 +34,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -151,7 +151,7 @@ def test_minting_with_unbalanced_total_collateral( tx_name=f"{temp_template}_step2", ) - # it should NOT be possible to mint with an unbalanced total collateral + # It should NOT be possible to mint with an unbalanced total collateral with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.submit_tx(tx_file=tx_signed_step2, txins=mint_utxos) err_str = str(excinfo.value) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_raw.py index da8e43dea..9299306e1 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_negative_raw.py @@ -33,7 +33,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -130,7 +130,7 @@ def test_minting_with_limited_collateral( *mint_txouts, ] - # limit the amount of collateral that can be used and balance the return collateral txout + # Limit the amount of collateral that can be used and balance the return collateral txout total_collateral_amount = minting_cost.min_collateral // 2 return_collateral_txouts = [ clusterlib.TxOut( @@ -154,7 +154,7 @@ def test_minting_with_limited_collateral( tx_name=f"{temp_template}_step2", ) - # it should NOT be possible to mint with a collateral with insufficient funds + # It should NOT be possible to mint with a collateral with insufficient funds with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.submit_tx(tx_file=tx_signed_step2, txins=mint_utxos) err_str = str(excinfo.value) @@ -263,7 +263,7 @@ def test_minting_with_unbalanced_total_collateral( tx_name=f"{temp_template}_step2", ) - # it should NOT be possible to mint with an unbalanced total collateral + # It should NOT be possible to mint with an unbalanced total collateral with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.submit_tx(tx_file=tx_signed_step2, txins=mint_utxos) err_str = str(excinfo.value) diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py index babf8d1d6..a6fc9775b 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_raw.py @@ -37,7 +37,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -193,7 +193,7 @@ def test_minting_two_tokens( mint=plutus_mint_data, tx_files=tx_files_step2, fee=minting_cost.fee + fee_txsize, - # ttl is optional in this test + # Ttl is optional in this test invalid_hereafter=cluster.g_query.get_slot_no() + 200, ) tx_signed_step2 = cluster.g_transaction.sign_tx( @@ -226,7 +226,7 @@ def test_minting_two_tokens( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check tx view + # Check tx view tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output_step2) @allure.link(helpers.get_vcs_link()) @@ -277,7 +277,7 @@ def test_datum_hash_visibility( datum_file=plutus_common.DATUM_42, ) - # to check datum hash on readonly reference input + # To check datum hash on readonly reference input with_reference_input = scenario != "reference_script" different_datum = scenario == "different_datum" datum_file = plutus_common.DATUM_43_TYPED if different_datum else plutus_common.DATUM_42 @@ -308,7 +308,7 @@ def test_datum_hash_visibility( script_data_file=plutus_common.DATUM_42 ) - # the redeemer file will be composed by the datum hash + # The redeemer file will be composed by the datum hash redeemer_file = f"{temp_template}.redeemer" with open(redeemer_file, "w", encoding="utf-8") as outfile: json.dump({"bytes": datum_hash}, outfile) @@ -336,7 +336,7 @@ def test_datum_hash_visibility( *mint_txouts, ] - # the plutus script checks if all reference inputs have the same datum hash + # The plutus script checks if all reference inputs have the same datum hash # it will fail if the datums hash are not the same in all reference inputs and # succeed if all datums hash match if different_datum: @@ -372,7 +372,7 @@ def test_datum_hash_visibility( readonly_reference_txins=reference_input, ) - # check that the token was minted + # Check that the token was minted out_utxos = cluster.g_query.get_utxo(tx_raw_output=tx_raw_output) token_utxo = clusterlib.filter_utxos( utxos=out_utxos, address=issuer_addr.address, coin=token @@ -381,7 +381,7 @@ def test_datum_hash_visibility( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check that reference UTxO was NOT spent + # Check that reference UTxO was NOT spent assert not reference_utxo or cluster.g_query.get_utxo( utxo=reference_utxo ), "Reference UTxO was spent" diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_build.py index 6f05faf83..cfccb736b 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_build.py @@ -36,7 +36,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -177,11 +177,11 @@ def test_use_secp_builtin_functions( except clusterlib.CLIError as err: before_pv8 = cluster.g_query.get_protocol_params()["protocolVersion"]["major"] < 8 - # the SECP256k1 functions should work from protocol version 8 + # The SECP256k1 functions should work from protocol version 8 if not before_pv8: raise - # before protocol version 8 the SECP256k1 is blocked or limited by high cost model + # Before protocol version 8 the SECP256k1 is blocked or limited by high cost model err_msg = str(err) is_forbidden = ( diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_raw.py index 56e943085..51e1bccc3 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_mint_secp256k1_raw.py @@ -36,7 +36,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -261,4 +261,4 @@ def test_negative_secp_builtin_functions( assert is_forbidden or is_overspending, err_msg else: assert re.search(expected_error_messages[test_vector], err_msg), err_msg - # assert expected_error_messages[test_vector] in err_msg, err_msg + # Assert expected_error_messages[test_vector] in err_msg, err_msg diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py index 1f06700e8..9356c4166 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_build.py @@ -37,7 +37,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -100,12 +100,12 @@ def test_txout_locking( ), ) - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address ( script_utxos, @@ -123,7 +123,7 @@ def test_txout_locking( ) assert reference_utxo or not use_reference_script, "No reference script UTxO" - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -174,17 +174,17 @@ def test_txout_locking( tx_file=tx_signed, txins=[t.txins[0] for t in tx_output_redeem.script_txins if t.txins] ) - # check that script address UTxO was spent + # Check that script address UTxO was spent assert not cluster.g_query.get_utxo( utxo=script_utxos[0] ), f"Script address UTxO was NOT spent `{script_utxos}`" - # check that reference UTxO was NOT spent + # Check that reference UTxO was NOT spent assert not reference_utxo or cluster.g_query.get_utxo( utxo=reference_utxo ), "Reference input was spent" - # check expected fees + # Check expected fees if use_reference_script: expected_fee_fund = 258_913 expected_fee_redeem = 233_889 @@ -244,14 +244,14 @@ def test_min_required_utxo( plutus_op = spend_build.PLUTUS_OP_GUESSING_GAME - # for mypy + # For mypy assert plutus_op.datum_file script_address = cluster.g_address.gen_payment_addr( addr_name=temp_template, payment_script_file=plutus_op.script_file ) - # create a Tx outputs + # Create a Tx outputs tx_files = clusterlib.TxFiles( signing_key_files=[payment_addrs[0].skey_file], @@ -268,7 +268,7 @@ def test_min_required_utxo( ] if use_token: - # create the token + # Create the token token_rand = clusterlib.get_rand_str(5) token = clusterlib_utils.new_tokens( *[f"qacoin{token_rand}".encode().hex()], diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py index 1ce1d4e51..36e997a0b 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_build.py @@ -38,7 +38,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -64,7 +64,7 @@ def _build_spend_locked_txin( total_collateral_amount: int | None = None, return_collateral_txouts: clusterlib.OptionalTxOuts = (), ) -> clusterlib.TxRawOutput: - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.redeemer_cbor_file @@ -88,7 +88,7 @@ def _build_spend_locked_txin( txouts_redeem = [ clusterlib.TxOut(address=dst_addr.address, amount=2_000_000), ] - # include any payment txin + # Include any payment txin txins = [ r for r in cluster.g_query.get_utxo( diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_raw.py index e639aefe0..bbf4e5c10 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_collateral_raw.py @@ -36,7 +36,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_build.py index c9b90b5b6..767431d58 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_build.py @@ -36,7 +36,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_raw.py index 232009a13..583546e92 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_compat_raw.py @@ -35,7 +35,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_build.py index 1a482358c..44124bc92 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_build.py @@ -40,7 +40,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -73,7 +73,7 @@ def test_check_inline_datum_cost(self, cluster: clusterlib.ClusterLib): addr_name=temp_template, payment_script_file=plutus_op.script_file ) - # small datum + # Small datum txouts_with_small_inline_datum = [ clusterlib.TxOut( @@ -113,7 +113,7 @@ def test_check_inline_datum_cost(self, cluster: clusterlib.ClusterLib): min_utxo_small_datum_hash, expected_min_small_datum_hash, frac=0.15 ) - # big datum + # Big datum txouts_with_big_inline_datum = [ clusterlib.TxOut( @@ -153,7 +153,7 @@ def test_check_inline_datum_cost(self, cluster: clusterlib.ClusterLib): min_utxo_big_datum_hash, expected_min_big_datum_hash, frac=0.15 ) - # check that the min UTxO value with an inline datum depends on the size of the datum + # Check that the min UTxO value with an inline datum depends on the size of the datum assert ( min_utxo_small_inline_datum < min_utxo_small_datum_hash @@ -210,7 +210,7 @@ def test_lock_tx_invalid_datum( execution_cost=plutus_common.ALWAYS_SUCCEEDS_V2_COST, ) - # create a Tx output with an invalid inline datum at the script address + # Create a Tx output with an invalid inline datum at the script address with pytest.raises(clusterlib.CLIError) as excinfo: spend_build._build_fund_script( @@ -246,11 +246,11 @@ def test_lock_tx_v1_script( execution_cost=plutus_common.ALWAYS_SUCCEEDS_COST, ) - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_utxos, collateral_utxos, __, __ = spend_build._build_fund_script( temp_template=temp_template, cluster=cluster, @@ -259,7 +259,7 @@ def test_lock_tx_v1_script( plutus_op=plutus_op, ) - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -364,12 +364,12 @@ def test_lock_tx_datum_as_witness( plutus_op = spend_build.PLUTUS_OP_ALWAYS_SUCCEEDS - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_utxos, collateral_utxos, __, __ = spend_build._build_fund_script( temp_template=temp_template, cluster=cluster, diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_raw.py index 49b9a40cb..410d24937 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_datum_raw.py @@ -39,7 +39,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -111,7 +111,7 @@ def test_lock_tx_invalid_datum( execution_cost=plutus_common.ALWAYS_SUCCEEDS_COST, ) - # for mypy + # For mypy assert plutus_op.execution_cost redeem_cost = plutus_common.compute_cost( @@ -119,7 +119,7 @@ def test_lock_tx_invalid_datum( protocol_params=cluster.g_query.get_protocol_params(), ) - # create a Tx output with an invalid inline datum at the script address + # Create a Tx output with an invalid inline datum at the script address with pytest.raises(clusterlib.CLIError) as excinfo: spend_raw._fund_script( @@ -159,7 +159,7 @@ def test_lock_tx_v1_script( execution_cost=plutus_common.ALWAYS_SUCCEEDS_COST, ) - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file @@ -252,7 +252,7 @@ def test_lock_tx_big_datum( ) assert plutus_op.execution_cost # for mypy - # create a Tx output with a datum hash at the script address + # Create a Tx output with a datum hash at the script address tx_files = clusterlib.TxFiles( signing_key_files=[payment_addrs[0].skey_file], @@ -298,7 +298,7 @@ def test_lock_tx_datum_as_witness( plutus_op = spend_raw.PLUTUS_OP_ALWAYS_SUCCEEDS - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_raw.py index add13ae37..a68027458 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_raw.py @@ -36,7 +36,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -77,7 +77,7 @@ def test_txout_locking( plutus_op = spend_raw.PLUTUS_OP_GUESSING_GAME_UNTYPED - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file @@ -152,28 +152,28 @@ def test_txout_locking( == dst_init_balance + amount ), f"Incorrect balance for destination address `{payment_addrs[1].address}`" - # check that script address UTxO was spent + # Check that script address UTxO was spent assert not cluster.g_query.get_utxo( utxo=script_utxos[0] ), f"Script address UTxO was NOT spent `{script_utxos[0]}`" - # check that reference UTxO was NOT spent + # Check that reference UTxO was NOT spent assert not reference_utxo or cluster.g_query.get_utxo( utxo=reference_utxo ), "Reference input was spent" - # check expected fees + # Check expected fees expected_fee_redeem = 176_024 if use_reference_script else 179_764 fee = ( - # for tx size + # For tx size cluster.g_transaction.estimate_fee( txbody_file=tx_output_redeem.out_file, txin_count=len(tx_output_redeem.txins), txout_count=len(tx_output_redeem.txouts), witness_count=len(tx_files_redeem.signing_key_files), ) - # for script execution + # For script execution + redeem_cost.fee ) @@ -211,7 +211,7 @@ def test_datum_bytes_in_dbsync( protocol_params=cluster.g_query.get_protocol_params(), ) - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_utxos, *__ = spend_raw._fund_script( temp_template=temp_template, cluster=cluster, @@ -224,7 +224,7 @@ def test_datum_bytes_in_dbsync( ) script_utxo = script_utxos[0] - # double-check that the UTxO datum hash corresponds to the datum CBOR file + # Double-check that the UTxO datum hash corresponds to the datum CBOR file datum_hash = cluster.g_transaction.get_hash_script_data( script_data_cbor_file=plutus_common.DATUM_FINITE_TYPED_CBOR ) @@ -234,7 +234,7 @@ def test_datum_bytes_in_dbsync( dbsync_queries.query_datum(datum_hash=script_utxo.inline_datum_hash) ) - # check that datum from db-sync produces the original datum hash + # Check that datum from db-sync produces the original datum hash db_cbor_hex = datum_db_response[0].bytes.hex() db_cbor_bin = binascii.unhexlify(db_cbor_hex) db_cbor_file = f"{temp_template}_db_datum.cbor" @@ -247,12 +247,12 @@ def test_datum_bytes_in_dbsync( db_datum_hash == datum_hash ), "Datum hash of bytes in db-sync doesn't correspond to the original datum hash" - # check that datum bytes in db-sync corresponds to the original datum + # Check that datum bytes in db-sync corresponds to the original datum with open(plutus_common.DATUM_FINITE_TYPED_CBOR, "rb") as in_fp: orig_cbor_bin = in_fp.read() orig_cbor_hex = orig_cbor_bin.hex() - # see https://github.com/IntersectMBO/cardano-db-sync/issues/1214 + # See https://github.com/IntersectMBO/cardano-db-sync/issues/1214 # and https://github.com/IntersectMBO/cardano-node/issues/4433 if db_cbor_hex != orig_cbor_hex: issues.node_4433.finish_test() diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_build.py index 9bc925fb2..5704b3944 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_build.py @@ -39,7 +39,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -78,12 +78,12 @@ def test_use_reference_input( reference_input_amount = 2_000_000 - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file - # create the necessary Tx outputs + # Create the necessary Tx outputs script_utxos, collateral_utxos, __, __ = spend_build._build_fund_script( temp_template=temp_template, @@ -101,7 +101,7 @@ def test_use_reference_input( amount=reference_input_amount, ) - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -147,7 +147,7 @@ def test_use_reference_input( tx_file=tx_signed, txins=[t.txins[0] for t in tx_output_redeem.script_txins if t.txins] ) - # check that the reference input was not spent + # Check that the reference input was not spent assert cluster.g_query.get_utxo( utxo=reference_input[0] ), f"The reference input was spent `{reference_input[0]}`" @@ -157,7 +157,7 @@ def test_use_reference_input( tx_output_redeem.fee, expected_redeem_fee, frac=0.15 ), "Expected fee doesn't match the actual fee" - # check "transaction view" + # Check "transaction view" tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output_redeem) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_output_redeem) @@ -185,12 +185,12 @@ def test_same_input_as_reference_input( reference_input_amount = 2_000_000 - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file - # create the necessary Tx outputs + # Create the necessary Tx outputs script_utxos, collateral_utxos, __, __ = spend_build._build_fund_script( temp_template=temp_template, @@ -209,7 +209,7 @@ def test_same_input_as_reference_input( amount=reference_input_amount, ) - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -259,12 +259,12 @@ def test_same_input_as_reference_input( raise return - # check that the input used also as reference was spent + # Check that the input used also as reference was spent assert not cluster.g_query.get_utxo( utxo=reference_input[0] ), f"The reference input was NOT spent `{reference_input[0]}`" - # check "transaction view" + # Check "transaction view" tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output_redeem) @allure.link(helpers.get_vcs_link()) @@ -286,14 +286,14 @@ def test_use_same_reference_input_multiple_times( temp_template = common.get_test_id(cluster) amount = 2_000_000 - # fund payment address + # Fund payment address clusterlib_utils.fund_from_faucet( payment_addrs[1], cluster_obj=cluster, all_faucets=cluster_manager.cache.addrs_data, ) - # create the reference input + # Create the reference input reference_input = spend_build._build_reference_txin( temp_template=temp_template, @@ -302,7 +302,7 @@ def test_use_same_reference_input_multiple_times( amount=amount, ) - # build 2 tx using the same readonly reference input + # Build 2 tx using the same readonly reference input tx_address_combinations = [ {"payment_addr": payment_addrs[0], "dst_addr": payment_addrs[1]}, @@ -339,7 +339,7 @@ def test_use_same_reference_input_multiple_times( clusterlib_utils.check_txins_spent(cluster_obj=cluster, txins=txins) - # check that the reference input was not spent + # Check that the reference input was not spent assert cluster.g_query.get_utxo( utxo=reference_input[0] ), f"The reference input was spent `{reference_input[0]}`" @@ -389,12 +389,12 @@ def test_reference_input_non_plutus( ) cluster.g_transaction.submit_tx(tx_file=tx_signed, txins=tx_output.txins) - # check that the reference input was not spent + # Check that the reference input was not spent assert cluster.g_query.get_utxo( utxo=reference_input[0] ), f"The reference input was spent `{reference_input[0]}`" - # check expected balances + # Check expected balances out_utxos = cluster.g_query.get_utxo(tx_raw_output=tx_output) assert ( clusterlib.filter_utxos(utxos=out_utxos, address=src_addr.address)[0].amount @@ -406,7 +406,7 @@ def test_reference_input_non_plutus( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check "transaction view" + # Check "transaction view" tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_output) @@ -435,12 +435,12 @@ def test_reference_spent_output( reference_input_amount = 2_000_000 - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file - # create the necessary Tx outputs + # Create the necessary Tx outputs script_utxos, collateral_utxos, __, __ = spend_build._build_fund_script( temp_template=temp_template, @@ -459,7 +459,7 @@ def test_reference_spent_output( amount=reference_input_amount, ) - # spend the output that will be used as reference input + # Spend the output that will be used as reference input tx_output_spend_reference_input = cluster.g_transaction.build_tx( src_address=payment_addrs[1].address, @@ -477,12 +477,12 @@ def test_reference_spent_output( tx_file=tx_signed, txins=tx_output_spend_reference_input.txins ) - # check that the input used also as reference was spent + # Check that the input used also as reference was spent assert not cluster.g_query.get_utxo( utxo=reference_input[0] ), f"The reference input was NOT spent `{reference_input[0]}`" - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -550,12 +550,12 @@ def test_v1_script_with_reference_input( execution_cost=plutus_common.ALWAYS_SUCCEEDS_COST, ) - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file - # create the necessary Tx outputs + # Create the necessary Tx outputs script_utxos, collateral_utxos, __, __ = spend_build._build_fund_script( temp_template=temp_template, @@ -566,7 +566,7 @@ def test_v1_script_with_reference_input( use_inline_datum=False, ) - # create the reference input + # Create the reference input reference_input = spend_build._build_reference_txin( temp_template=temp_template, @@ -575,7 +575,7 @@ def test_v1_script_with_reference_input( amount=2_000_000, ) - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_raw.py index cc80c8d7c..7039a89b6 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_inputs_raw.py @@ -38,7 +38,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -76,7 +76,7 @@ def test_use_reference_input( plutus_op = spend_raw.PLUTUS_OP_ALWAYS_SUCCEEDS - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file @@ -86,7 +86,7 @@ def test_use_reference_input( protocol_params=cluster.g_query.get_protocol_params(), ) - # create the necessary Tx outputs + # Create the necessary Tx outputs script_utxos, collateral_utxos, __, __ = spend_raw._fund_script( temp_template=temp_template, @@ -105,7 +105,7 @@ def test_use_reference_input( amount=amount, ) - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -144,12 +144,12 @@ def test_use_reference_input( script_txins=plutus_txins, ) - # check that the reference input was not spent + # Check that the reference input was not spent assert cluster.g_query.get_utxo( utxo=reference_input[0] ), f"The reference input was spent `{reference_input[0]}`" - # check "transaction view" + # Check "transaction view" tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output) @allure.link(helpers.get_vcs_link()) @@ -175,7 +175,7 @@ def test_same_input_as_reference_input( plutus_op = spend_raw.PLUTUS_OP_ALWAYS_SUCCEEDS - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file @@ -185,7 +185,7 @@ def test_same_input_as_reference_input( protocol_params=cluster.g_query.get_protocol_params(), ) - # create the necessary Tx outputs + # Create the necessary Tx outputs script_utxos, collateral_utxos, __, __ = spend_raw._fund_script( temp_template=temp_template, @@ -205,7 +205,7 @@ def test_same_input_as_reference_input( amount=amount, ) - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -247,12 +247,12 @@ def test_same_input_as_reference_input( raise return - # check that the reference input was spent + # Check that the reference input was spent assert not cluster.g_query.get_utxo( utxo=reference_input[0] ), f"The reference input was NOT spent `{reference_input[0]}`" - # check "transaction view" + # Check "transaction view" tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_output_redeem) @allure.link(helpers.get_vcs_link()) @@ -294,12 +294,12 @@ def test_reference_input_non_plutus( tx_files=tx_files, ) - # check that the reference input was not spent + # Check that the reference input was not spent assert cluster.g_query.get_utxo( utxo=reference_input[0] ), f"The reference input was spent `{reference_input[0]}`" - # check expected balances + # Check expected balances out_utxos = cluster.g_query.get_utxo(tx_raw_output=tx_raw_output) assert ( clusterlib.filter_utxos(utxos=out_utxos, address=src_addr.address)[0].amount @@ -311,7 +311,7 @@ def test_reference_input_non_plutus( common.check_missing_utxos(cluster_obj=cluster, utxos=out_utxos) - # check "transaction view" + # Check "transaction view" tx_view.check_tx_view(cluster_obj=cluster, tx_raw_output=tx_raw_output) dbsync_utils.check_tx(cluster_obj=cluster, tx_raw_output=tx_raw_output) @@ -340,7 +340,7 @@ def test_reference_spent_output( plutus_op = spend_raw.PLUTUS_OP_ALWAYS_SUCCEEDS - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file @@ -350,7 +350,7 @@ def test_reference_spent_output( protocol_params=cluster.g_query.get_protocol_params(), ) - # create the necessary Tx outputs + # Create the necessary Tx outputs script_utxos, collateral_utxos, __, __ = spend_raw._fund_script( temp_template=temp_template, @@ -372,7 +372,7 @@ def test_reference_spent_output( reference_utxo = f"{reference_input[0].utxo_hash}#{reference_input[0].utxo_ix}" - # spend the output that will be used as reference input + # Spend the output that will be used as reference input cluster.g_transaction.send_tx( src_address=payment_addrs[0].address, @@ -382,12 +382,12 @@ def test_reference_spent_output( tx_files=clusterlib.TxFiles(signing_key_files=[payment_addrs[1].skey_file]), ) - # check that the input used also as reference was spent + # Check that the input used also as reference was spent reference_input_utxo = cluster.g_query.get_utxo(txin=reference_utxo) assert not reference_input_utxo, f"The reference input was NOT spent `{reference_utxo}`" - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -450,7 +450,7 @@ def test_v1_script_with_reference_input( execution_cost=plutus_common.ALWAYS_SUCCEEDS_COST, ) - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file @@ -460,7 +460,7 @@ def test_v1_script_with_reference_input( protocol_params=cluster.g_query.get_protocol_params(), ) - # create the necessary Tx outputs + # Create the necessary Tx outputs script_utxos, collateral_utxos, __, __ = spend_raw._fund_script( temp_template=temp_template, @@ -472,7 +472,7 @@ def test_v1_script_with_reference_input( redeem_cost=redeem_cost, ) - # create the reference input + # Create the reference input reference_input = spend_raw._build_reference_txin( temp_template=temp_template, cluster=cluster, @@ -480,7 +480,7 @@ def test_v1_script_with_reference_input( amount=amount, ) - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_build.py index 7ba0b9ce1..4ec06c788 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_build.py @@ -36,7 +36,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -79,12 +79,12 @@ def test_reference_multiple_script( else: plutus_op2 = spend_build.PLUTUS_OP_ALWAYS_SUCCEEDS - # for mypy + # For mypy assert plutus_op1.execution_cost and plutus_op2.execution_cost assert plutus_op1.datum_file and plutus_op2.datum_file assert plutus_op1.redeemer_cbor_file and plutus_op2.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_fund = 100_000_000 @@ -121,7 +121,7 @@ def test_reference_multiple_script( amount=script_fund, inline_datum_file=plutus_op2.datum_file, ), - # for reference script + # For reference script clusterlib.TxOut( address=payment_addrs[1].address, amount=10_000_000, @@ -132,7 +132,7 @@ def test_reference_multiple_script( amount=10_000_000, reference_script_file=plutus_op2.script_file, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost_1.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost_2.collateral), ] @@ -163,7 +163,7 @@ def test_reference_multiple_script( collateral_utxos1 = clusterlib.filter_utxos(utxos=fund_utxos, utxo_ix=utxo_ix_offset + 4) collateral_utxos2 = clusterlib.filter_utxos(utxos=fund_utxos, utxo_ix=utxo_ix_offset + 5) - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -216,14 +216,14 @@ def test_reference_multiple_script( tx_file=tx_signed, txins=[t.txins[0] for t in tx_output_redeem.script_txins if t.txins] ) - # check that script address UTxOs were spent + # Check that script address UTxOs were spent assert not ( cluster.g_query.get_utxo(utxo=script_utxos1[0]) or cluster.g_query.get_utxo(utxo=script_utxos2[0]) ), f"Script address UTxOs were NOT spent - `{script_utxos1}` and `{script_utxos2}`" - # check min required UTxO with reference script + # Check min required UTxO with reference script min_required_utxo = cluster.g_transaction.calculate_min_req_utxo(txouts=[txouts[2]]).value @@ -249,12 +249,12 @@ def test_reference_same_script( plutus_op = spend_build.PLUTUS_OP_ALWAYS_SUCCEEDS - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_fund = 200_000_000 @@ -282,13 +282,13 @@ def test_reference_same_script( clusterlib.TxOut( address=script_address_2, amount=script_fund, inline_datum_file=plutus_op.datum_file ), - # for reference script + # For reference script clusterlib.TxOut( address=payment_addrs[1].address, amount=2_000_000, reference_script_file=plutus_op.script_file, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost.collateral), ] @@ -318,7 +318,7 @@ def test_reference_same_script( collateral_utxos1 = clusterlib.filter_utxos(utxos=fund_utxos, utxo_ix=utxo_ix_offset + 3) collateral_utxos2 = clusterlib.filter_utxos(utxos=fund_utxos, utxo_ix=utxo_ix_offset + 4) - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -371,7 +371,7 @@ def test_reference_same_script( tx_file=tx_signed, txins=[t.txins[0] for t in tx_output_redeem.script_txins if t.txins] ) - # check that script address UTxOs were spent + # Check that script address UTxOs were spent assert not ( cluster.g_query.get_utxo(utxo=script_utxos1[0]) or cluster.g_query.get_utxo(utxo=script_utxos2[0]) @@ -397,12 +397,12 @@ def test_mix_reference_attached_script( plutus_op1 = spend_build.PLUTUS_OP_ALWAYS_SUCCEEDS plutus_op2 = spend_build.PLUTUS_OP_GUESSING_GAME_UNTYPED - # for mypy + # For mypy assert plutus_op1.execution_cost and plutus_op2.execution_cost assert plutus_op1.datum_file and plutus_op2.datum_file assert plutus_op1.redeemer_cbor_file and plutus_op2.redeemer_cbor_file - # create the necessary UTxOs + # Create the necessary UTxOs script_fund = 100_000_000 @@ -439,13 +439,13 @@ def test_mix_reference_attached_script( amount=script_fund, inline_datum_file=plutus_op2.datum_file, ), - # for reference script + # For reference script clusterlib.TxOut( address=payment_addrs[1].address, amount=2_000_000, reference_script_file=plutus_op2.script_file, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost1.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost2.collateral), ] @@ -475,7 +475,7 @@ def test_mix_reference_attached_script( collateral_utxos1 = clusterlib.filter_utxos(utxos=fund_utxos, utxo_ix=utxo_ix_offset + 3) collateral_utxos2 = clusterlib.filter_utxos(utxos=fund_utxos, utxo_ix=utxo_ix_offset + 4) - # spend the "locked" UTxOs + # Spend the "locked" UTxOs plutus_txins = [ clusterlib.ScriptTxIn( @@ -531,13 +531,13 @@ def test_mix_reference_attached_script( tx_file=tx_signed, txins=[t.txins[0] for t in tx_output_redeem.script_txins if t.txins] ) - # check that script address UTxOs were spent + # Check that script address UTxOs were spent assert not ( cluster.g_query.get_utxo(utxo=script_utxos1[0]) or cluster.g_query.get_utxo(utxo=script_utxos2[0]) ), f"Script address UTxOs were NOT spent - `{script_utxos1}` and `{script_utxos2}`" - # check that the script hash is included for all scripts + # Check that the script hash is included for all scripts for script in plutus_costs: assert script.get( "scriptHash" @@ -570,12 +570,12 @@ def test_spend_reference_script( reference_addr = payment_addrs[1] if address_type == "byron": - # create reference UTxO on Byron address + # Create reference UTxO on Byron address reference_addr = clusterlib_utils.gen_byron_addr( cluster_obj=cluster, name_template=temp_template ) - # create a Tx output with the reference script + # Create a Tx output with the reference script reference_utxo, __ = clusterlib_utils.create_reference_utxo( temp_template=temp_template, cluster_obj=cluster, @@ -587,7 +587,7 @@ def test_spend_reference_script( assert reference_utxo.reference_script, "Reference script is missing" assert reference_utxo.amount == amount, "Incorrect amount transferred" - # spend the Tx output with the reference script + # Spend the Tx output with the reference script txouts = [clusterlib.TxOut(address=payment_addr.address, amount=-1)] tx_files = clusterlib.TxFiles(signing_key_files=[reference_addr.skey_file]) @@ -608,7 +608,7 @@ def test_spend_reference_script( cluster.g_transaction.submit_tx(tx_file=tx_signed, txins=tx_output.txins) - # check that reference script utxo was spent + # Check that reference script utxo was spent assert not cluster.g_query.get_utxo( utxo=reference_utxo ), f"Reference script UTxO was NOT spent: '{reference_utxo}`" @@ -731,7 +731,7 @@ def test_not_a_script( execution_cost=plutus_common.ALWAYS_SUCCEEDS_V2_COST, ) - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address with pytest.raises(clusterlib.CLIError) as excinfo: spend_build._build_fund_script( @@ -762,12 +762,12 @@ def test_two_scripts_one_fail( plutus_op1 = spend_build.PLUTUS_OP_ALWAYS_SUCCEEDS plutus_op2 = spend_build.PLUTUS_OP_ALWAYS_FAILS - # for mypy + # For mypy assert plutus_op1.execution_cost and plutus_op2.execution_cost assert plutus_op1.datum_file and plutus_op2.datum_file assert plutus_op1.redeemer_cbor_file and plutus_op2.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_fund = 100_000_000 @@ -804,7 +804,7 @@ def test_two_scripts_one_fail( amount=script_fund, inline_datum_file=plutus_op2.datum_file, ), - # for reference script + # For reference script clusterlib.TxOut( address=payment_addrs[1].address, amount=2_000_000, @@ -815,7 +815,7 @@ def test_two_scripts_one_fail( amount=10_000_000, reference_script_file=plutus_op2.script_file, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost1.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost2.collateral), ] @@ -846,7 +846,7 @@ def test_two_scripts_one_fail( collateral_utxos1 = clusterlib.filter_utxos(utxos=fund_utxos, utxo_ix=utxo_ix_offset + 4) collateral_utxos2 = clusterlib.filter_utxos(utxos=fund_utxos, utxo_ix=utxo_ix_offset + 5) - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -915,12 +915,12 @@ def test_lock_tx_v1_reference_script( execution_cost=plutus_common.ALWAYS_SUCCEEDS["v1"].execution_cost, ) - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_utxos, collateral_utxos, reference_utxo, __ = spend_build._build_fund_script( temp_template=temp_template, @@ -932,7 +932,7 @@ def test_lock_tx_v1_reference_script( ) assert reference_utxo, "No reference script UTxO" - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -996,12 +996,12 @@ def test_v1_attached_v2_reference( plutus_op2 = spend_build.PLUTUS_OP_ALWAYS_SUCCEEDS - # for mypy + # For mypy assert plutus_op1.execution_cost and plutus_op2.execution_cost assert plutus_op1.datum_file and plutus_op2.datum_file assert plutus_op1.redeemer_cbor_file and plutus_op2.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_fund = 200_000_000 @@ -1036,13 +1036,13 @@ def test_v1_attached_v2_reference( amount=script_fund, inline_datum_file=plutus_op2.datum_file, ), - # for reference script + # For reference script clusterlib.TxOut( address=payment_addrs[1].address, amount=2_000_000, reference_script_file=plutus_op2.script_file, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost_1.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost_2.collateral), ] @@ -1072,7 +1072,7 @@ def test_v1_attached_v2_reference( collateral_utxos1 = clusterlib.filter_utxos(utxos=fund_utxos, utxo_ix=utxo_ix_offset + 3) collateral_utxos2 = clusterlib.filter_utxos(utxos=fund_utxos, utxo_ix=utxo_ix_offset + 4) - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -1135,12 +1135,12 @@ def test_lock_byron_reference_script( plutus_op = spend_build.PLUTUS_OP_ALWAYS_SUCCEEDS - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_utxos, collateral_utxos, *__ = spend_build._build_fund_script( temp_template=temp_template, @@ -1151,7 +1151,7 @@ def test_lock_byron_reference_script( use_reference_script=False, ) - # create reference UTxO on Byron address + # Create reference UTxO on Byron address byron_addr = clusterlib_utils.gen_byron_addr( cluster_obj=cluster, name_template=temp_template ) @@ -1166,7 +1166,7 @@ def test_lock_byron_reference_script( assert reference_utxo.address == byron_addr.address, "Incorrect address for reference UTxO" assert reference_utxo.reference_script, "Reference script is missing" - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_raw.py index 0c8d51398..f6a3e92e3 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_ref_scripts_raw.py @@ -36,7 +36,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -80,12 +80,12 @@ def test_reference_multiple_script( amount = 2_000_000 - # for mypy + # For mypy assert plutus_op1.execution_cost and plutus_op2.execution_cost assert plutus_op1.datum_file and plutus_op2.datum_file assert plutus_op1.redeemer_cbor_file and plutus_op2.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_address_1 = cluster.g_address.gen_payment_addr( addr_name=f"{temp_template}_addr1", payment_script_file=plutus_op1.script_file @@ -120,7 +120,7 @@ def test_reference_multiple_script( amount=amount + redeem_cost_2.fee + spend_raw.FEE_REDEEM_TXSIZE, inline_datum_file=plutus_op2.datum_file, ), - # for reference script + # For reference script clusterlib.TxOut( address=payment_addrs[1].address, amount=10_000_000, @@ -131,7 +131,7 @@ def test_reference_multiple_script( amount=10_000_000, reference_script_file=plutus_op2.script_file, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost_1.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost_2.collateral), ] @@ -154,7 +154,7 @@ def test_reference_multiple_script( collateral_utxos1 = cluster.g_query.get_utxo(txin=f"{txid}#4") collateral_utxos2 = cluster.g_query.get_utxo(txin=f"{txid}#5") - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -208,7 +208,7 @@ def test_reference_multiple_script( txins=[t.txins[0] for t in tx_output_redeem.script_txins if t.txins], ) - # check that script address UTxOs were spent + # Check that script address UTxOs were spent assert not ( cluster.g_query.get_utxo(utxo=script_utxos1[0]) or cluster.g_query.get_utxo(utxo=script_utxos2[0]) @@ -234,12 +234,12 @@ def test_reference_same_script( plutus_op = spend_raw.PLUTUS_OP_ALWAYS_SUCCEEDS - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_address_1 = cluster.g_address.gen_payment_addr( addr_name=f"{temp_template}_addr1", payment_script_file=plutus_op.script_file @@ -269,13 +269,13 @@ def test_reference_same_script( amount=amount + redeem_cost.fee + spend_raw.FEE_REDEEM_TXSIZE, inline_datum_file=plutus_op.datum_file, ), - # for reference script + # For reference script clusterlib.TxOut( address=payment_addrs[1].address, amount=10_000_000, reference_script_file=plutus_op.script_file, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost.collateral), ] @@ -297,7 +297,7 @@ def test_reference_same_script( collateral_utxos1 = cluster.g_query.get_utxo(txin=f"{txid}#3") collateral_utxos2 = cluster.g_query.get_utxo(txin=f"{txid}#4") - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -351,7 +351,7 @@ def test_reference_same_script( txins=[t.txins[0] for t in tx_output_redeem.script_txins if t.txins], ) - # check that script address UTxOs were spent + # Check that script address UTxOs were spent assert not ( cluster.g_query.get_utxo(utxo=script_utxos1[0]) or cluster.g_query.get_utxo(utxo=script_utxos2[0]) @@ -378,12 +378,12 @@ def test_mix_reference_attached_script( plutus_op1 = spend_raw.PLUTUS_OP_ALWAYS_SUCCEEDS plutus_op2 = spend_raw.PLUTUS_OP_GUESSING_GAME_UNTYPED - # for mypy + # For mypy assert plutus_op1.execution_cost and plutus_op2.execution_cost assert plutus_op1.datum_file and plutus_op2.datum_file assert plutus_op1.redeemer_cbor_file and plutus_op2.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_address_1 = cluster.g_address.gen_payment_addr( addr_name=f"{temp_template}_addr1", payment_script_file=plutus_op1.script_file @@ -418,13 +418,13 @@ def test_mix_reference_attached_script( amount=amount + redeem_cost_2.fee + spend_raw.FEE_REDEEM_TXSIZE, inline_datum_file=plutus_op2.datum_file, ), - # for reference script + # For reference script clusterlib.TxOut( address=payment_addrs[1].address, amount=10_000_000, reference_script_file=plutus_op2.script_file, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost_1.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost_2.collateral), ] @@ -446,7 +446,7 @@ def test_mix_reference_attached_script( collateral_utxos1 = cluster.g_query.get_utxo(txin=f"{txid}#3") collateral_utxos2 = cluster.g_query.get_utxo(txin=f"{txid}#4") - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -499,7 +499,7 @@ def test_mix_reference_attached_script( txins=[t.txins[0] for t in tx_output_redeem.script_txins if t.txins], ) - # check that script address UTxOs were spent + # Check that script address UTxOs were spent assert not ( cluster.g_query.get_utxo(utxo=script_utxos1[0]) or cluster.g_query.get_utxo(utxo=script_utxos2[0]) @@ -532,12 +532,12 @@ def test_spend_reference_script( reference_addr = payment_addrs[1] if address_type == "byron": - # create reference UTxO on Byron address + # Create reference UTxO on Byron address reference_addr = clusterlib_utils.gen_byron_addr( cluster_obj=cluster, name_template=temp_template ) - # create a Tx output with the reference script + # Create a Tx output with the reference script reference_utxo, __ = clusterlib_utils.create_reference_utxo( temp_template=temp_template, cluster_obj=cluster, @@ -549,7 +549,7 @@ def test_spend_reference_script( assert reference_utxo.reference_script, "Reference script is missing" assert reference_utxo.amount == amount, "Incorrect amount transferred" - # spend the Tx output with the reference script + # Spend the Tx output with the reference script txouts = [clusterlib.TxOut(address=payment_addr.address, amount=-1)] tx_files = clusterlib.TxFiles(signing_key_files=[reference_addr.skey_file]) @@ -561,7 +561,7 @@ def test_spend_reference_script( tx_files=tx_files, ) - # check that reference script UTxO was spent + # Check that reference script UTxO was spent assert not cluster.g_query.get_utxo( utxo=reference_utxo ), f"Reference script UTxO was NOT spent: '{reference_utxo}`" @@ -664,7 +664,7 @@ def test_reference_script_byron_address( cluster_obj=cluster, name_template=temp_template ) - # create reference UTxO + # Create reference UTxO reference_utxo, __ = clusterlib_utils.create_reference_utxo( temp_template=temp_template, cluster_obj=cluster, @@ -710,7 +710,7 @@ def test_not_a_script( protocol_params=cluster.g_query.get_protocol_params(), ) - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address with pytest.raises(clusterlib.CLIError) as excinfo: spend_raw._fund_script( @@ -743,12 +743,12 @@ def test_two_scripts_one_fail( plutus_op1 = spend_raw.PLUTUS_OP_ALWAYS_SUCCEEDS plutus_op2 = spend_raw.PLUTUS_OP_ALWAYS_FAILS - # for mypy + # For mypy assert plutus_op1.execution_cost and plutus_op2.execution_cost assert plutus_op1.datum_file and plutus_op2.datum_file assert plutus_op1.redeemer_cbor_file and plutus_op2.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_address_1 = cluster.g_address.gen_payment_addr( addr_name=f"{temp_template}_addr1", payment_script_file=plutus_op1.script_file @@ -783,7 +783,7 @@ def test_two_scripts_one_fail( amount=amount + redeem_cost_2.fee + spend_raw.FEE_REDEEM_TXSIZE, inline_datum_file=plutus_op2.datum_file, ), - # for reference script + # For reference script clusterlib.TxOut( address=payment_addrs[1].address, amount=10_000_000, @@ -794,7 +794,7 @@ def test_two_scripts_one_fail( amount=10_000_000, reference_script_file=plutus_op2.script_file, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost_1.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost_2.collateral), ] @@ -817,7 +817,7 @@ def test_two_scripts_one_fail( collateral_utxos1 = cluster.g_query.get_utxo(txin=f"{txid}#4") collateral_utxos2 = cluster.g_query.get_utxo(txin=f"{txid}#5") - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -906,7 +906,7 @@ def test_lock_tx_v1_reference_script( execution_cost=plutus_common.ALWAYS_SUCCEEDS["v1"].execution_cost, ) - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file @@ -965,7 +965,7 @@ def test_lock_tx_v1_reference_script( tx_name=f"{temp_template}_step2", ) - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.submit_tx( @@ -1002,12 +1002,12 @@ def test_v1_attached_v2_reference( plutus_op2 = spend_raw.PLUTUS_OP_GUESSING_GAME_UNTYPED - # for mypy + # For mypy assert plutus_op1.execution_cost and plutus_op2.execution_cost assert plutus_op1.datum_file and plutus_op2.datum_file assert plutus_op1.redeemer_cbor_file and plutus_op2.redeemer_cbor_file - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address script_address_1 = cluster.g_address.gen_payment_addr( addr_name=f"{temp_template}_addr1", payment_script_file=plutus_op1.script_file @@ -1042,13 +1042,13 @@ def test_v1_attached_v2_reference( amount=amount + redeem_cost_2.fee + spend_raw.FEE_REDEEM_TXSIZE, inline_datum_file=plutus_op2.datum_file, ), - # for reference script + # For reference script clusterlib.TxOut( address=payment_addrs[1].address, amount=10_000_000, reference_script_file=plutus_op2.script_file, ), - # for collateral + # For collateral clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost_1.collateral), clusterlib.TxOut(address=payment_addrs[1].address, amount=redeem_cost_2.collateral), ] @@ -1070,7 +1070,7 @@ def test_v1_attached_v2_reference( collateral_utxos1 = cluster.g_query.get_utxo(txin=f"{txid}#3") collateral_utxos2 = cluster.g_query.get_utxo(txin=f"{txid}#4") - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -1148,7 +1148,7 @@ def test_lock_byron_reference_script( plutus_op = spend_raw.PLUTUS_OP_ALWAYS_SUCCEEDS - # for mypy + # For mypy assert plutus_op.execution_cost assert plutus_op.datum_file assert plutus_op.redeemer_cbor_file @@ -1172,7 +1172,7 @@ def test_lock_byron_reference_script( use_inline_datum=True, ) - # create reference UTxO on Byron address + # Create reference UTxO on Byron address byron_addr = clusterlib_utils.gen_byron_addr( cluster_obj=cluster, name_template=temp_template ) @@ -1222,7 +1222,7 @@ def test_lock_byron_reference_script( tx_name=f"{temp_template}_step2", ) - # create a Tx output with an inline datum at the script address + # Create a Tx output with an inline datum at the script address with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.submit_tx( diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_build.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_build.py index 92008dddc..4eda95bbc 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_build.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_build.py @@ -38,7 +38,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -97,7 +97,7 @@ def build_fund_script_secp( amount=script_fund, inline_datum_file=plutus_common.DATUM_42_TYPED, ), - # for collateral + # For collateral clusterlib.TxOut(address=dst_addr.address, amount=redeem_cost.collateral), ] @@ -146,7 +146,7 @@ def test_use_secp_builtin_functions( * spend the locked UTxO * check that script address UTxO was spent """ - # create the necessary Tx outputs + # Create the necessary Tx outputs algorithm, script_utxos, collateral_utxos = build_fund_script_secp temp_template = common.get_test_id(cluster) @@ -170,12 +170,12 @@ def test_use_secp_builtin_functions( redeemer_file=redeemer_file, ) - # for mypy + # For mypy assert plutus_op.script_file assert plutus_op.datum_file assert plutus_op.redeemer_file - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -217,7 +217,7 @@ def test_use_secp_builtin_functions( tx_file=tx_signed, txins=[t.txins[0] for t in tx_output_redeem.script_txins if t.txins] ) - # check that script address UTxO was spent + # Check that script address UTxO was spent assert not cluster.g_query.get_utxo( utxo=script_utxos[0] ), f"Script address UTxO was NOT spent `{script_utxos}`" @@ -241,12 +241,12 @@ def test_overspending_execution_budget( * Expect failure. """ - # create the necessary Tx outputs + # Create the necessary Tx outputs algorithm, script_utxos, collateral_utxos = build_fund_script_secp temp_template = f"{common.get_test_id(cluster)}_{common.unique_time_str()}" - # the redeemer file will define the number of loops on the script + # The redeemer file will define the number of loops on the script redeemer_dir = ( plutus_common.SEPC256K1_ECDSA_DIR if algorithm == "ecdsa" @@ -255,7 +255,7 @@ def test_overspending_execution_budget( redeemer_file = redeemer_dir / "loop_script.redeemer" - # generate a dummy redeemer with a number of loops big enough + # Generate a dummy redeemer with a number of loops big enough # to make the script to overspending the budget redeemer_file_dummy = pl.Path(f"{temp_template}_dummy_script_context.redeemer") @@ -278,12 +278,12 @@ def test_overspending_execution_budget( redeemer_file=pl.Path(redeemer_file_dummy), ) - # for mypy + # For mypy assert plutus_op.script_file assert plutus_op.datum_file assert plutus_op.redeemer_file - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( diff --git a/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_raw.py b/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_raw.py index 8204cf6a0..a77048125 100644 --- a/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_raw.py +++ b/cardano_node_tests/tests/tests_plutus_v2/test_spend_secp256k1_raw.py @@ -34,7 +34,7 @@ def payment_addrs( cluster_obj=cluster, ) - # fund source address + # Fund source address clusterlib_utils.fund_from_faucet( addrs[0], cluster_obj=cluster, @@ -93,7 +93,7 @@ def fund_script_secp( amount=amount + redeem_cost.fee + spend_raw.FEE_REDEEM_TXSIZE, inline_datum_file=plutus_common.DATUM_42_TYPED, ), - # for collateral + # For collateral clusterlib.TxOut(address=dst_addr.address, amount=redeem_cost.collateral), ] @@ -132,7 +132,7 @@ def test_use_secp_builtin_functions( """ amount = 2_000_000 - # create the necessary Tx outputs + # Create the necessary Tx outputs algorithm, script_utxos, collateral_utxos = fund_script_secp temp_template = common.get_test_id(cluster) @@ -163,12 +163,12 @@ def test_use_secp_builtin_functions( execution_cost=execution_units, ) - # for mypy + # For mypy assert plutus_op.script_file assert plutus_op.redeemer_file assert plutus_op.execution_cost - # spend the "locked" UTxO + # Spend the "locked" UTxO plutus_txins = [ clusterlib.ScriptTxIn( @@ -206,7 +206,7 @@ def test_use_secp_builtin_functions( ) raise - # check that script address UTxO was spent + # Check that script address UTxO was spent assert not cluster.g_query.get_utxo( utxo=script_utxos[0] ), f"Script address UTxO was NOT spent `{script_utxos}`" diff --git a/cardano_node_tests/tests/tx_common.py b/cardano_node_tests/tests/tx_common.py index 4fba6881e..ed77b3306 100644 --- a/cardano_node_tests/tests/tx_common.py +++ b/cardano_node_tests/tests/tx_common.py @@ -41,7 +41,7 @@ def get_raw_tx_values( src_addr_highest_utxo = cluster_obj.g_query.get_utxo_with_highest_amount(src_address) - # use only the UTxO with the highest amount + # Use only the UTxO with the highest amount txins = [src_addr_highest_utxo] txouts = [ clusterlib.TxOut( diff --git a/cardano_node_tests/utils/cluster_nodes.py b/cardano_node_tests/utils/cluster_nodes.py index 8d1fc16d3..809f30783 100644 --- a/cardano_node_tests/utils/cluster_nodes.py +++ b/cardano_node_tests/utils/cluster_nodes.py @@ -203,7 +203,7 @@ def __init__(self) -> None: cluster_scripts.TestnetScripts() ) - # cached values + # Cached values self._testnet_type = "" @property diff --git a/cardano_node_tests/utils/cluster_scripts.py b/cardano_node_tests/utils/cluster_scripts.py index d23ea6f22..17df99563 100644 --- a/cardano_node_tests/utils/cluster_scripts.py +++ b/cardano_node_tests/utils/cluster_scripts.py @@ -190,27 +190,27 @@ def _get_node_ports(num: int) -> NodePorts: metrics_submit_api=last_port - 1, submit_api=last_port - 2, supervisor=12001 + instance_num, - # relay1 + # Relay1 relay1=0, ekg_relay1=0, prometheus_relay1=0, - # bft1 + # Bft1 bft1=base, ekg_bft1=base + 1, prometheus_bft1=base + 2, - # pool1 + # Pool1 pool1=base + 5, ekg_pool1=base + 6, prometheus_pool1=base + 7, - # pool2 + # Pool2 pool2=base + 10, ekg_pool2=base + 11, prometheus_pool2=base + 12, - # pool3 + # Pool3 pool3=base + 15, ekg_pool3=base + 16, prometheus_pool3=base + 17, - # all nodes + # All nodes node_ports=node_ports, ) return ports @@ -250,23 +250,23 @@ def _replace_instance_files( ) -> str: """Replace instance variables in given content.""" content = infile.read_text() - # replace cluster instance number + # Replace cluster instance number new_content = content.replace("%%INSTANCE_NUM%%", str(instance_num)) - # replace number of pools + # Replace number of pools new_content = new_content.replace("%%NUM_POOLS%%", str(self.num_pools)) - # replace node port number strings + # Replace node port number strings new_content = new_content.replace("%%NODE_PORT_BASE%%", str(instance_ports.base)) - # replace number of reserved ports per node + # Replace number of reserved ports per node new_content = new_content.replace("%%PORTS_PER_NODE%%", str(ports_per_node)) - # reconfigure supervisord port + # Reconfigure supervisord port new_content = new_content.replace("%%SUPERVISOR_PORT%%", str(instance_ports.supervisor)) - # reconfigure submit-api port + # Reconfigure submit-api port new_content = new_content.replace("%%SUBMIT_API_PORT%%", str(instance_ports.submit_api)) - # reconfigure submit-api metrics port + # Reconfigure submit-api metrics port new_content = new_content.replace( "%%METRICS_SUBMIT_API_PORT%%", str(instance_ports.metrics_submit_api) ) - # reconfigure webserver port + # Reconfigure webserver port new_content = new_content.replace("%%WEBSERVER_PORT%%", str(instance_ports.webserver)) return new_content @@ -359,7 +359,7 @@ def _gen_topology_files( # P2P topology - # bft1 and first three pools + # Bft1 and first three pools fixed_ports = all_except[:4] p2p_topology = self._gen_p2p_topology( @@ -376,11 +376,11 @@ def _reconfigure_local(self, indir: pl.Path, destdir: pl.Path, instance_num: int ports_per_node = instance_ports.pool1 - instance_ports.bft1 addr = self._preselect_addr(instance_num=instance_num) - # reconfigure cluster instance files + # Reconfigure cluster instance files for infile in indir.glob("*"): fname = infile.name - # skip template files + # Skip template files if fname.startswith("template-"): continue @@ -393,11 +393,11 @@ def _reconfigure_local(self, indir: pl.Path, destdir: pl.Path, instance_num: int ) outfile.write_text(f"{dest_content}\n") - # make `*.sh` files and files without extension executable + # Make `*.sh` files and files without extension executable if "." not in fname or fname.endswith(".sh"): outfile.chmod(0o755) - # generate config and topology files from templates + # Generate config and topology files from templates for node_rec in instance_ports.node_ports: if node_rec.num != 0: supervisor_script = destdir / f"cardano-node-pool{node_rec.num}" @@ -602,21 +602,21 @@ def _reconfigure_testnet( with open(infile, encoding="utf-8") as in_fp: content = in_fp.read() - # replace cluster instance number + # Replace cluster instance number new_content = content.replace("%%INSTANCE_NUM%%", str(instance_num)) - # replace node port number strings + # Replace node port number strings new_content = new_content.replace("%%NODE_PORT_RELAY1%%", str(instance_ports.relay1)) - # reconfigure supervisord port + # Reconfigure supervisord port new_content = new_content.replace("%%SUPERVISOR_PORT%%", str(instance_ports.supervisor)) - # reconfigure submit-api port + # Reconfigure submit-api port new_content = new_content.replace("%%SUBMIT_API_PORT%%", str(instance_ports.submit_api)) - # reconfigure submit-api metrics port + # Reconfigure submit-api metrics port new_content = new_content.replace( "%%METRICS_SUBMIT_API_PORT%%", str(instance_ports.metrics_submit_api) ) - # reconfigure EKG metrics port + # Reconfigure EKG metrics port new_content = new_content.replace("%%EKG_PORT_RELAY1%%", str(instance_ports.ekg_relay1)) - # reconfigure prometheus metrics port + # Reconfigure prometheus metrics port new_content = new_content.replace( "%%PROMETHEUS_PORT_RELAY1%%", str(instance_ports.prometheus_relay1) ) @@ -624,7 +624,7 @@ def _reconfigure_testnet( with open(outfile, "w", encoding="utf-8") as out_fp: out_fp.write(new_content) - # make `*.sh` files and files without extension executable + # Make `*.sh` files and files without extension executable if "." not in fname or fname.endswith(".sh"): outfile.chmod(0o755) diff --git a/cardano_node_tests/utils/clusterlib_utils.py b/cardano_node_tests/utils/clusterlib_utils.py index 6c6661998..d5410ac8e 100644 --- a/cardano_node_tests/utils/clusterlib_utils.py +++ b/cardano_node_tests/utils/clusterlib_utils.py @@ -217,7 +217,7 @@ def register_stake_address( deposit_amt: int = -1, ) -> clusterlib.TxRawOutput: """Register stake address.""" - # files for registering stake address + # Files for registering stake address addr_reg_cert = cluster_obj.g_stake_address.gen_stake_addr_registration_cert( addr_name=name_template, deposit_amt=deposit_amt, @@ -332,7 +332,7 @@ def create_pool_users( """Create PoolUsers.""" pool_users = [] for i in range(no_of_addr): - # create key pairs and addresses + # Create key pairs and addresses stake_addr_rec = create_stake_addr_records( f"{name_template}_addr{i}", cluster_obj=cluster_obj, destination_dir=destination_dir )[0] @@ -342,7 +342,7 @@ def create_pool_users( stake_vkey_file=stake_addr_rec.vkey_file, destination_dir=destination_dir, )[0] - # create pool user struct + # Create pool user struct pool_user = clusterlib.PoolUser(payment=payment_addr_rec, stake=stake_addr_rec) pool_users.append(pool_user) @@ -525,7 +525,7 @@ def update_params_build( cli_args = list(itertools.chain.from_iterable(_cli_args)) temp_template = helpers.get_timestamped_rand_str() - # assumption is update proposals are submitted near beginning of epoch + # Assumption is update proposals are submitted near beginning of epoch epoch = cluster_obj.g_query.get_epoch() out_file = cluster_obj.g_governance.gen_update_proposal( @@ -576,7 +576,7 @@ def mint_or_burn_witness( token_mint_addr = new_tokens[0].token_mint_addr signing_key_files = list({*issuers_skey_files, token_mint_addr.skey_file}) - # create TX body + # Create TX body mint = [ clusterlib.Mint( txouts=[ @@ -594,7 +594,7 @@ def mint_or_burn_witness( ] txouts = [] if mint_txouts: - # meet the minimum required UTxO value + # Meet the minimum required UTxO value lovelace_amount = 2_000_000 + math.ceil(len(mint_txouts) / 8) * 1_000_000 txouts = [ clusterlib.TxOut(address=new_tokens[0].token_mint_addr.address, amount=lovelace_amount), @@ -631,21 +631,21 @@ def mint_or_burn_witness( invalid_before=invalid_before, ) - # sign incrementally (just to check that it works) + # Sign incrementally (just to check that it works) if sign_incrementally and len(signing_key_files) >= 1: - # create witness file for first required key + # Create witness file for first required key witness_file = cluster_obj.g_transaction.witness_tx( tx_body_file=tx_output.out_file, witness_name=f"{temp_template}_skey0", signing_key_files=signing_key_files[:1], ) - # sign Tx using witness file + # Sign Tx using witness file tx_witnessed_file = cluster_obj.g_transaction.assemble_tx( tx_body_file=tx_output.out_file, witness_files=[witness_file], tx_name=f"{temp_template}_sign0", ) - # incrementally sign the already signed Tx with rest of required skeys + # Incrementally sign the already signed Tx with rest of required skeys for idx, skey in enumerate(signing_key_files[1:], start=1): tx_witnessed_file = cluster_obj.g_transaction.sign_tx( tx_file=tx_witnessed_file, @@ -653,7 +653,7 @@ def mint_or_burn_witness( tx_name=f"{temp_template}_sign{idx}", ) else: - # create witness file for each required key + # Create witness file for each required key witness_files = [ cluster_obj.g_transaction.witness_tx( tx_body_file=tx_output.out_file, @@ -663,7 +663,7 @@ def mint_or_burn_witness( for idx, skey in enumerate(signing_key_files) ] - # sign Tx using witness files + # Sign Tx using witness files tx_witnessed_file = cluster_obj.g_transaction.assemble_tx( tx_body_file=tx_output.out_file, witness_files=witness_files, @@ -699,7 +699,7 @@ def mint_or_burn_sign( token_mint_addr = new_tokens[0].token_mint_addr signing_key_files = list({*issuers_skey_files, token_mint_addr.skey_file}) - # build and sign a transaction + # Build and sign a transaction mint = [ clusterlib.Mint( txouts=[ @@ -716,7 +716,7 @@ def mint_or_burn_sign( ] txouts = [] if mint_txouts: - # meet the minimum required UTxO value + # Meet the minimum required UTxO value lovelace_amount = 2_000_000 + math.ceil(len(mint_txouts) / 8) * 1_000_000 txouts = [ clusterlib.TxOut(address=new_tokens[0].token_mint_addr.address, amount=lovelace_amount), @@ -749,14 +749,14 @@ def mint_or_burn_sign( fee=fee, ) - # sign incrementally (just to check that it works) + # Sign incrementally (just to check that it works) if sign_incrementally and len(signing_key_files) >= 1: out_file_signed = cluster_obj.g_transaction.sign_tx( tx_body_file=tx_output.out_file, signing_key_files=signing_key_files[:1], tx_name=f"{temp_template}_sign0", ) - # incrementally sign the already signed Tx with rest of required skeys + # Incrementally sign the already signed Tx with rest of required skeys for idx, skey in enumerate(signing_key_files[1:], start=1): out_file_signed = cluster_obj.g_transaction.sign_tx( tx_file=out_file_signed, @@ -815,7 +815,7 @@ def withdraw_reward_w_build( witness_override=len(tx_files_withdrawal.signing_key_files), destination_dir=destination_dir, ) - # sign incrementally (just to check that it works) + # Sign incrementally (just to check that it works) tx_signed = cluster_obj.g_transaction.sign_tx( tx_body_file=tx_raw_withdrawal_output.out_file, signing_key_files=[dst_addr_record.skey_file], @@ -831,7 +831,7 @@ def withdraw_reward_w_build( if not verify: return tx_raw_withdrawal_output - # check that reward is 0 + # Check that reward is 0 if ( cluster_obj.g_query.get_stake_addr_info(stake_addr_record.address).reward_account_balance != 0 @@ -839,7 +839,7 @@ def withdraw_reward_w_build( msg = "Not all rewards were transferred." raise AssertionError(msg) - # check that rewards were transferred + # Check that rewards were transferred src_reward_balance = cluster_obj.g_query.get_address_balance(dst_address) if ( src_reward_balance @@ -862,7 +862,7 @@ def new_tokens( amount: int, ) -> list[TokenRecord]: """Mint new token, sign using skeys.""" - # create simple script + # Create simple script keyhash = cluster_obj.g_address.get_payment_vkey_hash(payment_vkey_file=issuer_addr.vkey_file) script_content = {"keyHash": keyhash, "type": "sig"} script = pl.Path(f"{temp_template}.script") @@ -889,7 +889,7 @@ def new_tokens( ) ) - # token minting + # Token minting mint_or_burn_sign( cluster_obj=cluster_obj, new_tokens=tokens_to_mint, @@ -1042,17 +1042,17 @@ def wait_for_epoch_interval( start_epoch = cluster_obj.g_query.get_epoch() - # wait for new block so we start counting with an up-to-date slot number + # Wait for new block so we start counting with an up-to-date slot number cluster_obj.wait_for_new_block() for __ in range(40): s_from_epoch_start = cluster_obj.time_from_epoch_start() - # return if we are in the required interval + # Return if we are in the required interval if start_abs <= s_from_epoch_start <= stop_abs: break - # if we are already after the required interval, wait for next epoch + # If we are already after the required interval, wait for next epoch if stop_abs < s_from_epoch_start: if force_epoch: msg = ( @@ -1068,13 +1068,13 @@ def wait_for_epoch_interval( cluster_obj.wait_for_new_epoch() continue - # sleep until `start_abs` + # Sleep until `start_abs` to_sleep = start_abs - s_from_epoch_start if to_sleep > 0: # `to_sleep` is float, wait for at least 1 second time.sleep(to_sleep if to_sleep > 1 else 1) - # we can finish if slot number of last minted block doesn't need + # We can finish if slot number of last minted block doesn't need # to match the time interval if not check_slot: break @@ -1105,7 +1105,7 @@ def load_tx_metadata(tx_body_file: pl.Path) -> TxMetadata: if not metadata_section: return TxMetadata(metadata={}, aux_data=[]) - # the `metadata_section` can be either list or `CBORTag`- check if it is `CBORTag` + # The `metadata_section` can be either list or `CBORTag`- check if it is `CBORTag` try: metadata_value = metadata_section.value except AttributeError: @@ -1115,7 +1115,7 @@ def load_tx_metadata(tx_body_file: pl.Path) -> TxMetadata: metadata=metadata_value.get(0) or {}, aux_data=metadata_value.get(1) or [] ) - # now we know the `metadata_section` is list + # Now we know the `metadata_section` is list try: metadata: dict = metadata_section[0] except KeyError: diff --git a/cardano_node_tests/utils/configuration.py b/cardano_node_tests/utils/configuration.py index eab6dca1f..da9259dd8 100644 --- a/cardano_node_tests/utils/configuration.py +++ b/cardano_node_tests/utils/configuration.py @@ -31,35 +31,35 @@ def _check_cardano_node_socket_path() -> None: DBSYNC_DB = "dbsync" IS_XDIST = bool(os.environ.get("PYTEST_XDIST_TESTRUNUID")) -# used also in startup scripts as `if [ -n "$VAR" ]...` +# Used also in startup scripts as `if [ -n "$VAR" ]...` ENABLE_LEGACY = (os.environ.get("ENABLE_LEGACY") or "") != "" -# used also in startup scripts as `if [ -n "$VAR" ]...` +# Used also in startup scripts as `if [ -n "$VAR" ]...` MIXED_P2P = (os.environ.get("MIXED_P2P") or "") != "" -# used also in startup scripts as `if [ -n "$VAR" ]...` +# Used also in startup scripts as `if [ -n "$VAR" ]...` HAS_CC = (os.environ.get("NO_CC") or "") == "" -# used also in startup scripts as `if [ -n "$VAR" ]...` +# Used also in startup scripts as `if [ -n "$VAR" ]...` PV10 = (os.environ.get("PV10") or "") == "" -# used also in startup scripts +# Used also in startup scripts UTXO_BACKEND = os.environ.get("UTXO_BACKEND") or "" if UTXO_BACKEND not in ("", "mem", "disk"): msg = f"Invalid UTXO_BACKEND: {UTXO_BACKEND}" raise RuntimeError(msg) -# resolve CARDANO_NODE_SOCKET_PATH +# Resolve CARDANO_NODE_SOCKET_PATH STARTUP_CARDANO_NODE_SOCKET_PATH = ( pl.Path(os.environ["CARDANO_NODE_SOCKET_PATH"]).expanduser().resolve() ) os.environ["CARDANO_NODE_SOCKET_PATH"] = str(STARTUP_CARDANO_NODE_SOCKET_PATH) -# resolve SCHEDULING_LOG +# Resolve SCHEDULING_LOG SCHEDULING_LOG: str | pl.Path = os.environ.get("SCHEDULING_LOG") or "" if SCHEDULING_LOG: SCHEDULING_LOG = pl.Path(SCHEDULING_LOG).expanduser().resolve() -# resolve BLOCK_PRODUCTION_DB +# Resolve BLOCK_PRODUCTION_DB BLOCK_PRODUCTION_DB: str | pl.Path = os.environ.get("BLOCK_PRODUCTION_DB") or "" if BLOCK_PRODUCTION_DB: BLOCK_PRODUCTION_DB = pl.Path(BLOCK_PRODUCTION_DB).expanduser().resolve() @@ -98,10 +98,10 @@ def _check_cardano_node_socket_path() -> None: DONT_OVERWRITE_OUTFILES = bool(os.environ.get("DONT_OVERWRITE_OUTFILES")) -# cluster instances are kept running after tests finish +# Cluster instances are kept running after tests finish KEEP_CLUSTERS_RUNNING = bool(os.environ.get("KEEP_CLUSTERS_RUNNING")) -# determine what scripts to use to start the cluster +# Determine what scripts to use to start the cluster SCRIPTS_DIRNAME = os.environ.get("SCRIPTS_DIRNAME") or "" if SCRIPTS_DIRNAME: pass diff --git a/cardano_node_tests/utils/faucet.py b/cardano_node_tests/utils/faucet.py index d7b8e6c3f..df33da250 100644 --- a/cardano_node_tests/utils/faucet.py +++ b/cardano_node_tests/utils/faucet.py @@ -93,7 +93,7 @@ def return_funds_to_faucet( for addr, amount_rec in zip(src_addrs, amount): fund_dst = [clusterlib.TxOut(address=faucet_addr, amount=amount_rec)] fund_tx_files = clusterlib.TxFiles(signing_key_files=[addr.skey_file]) - # try to return funds; don't mind if there's not enough funds for fees etc. + # Try to return funds; don't mind if there's not enough funds for fees etc. with contextlib.suppress(Exception): cluster_obj.g_transaction.send_funds( src_address=addr.address, diff --git a/cardano_node_tests/utils/locking.py b/cardano_node_tests/utils/locking.py index ea64dad03..0622a6ba8 100644 --- a/cardano_node_tests/utils/locking.py +++ b/cardano_node_tests/utils/locking.py @@ -10,7 +10,7 @@ if configuration.IS_XDIST: from filelock import FileLock - # suppress messages from filelock + # Suppress messages from filelock logging.getLogger("filelock").setLevel(logging.WARNING) FileLockIfXdist: tp.Any = FileLock diff --git a/cardano_node_tests/utils/model_ekg.py b/cardano_node_tests/utils/model_ekg.py index 694b5f91b..8cba99bd9 100644 --- a/cardano_node_tests/utils/model_ekg.py +++ b/cardano_node_tests/utils/model_ekg.py @@ -1,4 +1,4 @@ -# generated by datamodel-codegen: +# Generated by datamodel-codegen: # filename: ekg.json # timestamp: 2021-03-10T11:38:36+00:00 from __future__ import annotations diff --git a/cardano_node_tests/utils/testnet_cleanup.py b/cardano_node_tests/utils/testnet_cleanup.py index d6e2a5773..9dda2c8e3 100644 --- a/cardano_node_tests/utils/testnet_cleanup.py +++ b/cardano_node_tests/utils/testnet_cleanup.py @@ -57,7 +57,7 @@ def deregister_stake_addr( deposit_amt: int, ) -> None: """Deregister stake address.""" - # files for deregistering stake address + # Files for deregistering stake address stake_addr_dereg_cert = cluster_obj.g_stake_address.gen_stake_addr_deregistration_cert( addr_name=f"rf_{name_template}_addr0_dereg", deposit_amt=deposit_amt, @@ -120,7 +120,7 @@ def return_funds_to_faucet( ) -> None: """Send funds from `src_addr`s to `faucet_address`.""" tx_name = f"rf_{tx_name}" - # the amount of "-1" means all available funds. + # The amount of "-1" means all available funds. fund_dst = [clusterlib.TxOut(address=faucet_address, amount=-1)] fund_tx_files = clusterlib.TxFiles(signing_key_files=[f.skey_file for f in src_addrs]) @@ -128,11 +128,11 @@ def return_funds_to_faucet( txins = list(itertools.chain.from_iterable(txins_nested)) utxos_balance = functools.reduce(lambda x, y: x + y.amount, txins, 0) - # skip if there no (or too little) Lovelace + # Skip if there no (or too little) Lovelace if utxos_balance < 1000_000: return - # if the balance is too low, add a faucet UTxO so there's enough funds for fee + # If the balance is too low, add a faucet UTxO so there's enough funds for fee # and the total amount is higher than min ADA value if utxos_balance < 3000_000: faucet_utxos = cluster_obj.g_query.get_utxo( @@ -141,7 +141,7 @@ def return_funds_to_faucet( futxo = random.choice(faucet_utxos) txins.append(futxo) - # try to return funds; don't mind if there's not enough funds for fees etc. + # Try to return funds; don't mind if there's not enough funds for fees etc. try: cluster_obj.g_transaction.send_tx( src_address=src_addrs[0].address, @@ -196,9 +196,9 @@ def group_addr_files(file_paths: tp.Generator[pl.Path, None, None]) -> list[list path_groups: list[list[pl.Path]] = [curr_group] prev_basename = "" - # reverse-sort the list so stake address files are processes before payment address files + # Reverse-sort the list so stake address files are processes before payment address files for f in sorted(file_paths, reverse=True): - # skip the '*_pycurrent' symlinks to pytest temp dirs + # Skip the '*_pycurrent' symlinks to pytest temp dirs if "_pycurrent" in str(f): continue basename = f.name.replace("_stake.addr", "").replace(".addr", "") diff --git a/cardano_node_tests/utils/tx_view.py b/cardano_node_tests/utils/tx_view.py index c4191772d..dee556bd4 100644 --- a/cardano_node_tests/utils/tx_view.py +++ b/cardano_node_tests/utils/tx_view.py @@ -194,11 +194,11 @@ def _check_return_collateral(tx_raw_output: clusterlib.TxRawOutput, tx_loaded: d "total collateral" ), "Return collateral total collateral mismatch" - # automatic return collateral works only with `transaction build` + # Automatic return collateral works only with `transaction build` if not (tx_raw_output.return_collateral_txouts or tx_raw_output.change_address): return - # when total collateral amount is specified, it is necessary to specify also return + # When total collateral amount is specified, it is necessary to specify also return # collateral `TxOut` to get the change, otherwise all collaterals will be collected if tx_raw_output.total_collateral_amount and not tx_raw_output.return_collateral_txouts: return @@ -237,7 +237,7 @@ def check_tx_view( # noqa: C901 tx_loaded = load_tx_view(cluster_obj=cluster_obj, tx_body_file=tx_raw_output.out_file) - # check inputs + # Check inputs loaded_txins = set(tx_loaded.get("inputs") or []) _tx_raw_script_txins = list( itertools.chain.from_iterable(r.txins for r in tx_raw_output.script_txins) @@ -250,7 +250,7 @@ def check_tx_view( # noqa: C901 msg = f"txins: {tx_raw_txins} != {loaded_txins}" raise AssertionError(msg) - # check outputs + # Check outputs tx_loaded_outputs = tx_loaded.get("outputs") or [] loaded_txouts: set[tuple[str, int, str]] = set() for txout in tx_loaded_outputs: @@ -264,9 +264,9 @@ def check_tx_view( # noqa: C901 msg = f"txouts: {tx_raw_txouts} not in {loaded_txouts}" raise AssertionError(msg) - # check fee + # Check fee fee = int(tx_loaded.get("fee", "").split()[0] or 0) - # for `transaction build` the `tx_raw_output.fee` can be -1 + # For `transaction build` the `tx_raw_output.fee` can be -1 if tx_raw_output.fee not in ( -1, fee, @@ -274,7 +274,7 @@ def check_tx_view( # noqa: C901 msg = f"fee: {tx_raw_output.fee} != {fee}" raise AssertionError(msg) - # check validity intervals + # Check validity intervals validity_range = tx_loaded.get("validity range") or {} loaded_invalid_before = validity_range.get("lower bound") @@ -289,7 +289,7 @@ def check_tx_view( # noqa: C901 msg = f"invalid hereafter: {tx_raw_output.invalid_hereafter} != {loaded_invalid_hereafter}" raise AssertionError(msg) - # check minting and burning + # Check minting and burning loaded_mint = set(_load_assets(assets=tx_loaded.get("mint") or {})) mint_txouts = list(itertools.chain.from_iterable(m.txouts for m in tx_raw_output.mint)) tx_raw_mint = {(r.amount, r.coin) for r in mint_txouts} @@ -298,7 +298,7 @@ def check_tx_view( # noqa: C901 msg = f"mint: {tx_raw_mint} != {loaded_mint}" raise AssertionError(msg) - # check withdrawals + # Check withdrawals tx_loaded_withdrawals = tx_loaded.get("withdrawals") loaded_withdrawals = set() if tx_loaded_withdrawals: @@ -321,7 +321,7 @@ def check_tx_view( # noqa: C901 msg = f"withdrawals: {tx_raw_withdrawals} != {loaded_withdrawals}" raise AssertionError(msg) - # check certificates + # Check certificates tx_raw_len_certs = len(tx_raw_output.tx_files.certificate_files) + len( tx_raw_output.complex_certs ) @@ -344,7 +344,7 @@ def check_tx_view( # noqa: C901 ) raise AssertionError(msg) - # load and check transaction era + # Load and check transaction era loaded_tx_era: str = tx_loaded["era"] loaded_tx_version = getattr(VERSIONS, loaded_tx_era.upper()) @@ -358,19 +358,19 @@ def check_tx_view( # noqa: C901 msg = f"Unexpected transaction era: {loaded_tx_version} != {output_tx_version}" raise AssertionError(msg) - # check collateral inputs, this is only available on Alonzo+ TX + # Check collateral inputs, this is only available on Alonzo+ TX if loaded_tx_version >= VERSIONS.ALONZO: _check_collateral_inputs(tx_raw_output=tx_raw_output, tx_loaded=tx_loaded) - # check reference inputs, this is only available on Babbage+ TX on node version 1.35.3+ + # Check reference inputs, this is only available on Babbage+ TX on node version 1.35.3+ if loaded_tx_version >= VERSIONS.BABBAGE and "reference inputs" in tx_loaded: _check_reference_inputs(tx_raw_output=tx_raw_output, tx_loaded=tx_loaded) - # check inline datum, this is only available on Babbage+ TX + # Check inline datum, this is only available on Babbage+ TX if loaded_tx_version >= VERSIONS.BABBAGE: _check_inline_datums(tx_raw_output=tx_raw_output, tx_loaded=tx_loaded) - # check return collateral, this is only available on Babbage+ TX on node version 1.35.3+ + # Check return collateral, this is only available on Babbage+ TX on node version 1.35.3+ if loaded_tx_version >= VERSIONS.BABBAGE and "return collateral" in tx_loaded: _check_return_collateral(tx_raw_output=tx_raw_output, tx_loaded=tx_loaded) diff --git a/cardano_node_tests/utils/types.py b/cardano_node_tests/utils/types.py index 156423c3c..054b91cfb 100644 --- a/cardano_node_tests/utils/types.py +++ b/cardano_node_tests/utils/types.py @@ -2,5 +2,5 @@ FileType = str | pl.Path FileTypeList = list[FileType] | list[str] | list[pl.Path] -# list of `FileType`s, empty list, or empty tuple +# List of `FileType`s, empty list, or empty tuple OptionalFiles = FileTypeList | tuple[()] From 3651c0bac8a8d178e82214ae0fa20f05f4f0e46f Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 2 Dec 2024 11:35:37 +0100 Subject: [PATCH 167/168] feat: Mark use of Plutus and lock Plutus when changing pparams The hash of current protocol parameters is passed to Plutus scripts, and pparams change after the transaction was built will lead to `PPViewHashesDontMatch` error on tx submit. Therefore we need to mark usage of Plutus scripts, and "lock" Plutus whenewer we are changing pparams. --- .../cluster_management/resources.py | 1 + .../tests/tests_conway/conftest.py | 28 +++++++-- .../tests/tests_conway/test_constitution.py | 1 + .../tests/tests_conway/test_guardrails.py | 5 +- .../tests/tests_conway/test_pparam_update.py | 60 ++++++++++--------- .../test_update_plutusv2_builtins.py | 34 ++++++----- .../tests/tests_plutus/conftest.py | 16 +++++ .../tests/tests_plutus/test_delegation.py | 1 + .../tests/tests_plutus_v2/conftest.py | 16 +++++ 9 files changed, 112 insertions(+), 50 deletions(-) create mode 100644 cardano_node_tests/tests/tests_plutus/conftest.py create mode 100644 cardano_node_tests/tests/tests_plutus_v2/conftest.py diff --git a/cardano_node_tests/cluster_management/resources.py b/cardano_node_tests/cluster_management/resources.py index 0b47259ea..cdf6f78f6 100644 --- a/cardano_node_tests/cluster_management/resources.py +++ b/cardano_node_tests/cluster_management/resources.py @@ -18,6 +18,7 @@ class Resources: ) # Reserve one pool for all tests where the pool will stop producing blocks POOL_FOR_OFFLINE: tp.Final[str] = POOL2 + PLUTUS: tp.Final[str] = "plutus" RESERVES: tp.Final[str] = "reserves" TREASURY: tp.Final[str] = "treasury" REWARDS: tp.Final[str] = "rewards" diff --git a/cardano_node_tests/tests/tests_conway/conftest.py b/cardano_node_tests/tests/tests_conway/conftest.py index 2772aae69..23ee93460 100644 --- a/cardano_node_tests/tests/tests_conway/conftest.py +++ b/cardano_node_tests/tests/tests_conway/conftest.py @@ -13,7 +13,7 @@ def cluster_use_committee( cluster_manager: cluster_management.ClusterManager, ) -> governance_utils.GovClusterT: - """Mark governance committee as "in use" and return instance of `clusterlib.ClusterLib`.""" + """Mark governance committee as "in use".""" cluster_obj = cluster_manager.get( use_resources=[ cluster_management.Resources.COMMITTEE, @@ -29,7 +29,7 @@ def cluster_use_committee( def cluster_use_dreps( cluster_manager: cluster_management.ClusterManager, ) -> governance_utils.GovClusterT: - """Mark governance DReps as "in use" and return instance of `clusterlib.ClusterLib`.""" + """Mark governance DReps as "in use".""" cluster_obj = cluster_manager.get( use_resources=[ cluster_management.Resources.DREPS, @@ -45,7 +45,7 @@ def cluster_use_dreps( def cluster_use_governance( cluster_manager: cluster_management.ClusterManager, ) -> governance_utils.GovClusterT: - """Mark governance as "in use" and return instance of `clusterlib.ClusterLib`.""" + """Mark whole governance as "in use".""" cluster_obj = cluster_manager.get( use_resources=[ cluster_management.Resources.COMMITTEE, @@ -64,7 +64,7 @@ def cluster_use_governance( def cluster_lock_governance( cluster_manager: cluster_management.ClusterManager, ) -> governance_utils.GovClusterT: - """Mark governance as "locked" and return instance of `clusterlib.ClusterLib`.""" + """Mark whole governance as "locked".""" cluster_obj = cluster_manager.get( use_resources=cluster_management.Resources.ALL_POOLS, lock_resources=[cluster_management.Resources.COMMITTEE, cluster_management.Resources.DREPS], @@ -74,3 +74,23 @@ def cluster_lock_governance( ) governance_utils.wait_delayed_ratification(cluster_obj=cluster_obj) return cluster_obj, governance_data + + +@pytest.fixture +def cluster_lock_governance_plutus( + cluster_manager: cluster_management.ClusterManager, +) -> governance_utils.GovClusterT: + """Mark whole governance and Plutus as "locked".""" + cluster_obj = cluster_manager.get( + use_resources=cluster_management.Resources.ALL_POOLS, + lock_resources=[ + cluster_management.Resources.COMMITTEE, + cluster_management.Resources.DREPS, + cluster_management.Resources.PLUTUS, + ], + ) + governance_data = governance_setup.get_default_governance( + cluster_manager=cluster_manager, cluster_obj=cluster_obj + ) + governance_utils.wait_delayed_ratification(cluster_obj=cluster_obj) + return cluster_obj, governance_data diff --git a/cardano_node_tests/tests/tests_conway/test_constitution.py b/cardano_node_tests/tests/tests_conway/test_constitution.py index 0286225d0..1de6e3862 100644 --- a/cardano_node_tests/tests/tests_conway/test_constitution.py +++ b/cardano_node_tests/tests/tests_conway/test_constitution.py @@ -44,6 +44,7 @@ def cluster_lock_gov_script( use_resources=[ *cluster_management.Resources.ALL_POOLS, cluster_management.Resources.REWARDS, + cluster_management.Resources.PLUTUS, ], lock_resources=[ cluster_management.Resources.COMMITTEE, diff --git a/cardano_node_tests/tests/tests_conway/test_guardrails.py b/cardano_node_tests/tests/tests_conway/test_guardrails.py index 95c5957a7..9ba83c884 100644 --- a/cardano_node_tests/tests/tests_conway/test_guardrails.py +++ b/cardano_node_tests/tests/tests_conway/test_guardrails.py @@ -49,7 +49,10 @@ def cluster_guardrails( Cleanup (== respin the cluster instance) after the tests are finished. """ cluster_obj = cluster_manager.get( - use_resources=cluster_management.Resources.ALL_POOLS, + use_resources=[ + *cluster_management.Resources.ALL_POOLS, + cluster_management.Resources.PLUTUS, + ], lock_resources=[cluster_management.Resources.COMMITTEE, cluster_management.Resources.DREPS], cleanup=True, ) diff --git a/cardano_node_tests/tests/tests_conway/test_pparam_update.py b/cardano_node_tests/tests/tests_conway/test_pparam_update.py index 4c8a648db..53ad7b1d6 100644 --- a/cardano_node_tests/tests/tests_conway/test_pparam_update.py +++ b/cardano_node_tests/tests/tests_conway/test_pparam_update.py @@ -200,13 +200,13 @@ class TestPParamUpdate: """Tests for protocol parameters update.""" @pytest.fixture - def pool_user_lg( + def pool_user_lgp( self, cluster_manager: cluster_management.ClusterManager, - cluster_lock_governance: governance_utils.GovClusterT, + cluster_lock_governance_plutus: governance_utils.GovClusterT, ) -> clusterlib.PoolUser: """Create a pool user for "lock governance".""" - cluster, __ = cluster_lock_governance + cluster, __ = cluster_lock_governance_plutus key = helpers.get_current_line_str() name_template = common.get_test_id(cluster) return conway_common.get_registered_pool_user( @@ -222,8 +222,10 @@ def pool_user_lg( @pytest.mark.dbsync def test_pparam_update( # noqa: C901 self, - cluster_lock_governance: governance_utils.GovClusterT, - pool_user_lg: clusterlib.PoolUser, + # The test is changing protocol parameters, so it is not safe to run Plutus tests at that + # time. It could e.g. lead to `PPViewHashesDontMatch` errors on transaction submits. + cluster_lock_governance_plutus: governance_utils.GovClusterT, + pool_user_lgp: clusterlib.PoolUser, ): """Test enactment of protocol parameter update. @@ -246,7 +248,7 @@ def test_pparam_update( # noqa: C901 and enacted actions """ # pylint: disable=too-many-locals,too-many-statements - cluster, governance_data = cluster_lock_governance + cluster, governance_data = cluster_lock_governance_plutus temp_template = common.get_test_id(cluster) cost_proposal_file = DATA_DIR / "cost_models_list_185_v2_v3.json" db_errors_final = [] @@ -256,7 +258,7 @@ def test_pparam_update( # noqa: C901 pytest.skip("The test doesn't work in bootstrap period without CC.") init_return_account_balance = cluster.g_query.get_stake_addr_info( - pool_user_lg.stake.address + pool_user_lgp.stake.address ).reward_account_balance # Check if total delegated stake is below the threshold. This can be used to check that @@ -660,7 +662,7 @@ def _propose_pparams_update( name_template=name_template, anchor_url=anchor_url, anchor_data_hash=anchor_data_hash, - pool_user=pool_user_lg, + pool_user=pool_user_lgp, proposals=proposals, prev_action_rec=prev_action_rec, ) @@ -707,7 +709,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_net_nodrep_bootstrap", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=net_nodrep_prop_rec.action_txid, action_ix=net_nodrep_prop_rec.action_ix, approve_cc=True, @@ -723,7 +725,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_net_nodrep", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=net_nodrep_prop_rec.action_txid, action_ix=net_nodrep_prop_rec.action_ix, approve_cc=True, @@ -755,7 +757,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_net_nocc", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=net_nocc_prop_rec.action_txid, action_ix=net_nocc_prop_rec.action_ix, approve_cc=False, @@ -779,7 +781,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_eco_nodrep", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=eco_nodrep_prop_rec.action_txid, action_ix=eco_nodrep_prop_rec.action_ix, approve_cc=True, @@ -809,7 +811,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_eco_nocc", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=eco_nocc_prop_rec.action_txid, action_ix=eco_nocc_prop_rec.action_ix, approve_cc=False, @@ -839,7 +841,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_fin_with_spos", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=tech_nodrep_prop_rec.action_txid, action_ix=tech_nodrep_prop_rec.action_ix, approve_cc=False, @@ -859,7 +861,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_tech_nodrep", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=tech_nodrep_prop_rec.action_txid, action_ix=tech_nodrep_prop_rec.action_ix, approve_cc=True, @@ -885,7 +887,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_tech_nocc", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=tech_nocc_prop_rec.action_txid, action_ix=tech_nocc_prop_rec.action_ix, approve_cc=None, @@ -911,7 +913,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_sec_nonespo", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=sec_nonespo_prop_rec.action_txid, action_ix=sec_nonespo_prop_rec.action_ix, approve_cc=True, @@ -928,7 +930,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_sec_nospo", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=sec_nospo_prop_rec.action_txid, action_ix=sec_nospo_prop_rec.action_ix, approve_cc=True, @@ -951,7 +953,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_gov_nodrep", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=gov_nodrep_prop_rec.action_txid, action_ix=gov_nodrep_prop_rec.action_ix, approve_cc=True, @@ -981,7 +983,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_gov_nocc", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=gov_nocc_prop_rec.action_txid, action_ix=gov_nocc_prop_rec.action_ix, approve_cc=False, @@ -1014,7 +1016,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_mix_nodrep", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=mix_nodrep_prop_rec.action_txid, action_ix=mix_nodrep_prop_rec.action_ix, approve_cc=True, @@ -1057,7 +1059,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_mix_nocc", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=mix_nocc_prop_rec.action_txid, action_ix=mix_nocc_prop_rec.action_ix, approve_cc=False, @@ -1082,7 +1084,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_fin_no", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=fin_prop_rec.action_txid, action_ix=fin_prop_rec.action_ix, approve_cc=False, @@ -1097,7 +1099,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_fin_yes", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=fin_prop_rec.action_txid, action_ix=fin_prop_rec.action_ix, approve_cc=True, @@ -1142,7 +1144,7 @@ def _check_proposed_pparams( cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_mix_approved", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=mix_approved_prop_rec.action_txid, action_ix=mix_approved_prop_rec.action_ix, approve_cc=True, @@ -1178,7 +1180,7 @@ def _check_state(state: dict): cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_after_ratification", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=fin_prop_rec.action_txid, action_ix=fin_prop_rec.action_ix, approve_cc=False, @@ -1261,7 +1263,7 @@ def _check_state(state: dict): cluster_obj=cluster, governance_data=governance_data, name_template=f"{temp_template}_enacted", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=fin_prop_rec.action_txid, action_ix=fin_prop_rec.action_ix, approve_cc=False, @@ -1275,7 +1277,7 @@ def _check_state(state: dict): # (all the remaining pparam proposals in our case). deposit_amt = cluster.conway_genesis["govActionDeposit"] total_deposit_return = cluster.g_query.get_stake_addr_info( - pool_user_lg.stake.address + pool_user_lgp.stake.address ).reward_account_balance # Check total deposit return accounting for both expired and enacted actions assert ( @@ -1291,7 +1293,7 @@ def _check_state(state: dict): try: dbsync_utils.check_proposal_refunds( - stake_address=pool_user_lg.stake.address, refunds_num=submitted_proposal_count + stake_address=pool_user_lgp.stake.address, refunds_num=submitted_proposal_count ) except AssertionError as exc: db_errors_final.append(f"db-sync proposal refunds error: {exc}") diff --git a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py index 35d9e250f..2a1db964f 100644 --- a/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py +++ b/cardano_node_tests/tests/tests_conway/test_update_plutusv2_builtins.py @@ -27,12 +27,12 @@ @pytest.fixture -def pool_user_lg( +def pool_user_lgp( cluster_manager: cluster_management.ClusterManager, - cluster_lock_governance: governance_utils.GovClusterT, + cluster_lock_governance_plutus: governance_utils.GovClusterT, ) -> clusterlib.PoolUser: """Create a pool user for "lock governance".""" - cluster, __ = cluster_lock_governance + cluster, __ = cluster_lock_governance_plutus key = helpers.get_current_line_str() name_template = common.get_test_id(cluster) return conway_common.get_registered_pool_user( @@ -45,12 +45,12 @@ def pool_user_lg( @pytest.fixture -def payment_addrs_lg( +def payment_addrs_lgp( cluster_manager: cluster_management.ClusterManager, - cluster_lock_governance: governance_utils.GovClusterT, + cluster_lock_governance_plutus: governance_utils.GovClusterT, ) -> list[clusterlib.AddressRecord]: """Create new payment address.""" - cluster, __ = cluster_lock_governance + cluster, __ = cluster_lock_governance_plutus test_id = common.get_test_id(cluster) addrs = clusterlib_utils.create_payment_addr_records( *[f"{test_id}_payment_addr_{i}" for i in range(2)], @@ -77,9 +77,11 @@ class TestUpdateBuiltIns: @pytest.mark.upgrade_step1 def test_update_in_pv9( self, - cluster_lock_governance: governance_utils.GovClusterT, - payment_addrs_lg: list[clusterlib.AddressRecord], - pool_user_lg: clusterlib.PoolUser, + # The test is changing protocol parameters, so it is not safe to run Plutus tests at that + # time. It could e.g. lead to `PPViewHashesDontMatch` errors on transaction submits. + cluster_lock_governance_plutus: governance_utils.GovClusterT, + payment_addrs_lgp: list[clusterlib.AddressRecord], + pool_user_lgp: clusterlib.PoolUser, ): """Test updating PlutusV2 cost model in PV9. @@ -90,7 +92,7 @@ def test_update_in_pv9( * update the PlutusV2 cost model * check again that the Plutus script fails as expected in PV9 """ - cluster, governance_data = cluster_lock_governance + cluster, governance_data = cluster_lock_governance_plutus temp_template = common.get_test_id(cluster) if not conway_common.is_in_bootstrap(cluster_obj=cluster): @@ -120,7 +122,7 @@ def _update_cost_model() -> None: name_template=_name_template, anchor_url=anchor_url, anchor_data_hash=anchor_data_hash, - pool_user=pool_user_lg, + pool_user=pool_user_lgp, proposals=update_proposals, ) @@ -133,7 +135,7 @@ def _update_cost_model() -> None: cluster_obj=cluster, governance_data=governance_data, name_template=f"{_name_template}_yes", - payment_addr=pool_user_lg.payment, + payment_addr=pool_user_lgp.payment, action_txid=cost_model_proposal.action_txid, action_ix=cost_model_proposal.action_ix, approve_cc=True, @@ -169,8 +171,8 @@ def _update_cost_model() -> None: mint_raw.check_missing_builtin( cluster_obj=cluster, temp_template=temp_template, - payment_addr=payment_addrs_lg[0], - issuer_addr=payment_addrs_lg[1], + payment_addr=payment_addrs_lgp[0], + issuer_addr=payment_addrs_lgp[1], ) # Update the PlutusV2 cost model @@ -180,6 +182,6 @@ def _update_cost_model() -> None: mint_raw.check_missing_builtin( cluster_obj=cluster, temp_template=temp_template, - payment_addr=payment_addrs_lg[0], - issuer_addr=payment_addrs_lg[1], + payment_addr=payment_addrs_lgp[0], + issuer_addr=payment_addrs_lgp[1], ) diff --git a/cardano_node_tests/tests/tests_plutus/conftest.py b/cardano_node_tests/tests/tests_plutus/conftest.py new file mode 100644 index 000000000..45cbec13c --- /dev/null +++ b/cardano_node_tests/tests/tests_plutus/conftest.py @@ -0,0 +1,16 @@ +import logging + +import pytest +from cardano_clusterlib import clusterlib + +from cardano_node_tests.cluster_management import cluster_management + +LOGGER = logging.getLogger(__name__) + + +@pytest.fixture +def cluster( + cluster_manager: cluster_management.ClusterManager, +) -> clusterlib.ClusterLib: + """Return instance of `clusterlib.ClusterLib`.""" + return cluster_manager.get(use_resources=[cluster_management.Resources.PLUTUS]) diff --git a/cardano_node_tests/tests/tests_plutus/test_delegation.py b/cardano_node_tests/tests/tests_plutus/test_delegation.py index bc9bcc8d1..fd53fe8e1 100644 --- a/cardano_node_tests/tests/tests_plutus/test_delegation.py +++ b/cardano_node_tests/tests/tests_plutus/test_delegation.py @@ -58,6 +58,7 @@ def cluster_lock_42stake( use_resources=[ resources_management.OneOf(resources=cluster_management.Resources.ALL_POOLS), cluster_management.Resources.REWARDS, + cluster_management.Resources.PLUTUS, ], ) pool_name = cluster_manager.get_used_resources(from_set=cluster_management.Resources.ALL_POOLS)[ diff --git a/cardano_node_tests/tests/tests_plutus_v2/conftest.py b/cardano_node_tests/tests/tests_plutus_v2/conftest.py new file mode 100644 index 000000000..45cbec13c --- /dev/null +++ b/cardano_node_tests/tests/tests_plutus_v2/conftest.py @@ -0,0 +1,16 @@ +import logging + +import pytest +from cardano_clusterlib import clusterlib + +from cardano_node_tests.cluster_management import cluster_management + +LOGGER = logging.getLogger(__name__) + + +@pytest.fixture +def cluster( + cluster_manager: cluster_management.ClusterManager, +) -> clusterlib.ClusterLib: + """Return instance of `clusterlib.ClusterLib`.""" + return cluster_manager.get(use_resources=[cluster_management.Resources.PLUTUS]) From 53bc508af3caea0ce56d47a8dd424d3ea9cf882d Mon Sep 17 00:00:00 2001 From: Martin Kourim Date: Mon, 2 Dec 2024 15:46:16 +0100 Subject: [PATCH 168/168] refactor: replace `send_funds` with `send_tx` Updated method calls from `send_funds` to `send_tx`. The `send_funds` will be deprecated in clusterlib. Renamed variables in for better clarity and consistency. Replaced 'destinations' with 'txouts' and 'ma_destinations' with 'ma_txouts'. --- .../tests/test_native_tokens.py | 94 +++++++++---------- .../tests/test_staking_no_rewards.py | 12 +-- cardano_node_tests/tests/test_tx_fees.py | 22 ++--- cardano_node_tests/utils/faucet.py | 14 +-- 4 files changed, 70 insertions(+), 72 deletions(-) diff --git a/cardano_node_tests/tests/test_native_tokens.py b/cardano_node_tests/tests/test_native_tokens.py index 00d6b9dfd..415fdb164 100644 --- a/cardano_node_tests/tests/test_native_tokens.py +++ b/cardano_node_tests/tests/test_native_tokens.py @@ -1734,24 +1734,24 @@ def test_transfer_tokens( src_address = new_token.token_mint_addr.address dst_address = payment_addrs[2].address - ma_destinations = [ + ma_txouts = [ clusterlib.TxOut(address=dst_address, amount=amount, coin=new_token.token), ] # Destinations with both native token and Lovelace (it doesn't matter on the amounts) for # calculating minimum required Lovelace value for tx output - calc_destinations = [ - *ma_destinations, + calc_txouts = [ + *ma_txouts, clusterlib.TxOut(address=dst_address, amount=2_000_000), ] - min_value = cluster.g_transaction.calculate_min_req_utxo(txouts=calc_destinations) + min_value = cluster.g_transaction.calculate_min_req_utxo(txouts=calc_txouts) assert min_value.coin.lower() in (clusterlib.DEFAULT_COIN, "coin") assert min_value.value, "No Lovelace required for `min-ada-value`" amount_lovelace = min_value.value - destinations = [ - *ma_destinations, + txouts = [ + *ma_txouts, clusterlib.TxOut(address=dst_address, amount=amount_lovelace), ] @@ -1759,7 +1759,7 @@ def test_transfer_tokens( if use_build_cmd: # TODO: add ADA txout for change address - see node issue #3057 - destinations.append(clusterlib.TxOut(address=src_address, amount=2_000_000)) + txouts.append(clusterlib.TxOut(address=src_address, amount=2_000_000)) if VERSIONS.transaction_era == VERSIONS.ALONZO: err_str = "" @@ -1767,7 +1767,7 @@ def test_transfer_tokens( cluster.g_transaction.build_tx( src_address=src_address, tx_name=temp_template, - txouts=destinations, + txouts=txouts, fee_buffer=2_000_000, tx_files=tx_files, ) @@ -1782,8 +1782,8 @@ def test_transfer_tokens( min_reported_utxo = _min_reported_utxo.group(1) amount_lovelace = int(min_reported_utxo) - destinations = [ - *ma_destinations, + txouts = [ + *ma_txouts, clusterlib.TxOut(address=dst_address, amount=amount_lovelace), clusterlib.TxOut(address=src_address, amount=2_000_000), ] @@ -1791,7 +1791,7 @@ def test_transfer_tokens( tx_raw_output = cluster.g_transaction.build_tx( src_address=src_address, tx_name=temp_template, - txouts=destinations, + txouts=txouts, fee_buffer=2_000_000, tx_files=tx_files, ) @@ -1802,10 +1802,10 @@ def test_transfer_tokens( ) cluster.g_transaction.submit_tx(tx_file=tx_signed, txins=tx_raw_output.txins) else: - tx_raw_output = cluster.g_transaction.send_funds( + tx_raw_output = cluster.g_transaction.send_tx( src_address=src_address, - destinations=destinations, tx_name=temp_template, + txouts=txouts, tx_files=tx_files, ) @@ -1880,44 +1880,44 @@ def test_transfer_multiple_tokens( dst_address1 = payment_addrs[1].address dst_address2 = payment_addrs[2].address - ma_destinations_address1 = [] - ma_destinations_address2 = [] + ma_txouts_address1 = [] + ma_txouts_address2 = [] for t in new_tokens: - ma_destinations_address1.append( + ma_txouts_address1.append( clusterlib.TxOut(address=dst_address1, amount=amount, coin=t.token) ) - ma_destinations_address2.append( + ma_txouts_address2.append( clusterlib.TxOut(address=dst_address2, amount=amount, coin=t.token) ) # Destinations with both native token and Lovelace (it doesn't matter on the amounts) for # calculating minimum required Lovelace value for tx output - calc_destinations_address1 = [ - *ma_destinations_address1, + calc_txouts_address1 = [ + *ma_txouts_address1, clusterlib.TxOut(address=dst_address1, amount=2_000_000), ] - calc_destinations_address2 = [ - *ma_destinations_address2, + calc_txouts_address2 = [ + *ma_txouts_address2, clusterlib.TxOut(address=dst_address2, amount=2_000_000), ] min_value_address1 = cluster.g_transaction.calculate_min_req_utxo( - txouts=calc_destinations_address1 + txouts=calc_txouts_address1 ) assert min_value_address1.coin.lower() in (clusterlib.DEFAULT_COIN, "coin") assert min_value_address1.value, "No Lovelace required for `min-ada-value`" amount_lovelace_address1 = min_value_address1.value min_value_address2 = cluster.g_transaction.calculate_min_req_utxo( - txouts=calc_destinations_address2 + txouts=calc_txouts_address2 ) assert min_value_address2.value, "No Lovelace required for `min-ada-value`" amount_lovelace_address2 = min_value_address2.value - destinations = [ - *ma_destinations_address1, + txouts = [ + *ma_txouts_address1, clusterlib.TxOut(address=dst_address1, amount=amount_lovelace_address1), - *ma_destinations_address2, + *ma_txouts_address2, clusterlib.TxOut(address=dst_address2, amount=amount_lovelace_address2), ] @@ -1927,7 +1927,7 @@ def test_transfer_multiple_tokens( if use_build_cmd: # TODO: add ADA txout for change address - destinations.append(clusterlib.TxOut(address=src_address, amount=4_000_000)) + txouts.append(clusterlib.TxOut(address=src_address, amount=4_000_000)) # TODO: see node issue #4297 if VERSIONS.transaction_era == VERSIONS.ALONZO: @@ -1936,7 +1936,7 @@ def test_transfer_multiple_tokens( cluster.g_transaction.build_tx( src_address=src_address, tx_name=temp_template, - txouts=destinations, + txouts=txouts, fee_buffer=2_000_000, tx_files=tx_files, ) @@ -1951,10 +1951,10 @@ def test_transfer_multiple_tokens( min_reported_utxo = _min_reported_utxo.group(1) amount_lovelace_address1 = amount_lovelace_address2 = int(min_reported_utxo) - destinations = [ - *ma_destinations_address1, + txouts = [ + *ma_txouts_address1, clusterlib.TxOut(address=dst_address1, amount=amount_lovelace_address1), - *ma_destinations_address2, + *ma_txouts_address2, clusterlib.TxOut(address=dst_address2, amount=amount_lovelace_address2), clusterlib.TxOut(address=src_address, amount=4_000_000), ] @@ -1962,7 +1962,7 @@ def test_transfer_multiple_tokens( tx_raw_output = cluster.g_transaction.build_tx( src_address=src_address, tx_name=temp_template, - txouts=destinations, + txouts=txouts, fee_buffer=2_000_000, tx_files=tx_files, ) @@ -1973,10 +1973,10 @@ def test_transfer_multiple_tokens( ) cluster.g_transaction.submit_tx(tx_file=tx_signed, txins=tx_raw_output.txins) else: - tx_raw_output = cluster.g_transaction.send_funds( + tx_raw_output = cluster.g_transaction.send_tx( src_address=src_address, - destinations=destinations, tx_name=temp_template, + txouts=txouts, tx_files=tx_files, ) @@ -2043,19 +2043,19 @@ def test_transfer_no_ada( src_address = new_token.token_mint_addr.address dst_address = payment_addrs[2].address - destinations = [clusterlib.TxOut(address=dst_address, amount=amount, coin=new_token.token)] + txouts = [clusterlib.TxOut(address=dst_address, amount=amount, coin=new_token.token)] tx_files = clusterlib.TxFiles(signing_key_files=[new_token.token_mint_addr.skey_file]) if use_build_cmd: expected_error = "Minimum required UTxO:" # TODO: add ADA txout for change address - destinations.append(clusterlib.TxOut(address=src_address, amount=3500_000)) + txouts.append(clusterlib.TxOut(address=src_address, amount=3500_000)) with pytest.raises(clusterlib.CLIError) as excinfo: cluster.g_transaction.build_tx( src_address=src_address, tx_name=temp_template, - txouts=destinations, + txouts=txouts, fee_buffer=2_000_000, tx_files=tx_files, ) @@ -2064,10 +2064,10 @@ def test_transfer_no_ada( expected_error = "OutputTooSmallUTxO" try: - cluster.g_transaction.send_funds( + cluster.g_transaction.send_tx( src_address=src_address, - destinations=destinations, tx_name=temp_template, + txouts=txouts, tx_files=tx_files, ) except clusterlib.CLIError as err: @@ -2098,14 +2098,14 @@ def test_transfer_invalid_token_amount( src_address = new_token.token_mint_addr.address dst_address = payment_addrs[2].address - ma_destinations = [ + ma_txouts = [ clusterlib.TxOut(address=dst_address, amount=token_amount, coin=new_token.token), ] min_amount_lovelace = 4_000_000 - destinations = [ - *ma_destinations, + txouts = [ + *ma_txouts, clusterlib.TxOut(address=dst_address, amount=min_amount_lovelace), ] @@ -2114,16 +2114,14 @@ def test_transfer_invalid_token_amount( if use_build_cmd: with pytest.raises(clusterlib.CLIError) as excinfo: # Add ADA txout for change address - see node issue #3057 - destinations.append( - clusterlib.TxOut(address=src_address, amount=min_amount_lovelace) - ) + txouts.append(clusterlib.TxOut(address=src_address, amount=min_amount_lovelace)) try: logging.disable(logging.ERROR) cluster.g_transaction.build_tx( src_address=src_address, tx_name=temp_template, - txouts=destinations, + txouts=txouts, fee_buffer=2_000_000, tx_files=tx_files, ) @@ -2140,9 +2138,9 @@ def test_transfer_invalid_token_amount( with pytest.raises(clusterlib.CLIError) as excinfo: try: logging.disable(logging.ERROR) - cluster.g_transaction.send_funds( + cluster.g_transaction.send_tx( src_address=src_address, - destinations=destinations, + txouts=txouts, tx_name=temp_template, tx_files=tx_files, fee=80_000, diff --git a/cardano_node_tests/tests/test_staking_no_rewards.py b/cardano_node_tests/tests/test_staking_no_rewards.py index 6eb44a063..713e7a6cc 100644 --- a/cardano_node_tests/tests/test_staking_no_rewards.py +++ b/cardano_node_tests/tests/test_staking_no_rewards.py @@ -266,14 +266,14 @@ def test_no_reward_unmet_pledge2( pledge_amount = loaded_data.pool_pledge // 2 # Withdraw part of the pledge - destinations = [ + txouts = [ clusterlib.TxOut(address=delegation_out.pool_user.payment.address, amount=pledge_amount) ] tx_files = clusterlib.TxFiles(signing_key_files=[pool_owner.payment.skey_file]) - cluster.g_transaction.send_funds( + cluster.g_transaction.send_tx( src_address=pool_owner.payment.address, - destinations=destinations, tx_name=f"{temp_template}_withdraw_pledge", + txouts=txouts, tx_files=tx_files, ) @@ -325,7 +325,7 @@ def test_no_reward_unmet_pledge2( ) # Return pledge - destinations = [ + txouts = [ clusterlib.TxOut( address=pool_owner.payment.address, amount=pledge_amount + 100_000_000 ) @@ -333,10 +333,10 @@ def test_no_reward_unmet_pledge2( tx_files = clusterlib.TxFiles( signing_key_files=[delegation_out.pool_user.payment.skey_file] ) - cluster.g_transaction.send_funds( + cluster.g_transaction.send_tx( src_address=delegation_out.pool_user.payment.address, - destinations=destinations, tx_name=f"{temp_template}_return_pledge", + txouts=txouts, tx_files=tx_files, ) diff --git a/cardano_node_tests/tests/test_tx_fees.py b/cardano_node_tests/tests/test_tx_fees.py index 31ffdcc55..b30881787 100644 --- a/cardano_node_tests/tests/test_tx_fees.py +++ b/cardano_node_tests/tests/test_tx_fees.py @@ -87,14 +87,14 @@ def test_negative_fee( src_address = payment_addrs[0].address dst_address = payment_addrs[1].address - destinations = [clusterlib.TxOut(address=dst_address, amount=10)] + txouts = [clusterlib.TxOut(address=dst_address, amount=10)] tx_files = clusterlib.TxFiles(signing_key_files=[payment_addrs[0].skey_file]) with pytest.raises(clusterlib.CLIError) as excinfo: - cluster.g_transaction.send_funds( + cluster.g_transaction.send_tx( src_address=src_address, - destinations=destinations, tx_name=temp_template, + txouts=txouts, tx_files=tx_files, fee=fee, ) @@ -119,7 +119,7 @@ def test_smaller_fee( src_address = payment_addrs[0].address dst_address = payment_addrs[1].address - destinations = [clusterlib.TxOut(address=dst_address, amount=10)] + txouts = [clusterlib.TxOut(address=dst_address, amount=10)] tx_files = clusterlib.TxFiles(signing_key_files=[payment_addrs[0].skey_file]) fee = 0.0 @@ -128,17 +128,17 @@ def test_smaller_fee( cluster.g_transaction.calculate_tx_fee( src_address=src_address, tx_name=temp_template, - txouts=destinations, + txouts=txouts, tx_files=tx_files, ) / fee_change ) with pytest.raises(clusterlib.CLIError) as excinfo: - cluster.g_transaction.send_funds( + cluster.g_transaction.send_tx( src_address=src_address, - destinations=destinations, tx_name=temp_template, + txouts=txouts, tx_files=tx_files, fee=int(fee), ) @@ -161,22 +161,22 @@ def test_expected_or_higher_fee( src_address = payment_addrs[0].address dst_address = payment_addrs[1].address - destinations = [clusterlib.TxOut(address=dst_address, amount=amount)] + txouts = [clusterlib.TxOut(address=dst_address, amount=amount)] tx_files = clusterlib.TxFiles(signing_key_files=[payment_addrs[0].skey_file]) fee = ( cluster.g_transaction.calculate_tx_fee( src_address=src_address, tx_name=temp_template, - txouts=destinations, + txouts=txouts, tx_files=tx_files, ) + fee_add ) - tx_raw_output = cluster.g_transaction.send_funds( + tx_raw_output = cluster.g_transaction.send_tx( src_address=src_address, - destinations=destinations, tx_name=temp_template, + txouts=txouts, tx_files=tx_files, fee=fee, ) diff --git a/cardano_node_tests/utils/faucet.py b/cardano_node_tests/utils/faucet.py index df33da250..cd73cc761 100644 --- a/cardano_node_tests/utils/faucet.py +++ b/cardano_node_tests/utils/faucet.py @@ -38,12 +38,12 @@ def fund_from_faucet( if isinstance(amount, int): amount = [amount] * len(dst_addr_records) - fund_dst = [ + fund_txouts = [ clusterlib.TxOut(address=d.address, amount=a) for d, a in zip(dst_addr_records, amount) if force or cluster_obj.g_query.get_address_balance(d.address) < a ] - if not fund_dst: + if not fund_txouts: return None if not faucet_data and all_faucets: @@ -59,10 +59,10 @@ def fund_from_faucet( tx_name = f"{tx_name}_funding" fund_tx_files = clusterlib.TxFiles(signing_key_files=[faucet_data["payment"].skey_file]) - tx_raw_output = cluster_obj.g_transaction.send_funds( + tx_raw_output = cluster_obj.g_transaction.send_tx( src_address=src_address, - destinations=fund_dst, tx_name=tx_name, + txouts=fund_txouts, tx_files=fund_tx_files, destination_dir=destination_dir, ) @@ -91,14 +91,14 @@ def return_funds_to_faucet( try: logging.disable(logging.ERROR) for addr, amount_rec in zip(src_addrs, amount): - fund_dst = [clusterlib.TxOut(address=faucet_addr, amount=amount_rec)] + fund_txouts = [clusterlib.TxOut(address=faucet_addr, amount=amount_rec)] fund_tx_files = clusterlib.TxFiles(signing_key_files=[addr.skey_file]) # Try to return funds; don't mind if there's not enough funds for fees etc. with contextlib.suppress(Exception): - cluster_obj.g_transaction.send_funds( + cluster_obj.g_transaction.send_tx( src_address=addr.address, - destinations=fund_dst, tx_name=tx_name, + txouts=fund_txouts, tx_files=fund_tx_files, destination_dir=destination_dir, )