diff --git a/contrib/devtools/README.md b/contrib/devtools/README.md index e04abb6beb..628da9bc4f 100644 --- a/contrib/devtools/README.md +++ b/contrib/devtools/README.md @@ -2,6 +2,7 @@ Contents =========== This directory contains tools for developers working on this repository. + github-merge.sh ================== @@ -36,6 +37,7 @@ Configuring the github-merge tool for the bitcoin repository is done in the foll git config githubmerge.testcmd "make -j4 check" (adapt to whatever you want to use for testing) git config --global user.signingkey mykeyid (if you want to GPG sign) + fix-copyright-headers.py =========================== @@ -49,6 +51,7 @@ For example a file changed in 2014 (with 2014 being the current year): would be changed to: ```// Copyright (c) 2009-2014 The Bitcoin Core developers``` + symbol-check.py ================== @@ -69,6 +72,7 @@ If there are 'unsupported' symbols, the return value will be 1 a list like this .../64/test_bitcoin: symbol std::out_of_range::~out_of_range() from unsupported version GLIBCXX_3.4.15 .../64/test_bitcoin: symbol _ZNSt8__detail15_List_nod from unsupported version GLIBCXX_3.4.15 + update-translations.py ======================= @@ -81,12 +85,14 @@ It will do the following automatically: See doc/translation-process.md for more information. + gen-manpages.sh =============== A small script to automatically create manpages in ../../doc/man by running the release binaries with the -help option. This requires help2man which can be found at: https://www.gnu.org/software/help2man/ + git-subtree-check.sh ==================== @@ -101,7 +107,8 @@ maintained: Usage: git-subtree-check.sh DIR COMMIT COMMIT may be omitted, in which case HEAD is used. -release_management/release_preparation + +release_management/(release_preparation) ================== Inside this folder the following are provided: @@ -128,7 +135,8 @@ After script completion the user is required to: - open a PR merging release preparation into release branch and wait for approval - once the release preparation branch is merged into the release branch, create annotated tag (vX.Y.Z) -release_management/release_backport + +release_management/(release_backport) ================== Inside this folder the following are provided: @@ -153,3 +161,13 @@ After script completion the user is required to: - check no error code is returned by the script - push from release backport local branch to remote - open a PR merging release backport into `main` branch and wait for approval + + +tests-execution-times-update.py +================== + +A script interacting with Travis CI API, its purpose is to update estimated execution times for Python regression tests +based on measured execution times of recent Travis CI runs. The script accepts `travis_token`, `builds_quantity` and +`rpc_tests_sh_file_path` parameters representing respectively the token for authenticating to Travis CI API, the quantity +of builds to consider when computing average estimated execution times and the path to the `rpc-tests.sh` file to update. +The script prints average estimated execution times and consequently updates the file `rpc-tests.sh`. diff --git a/contrib/devtools/tests-execution-times-update.py b/contrib/devtools/tests-execution-times-update.py new file mode 100755 index 0000000000..ab42a5c821 --- /dev/null +++ b/contrib/devtools/tests-execution-times-update.py @@ -0,0 +1,238 @@ + +import requests +import sys +import time + +k_python_os_linux = "python_linux" +k_python_os_macos = "python_macos" + +k_start_job_id_ubuntu_jammy = 13 +k_end_job_id_ubuntu_jammy = 18 +k_start_job_id_ubuntu_focal = 22 +k_end_job_id_ubuntu_focal = 27 +k_start_job_id_macos = 30 +k_end_job_id_macos = 42 +k_total_jobs = 43 + +# ---------- Travis API functions ---------- + +def get_build_history(repo_owner, repo_name, token, pages = 4): + build_history = [] + + base_url = 'https://api.travis-ci.com' + endpoint = f'/repo/{repo_owner}%2F{repo_name}/builds' + headers = {'Travis-API-Version': '3', 'Authorization': f'token {token}'} + url = base_url + endpoint + + for page in range(pages): + params = {'limit': 25, 'offset': 25 * page} + maxretry = 2 + for retry in range(maxretry): + response = requests.get(url, headers=headers, params=params) + try: + if response.status_code == 200: + build_history.extend(response.json()['builds']) + break + except: + if retry == maxretry - 1: + sys.exit(f"Error retrieving build history: {response.text}") + + return build_history + +def get_jobs_details(build_id, token): + base_url = f"https://api.travis-ci.com" + endpoint = f"/build/{build_id}/jobs" + headers = {"Travis-API-Version": "3", "Authorization": f"token {token}"} + url = base_url + endpoint + + maxretry = 2 + for retry in range(maxretry): + response = requests.get(url, headers=headers) + if response.status_code == 200: + return response.json() + else: + if retry == maxretry - 1: + sys.exit(f"Error retrieving jobs details: {response.text}") + +def get_job_logs(job_id, token): + base_url = f"https://api.travis-ci.com" + endpoint = f"/job/{job_id}/log.txt" + headers = {"Travis-API-Version": "3", "Authorization": f"token {token}"} + url = base_url + endpoint + + maxretry = 2 + for retry in range(maxretry): + response = requests.get(url, headers=headers) + if response.status_code == 200: + return response.text + else: + if retry == maxretry - 1: + sys.exit(f"Error retrieving job logs: {response.text}") + +# ---------- Travis API functions ---------- + +# ---------- Travis python tests per stage ---------- + +def get_python_stage_type(job_number, total_jobs): + if (k_start_job_id_ubuntu_jammy <= job_number <= k_end_job_id_ubuntu_jammy or + k_start_job_id_ubuntu_focal <= job_number <= k_end_job_id_ubuntu_focal): + return k_python_os_linux + elif (k_start_job_id_macos <= job_number <= k_end_job_id_macos): + return k_python_os_macos + else: + return "" + +# ---------- Travis python tests per stage ---------- + +# ---------- Auxiliaries ---------- + +def extract_tests_times(tests_times, os, log): + while (True): + start_pos_name = log.find("--- Success: ") + if (start_pos_name == -1): + break + start_pos_name += len("--- Success: ") + end_pos_name = log.find(" - elapsed time: ", start_pos_name, start_pos_name + 100) + test_name = log[start_pos_name:end_pos_name] + test_name = test_name.replace("**", "rt") + start_pos_time = end_pos_name + len(" - elapsed time: ") + end_pos_time = log.find(" ---", start_pos_time, start_pos_time + 10) + test_time = int(log[start_pos_time:end_pos_time]) + if (not test_name in tests_times): + tests_times[test_name] = {} + if (not os in tests_times[test_name]): + tests_times[test_name][os] = [] + tests_times[test_name][os].append(test_time) + log = log[end_pos_time:-1] + +def compute_average_tests_times(tests_times): + for test_name in tests_times: + for os in tests_times[test_name]: + times_list = tests_times[test_name][os] + tests_times[test_name][os] = int(sum(times_list) / len(times_list)) + +def update_rpc_tests_sh_file(file_path, tests_times): + with open(file_path, "r") as file: + file_data_in = file.read() + file_data_in = file_data_in.split('\n') + + file_data_out = "" + + line_start_opt_1 = " '" + line_start_opt_2 = " testScripts+=('" + line_end_opt_1 = "" + line_end_opt_2 = ")" + for index in range(len(file_data_in)): + line_start = "" + just_copy = False + if (".py" in file_data_in[index] and "'," in file_data_in[index]): + if (file_data_in[index].startswith(line_start_opt_1)): + line_start = line_start_opt_1 + line_end = line_end_opt_1 + elif (file_data_in[index].startswith(line_start_opt_2)): + line_start = line_start_opt_2 + line_end = line_end_opt_2 + else: + just_copy = True + if (not just_copy): + start_pos_name = file_data_in[index].find(line_start) + if (start_pos_name == -1): + continue + start_pos_name += len(line_start) + end_pos_name = file_data_in[index].find("',") # not using ".py" because of cases like 'txn_doublespend.py --mineblock',23,62 + if (end_pos_name == -1): + continue + test_name = file_data_in[index][start_pos_name:end_pos_name] + if (test_name in tests_times): + linux_time = tests_times[test_name][k_python_os_linux] if k_python_os_linux in tests_times[test_name] else 0 + macos_time = tests_times[test_name][k_python_os_macos] if k_python_os_macos in tests_times[test_name] else 0 + file_data_out += f"{line_start}{test_name}',{linux_time},{macos_time}{line_end}\n" + else: + just_copy = True + else: + just_copy = True + + if (just_copy): + file_data_out += file_data_in[index] + if (index < len(file_data_in) - 1): + file_data_out += "\n" + + with open(file_path, "w") as file: + file.write(file_data_out) + +def print_tests_times(tests_times): + for test_name in tests_times: + linux_time = tests_times[test_name][k_python_os_linux] if k_python_os_linux in tests_times[test_name] else 0 + macos_time = tests_times[test_name][k_python_os_macos] if k_python_os_macos in tests_times[test_name] else 0 + print(f"'{test_name}',{linux_time},{macos_time}") + +def check_missing(tests_times): + for test_name in tests_times: + for os in [k_python_os_linux, k_python_os_macos]: + if (not os in tests_times[test_name]): + print(f"Missing {os} for {test_name}") + +# ---------- Auxiliaries ---------- + +# Script main + +start_time = time.time() + +repository_owner = 'HorizenOfficial' +repository_name = 'zen' +travis_token = 'SetHereProperTokenValue' +builds_quantity = 10 +rpc_tests_sh_file_path = "./qa/pull-tester/rpc-tests.sh" + +for arg in sys.argv: + if (arg.startswith("travis_token=")): + travis_token = arg.replace("travis_token=", "") + if (arg.startswith("builds_quantity=")): + builds_quantity = int(arg.replace("builds_quantity=", "")) + if (arg.startswith("rpc_tests_sh_file_path=")): + rpc_tests_sh_file_path = arg.replace("rpc_tests_sh_file_path=", "") + +print("Running with following params:") +print(f"repository_owner (constant): {repository_owner}") +print(f"repository_name (constant): {repository_name}") +print(f"travis_token (configurable): {travis_token}") +print(f"builds_quantity (configurable): {builds_quantity}") +print(f"rpc_tests_sh_file_path (configurable): {rpc_tests_sh_file_path}") + +assert(travis_token != 'SetHereProperTokenValue') + +tests_times = {} + +print("Getting build history") +builds = get_build_history(repository_owner, repository_name, travis_token, int(builds_quantity / 25 + 1)) # retrieving last (builds_quantity / 25 + 1) * 25 builds +builds_accounted = 0 +for build in builds: + if (builds_accounted >= builds_quantity): + break + build_id = build['id'] + build_number = build['number'] + build_state = build['state'] + if (build_state == "passed"): + print(f"Processing a passed build ({build_number})") + builds_accounted += 1 + jobs = build['jobs'] + if (len(jobs) == k_total_jobs): + print("Getting jobs details") + jobs_details = get_jobs_details(build_id, travis_token) + for job_details in jobs_details["jobs"]: + job_number = (int)(job_details['number'].split(".")[-1]) + python_stage_type = get_python_stage_type(job_number, len(jobs)) + if (python_stage_type in [k_python_os_linux, k_python_os_macos]): + print(f"Getting job logs ({job_number})") + job_logs = get_job_logs(job_details["id"], travis_token) + extract_tests_times(tests_times, python_stage_type, job_logs) + else: + sys.exit("Unsupported .travis.yml file (os cannot be identified)") + +print(f"Averaging over {builds_accounted} accounted builds") +compute_average_tests_times(tests_times) +print_tests_times(tests_times) +check_missing(tests_times) +update_rpc_tests_sh_file(rpc_tests_sh_file_path, tests_times) +print(f"Script execution time: {time.time() - start_time}") +temp = 0 diff --git a/qa/pull-tester/rpc-tests.sh b/qa/pull-tester/rpc-tests.sh index 02b7ca7426..9d3d47306e 100755 --- a/qa/pull-tester/rpc-tests.sh +++ b/qa/pull-tester/rpc-tests.sh @@ -47,163 +47,163 @@ done # Also, note that comma ',' is used as delimiter. Please modify loadbalancer.py if in the future we # need to use this character in the filename field. testScripts=( - 'paymentdisclosure.py',99,332 - 'prioritisetransaction.py',47,145 - 'wallet_treestate.py',136,345 - 'wallet_protectcoinbase.py',294,1368 - 'wallet_shieldcoinbase.py',214,970 - 'wallet_mergetoaddress.py',491,1740 - 'wallet_mergetoaddress_2.py',975,2539 - 'wallet.py',131,692 - 'wallet_nullifiers.py',109,372 - 'wallet_1941.py',53,170 - 'wallet_grothtx.py',91,262 - 'listtransactions.py',129,241 + 'paymentdisclosure.py',100,304 + 'prioritisetransaction.py',78,132 + 'wallet_treestate.py',134,317 + 'wallet_protectcoinbase.py',404,1513 + 'wallet_shieldcoinbase.py',215,847 + 'wallet_mergetoaddress.py',502,1509 + 'wallet_mergetoaddress_2.py',997,2930 + 'wallet.py',129,587 + 'wallet_nullifiers.py',140,337 + 'wallet_1941.py',54,156 + 'wallet_grothtx.py',89,241 + 'listtransactions.py',91,222 'mempool_resurrect_test.py',6,16 - 'txn_doublespend.py',22,139 - 'txn_doublespend.py --mineblock',23,62 - 'getchaintips.py',66,269 - 'rawtransactions.py',141,397 - 'rest.py',26,59 - 'mempool_spendcoinbase.py',6,96 - 'mempool_coinbase_spends.py',14,34 - 'mempool_tx_input_limit.py',91,308 - 'httpbasics.py',21,63 - 'zapwallettxes.py',35,86 - 'proxy_test.py',22,142 - 'merkle_blocks.py',69,163 - 'fundrawtransaction.py',60,128 + 'txn_doublespend.py',23,61 + 'txn_doublespend.py --mineblock',24,63 + 'getchaintips.py',68,253 + 'rawtransactions.py',137,330 + 'rest.py',27,57 + 'mempool_spendcoinbase.py',6,15 + 'mempool_coinbase_spends.py',16,110 + 'mempool_tx_input_limit.py',91,281 + 'httpbasics.py',21,61 + 'zapwallettxes.py',35,83 + 'proxy_test.py',22,134 + 'merkle_blocks.py',63,145 + 'fundrawtransaction.py',57,122 'signrawtransactions.py',6,15 - 'walletbackup.py',432,1478 - 'key_import_export.py',41,86 - 'nodehandling.py',428,666 - 'reindex.py',13,33 + 'walletbackup.py',426,1380 + 'key_import_export.py',38,78 + 'nodehandling.py',360,406 + 'reindex.py',12,30 'decodescript.py',6,16 'disablewallet.py',6,16 - 'zcjoinsplit.py',45,215 - 'zcjoinsplitdoublespend.py',199,733 - 'zkey_import_export.py',381,1588 - 'getblocktemplate.py',12,35 - 'bip65-cltv-p2p.py',6,18 - 'bipdersig-p2p.py',7,17 - 'nulldata.py',38,69 - 'blockdelay.py',46,101 - 'blockdelay_2.py',51,84 - 'z_sendmany.py',71,196 - 'sc_create.py',185,707 - 'sc_split.py',38,84 - 'sc_invalidate.py',41,99 - 'sc_cert_base.py',104,289 - 'sc_cert_nonceasing.py',97,304 - 'sc_cert_fee.py',53,144 - 'sc_cert_epoch.py',87,224 - 'sc_cert_invalidate.py',58,131 - 'sc_fwd_maturity.py',54,129 - 'sc_rawcertificate.py',74,186 - 'getunconfirmedtxdata.py',46,108 - 'sc_cr_and_fw_in_mempool.py',57,137 - 'sc_cert_change.py',55,145 - 'sc_cert_orphans.py',59,145 - 'sc_cert_maturity.py',56,144 - 'sbh_rpc_cmds.py',41,101 - 'sc_cert_ceasing.py',67,179 - 'sc_cert_customfields.py',106,317 - 'sc_cert_getraw.py',48,114 - 'sc_quality_mempool.py',109,334 - 'sc_ft_and_mbtr_fees.py',47,91 - 'sc_ft_and_mbtr_fees_update.py',305,1131 - 'sc_bwt_request.py',70,165 - 'sc_cert_quality_wallet.py',101,250 - 'ws_messages.py',71,173 - 'ws_getsidechainversions.py',47,138 - 'sc_cert_ceasing_split.py',66,161 - 'sc_async_proof_verifier.py',97,227 - 'sc_quality_blockchain.py',86,254 - 'sc_quality_voiding.py',57,144 - 'sc_csw_actcertdata.py',104,315 - 'sc_csw_actcertdata_null.py',54,136 - 'sc_cert_ceasing_sg.py',54,138 - 'sc_csw_nullifier.py',120,319 - 'sc_getscinfo.py',121,592 - 'sc_quality_nodes.py',51,126 - 'sc_cert_memcleanup_split.py',69,173 - 'sc_csw_fundrawtransaction.py',93,260 - 'sc_proof_verifier_low_priority_threads.py',49,82 - 'subsidyhalving.py',195,361 - 'cbh_rpfix.py',45,113 - 'cbh_rpcheck.py',26,61 - 'tlsprotocols.py',12,34 - 'mempool_double_spend.py',21,60 - 'getblockmerkleroots.py',67,156 - 'sc_block_partitions.py',60,153 - 'sc_cert_bwt_amount_rounding.py',30,73 - 'sc_csw_eviction_from_mempool.py',124,377 - 'sc_csw_memcleanup_split.py',70,188 - 'sc_csw_balance_exceeding.py',57,162 - 'sc_stale_ft_and_mbtr.py',121,293 - 'sc_cert_getblocktemplate.py',253,993 - 'sc_cert_bt_immature_balances.py',40,94 - 'sc_rpc_cmds_fee_handling.py',137,254 - 'sc_cert_listsinceblock.py',70,158 - 'sc_cert_dust.py',98,236 - 'sc_keyrot.py',47,149 - 'txindex.py',28,72 - 'addressindex.py',34,95 - 'spentindex.py',18,74 - 'timestampindex.py',21,75 - 'sc_cert_addressindex.py',128,508 - 'sc_cert_addrmempool.py',46,168 - 'getblockexpanded.py',191,478 - 'sc_rpc_cmds_json_output.py',68,187 - 'sc_version.py',104,371 - 'sc_getscgenesisinfo.py',86,286 - 'fundaddresses.py',12,25 - 'sc_getcertmaturityinfo.py',68,231 - 'sc_big_commitment_tree.py',63,110 - 'sc_big_commitment_tree_getblockmerkleroot.py',11,25 - 'p2p_ignore_spent_tx.py',215,455 - 'shieldedpooldeprecation_rpc.py',558,1794 - 'mempool_size_limit.py',121,203 - 'mempool_size_limit_more.py',103,160 - 'mempool_size_limit_even_more.py',87,210 - 'mempool_hard_fork_cleaning.py',34,60 + 'zcjoinsplit.py',106,315 + 'zcjoinsplitdoublespend.py',197,837 + 'zkey_import_export.py',378,1453 + 'getblocktemplate.py',12,32 + 'bip65-cltv-p2p.py',7,18 + 'bipdersig-p2p.py',6,17 + 'nulldata.py',38,67 + 'blockdelay.py',46,97 + 'blockdelay_2.py',50,83 + 'z_sendmany.py',70,183 + 'sc_create.py',185,672 + 'sc_split.py',40,80 + 'sc_invalidate.py',43,91 + 'sc_cert_base.py',94,275 + 'sc_cert_nonceasing.py',88,276 + 'sc_cert_fee.py',54,126 + 'sc_cert_epoch.py',88,210 + 'sc_cert_invalidate.py',61,122 + 'sc_fwd_maturity.py',52,120 + 'sc_rawcertificate.py',71,181 + 'getunconfirmedtxdata.py',46,105 + 'sc_cr_and_fw_in_mempool.py',58,125 + 'sc_cert_change.py',57,138 + 'sc_cert_orphans.py',58,141 + 'sc_cert_maturity.py',57,134 + 'sbh_rpc_cmds.py',42,93 + 'sc_cert_ceasing.py',66,174 + 'sc_cert_customfields.py',100,290 + 'sc_cert_getraw.py',47,108 + 'sc_quality_mempool.py',104,292 + 'sc_ft_and_mbtr_fees.py',47,87 + 'sc_ft_and_mbtr_fees_update.py',305,1052 + 'sc_bwt_request.py',68,155 + 'sc_cert_quality_wallet.py',96,236 + 'ws_messages.py',75,161 + 'ws_getsidechainversions.py',44,130 + 'sc_cert_ceasing_split.py',64,153 + 'sc_async_proof_verifier.py',89,209 + 'sc_quality_blockchain.py',83,233 + 'sc_quality_voiding.py',57,136 + 'sc_csw_actcertdata.py',97,290 + 'sc_csw_actcertdata_null.py',52,129 + 'sc_cert_ceasing_sg.py',53,132 + 'sc_csw_nullifier.py',112,304 + 'sc_getscinfo.py',115,408 + 'sc_quality_nodes.py',47,116 + 'sc_cert_memcleanup_split.py',67,161 + 'sc_csw_fundrawtransaction.py',86,251 + 'sc_proof_verifier_low_priority_threads.py',50,79 + 'subsidyhalving.py',227,316 + 'cbh_rpfix.py',46,104 + 'cbh_rpcheck.py',27,57 + 'tlsprotocols.py',12,32 + 'mempool_double_spend.py',22,55 + 'getblockmerkleroots.py',88,146 + 'sc_block_partitions.py',59,150 + 'sc_cert_bwt_amount_rounding.py',31,70 + 'sc_csw_eviction_from_mempool.py',110,342 + 'sc_csw_memcleanup_split.py',68,168 + 'sc_csw_balance_exceeding.py',55,145 + 'sc_stale_ft_and_mbtr.py',117,276 + 'sc_cert_getblocktemplate.py',181,867 + 'sc_cert_bt_immature_balances.py',39,87 + 'sc_rpc_cmds_fee_handling.py',139,245 + 'sc_cert_listsinceblock.py',70,147 + 'sc_cert_dust.py',96,226 + 'sc_keyrot.py',44,141 + 'txindex.py',26,66 + 'addressindex.py',45,88 + 'spentindex.py',28,69 + 'timestampindex.py',30,70 + 'sc_cert_addressindex.py',185,534 + 'sc_cert_addrmempool.py',66,158 + 'getblockexpanded.py',188,381 + 'sc_rpc_cmds_json_output.py',66,171 + 'sc_version.py',92,337 + 'sc_getscgenesisinfo.py',79,254 + 'fundaddresses.py',25,30 + 'sc_getcertmaturityinfo.py',61,204 + 'sc_big_commitment_tree.py',62,104 + 'sc_big_commitment_tree_getblockmerkleroot.py',14,24 + 'p2p_ignore_spent_tx.py',217,414 + 'shieldedpooldeprecation_rpc.py',566,1712 + 'mempool_size_limit.py',112,179 + 'mempool_size_limit_more.py',38,58 + 'mempool_size_limit_even_more.py',100,218 + 'mempool_hard_fork_cleaning.py',61,121 ); testScriptsExt=( - 'getblocktemplate_longpoll.py',120,207 - 'getblocktemplate_proposals.py',57,129 - 'getblocktemplate_blockmaxcomplexity.py',55,136 - 'getblocktemplate_priority.py',39,84 + 'getblocktemplate_longpoll.py',104,127 + 'getblocktemplate_proposals.py',56,200 + 'getblocktemplate_blockmaxcomplexity.py',54,133 + 'getblocktemplate_priority.py',38,77 # 'pruning.py' # disabled for Zen. Failed because of the issue #1302 in zcash - 'forknotify.py',27,60 + 'forknotify.py',27,55 # 'hardforkdetection.py' # disabled for Zen. Failed because of the issue #1302 in zcash # 'invalidateblock.py' # disabled for Zen. Failed because of the issue #1302 in zcash - 'keypool.py',12,34 - 'receivedby.py',30,68 - 'rpcbind_test.py',60,140 + 'keypool.py',12,31 + 'receivedby.py',28,142 + 'rpcbind_test.py',76,0 # 'script_test.py' - 'smartfees.py',158,480 - 'maxblocksinflight.py',14,25 - 'invalidblockrequest.py',40,100 - 'invalidblockposthalving.py',113,250 - 'p2p-acceptblock.py',202,450 - 'replay_protection.py',22,56 - 'headers_01.py',14,34 - 'headers_02.py',22,52 - 'headers_03.py',22,54 - 'headers_04.py',26,57 - 'headers_05.py',48,82 - 'headers_06.py',44,87 - 'headers_07.py',107,228 - 'headers_08.py',25,59 - 'headers_09.py',44,81 - 'headers_10.py',36,72 - 'checkblockatheight.py',103,236 - 'sc_big_block.py',92,247 + 'smartfees.py',185,376 + 'maxblocksinflight.py',14,24 + 'invalidblockrequest.py',44,0 + 'invalidblockposthalving.py',114,0 + 'p2p-acceptblock.py',214,0 + 'replay_protection.py',22,52 + 'headers_01.py',12,33 + 'headers_02.py',19,48 + 'headers_03.py',19,50 + 'headers_04.py',24,54 + 'headers_05.py',43,73 + 'headers_06.py',42,83 + 'headers_07.py',41,83 + 'headers_08.py',26,55 + 'headers_09.py',46,76 + 'headers_10.py',27,56 + 'checkblockatheight.py',105,220 + 'sc_big_block.py',87,233 ); if [ "x$ENABLE_ZMQ" = "x1" ]; then - testScripts+=('zmq_test.py',25,73) + testScripts+=('zmq_test.py',25,66) fi if [ "x$ENABLE_PROTON" = "x1" ]; then