Skip to content

Commit

Permalink
Problem: benchmark don't support batch tx (#1650)
Browse files Browse the repository at this point in the history
* Problem: benchmark don't support batch tx

Solution:
- add batch mode

Update CHANGELOG.md

Signed-off-by: yihuang <[email protected]>

fix config

* fix

* Update testground/benchmark/.flake8

Signed-off-by: yihuang <[email protected]>

* fix lint

---------

Signed-off-by: yihuang <[email protected]>
  • Loading branch information
yihuang authored Oct 18, 2024
1 parent 3303077 commit 9080ded
Show file tree
Hide file tree
Showing 9 changed files with 285 additions and 29 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
* (testground)[#1644](https://github.com/crypto-org-chain/cronos/pull/1644) load generator retry with backoff on error.
* [#1648](https://github.com/crypto-org-chain/cronos/pull/1648) Add abort OE in PrepareProposal.
* (testground)[#1651](https://github.com/crypto-org-chain/cronos/pull/1651) Benchmark use cosmos broadcast rpc.
* (testground)[#1650](https://github.com/crypto-org-chain/cronos/pull/1650) Benchmark support batch mode.

*Oct 14, 2024*

Expand Down
4 changes: 4 additions & 0 deletions testground/benchmark/.flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[flake8]
max-line-length = 88
extend-ignore = E203
exclude = __pycache__
17 changes: 13 additions & 4 deletions testground/benchmark/benchmark/stateless.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ def validate_json(ctx, param, value):
@click.option("--num-txs", default=1000)
@click.option("--num-idle", default=20)
@click.option("--tx-type", default="simple-transfer")
@click.option("--batch-size", default=1)
@click.option("--config-patch", default="{}", callback=validate_json)
@click.option("--app-patch", default="{}", callback=validate_json)
@click.option("--genesis-patch", default="{}", callback=validate_json)
Expand All @@ -82,6 +83,7 @@ def _gen(
num_txs: int = 1000,
num_idle: int = 20,
tx_type: str = "simple-transfer",
batch_size: int = 1,
validator_generate_load: bool = True,
config_patch: dict = None,
app_patch: dict = None,
Expand Down Expand Up @@ -145,7 +147,8 @@ def _gen(
"num_txs": num_txs,
"num_idle": num_idle,
"tx_type": tx_type,
"validator-generate-load": validator_generate_load,
"batch_size": batch_size,
"validator_generate_load": validator_generate_load,
}
(outdir / "config.json").write_text(json.dumps(cfg))

Expand Down Expand Up @@ -216,6 +219,7 @@ def run(outdir: str, datadir: str, cronosd, global_seq):
@click.option("--num-accounts", default=10)
@click.option("--num-txs", default=1000)
@click.option("--tx-type", default="simple-transfer")
@click.option("--batch-size", default=1)
@click.option("--node", type=int)
def gen_txs(**kwargs):
return _gen_txs(**kwargs)
Expand Down Expand Up @@ -249,13 +253,14 @@ def _gen_txs(
num_accounts: int = 10,
num_txs: int = 1000,
tx_type: str = "simple-transfer",
batch_size: int = 1,
node: Optional[int] = None,
):
outdir = Path(outdir)

def job(global_seq):
print("generating", num_accounts * num_txs, "txs for node", global_seq)
txs = transaction.gen(global_seq, num_accounts, num_txs, tx_type)
txs = transaction.gen(global_seq, num_accounts, num_txs, tx_type, batch_size)
transaction.save(txs, outdir, global_seq)
print("saved", len(txs), "txs for node", global_seq)

Expand All @@ -269,7 +274,7 @@ def job(global_seq):
def do_run(
datadir: Path, home: Path, cronosd: str, group: str, global_seq: int, cfg: dict
):
if group == FULLNODE_GROUP or cfg.get("validator-generate-load", True):
if group == FULLNODE_GROUP or cfg.get("validator_generate_load", True):
txs = prepare_txs(cfg, datadir, global_seq)
else:
txs = []
Expand Down Expand Up @@ -437,7 +442,11 @@ def prepare_txs(cfg, datadir, global_seq):
else:
print("generating", cfg["num_accounts"] * cfg["num_txs"], "txs")
txs = transaction.gen(
global_seq, cfg["num_accounts"], cfg["num_txs"], cfg["tx_type"]
global_seq,
cfg["num_accounts"],
cfg["num_txs"],
cfg["tx_type"],
cfg["batch_size"],
)
return txs

Expand Down
27 changes: 21 additions & 6 deletions testground/benchmark/benchmark/stats.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from datetime import datetime

from .utils import block, block_height
from .utils import block, block_eth, block_height

# the tps calculation use the average of the last 10 blocks
TPS_WINDOW = 5
Expand All @@ -19,18 +19,33 @@ def calculate_tps(blocks):
return txs / time_diff


def dump_block_stats(fp):
def get_block_info_cosmos(height):
blk = block(height)
timestamp = datetime.fromisoformat(blk["result"]["block"]["header"]["time"])
txs = len(blk["result"]["block"]["data"]["txs"])
return timestamp, txs


def get_block_info_eth(height):
blk = block_eth(height)
timestamp = datetime.fromtimestamp(int(blk["timestamp"], 0))
txs = len(blk["transactions"])
return timestamp, txs


def dump_block_stats(fp, eth=True):
"""
dump simple statistics for blocks for analysis
dump block stats using web3 json-rpc, which splits batch tx
"""
tps_list = []
current = block_height()
blocks = []
# skip block 1 whose timestamp is not accurate
for i in range(2, current + 1):
blk = block(i)
timestamp = datetime.fromisoformat(blk["result"]["block"]["header"]["time"])
txs = len(blk["result"]["block"]["data"]["txs"])
if eth:
timestamp, txs = get_block_info_eth(i)
else:
timestamp, txs = get_block_info_cosmos(i)
blocks.append((txs, timestamp))
tps = calculate_tps(blocks[-TPS_WINDOW:])
tps_list.append(tps)
Expand Down
50 changes: 32 additions & 18 deletions testground/benchmark/benchmark/transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

from . import cosmostx
from .erc20 import CONTRACT_ADDRESS
from .utils import DEFAULT_DENOM, LOCAL_RPC, gen_account, split
from .utils import DEFAULT_DENOM, LOCAL_RPC, gen_account, split, split_batch

GAS_PRICE = 1000000000
CHAIN_ID = 777
Expand Down Expand Up @@ -55,8 +55,10 @@ def erc20_transfer_tx(nonce: int):


Job = namedtuple(
"Job", ["chunk", "global_seq", "num_accounts", "num_txs", "tx_type", "create_tx"]
"Job",
["chunk", "global_seq", "num_accounts", "num_txs", "tx_type", "create_tx", "batch"],
)
EthTx = namedtuple("EthTx", ["tx", "raw", "sender"])


def _do_job(job: Job):
Expand All @@ -68,19 +70,25 @@ def _do_job(job: Job):
for i in range(job.num_txs):
tx = job.create_tx(i)
raw = acct.sign_transaction(tx).rawTransaction
txs.append(build_cosmos_tx(tx, raw, HexBytes(acct.address)))
txs.append(EthTx(tx, raw, HexBytes(acct.address)))
total += 1
if total % 1000 == 0:
print("generated", total, "txs for node", job.global_seq)

# to keep it simple, only build batch inside the account
txs = [
build_cosmos_tx(*txs[start:end])
for start, end in split_batch(len(txs), job.batch)
]
acct_txs.append(txs)
return acct_txs


def gen(global_seq, num_accounts, num_txs, tx_type: str) -> [str]:
def gen(global_seq, num_accounts, num_txs, tx_type: str, batch: int) -> [str]:
chunks = split(num_accounts, os.cpu_count())
create_tx = TX_TYPES[tx_type]
jobs = [
Job(chunk, global_seq, num_accounts, num_txs, tx_type, create_tx)
Job(chunk, global_seq, num_accounts, num_txs, tx_type, create_tx, batch)
for chunk in chunks
]

Expand Down Expand Up @@ -112,28 +120,32 @@ def load(datadir: Path, global_seq: int) -> [str]:
return ujson.load(f)


def build_cosmos_tx(tx: dict, raw: bytes, sender: bytes) -> str:
def build_cosmos_tx(*txs: EthTx) -> str:
"""
return base64 encoded cosmos tx
return base64 encoded cosmos tx, support batch
"""
msg = cosmostx.build_any(
"/ethermint.evm.v1.MsgEthereumTx",
cosmostx.MsgEthereumTx(
from_=sender,
raw=raw,
),
)
fee = tx["gas"] * tx["gasPrice"]
msgs = [
cosmostx.build_any(
"/ethermint.evm.v1.MsgEthereumTx",
cosmostx.MsgEthereumTx(
from_=tx.sender,
raw=tx.raw,
),
)
for tx in txs
]
fee = sum(tx.tx["gas"] * tx.tx["gasPrice"] for tx in txs)
gas = sum(tx.tx["gas"] for tx in txs)
body = cosmostx.TxBody(
messages=[msg],
messages=msgs,
extension_options=[
cosmostx.build_any("/ethermint.evm.v1.ExtensionOptionsEthereumTx")
],
)
auth_info = cosmostx.AuthInfo(
fee=cosmostx.Fee(
amount=[cosmostx.Coin(denom=DEFAULT_DENOM, amount=str(fee))],
gas_limit=tx["gas"],
gas_limit=gas,
)
)
return base64.b64encode(
Expand All @@ -151,7 +163,9 @@ async def async_sendtx(session, raw):
json={
"jsonrpc": "2.0",
"method": "broadcast_tx_async",
"params": {"tx": raw},
"params": {
"tx": raw,
},
"id": 1,
},
) as rsp:
Expand Down
26 changes: 26 additions & 0 deletions testground/benchmark/benchmark/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,18 @@ def block(height):
return requests.get(f"{LOCAL_RPC}/block?height={height}").json()


def block_eth(height: int):
return requests.post(
f"{LOCAL_JSON_RPC}",
json={
"jsonrpc": "2.0",
"method": "eth_getBlockByNumber",
"params": [hex(height), False],
"id": 1,
},
).json()["result"]


def block_txs(height):
return block(height)["result"]["block"]["data"]["txs"]

Expand All @@ -181,3 +193,17 @@ def split(a: int, n: int):
"""
k, m = divmod(a, n)
return [(i * k + min(i, m), (i + 1) * k + min(i + 1, m)) for i in range(n)]


def split_batch(a: int, size: int):
"""
Split range(0, a) into batches with size
"""
if size < 1:
size = 1

k, m = divmod(a, size)
parts = [(i * size, (i + 1) * size) for i in range(k)]
if m:
parts.append((k * size, a))
return parts
2 changes: 2 additions & 0 deletions testground/benchmark/overlay.nix
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ let
pyunormalize = [ "setuptools" ];
pytest-github-actions-annotate-failures = [ "setuptools" ];
cprotobuf = [ "setuptools" ];
flake8-black = [ "setuptools" ];
flake8-isort = [ "hatchling" ];
};
in
lib.mapAttrs
Expand Down
Loading

0 comments on commit 9080ded

Please sign in to comment.