From 73aaa794935fdb07e32960b07b71a2e9f79f3be3 Mon Sep 17 00:00:00 2001 From: Martin Stefcek Date: Mon, 19 Feb 2024 10:36:35 +0400 Subject: [PATCH] add get dbs --- Processes/common_exec.py | 8 ++++ Processes/miner.py | 8 ++++ Stats/scan.py | 82 ---------------------------------------- temp.py | 42 -------------------- webui.py | 45 ++++++++++++++++++++-- 5 files changed, 58 insertions(+), 127 deletions(-) delete mode 100644 Stats/scan.py delete mode 100644 temp.py diff --git a/Processes/common_exec.py b/Processes/common_exec.py index 63df15a..95d8450 100644 --- a/Processes/common_exec.py +++ b/Processes/common_exec.py @@ -79,5 +79,13 @@ def get_stdout(self): logs.append((os.path.join(path, file), "stdout")) return logs + def get_dbs(self): + dbs: list[tuple[str, str]] = [] + for path, _dirs, files in os.walk(os.path.join(DATA_FOLDER, self.name)): + for file in files: + if file.endswith(".sqlite") or file.endswith(".db"): + dbs.append((os.path.join(path, file), self.name)) + return dbs + all_processes: dict[str, CommonExec] = {} diff --git a/Processes/miner.py b/Processes/miner.py index b238d71..73ff789 100644 --- a/Processes/miner.py +++ b/Processes/miner.py @@ -55,5 +55,13 @@ def get_stdout(self): logs.append((os.path.join(path, file), "stdout")) return logs + def get_dbs(self): + dbs: list[tuple[str, str]] = [] + for path, _dirs, files in os.walk(os.path.join(DATA_FOLDER, self.name)): + for file in files: + if file.endswith(".sqlite") or file.endswith(".db"): + dbs.append((os.path.join(path, file), self.name)) + return dbs + miner = Miner() diff --git a/Stats/scan.py b/Stats/scan.py deleted file mode 100644 index 7fc3c4e..0000000 --- a/Stats/scan.py +++ /dev/null @@ -1,82 +0,0 @@ -# type:ignore -from collections import defaultdict as dd -import os -import statistics - - -def scan(): - running = dd(int) - times = dd(list[float]) - cnt = dd(int) - - def gather(file): - lines = open(file, "rt", encoding="utf-8").readlines() - # print(lines) - for line in lines: - if line.startswith("[+] "): - # print(line) - running[line[4:].strip()] += 1 - elif line.startswith("[-] "): - # print(line) - _, name, _time, time, _returned, *returned = line.split() - # print(name) - if time.endswith("µs"): - time = float(time[:-2]) / 1000000 - pass - elif time.endswith("ms"): - time = float(time[:-2]) / 1000 - elif time.endswith("ns"): - time = float(time[:-2]) / 1000000000 - else: - time = float(time[:-1]) - print("HERE", time) - cnt[name] += 1 - times[name].append(time) - # exit() - running[line[4:].split()[0]] -= 1 - - for path, dirs, files in os.walk("."): - for file in files: - if file.endswith(".log"): - gather(os.path.join(path, file)) - - print("Still running :") - for name in running: - if running[name] > 0: - print(name, running[name]) - - print() - print("Stats") - longest = [0] * 6 - for name in times: - total_time = sum(times[name]) - avg_time = total_time / cnt[name] - median_time = statistics.median(times[name]) - max_time = max(times[name]) - l = [0] * 6 - l[0] = len(name) - l[1] = len(f"{cnt[name]}") - l[2] = len(f"{avg_time:.2f}") - l[3] = len(f"{total_time:.2f}") - l[4] = len(f"{median_time:.2f}") - l[5] = len(f"{max_time:.2f}") - - for i in range(6): - longest[i] = max(longest[i], l[i]) - - for name in times: - total_time = sum(times[name]) - avg_time = total_time / cnt[name] - median_time = statistics.median(times[name]) - max_time = max(times[name]) - l = [0] * 6 - l[0] = len(name) - l[1] = len(f"{cnt[name]}") - l[2] = len(f"{avg_time:.2f}") - l[3] = len(f"{total_time:.2f}") - l[4] = len(f"{median_time:.2f}") - l[5] = len(f"{max_time:.2f}") - - print( - f"{name}{' '*(longest[0]-l[0])} cnt {' '*(longest[1]-l[1])}{cnt[name]}, avg time {' '*(longest[2]-l[2])}{avg_time:.2f} total_time {' '*(longest[3]-l[3])}{total_time:.2f}, median time {' '*(longest[4]-l[4])}{median_time:.2f}, max time {' '*(longest[5]-l[5])}{max_time:.2f}" - ) diff --git a/temp.py b/temp.py deleted file mode 100644 index 0ad7a79..0000000 --- a/temp.py +++ /dev/null @@ -1,42 +0,0 @@ -import re -from typing import Optional -from enum import Enum - - -def parse(what: str) -> tuple[Optional[str], Optional[int]]: - match = re.match(r"(Miner)", what) - if match: - return - match = re.match(r"(BaseNode|BaseWallet|AssetWallet|ValidatorNode)_(\d+)", what) - if not match: - return ("_", None) - print(match.groups()) - if len(match.groups()) == 2: - return (match.group(1), None) - return (match.group(1), int(match.group(2))) - - -print(parse("Miner")) -print(parse("BaseNode_0")) -print(parse("BaseNode_1")) -print(parse("ValidatorNode_0")) -print(parse("AssetWallet_0")) -print(parse("Indexer_0")) - -# from Processes.base_wallet import GrpcWallet -# from Processes.base_node import GrpcBaseNode -# from Processes.dan_wallet_daemon import JrpcDanWalletDaemon - -# grpc = GrpcWallet("127.0.0.1:18008") -# # print(grpc.get_balance()) -# # print(grpc.check_connectivity()) -# # print(grpc.get_address().address.hex()) - - -# grpc = GrpcBaseNode("127.0.0.1:18006") -# print(grpc.get_public_addresses()) - - -# # jrpc = JrpcDanWalletDaemon("http://127.0.0.1:18013") -# # jrpc.auth() -# # print(jrpc.get_all_tx_by_status()) diff --git a/webui.py b/webui.py index 05fbf82..a94ef92 100644 --- a/webui.py +++ b/webui.py @@ -172,9 +172,7 @@ def grpc(what: str) -> Result: # type:ignore @method def http(what: str) -> Result: # type:ignore - print(what) http_address = self.commands.http(what) - print(http_address) if http_address: return Success(http_address) return InvalidParams() @@ -189,6 +187,47 @@ def burn(public_key: str, outfile: str, amount: int) -> Result: # type:ignore self.commands.burn(public_key, outfile, amount) return Success() + @method + def get_dbs(what: Optional[str]) -> Result: # type:ignore + try: + if what is None: + dbs: list[tuple[str, str]] = [] + for path, _dirs, files in os.walk(DATA_FOLDER): + for file in files: + if file.endswith(".sqlite") or file.endswith(".db"): + dbs.append((os.path.join(path, file), os.path.split(path)[1])) # type:ignore + return Success(dbs) + if process_type.is_miner(what): + return Success(self.commands.miner.get_dbs()) + if process_type.is_connector(what): + if self.commands.tari_connector_sample: + return Success(self.commands.tari_connector_sample.get_dbs()) + return Success("Not running") + if process_type.is_signaling_server(what): + return Success(self.commands.signaling_server.get_dbs()) + id = process_type.get_index(what) + if id is None: + return InvalidParams() + if process_type.is_validator_node(what): + if id in self.commands.validator_nodes: + return Success(self.commands.validator_nodes[id].get_dbs()) + if process_type.is_asset_wallet(what): + if id in self.commands.dan_wallets: + return Success(self.commands.dan_wallets[id].get_dbs()) + if process_type.is_indexer(what): + if id in self.commands.indexers: + return Success(self.commands.indexers[id].get_dbs()) + if process_type.is_base_node(what): + if self.commands.base_nodes.has(id): + return Success(self.commands.base_nodes[id].get_dbs()) + if process_type.is_base_wallet(what): + if self.commands.base_wallets.has(id): + return Success(self.commands.base_wallets[id].get_dbs()) + return InvalidParams() + except Exception as error: + Error(error) + return Error("Unknown") + @method def get_logs(what: Optional[str]) -> Result: # type:ignore try: @@ -287,7 +326,7 @@ def get_file_binary(filename: str) -> Result: # type:ignore file = open(filename, "rb") data = file.read() file.close() - return Success(base64.b64encode(data).decode('utf-8')) + return Success(base64.b64encode(data).decode("utf-8")) return InvalidParams("File not found")