From b0e944f5cad951257e09bb0bf71a7ea85e892a37 Mon Sep 17 00:00:00 2001 From: latentvector Date: Tue, 17 Dec 2024 15:46:13 -0500 Subject: [PATCH] refactor --- Dockerfile | 7 +- Makefile | 8 +- commune/cli.py | 135 +- commune/client.py | 102 +- commune/key.py | 118 +- commune/module.py | 133 +- commune/{network => }/network.py | 3 +- commune/network/substrate/requirements.txt | 22 - commune/network/subtensor/subtensor.py | 1922 ----------------- {modules => commune}/serializer/bytes.py | 0 {modules => commune}/serializer/munch.py | 0 {modules => commune}/serializer/numpy.py | 0 {modules => commune}/serializer/pandas.py | 0 {modules => commune}/serializer/serializer.py | 37 +- {modules => commune}/serializer/torch.py | 3 +- commune/{network => }/subspace/subspace.py | 20 +- commune/{network => }/subspace/types.py | 0 commune/{network => }/substrate/__init__.py | 0 commune/{network => }/substrate/base.py | 0 commune/{network => }/substrate/constants.py | 0 commune/{network => }/substrate/contracts.py | 4 +- commune/{network => }/substrate/exceptions.py | 0 commune/{network => }/substrate/extensions.py | 0 commune/{network => }/substrate/interfaces.py | 0 commune/{network => }/substrate/key.py | 0 commune/{network => }/substrate/storage.py | 2 +- .../{network => }/substrate/utils/__init__.py | 0 .../{network => }/substrate/utils/caching.py | 0 .../substrate/utils/ecdsa_helpers.py | 0 .../substrate/utils/encrypted_json.py | 0 .../{network => }/substrate/utils/hasher.py | 0 commune/{network => }/substrate/utils/ss58.py | 0 commune/utils/misc.py | 1 - commune/utils/os.py | 3 + commune/utils/types.py | 166 -- modules/agent/agent.py | 42 +- modules/agent/test.py | 0 modules/anthropic/anthropic.py | 12 - modules/base/base.py | 8 +- modules/find/find.py | 119 +- modules/git/git.py | 9 +- modules/model/openrouter.py | 2 +- modules/py/py.py | 50 +- modules/remote/app.py | 2 +- modules/sandbox.py | 2 + 45 files changed, 395 insertions(+), 2537 deletions(-) rename commune/{network => }/network.py (99%) delete mode 100644 commune/network/substrate/requirements.txt delete mode 100644 commune/network/subtensor/subtensor.py rename {modules => commune}/serializer/bytes.py (100%) rename {modules => commune}/serializer/munch.py (100%) rename {modules => commune}/serializer/numpy.py (100%) rename {modules => commune}/serializer/pandas.py (100%) rename {modules => commune}/serializer/serializer.py (99%) rename {modules => commune}/serializer/torch.py (87%) rename commune/{network => }/subspace/subspace.py (99%) rename commune/{network => }/subspace/types.py (100%) rename commune/{network => }/substrate/__init__.py (100%) rename commune/{network => }/substrate/base.py (100%) rename commune/{network => }/substrate/constants.py (100%) rename commune/{network => }/substrate/contracts.py (99%) rename commune/{network => }/substrate/exceptions.py (100%) rename commune/{network => }/substrate/extensions.py (100%) rename commune/{network => }/substrate/interfaces.py (100%) rename commune/{network => }/substrate/key.py (100%) rename commune/{network => }/substrate/storage.py (99%) rename commune/{network => }/substrate/utils/__init__.py (100%) rename commune/{network => }/substrate/utils/caching.py (100%) rename commune/{network => }/substrate/utils/ecdsa_helpers.py (100%) rename commune/{network => }/substrate/utils/encrypted_json.py (100%) rename commune/{network => }/substrate/utils/hasher.py (100%) rename commune/{network => }/substrate/utils/ss58.py (100%) delete mode 100644 commune/utils/types.py create mode 100644 modules/agent/test.py diff --git a/Dockerfile b/Dockerfile index bcbb753a..7dae77d8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,21 +4,16 @@ FROM ubuntu:22.04 ARG DEBIAN_FRONTEND=noninteractive RUN usermod -s /bin/bash root RUN apt-get update - #RUST RUN apt-get install curl nano build-essential cargo libstd-rust-dev -y - #NPM/JS RUN apt-get install -y nodejs npm RUN npm install -g pm2 - #PYTHON RUN apt-get install python3 python3-pip python3-venv -y COPY ./ /commune # RUN git clone -b main https://github.com/commune-ai/commune.git /commune RUN pip install -e /commune - WORKDIR /app -# TODO DOCKER - +# TODO DOCKERIZE THE ENTRYPOINT ENTRYPOINT [ "tail", "-f", "/dev/null"] \ No newline at end of file diff --git a/Makefile b/Makefile index bff4e586..411cffb1 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,8 @@ SCRIPTS_PATH=./scripts build: ${SCRIPTS_PATH}/build.sh -run: - ${SCRIPTS_PATH}/run.sh +start: + ${SCRIPTS_PATH}/start.sh stop: ${SCRIPTS_PATH}/stop.sh enter: @@ -12,9 +12,7 @@ test: chmod: chmod +x ${SCRIPTS_PATH}/* up: - make run + make start down: make stop -start: - make run diff --git a/commune/cli.py b/commune/cli.py index 7146fb17..d1e54e91 100644 --- a/commune/cli.py +++ b/commune/cli.py @@ -50,89 +50,94 @@ def determine_type(x): pass return x -def forward(argv = None, sep = '--', fn_splitters = [':', '/', '//', '::'], base = 'module', helper_fns = ['code', 'schema', 'fn_schema', 'help', 'fn_info', 'fn_hash'], default_fn = 'vs'): - t0 = time.time() - argv = argv or sys.argv[1:] - if len(argv) == 0: - argv = [default_fn] - output = None + +def get_args_kwargs(argv): + args = [] + kwargs = {} + parsing_kwargs = False + for arg in c.copy(argv): + if '=' in arg: + parsing_kwargs = True + key, value = arg.split('=') + kwargs[key] = determine_type(value) + else: + assert parsing_kwargs is False, 'Cannot mix positional and keyword arguments' + args.append(determine_type(arg)) + return args, kwargs + + +def get_init_kwargs(argv, helper_fns = ['code', 'schema', 'fn_schema', 'help', 'fn_info', 'fn_hash']): init_kwargs = {} - if any([arg.startswith(sep) for arg in argv]): - for arg in c.copy(argv): - if arg.startswith(sep): - key = arg[len(sep):].split('=')[0] - if key in helper_fns: - # is it a helper function - return forward([key , argv[0]]) - else: - value = arg.split('=')[-1] if '=' in arg else True - argv.remove(arg) - init_kwargs[key] = determine_type(value) - # any of the --flags are init kwargs + for arg in c.copy(argv): + if arg.startswith('--'): # init kwargs + key = arg[len('--'):].split('=')[0] + if key in helper_fns: + # is it a helper function + return forward([key , argv[0]]) + else: + value = arg.split('=')[-1] if '=' in arg else True + argv.remove(arg) + init_kwargs[key] = determine_type(value) + continue + return init_kwargs + +def get_fn(argv, fn_splitters = [':', '/', '//', '::'], init_kwargs={}, default_fn='forward'): + if len(argv) == 0: + argv = [default_fn] fn = argv.pop(0).replace('-', '_') - module = c.module(base) - fs = [fs for fs in fn_splitters if fs in fn] - if len(fs) == 1: - module, fn = fn.split(fs[0]) + + + init_kwargs = get_init_kwargs(argv) + + # get the function object + fn_splitters = [fs for fs in fn_splitters if fs in fn] + if len(fn_splitters) == 1: + fn_splitter = fn_splitters[0] + module, fn = fn.split(fn_splitter) module = c.shortcuts.get(module, module) - modules = c.get_modules() - module_options = [] - for m in modules: - if module == m: - module_options = [m] - break - if module in m: - module_options.append(m) - if len(module_options)>0: - module = module_options[0] - print('Module:', module) - module = c.module(module) - else: - raise AttributeError(f'Function {fn} not found in {module}') + module = c.module(module) + elif len(fn_splitters) == 0: + module = c.module() if hasattr(module, 'fn2module') and not hasattr(module, fn): - c.print(f'ROUTE_ACTIVATED({fn} from {module})') - fn2module = module.fn2module() + fn2module = module.fn2module() if callable(module.fn2module) else module.fn2module if not fn in fn2module: - return c.print(f'FN({fn}) not found {module}', color='red') + raise Exception(f'Function({fn}) NOT IN Module({module})', color='red') module = c.module(fn2module[fn]) - if not hasattr(module, fn): - module = module() + fn_obj = getattr(module, fn) - if c.is_property(fn_obj) or c.classify_fn(fn_obj) == 'self': - fn_obj = getattr(module(**init_kwargs), fn) - if callable(fn_obj): - args = [] - kwargs = {} - parsing_kwargs = False - for arg in argv: - if '=' in arg: - parsing_kwargs = True - key, value = arg.split('=') - kwargs[key] = determine_type(value) - else: - assert parsing_kwargs is False, 'Cannot mix positional and keyword arguments' - args.append(determine_type(arg)) - output = fn_obj(*args, **kwargs) - else: - output = fn_obj - buffer = '⚡️'*4 - c.print(buffer+fn+buffer, color='yellow') + initialize_module_class = bool(not hasattr(module, fn) or isinstance(fn, property) or 'self' in c.get_args(fn_obj)) + module = module(**init_kwargs) if initialize_module_class else module + print('⚡️'*4+fn+'⚡️'*4, color='yellow') + fn_obj = getattr(module, fn) + return fn_obj + +def run_fn(fn_obj, args, kwargs): + # call the function + t0 = time.time() + output = fn_obj(*args, **kwargs) if callable(fn_obj) else fn_obj latency = time.time() - t0 is_error = c.is_error(output) - msg = f'❌Error({latency:.3f}sec)❌' if is_error else f'✅Result({latency:.3f}s)✅' - c.print(msg) + print(f'❌Error({latency:.3f}sec)❌' if is_error else f'✅Result({latency:.3f}s)✅') is_generator = c.is_generator(output) if is_generator: for item in output: if isinstance(item, dict): - c.print(item) + print(item) else: - c.print(item, end='') + print(item, end='') else: - c.print(output) + print(output) return output + +def forward(): + argv = sys.argv[1:] + fn = get_fn(argv) + args, kwargs = get_args_kwargs(argv) + return run_fn(fn, args, kwargs) + + def main(): forward() \ No newline at end of file diff --git a/commune/client.py b/commune/client.py index 6601fc89..12c66a16 100644 --- a/commune/client.py +++ b/commune/client.py @@ -8,14 +8,12 @@ import commune as c class Client(c.Module): - network2namespace = {} - stream_prefix = 'data: ' - - def __init__( self, module : str = 'module', - network: Optional[bool] = 'local', - mode: Optional[str] = 'http', - key : Optional[str]= None , - serializer: Optional[c.Module] = 'serializer', + + def __init__( self, module : str = 'module', + key : Optional[str]= None , + network: Optional[bool] = 'local', + mode: Optional[str] = 'http', + serializer: Optional[c.Module] = 'serializer', **kwargs ): self.serializer = c.module(serializer)() @@ -55,7 +53,9 @@ def call(cls, module = fn fn = 'info' client = cls(module=module, network=network) - return client.forward(fn=fn, args=args, kwargs=kwargs, timeout=timeout, **extra_kwargs) + kwargs = kwargs or {} + kwargs = {**kwargs, **extra_kwargs} + return client.forward(fn=fn, args=args, kwargs=kwargs, timeout=timeout, key=key) @classmethod def connect(cls, @@ -89,12 +89,42 @@ def get_url(self, fn, mode='http'): module_address = module_address.replace(ip, '0.0.0.0') url = f"{module_address}/{fn}/" return url - - def request(self, url: str, - data: dict, - headers: dict, - timeout: int = 10, - stream: bool = True): + + + def get_data(self, args=[], kwargs={}, params = None): + # derefernece + args = c.copy(args or []) + kwargs = c.copy(kwargs or {}) + if isinstance(args, dict): + kwargs = {**kwargs, **args} + args = [] + if params: + if isinstance(params, dict): + kwargs = {**kwargs, **params} + elif isinstance(params, list): + args = params + else: + raise Exception(f'Invalid params {params}') + data = { "args": args, "kwargs": kwargs} + data = self.serializer.serialize(data) + return data + + def forward(self, + fn = 'info', + params: Optional[Union[list, dict]] = None, + args : Optional[list] = [], + kwargs : Optional[dict] = {}, + timeout:int=2, + key : str = None, + mode: str = 'http', + data=None, + headers = None, + stream:bool = False): + + key = self.resolve_key(key) + url = self.get_url(fn=fn, mode=mode) + data = data or self.get_data(params=params, args=args, kwargs=kwargs, ) + headers = headers or self.get_header(data=data, key=key) try: response = self.session.post(url, json=data, headers=headers, timeout=timeout, stream=stream) if 'text/event-stream' in response.headers.get('Content-Type', ''): @@ -111,27 +141,6 @@ def request(self, url: str, except Exception as e: result = c.detailed_error(e) return result - - def get_data(self, args=[], kwargs={}, **extra_kwargs): - # derefernece - args = c.copy(args or []) - kwargs = c.copy(kwargs or {}) - if isinstance(args, dict): - kwargs = {**kwargs, **args} - args = [] - if extra_kwargs: - kwargs = {**kwargs, **extra_kwargs} - data = { "args": args, "kwargs": kwargs} - data = self.serializer.serialize(data) - return data - - def forward(self, fn = 'info', args : str = [], kwargs : str = {}, - timeout:int=2, key : str = None, mode: str = 'http', data=None, headers = None, **extra_kwargs): - key = self.resolve_key(key) - url = self.get_url(fn=fn, mode=mode) - data = data or self.get_data(args=args, kwargs=kwargs,**extra_kwargs) - headers = headers or self.get_header(data=data, key=key) - return self.request(url=url, data=data,headers=headers, timeout=timeout) def __del__(self): try: @@ -156,10 +165,10 @@ def stream(self, response): print(f'Error in stream: {e}') yield None - def process_stream_line(self, line): + def process_stream_line(self, line, stream_prefix = 'data: '): event_data = line.decode('utf-8') - if event_data.startswith(self.stream_prefix): - event_data = event_data[len(self.stream_prefix):] + if event_data.startswith(stream_prefix): + event_data = event_data[len(stream_prefix):] if event_data == "": # skip empty lines if the event data is empty return '' if isinstance(event_data, str): @@ -173,28 +182,17 @@ def check_response(x) -> bool: return False else: return True - - class Virtual: - protected_attributes = [ 'client', 'remote_call'] + class Virtual: def __init__(self, client: str ='ReactAgentModule'): if isinstance(client, str): client = c.connect(client) self.client = client - def remote_call(self, *args, remote_fn, timeout:int=10, key=None, **kwargs): result = self.client.forward(fn=remote_fn, args=args, kwargs=kwargs, timeout=timeout, key=key) return result - - def __str__(self): - return str(self.client) - - def __repr__(self): - return self.__str__() - def __getattr__(self, key): - - if key in self.protected_attributes : + if key in [ 'client', 'remote_call'] : return getattr(self, key) else: return lambda *args, **kwargs : self.remote_call(*args, remote_fn=key, **kwargs) diff --git a/commune/key.py b/commune/key.py index 1a2108aa..1f6cee25 100644 --- a/commune/key.py +++ b/commune/key.py @@ -16,12 +16,12 @@ from eth_keys.datatypes import PrivateKey from scalecodec.utils.ss58 import ss58_encode, ss58_decode, get_ss58_format from scalecodec.base import ScaleBytes -from commune.network.substrate.utils import ss58 -from commune.network.substrate.constants import DEV_PHRASE -from commune.network.substrate.exceptions import ConfigurationError -from commune.network.substrate.key import extract_derive_path -from commune.network.substrate.utils.ecdsa_helpers import mnemonic_to_ecdsa_private_key, ecdsa_verify, ecdsa_sign -from commune.network.substrate.utils.encrypted_json import decode_pair_from_encrypted_json, encode_pair +from commune.substrate.utils import ss58 +from commune.substrate.constants import DEV_PHRASE +from commune.substrate.exceptions import ConfigurationError +from commune.substrate.key import extract_derive_path +from commune.substrate.utils.ecdsa_helpers import mnemonic_to_ecdsa_private_key, ecdsa_verify, ecdsa_sign +from commune.substrate.utils.encrypted_json import decode_pair_from_encrypted_json, encode_pair from bip39 import bip39_to_mini_secret, bip39_generate, bip39_validate import sr25519 import ed25519_zebra @@ -36,23 +36,12 @@ class KeyType: KeyType.crypto_type_map = {k.lower():v for k,v in KeyType.__dict__.items() if k in KeyType.crypto_types } KeyType.crypto_types = list(KeyType.crypto_type_map.keys()) -class MnemonicLanguageCode: - ENGLISH = 'en' - CHINESE_SIMPLIFIED = 'zh-hans' - CHINESE_TRADITIONAL = 'zh-hant' - FRENCH = 'fr' - ITALIAN = 'it' - JAPANESE = 'ja' - KOREAN = 'ko' - SPANISH = 'es' - class Key(c.Module): crypto_types = KeyType.crypto_types crypto_type_map = KeyType.crypto_type_map crypto_types = list(crypto_type_map.keys()) ss58_format = 42 crypto_type = 'sr25519' - def __init__(self, private_key: Union[bytes, str] = None, ss58_format: int = ss58_format, @@ -85,7 +74,7 @@ def set_crypto_type(self, crypto_type): return self.set_private_key(**kwargs) else: return {'success': False, 'message': f'crypto_type already set to {crypto_type}'} - + def set_private_key(self, private_key: Union[bytes, str] = None, ss58_format: int = ss58_format, @@ -150,10 +139,9 @@ def set_private_key(self, @classmethod def add_key(cls, path:str, mnemonic:str = None, password:str=None, refresh:bool=False, private_key=None, **kwargs): - if cls.key_exists(path) and not refresh : c.print(f'key already exists at {path}') - return json.loads(cls.get(path)) + return cls.get(path) key = cls.new_key(mnemonic=mnemonic, private_key=private_key, **kwargs) key.path = path key_json = key.to_json() @@ -221,6 +209,7 @@ def load_key(cls, path=None): cls.add_key(**key_info) return {'status': 'success', 'message': f'key loaded from {path}'} + @classmethod def save_keys(cls, path='saved_keys.json', **kwargs): @@ -453,7 +442,7 @@ def new_key(cls, create = gen = new_key def to_json(self, password: str = None ) -> dict: - state_dict = self.copy(self.__dict__) + state_dict = c.copy(self.__dict__) for k,v in state_dict.items(): if type(v) in [bytes]: state_dict[k] = v.hex() @@ -483,27 +472,27 @@ def from_json(cls, obj: Union[str, dict], password: str = None, crypto_type=None @classmethod - def generate_mnemonic(cls, words: int = 12, language_code: str = MnemonicLanguageCode.ENGLISH) -> str: + def generate_mnemonic(cls, words: int = 12, language_code: str = "en") -> str: """ params: words: The amount of words to generate, valid values are 12, 15, 18, 21 and 24 language_code: The language to use, valid values are: 'en', 'zh-hans', 'zh-hant', 'fr', 'it', 'ja', 'ko', 'es'. - Defaults to `MnemonicLanguageCode.ENGLISH` + Defaults to `"en"` """ mnemonic = bip39_generate(words, language_code) assert cls.validate_mnemonic(mnemonic, language_code), 'mnemonic is invalid' return mnemonic @classmethod - def validate_mnemonic(cls, mnemonic: str, language_code: str = MnemonicLanguageCode.ENGLISH) -> bool: + def validate_mnemonic(cls, mnemonic: str, language_code: str = "en") -> bool: """ Verify if specified mnemonic is valid Parameters ---------- mnemonic: Seed phrase - language_code: The language to use, valid values are: 'en', 'zh-hans', 'zh-hant', 'fr', 'it', 'ja', 'ko', 'es'. Defaults to `MnemonicLanguageCode.ENGLISH` + language_code: The language to use, valid values are: 'en', 'zh-hans', 'zh-hant', 'fr', 'it', 'ja', 'ko', 'es'. Defaults to `"en"` Returns ------- @@ -513,7 +502,7 @@ def validate_mnemonic(cls, mnemonic: str, language_code: str = MnemonicLanguageC @classmethod - def create_from_mnemonic(cls, mnemonic: str = None, ss58_format=ss58_format, crypto_type=KeyType.SR25519, language_code: str = MnemonicLanguageCode.ENGLISH) -> 'Key': + def create_from_mnemonic(cls, mnemonic: str = None, ss58_format=ss58_format, crypto_type=KeyType.SR25519, language_code: str = "en") -> 'Key': """ Create a Key for given memonic @@ -522,7 +511,7 @@ def create_from_mnemonic(cls, mnemonic: str = None, ss58_format=ss58_format, cry mnemonic: Seed phrase ss58_format: Substrate address format crypto_type: Use `KeyType.SR25519` or `KeyType.ED25519` cryptography for generating the Key - language_code: The language to use, valid values are: 'en', 'zh-hans', 'zh-hant', 'fr', 'it', 'ja', 'ko', 'es'. Defaults to `MnemonicLanguageCode.ENGLISH` + language_code: The language to use, valid values are: 'en', 'zh-hans', 'zh-hant', 'fr', 'it', 'ja', 'ko', 'es'. Defaults to `"en"` Returns ------- @@ -532,7 +521,7 @@ def create_from_mnemonic(cls, mnemonic: str = None, ss58_format=ss58_format, cry mnemonic = cls.generate_mnemonic(language_code=language_code) if crypto_type == KeyType.ECDSA: - if language_code != MnemonicLanguageCode.ENGLISH: + if language_code != "en": raise ValueError("ECDSA mnemonic only supports english") private_key = mnemonic_to_ecdsa_private_key(mnemonic) keypair = cls.create_from_private_key(private_key, ss58_format=ss58_format, crypto_type=crypto_type) @@ -598,7 +587,7 @@ def create_from_uri( suri: str, ss58_format: Optional[int] = ss58_format, crypto_type=KeyType.SR25519, - language_code: str = MnemonicLanguageCode.ENGLISH + language_code: str = "en" ) -> 'Key': """ Creates Key for specified suri in following format: `[mnemonic]/[soft-path]//[hard-path]` @@ -608,7 +597,7 @@ def create_from_uri( suri: ss58_format: Substrate address format crypto_type: Use KeyType.SR25519 or KeyType.ED25519 cryptography for generating the Key - language_code: The language to use, valid values are: 'en', 'zh-hans', 'zh-hant', 'fr', 'it', 'ja', 'ko', 'es'. Defaults to `MnemonicLanguageCode.ENGLISH` + language_code: The language to use, valid values are: 'en', 'zh-hans', 'zh-hant', 'fr', 'it', 'ja', 'ko', 'es'. Defaults to `"en"` Returns ------- @@ -627,7 +616,7 @@ def create_from_uri( suri_parts = suri_regex.groupdict() if crypto_type == KeyType.ECDSA: - if language_code != MnemonicLanguageCode.ENGLISH: + if language_code != "en": raise ValueError("ECDSA mnemonic only supports english") private_key = mnemonic_to_ecdsa_private_key( @@ -823,9 +812,35 @@ def sign(self, data: Union[ScaleBytes, bytes, str], to_json = False) -> bytes: 'address': self.ss58_address,} return signature - def is_ticket(self, data): - return all([k in data for k in ['data','signature', 'address', 'crypto_type']]) and any([k in data for k in ['time', 'timestamp']]) - + + @classmethod + def bytes2str(cls, data: bytes, mode: str = 'utf-8') -> str: + + if hasattr(data, 'hex'): + return data.hex() + else: + if isinstance(data, str): + return data + return bytes.decode(data, mode) + + @classmethod + def python2str(cls, input): + from copy import deepcopy + import json + + input = deepcopy(input) + input_type = type(input) + if input_type == str: + return input + if input_type in [dict]: + input = json.dumps(input) + elif input_type in [bytes]: + input = cls.bytes2str(input) + elif input_type in [list, tuple, set]: + input = json.dumps(list(input)) + elif input_type in [int, float, bool]: + input = str(input) + return input def verify(self, data: Union[ScaleBytes, bytes, str, dict], @@ -851,7 +866,7 @@ def verify(self, True if data is signed with this Key, otherwise False """ data = c.copy(data) - + if isinstance(data, dict): if self.is_ticket(data): address = data.pop('address') @@ -895,6 +910,8 @@ def verify(self, signature = bytes.fromhex(signature) if type(signature) is not bytes: raise TypeError("Signature should be of type bytes or a hex-string") + + if self.crypto_type == KeyType.SR25519: crypto_verify_fn = sr25519.verify elif self.crypto_type == KeyType.ED25519: @@ -903,9 +920,7 @@ def verify(self, crypto_verify_fn = ecdsa_verify else: raise ConfigurationError("Crypto type not supported") - verified = crypto_verify_fn(signature, data, public_key) - if not verified: # Another attempt with the data wrapped, as discussed in https://github.com/polkadot-js/extension/pull/743 # Note: As Python apps are trusted sources on its own, no need to wrap data when signing from this lib @@ -914,6 +929,9 @@ def verify(self, return ss58_encode(public_key, ss58_format=ss58_format) return verified + def is_ticket(self, data): + return all([k in data for k in ['data','signature', 'address', 'crypto_type']]) and any([k in data for k in ['time', 'timestamp']]) + def resolve_encryption_password(self, password:str=None) -> str: if password == None: password = self.private_key @@ -1081,31 +1099,6 @@ def ss58_encode(*args, **kwargs): @staticmethod def ss58_decode(*args, **kwargs): return ss58_decode(*args, **kwargs) - - @classmethod - def decrypt_file(cls, - path: Union[str, bytes], - key: str = None, - password : str = None, - **kwargs) -> bytes: - key = c.get_key(key) - data = c.get_text(path) - return key.decrypt(data, password=password, **kwargs) - - @classmethod - def encrypt_file(cls, - path: Union[str, bytes], - key: str = None, - password : str = None, - save = True, - **kwargs) -> bytes: - key = c.get_key(key) - data = c.get_text(path) - response = key.encrypt(data, password=password, **kwargs) - if save: - print(f'Encrypting {path}' ) - c.put_text(path, response) - return response @classmethod def get_key_address(cls, key): @@ -1138,7 +1131,6 @@ def valid_h160_address(cls, address): return False return True - def storage_migration(self): key2path = self.key2path() diff --git a/commune/module.py b/commune/module.py index f0c8a2ce..6e49eec2 100755 --- a/commune/module.py +++ b/commune/module.py @@ -38,8 +38,8 @@ class c: 'openrouter': 'model.openrouter', 'or' : ' model.openrouter', 'r' : 'remote', - 's' : 'network.subspace', - 'subspace': 'network.subspace', + 's' : 'subspace', + 'subspace': 'subspace', 'namespace': 'network', 'local': 'network', 'network.local': 'network', @@ -54,15 +54,13 @@ def module(cls, shortcuts : dict = None, cache=True, trials=1, - tree:dict=None, - **extra_kwargs ) -> str: + tree:dict=None ) -> str: path = path or 'module' if path.endswith('.py') and os.path.exists(path): path = c.path2name(path) else: path = path.replace('/','.') og_path = path - if path in c.module_cache and cache: return c.module_cache[path] if path in ['module', c.libname[0]]: @@ -77,26 +75,30 @@ def module(cls, if trials == 0: raise ValueError(f'Error in module {og_path} {e}') return c.module(path,cache=cache, tree=c.tree(max_age=10), trials=trials-1) - if not cls.is_module(module): - module.module_class = lambda *args, **kwargs : c.module_class(module) - module.module_name = module.name = lambda *args, **kwargs : c.module_name(module) - module.key = c.get_key(module.module_name(), create_if_not_exists=True) - module.resolve_module = lambda *args, **kwargs : c.resolve_module(module) - module.filepath = lambda *args, **kwargs : c.filepath(module) - module.dirpath = lambda *args, **kwargs : c.dirpath(module) - module.code = lambda *args, **kwargs : c.code(module) - module.code_hash = lambda *args, **kwargs : c.code_hash(module) - module.schema = lambda *args, **kwargs : c.schema(module) - module.functions = module.fns = lambda *args, **kwargs : c.get_functions(module) - module.fn2code = lambda *args, **kwargs : c.fn2code(module) - module.ask = lambda *args, **kwargs : c.ask(*args, module=module, **kwargs) - module.config = lambda *args, **kwargs : c.config(module=module, **kwargs) + module = module if cls.is_module(module) else cls.convert_module(module) if cache: c.module_cache[path] = module if kwargs != None: module = module(**kwargs) return module - block = get_block = get_module = module + + get_agent = block = get_block = get_module = module + + @classmethod + def convert_module(cls, module): + module.module_class = lambda *args, **kwargs : c.module_class(module) + module.module_name = module.name = lambda *args, **kwargs : c.module_name(module) + module.key = c.get_key(module.module_name(), create_if_not_exists=True) + module.resolve_module = lambda *args, **kwargs : c.resolve_module(module) + module.filepath = lambda *args, **kwargs : c.filepath(module) + module.dirpath = lambda *args, **kwargs : c.dirpath(module) + module.code = lambda *args, **kwargs : c.code(module) + module.code_hash = lambda *args, **kwargs : c.code_hash(module) + module.schema = lambda *args, **kwargs : c.schema(module) + module.functions = module.fns = lambda *args, **kwargs : c.get_functions(module) + module.fn2code = lambda *args, **kwargs : c.fn2code(module) + module.config = lambda *args, **kwargs : c.config(module=module, **kwargs) + return module @classmethod def filepath(cls, obj=None) -> str: @@ -143,7 +145,7 @@ def config_path(cls, obj = None) -> str: def sandbox(cls, path='./', filename='sandbox.py'): for file in c.files(path): if file.endswith(filename): - return c.cmd(f'python {file}', verbose=True) + return c.cmd(f'python3 {file}', verbose=True) return {'success': False, 'message': 'sandbox not found'} sand = sandbox @@ -190,9 +192,7 @@ def resolve_object(cls, obj:str = None, **kwargs): obj = c.module(obj) elif c.is_fn(obj): obj = c.get_fn(obj) - assert obj != None, f'Object {obj} does not exist' - return obj @classmethod @@ -215,6 +215,8 @@ def help(self, module, *question): def run(cls, fn=None, params=None, name=None) -> Any: # if name != '__main__': # return {} + if fn != None: + return c.get_fn(fn)(**(params or {})) parser = argparse.ArgumentParser(description='Argparse for the module') parser.add_argument('-m', '--m', '--module', '-module', dest='module', help='The function', type=str, default=cls.module_name()) parser.add_argument('-fn', '--fn', dest='fn', help='The function', type=str, default="__init__") @@ -515,7 +517,7 @@ def fn2route(cls): return fn2route @classmethod - def add_routes(cls, routes:dict=None, verbose=False, add_utils=True): + def add_routes(cls, routes:dict=None): from functools import partial """ This ties other modules into the current module. @@ -729,20 +731,22 @@ def test_map(self): assert self.map(x, fn) == [2,3,4] return {'success':True, 'message':'map test passed'} - avoid_paths = ['~', '/', './', storage_path] @classmethod - def rm(cls, path,possible_extensions = ['json'], avoid_paths = avoid_paths): + def rm(cls, path:str, + possible_extensions = ['json'], + avoid_paths = ['~', '/', './', storage_path]): avoid_paths.append(c.storage_path) path = cls.resolve_path(path) avoid_paths = [cls.resolve_path(p) for p in avoid_paths] assert path not in avoid_paths, f'Cannot remove {path}' - if not os.path.exists(path): + path_exists = lambda p: os.path.exists(p) + if not path_exists(path): for pe in possible_extensions: if path.endswith(pe) and os.path.exists(path + f'.{pe}'): path = path + f'.{pe}' break - if not os.path.exists(path): - return {'success':False, 'message':f'{path} does not exist'} + if not path_exists(path): + return {'success':False, 'message':f'{path} does not exist'} if os.path.isdir(path): return shutil.rmtree(path) if os.path.isfile(path): @@ -1016,7 +1020,7 @@ def is_encrypted(self, path:str) -> bool: @classmethod def storage_dir(cls): return f'{c.storage_path}/{cls.module_name()}' - + @staticmethod def sleep(period): time.sleep(period) @@ -1061,14 +1065,8 @@ def fn_code(cls,fn:str, **kwargs) -> str: ''' Returns the code of a function ''' - try: - fn = cls.get_fn(fn) - code_text = inspect.getsource(fn) - except Exception as e: - code_text = None - raise e - print(f'Error in getting fn_code: {e}') - return code_text + fn = cls.get_fn(fn) + return inspect.getsource(fn) @classmethod def fn_hash(cls,fn:str = 'subspace/ls', detail:bool=False, seperator: str = '/') -> str: @@ -1301,24 +1299,29 @@ def get_fn(cls, fn:str, splitters=[":", "/"]) -> 'Callable': Gets the function from a string or if its an attribute """ if isinstance(fn, str): - if hasattr(cls, fn): - fn2route = cls.fn2route() - if fn in fn2route: - return c.obj(fn2route[fn]) - return getattr(cls, fn) - elif c.object_exists(fn): - return c.obj(fn) - + fn_obj = None + module = cls for splitter in splitters: if splitter in fn: module_name= splitter.join(fn.split(splitter)[:-1]) fn_name = fn.split(splitter)[-1] if c.module_exists(module_name): module = c.get_module(module_name) - return getattr(module, fn_name) - if callable(fn): - return fn - return fn + fn_obj = getattr(module, fn_name) + if hasattr(cls, fn): + fn2route = cls.fn2route() + if fn in fn2route: + return c.obj(fn2route[fn]) + fn_obj = getattr(cls, fn) + elif c.object_exists(fn): + fn_obj = c.obj(fn) + args = c.get_args(fn_obj) + if 'self' in args: + fn_obj = getattr(module(), fn.split('/')[-1]) + else: + fn_obj = fn + # assert fn_obj != None, f'{fn} is not a function or object' + return fn_obj @classmethod def self_functions(cls, search = None): @@ -1354,7 +1357,7 @@ def get_args(cls, fn) -> List[str]: # if fn is an object get the __ if not callable(fn): - fn = cls.get_fn(fn) + return [] try: args = inspect.getfullargspec(fn).args except Exception as e: @@ -1695,6 +1698,8 @@ def import_module(cls, def import_object(cls, key:str, **kwargs)-> Any: ''' Import an object from a string with the format of {module_path}.{object}''' key = key.replace('/', '.') + if '/' in key: + key = key.replace('/', '.') module_obj = c.import_module('.'.join(key.split('.')[:-1])) return getattr(module_obj, key.split('.')[-1]) @@ -1729,11 +1734,6 @@ def module_exists(cls, module:str, **kwargs) -> bool: return module_exists - @classmethod - def has_app(cls, module:str, **kwargs) -> bool: - return cls.module_exists(module + '.app', **kwargs) - - @classmethod def get_path(cls, module:str, **kwargs) -> bool: return c.filepath(module, **kwargs) @@ -1852,10 +1852,6 @@ def modules(cls, def has_module(cls, module, path=None): path = path or c.libpath return module in c.modules() - - def new_modules(self, *modules, **kwargs): - for module in modules: - self.new_module(module=module, **kwargs) def net(self): return c.network() @@ -1882,6 +1878,9 @@ def new_module( cls, return {'name': name, 'path': path, 'msg': 'Module Created'} add_module = new_module + + def build(self, *args, **kwargs): + return c.module('builder')().forward(*args, **kwargs) @classmethod def filter(cls, text_list: List[str], filter_text: str) -> List[str]: @@ -2008,6 +2007,14 @@ def repos(self, search=None): def is_repo(self, repo:str): return repo in self.repos() + + def file2hash(self, path='./'): + file2hash = {} + for k,v in c.file2text(path).items(): + file2hash[k] = c.hash(v) + return file2hash + + @classmethod def help(cls, *text, module=None, **kwargs): @@ -2021,6 +2028,9 @@ def time(self): return time.time() + def ask(self, *args, **kwargs): + return c.module("agent")().ask(*args, **kwargs) + def clone(self, repo:str, path:str=None, **kwargs): path = '~/' + repo if path == None else path cmd = f'git clone {repo}' @@ -2264,10 +2274,11 @@ def epoch(self, *args, **kwargs): "generate", "models" ], - "agent": ["ask", "models", "pricing", "model2info", "reduce"], + "agent": [ "models", "pricing", "model2info", "reduce"], "builder": ["build"], "summary": ["reduce"] } + c.add_routes() Module = c # Module is alias of c if __name__ == "__main__": diff --git a/commune/network/network.py b/commune/network.py similarity index 99% rename from commune/network/network.py rename to commune/network.py index fe6e7aed..c43dc6a4 100644 --- a/commune/network/network.py +++ b/commune/network.py @@ -1,7 +1,6 @@ -import commune as c from typing import * import os - +import commune as c class Network(c.Module): min_stake = 0 blocktime = block_time = 8 diff --git a/commune/network/substrate/requirements.txt b/commune/network/substrate/requirements.txt deleted file mode 100644 index 14363cbb..00000000 --- a/commune/network/substrate/requirements.txt +++ /dev/null @@ -1,22 +0,0 @@ -websocket-client>=0.57.0,<2 -base58>=1.0.3,<3 -certifi>=2019.3.9 -idna>=2.1.0,<4 -requests>=2.21.0,<3 -xxhash>=1.3.0,<4 -ecdsa>=0.17.0,<1 -eth-keys>=0.2.1 -eth_utils>=1.3.0 -pycryptodome>=3.11.0,<4 -PyNaCl>=1.0.1,<2 - -scalecodec>=1.2.10,<1.3 -py-sr25519-bindings>=0.2.0,<1 -py-ed25519-zebra-bindings>=1.0,<2 -py-bip39-bindings>=0.1.9,<1 - -mkdocs -mkdocs-material -mkdocs-autorefs -mkdocstrings -mkdocstrings[python] diff --git a/commune/network/subtensor/subtensor.py b/commune/network/subtensor/subtensor.py deleted file mode 100644 index efaff369..00000000 --- a/commune/network/subtensor/subtensor.py +++ /dev/null @@ -1,1922 +0,0 @@ -# The MIT License (MIT) -# Copyright © 2024 Opentensor Foundation -# -# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated -# documentation files (the “Software”), to deal in the Software without restriction, including without limitation -# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, -# and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all copies or substantial portions of -# the Software. -# -# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO -# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -# DEALINGS IN THE SOFTWARE. - -""" -The ``bittensor.core.subtensor.Subtensor`` module in Bittensor serves as a crucial interface for interacting with the Bittensor -blockchain, facilitating a range of operations essential for the decentralized machine learning network. -""" - -import argparse -import copy -import socket -import ssl -from typing import Union, Optional, TypedDict, Any - -import numpy as np -import scalecodec -from bittensor_wallet import Wallet -from numpy.typing import NDArray -from scalecodec.base import RuntimeConfiguration -from scalecodec.exceptions import RemainingScaleBytesNotEmptyException -from scalecodec.type_registry import load_type_registry_preset -from scalecodec.types import ScaleType -from substrateinterface.base import QueryMapResult, SubstrateInterface - -from bittensor.core import settings -from bittensor.core.axon import Axon -from bittensor.core.chain_data import ( - custom_rpc_type_registry, - DelegateInfo, - NeuronInfo, - NeuronInfoLite, - PrometheusInfo, - SubnetHyperparameters, - SubnetInfo, -) -from bittensor.core.config import Config -from bittensor.core.extrinsics.commit_weights import ( - commit_weights_extrinsic, - reveal_weights_extrinsic, -) -from bittensor.core.extrinsics.registration import ( - burned_register_extrinsic, - register_extrinsic, -) -from bittensor.core.extrinsics.root import ( - root_register_extrinsic, - set_root_weights_extrinsic, -) -from bittensor.core.extrinsics.serving import ( - do_serve_axon, - serve_axon_extrinsic, - publish_metadata, - get_metadata, -) -from bittensor.core.extrinsics.set_weights import set_weights_extrinsic -from bittensor.core.extrinsics.transfer import ( - transfer_extrinsic, -) -from bittensor.core.metagraph import Metagraph -from bittensor.utils import ( - networking, - torch, - ss58_to_vec_u8, - u16_normalized_float, - hex_to_bytes, -) -from bittensor.utils.balance import Balance -from bittensor.utils.btlogging import logging -from bittensor.utils.registration import legacy_torch_api_compat -from bittensor.utils.weight_utils import generate_weight_hash - -KEY_NONCE: dict[str, int] = {} - - -class ParamWithTypes(TypedDict): - name: str # Name of the parameter. - type: str # ScaleType string of the parameter. - - -class Subtensor: - """ - The Subtensor class in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, - facilitating a range of operations essential for the decentralized machine learning network. - - This class enables neurons (network participants) to engage in activities such as registering on the network, - managing staked weights, setting inter-neuronal weights, and participating in consensus mechanisms. - - The Bittensor network operates on a digital ledger where each neuron holds stakes (S) and learns a set - of inter-peer weights (W). These weights, set by the neurons themselves, play a critical role in determining - the ranking and incentive mechanisms within the network. Higher-ranked neurons, as determined by their - contributions and trust within the network, receive more incentives. - - The Subtensor class connects to various Bittensor networks like the main ``finney`` network or local test - networks, providing a gateway to the blockchain layer of Bittensor. It leverages a staked weighted trust - system and consensus to ensure fair and distributed incentive mechanisms, where incentives (I) are - primarily allocated to neurons that are trusted by the majority of the network. - - Additionally, Bittensor introduces a speculation-based reward mechanism in the form of bonds (B), allowing - neurons to accumulate bonds in other neurons, speculating on their future value. This mechanism aligns - with market-based speculation, incentivizing neurons to make judicious decisions in their inter-neuronal - investments. - - Example Usage:: - - from bittensor.core.subtensor import Subtensor - - # Connect to the main Bittensor network (Finney). - finney_subtensor = Subtensor(network='finney') - - # Close websocket connection with the Bittensor network. - finney_subtensor.close() - - # Register a new neuron on the network. - wallet = bittensor_wallet.Wallet(...) # Assuming a wallet instance is created. - netuid = 1 - success = finney_subtensor.register(wallet=wallet, netuid=netuid) - - # Set inter-neuronal weights for collaborative learning. - success = finney_subtensor.set_weights(wallet=wallet, netuid=netuid, uids=[...], weights=[...]) - - # Get the metagraph for a specific subnet using given subtensor connection - metagraph = finney_subtensor.metagraph(netuid=netuid) - - By facilitating these operations, the Subtensor class is instrumental in maintaining the decentralized - intelligence and dynamic learning environment of the Bittensor network, as envisioned in its foundational - principles and mechanisms described in the `NeurIPS paper - `_. paper. - """ - - def __init__( - self, - network: Optional[str] = None, - config: Optional["Config"] = None, - _mock: bool = False, - log_verbose: bool = False, - connection_timeout: int = 600, - ) -> None: - """ - Initializes a Subtensor interface for interacting with the Bittensor blockchain. - - NOTE: - Currently subtensor defaults to the ``finney`` network. This will change in a future release. - - We strongly encourage users to run their own local subtensor node whenever possible. This increases decentralization and resilience of the network. In a future release, local subtensor will become the default and the fallback to ``finney`` removed. Please plan ahead for this change. We will provide detailed instructions on how to run a local subtensor node in the documentation in a subsequent release. - - Args: - network (Optional[str]): The network name to connect to (e.g., ``finney``, ``local``). This can also be the chain endpoint (e.g., ``wss://entrypoint-finney.opentensor.ai:443``) and will be correctly parsed into the network and chain endpoint. If not specified, defaults to the main Bittensor network. - config (Optional[bittensor.core.config.Config]): Configuration object for the subtensor. If not provided, a default configuration is used. - _mock (bool): If set to ``True``, uses a mocked connection for testing purposes. Default is ``False``. - log_verbose (bool): Whether to enable verbose logging. If set to ``True``, detailed log information about the connection and network operations will be provided. Default is ``True``. - connection_timeout (int): The maximum time in seconds to keep the connection alive. Default is ``600``. - - This initialization sets up the connection to the specified Bittensor network, allowing for various blockchain operations such as neuron registration, stake management, and setting weights. - """ - # Determine config.subtensor.chain_endpoint and config.subtensor.network config. - # If chain_endpoint is set, we override the network flag, otherwise, the chain_endpoint is assigned by the - # network. - # Argument importance: network > chain_endpoint > config.subtensor.chain_endpoint > config.subtensor.network - - if config is None: - config = Subtensor.config() - self._config = copy.deepcopy(config) - - # Setup config.subtensor.network and config.subtensor.chain_endpoint - self.chain_endpoint, self.network = Subtensor.setup_config( - network, self._config - ) - - if ( - self.network == "finney" - or self.chain_endpoint == settings.FINNEY_ENTRYPOINT - ) and log_verbose: - logging.info( - f"You are connecting to {self.network} network with endpoint {self.chain_endpoint}." - ) - logging.debug( - "We strongly encourage running a local subtensor node whenever possible. " - "This increases decentralization and resilience of the network." - ) - logging.debug( - "In a future release, local subtensor will become the default endpoint. " - "To get ahead of this change, please run a local subtensor node and point to it." - ) - - self.log_verbose = log_verbose - self._connection_timeout = connection_timeout - self.substrate: "SubstrateInterface" = None - self._get_substrate() - - def __str__(self) -> str: - if self.network == self.chain_endpoint: - # Connecting to chain endpoint without network known. - return f"subtensor({self.chain_endpoint})" - else: - # Connecting to network with endpoint known. - return f"subtensor({self.network}, {self.chain_endpoint})" - - def __repr__(self) -> str: - return self.__str__() - - def close(self): - """Cleans up resources for this subtensor instance like active websocket connection and active extensions.""" - if self.substrate: - self.substrate.close() - - def _get_substrate(self): - """Establishes a connection to the Substrate node using configured parameters.""" - try: - # Set up params. - self.substrate = SubstrateInterface( - ss58_format=settings.SS58_FORMAT, - use_remote_preset=True, - url=self.chain_endpoint, - type_registry=settings.TYPE_REGISTRY, - ) - if self.log_verbose: - logging.debug( - f"Connected to {self.network} network and {self.chain_endpoint}." - ) - - try: - self.substrate.websocket.settimeout(self._connection_timeout) - except (AttributeError, TypeError, socket.error, OSError) as e: - logging.warning(f"Error setting timeout: {e}") - - except (ConnectionRefusedError, ssl.SSLError) as error: - logging.error( - f"Could not connect to {self.network} network with {self.chain_endpoint} chain endpoint.", - ) - logging.info( - "You can check if you have connectivity by running this command: nc -vz localhost " - f"{self.chain_endpoint}" - ) - raise ConnectionRefusedError(error.args) - - @staticmethod - def config() -> "Config": - """ - Creates and returns a Bittensor configuration object. - - Returns: - config (bittensor.core.config.Config): A Bittensor configuration object configured with arguments added by the `subtensor.add_args` method. - """ - parser = argparse.ArgumentParser() - Subtensor.add_args(parser) - return Config(parser, args=[]) - - @staticmethod - def setup_config(network: Optional[str], config: "Config"): - """ - Sets up and returns the configuration for the Subtensor network and endpoint. - - This method determines the appropriate network and chain endpoint based on the provided network string or - configuration object. It evaluates the network and endpoint in the following order of precedence: - 1. Provided network string. - 2. Configured chain endpoint in the `config` object. - 3. Configured network in the `config` object. - 4. Default chain endpoint. - 5. Default network. - - Args: - network (Optional[str]): The name of the Subtensor network. If None, the network and endpoint will be determined from the `config` object. - config (bittensor.core.config.Config): The configuration object containing the network and chain endpoint settings. - - Returns: - tuple: A tuple containing the formatted WebSocket endpoint URL and the evaluated network name. - """ - if network is not None: - ( - evaluated_network, - evaluated_endpoint, - ) = Subtensor.determine_chain_endpoint_and_network(network) - else: - if config.is_set("subtensor.chain_endpoint"): - ( - evaluated_network, - evaluated_endpoint, - ) = Subtensor.determine_chain_endpoint_and_network( - config.subtensor.chain_endpoint - ) - - elif config.is_set("subtensor.network"): - ( - evaluated_network, - evaluated_endpoint, - ) = Subtensor.determine_chain_endpoint_and_network( - config.subtensor.network - ) - - elif config.subtensor.get("chain_endpoint"): - ( - evaluated_network, - evaluated_endpoint, - ) = Subtensor.determine_chain_endpoint_and_network( - config.subtensor.chain_endpoint - ) - - elif config.subtensor.get("network"): - ( - evaluated_network, - evaluated_endpoint, - ) = Subtensor.determine_chain_endpoint_and_network( - config.subtensor.network - ) - - else: - ( - evaluated_network, - evaluated_endpoint, - ) = Subtensor.determine_chain_endpoint_and_network( - settings.DEFAULTS.subtensor.network - ) - - return ( - networking.get_formatted_ws_endpoint_url(evaluated_endpoint), - evaluated_network, - ) - - @classmethod - def help(cls): - """Print help to stdout.""" - parser = argparse.ArgumentParser() - cls.add_args(parser) - print(cls.__new__.__doc__) - parser.print_help() - - @classmethod - def add_args(cls, parser: "argparse.ArgumentParser", prefix: Optional[str] = None): - """ - Adds command-line arguments to the provided ArgumentParser for configuring the Subtensor settings. - - Args: - parser (argparse.ArgumentParser): The ArgumentParser object to which the Subtensor arguments will be added. - prefix (Optional[str]): An optional prefix for the argument names. If provided, the prefix is prepended to each argument name. - - Arguments added: - --subtensor.network: The Subtensor network flag. Possible values are 'finney', 'test', 'archive', and 'local'. Overrides the chain endpoint if set. - --subtensor.chain_endpoint: The Subtensor chain endpoint flag. If set, it overrides the network flag. - --subtensor._mock: If true, uses a mocked connection to the chain. - - Example: - parser = argparse.ArgumentParser() - Subtensor.add_args(parser) - """ - prefix_str = "" if prefix is None else f"{prefix}." - try: - default_network = settings.DEFAULT_NETWORK - default_chain_endpoint = settings.FINNEY_ENTRYPOINT - - parser.add_argument( - f"--{prefix_str}subtensor.network", - default=default_network, - type=str, - help="""The subtensor network flag. The likely choices are: - -- finney (main network) - -- test (test network) - -- archive (archive network +300 blocks) - -- local (local running network) - If this option is set it overloads subtensor.chain_endpoint with - an entry point node from that network. - """, - ) - parser.add_argument( - f"--{prefix_str}subtensor.chain_endpoint", - default=default_chain_endpoint, - type=str, - help="""The subtensor endpoint flag. If set, overrides the --network flag.""", - ) - parser.add_argument( - f"--{prefix_str}subtensor._mock", - default=False, - type=bool, - help="""If true, uses a mocked connection to the chain.""", - ) - - except argparse.ArgumentError: - # re-parsing arguments. - pass - - # Inner private functions - @networking.ensure_connected - def _encode_params( - self, - call_definition: dict[str, list["ParamWithTypes"]], - params: Union[list[Any], dict[str, Any]], - ) -> str: - """Returns a hex encoded string of the params using their types.""" - param_data = scalecodec.ScaleBytes(b"") - - for i, param in enumerate(call_definition["params"]): - scale_obj = self.substrate.create_scale_object(param["type"]) - if isinstance(params, list): - param_data += scale_obj.encode(params[i]) - else: - if param["name"] not in params: - raise ValueError(f"Missing param {param['name']} in params dict.") - - param_data += scale_obj.encode(params[param["name"]]) - - return param_data.to_hex() - - def _get_hyperparameter( - self, param_name: str, netuid: int, block: Optional[int] = None - ) -> Optional[Any]: - """ - Retrieves a specified hyperparameter for a specific subnet. - - Args: - param_name (str): The name of the hyperparameter to retrieve. - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[Union[int, float]]: The value of the specified hyperparameter if the subnet exists, ``None`` otherwise. - """ - if not self.subnet_exists(netuid, block): - return None - - result = self.query_subtensor(param_name, block, [netuid]) - if result is None or not hasattr(result, "value"): - return None - - return result.value - - # Calls methods - @networking.ensure_connected - def query_subtensor( - self, name: str, block: Optional[int] = None, params: Optional[list] = None - ) -> "ScaleType": - """ - Queries named storage from the Subtensor module on the Bittensor blockchain. This function is used to retrieve specific data or parameters from the blockchain, such as stake, rank, or other neuron-specific attributes. - - Args: - name (str): The name of the storage function to query. - block (Optional[int]): The blockchain block number at which to perform the query. - params (Optional[list[object]]): A list of parameters to pass to the query function. - - Returns: - query_response (scalecodec.ScaleType): An object containing the requested data. - - This query function is essential for accessing detailed information about the network and its neurons, providing valuable insights into the state and dynamics of the Bittensor ecosystem. - """ - - return self.substrate.query( - module="SubtensorModule", - storage_function=name, - params=params, - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), - ) - - @networking.ensure_connected - def query_map_subtensor( - self, name: str, block: Optional[int] = None, params: Optional[list] = None - ) -> "QueryMapResult": - """ - Queries map storage from the Subtensor module on the Bittensor blockchain. This function is designed to retrieve a map-like data structure, which can include various neuron-specific details or network-wide attributes. - - Args: - name (str): The name of the map storage function to query. - block (Optional[int]): The blockchain block number at which to perform the query. - params (Optional[list[object]]): A list of parameters to pass to the query function. - - Returns: - QueryMapResult (substrateinterface.base.QueryMapResult): An object containing the map-like data structure, or ``None`` if not found. - - This function is particularly useful for analyzing and understanding complex network structures and relationships within the Bittensor ecosystem, such as inter-neuronal connections and stake distributions. - """ - return self.substrate.query_map( - module="SubtensorModule", - storage_function=name, - params=params, - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), - ) - - def query_runtime_api( - self, - runtime_api: str, - method: str, - params: Optional[Union[list[int], dict[str, int]]], - block: Optional[int] = None, - ) -> Optional[str]: - """ - Queries the runtime API of the Bittensor blockchain, providing a way to interact with the underlying runtime and retrieve data encoded in Scale Bytes format. This function is essential for advanced users who need to interact with specific runtime methods and decode complex data types. - - Args: - runtime_api (str): The name of the runtime API to query. - method (str): The specific method within the runtime API to call. - params (Optional[list[ParamWithTypes]]): The parameters to pass to the method call. - block (Optional[int]): The blockchain block number at which to perform the query. - - Returns: - Optional[str]: The Scale Bytes encoded result from the runtime API call, or ``None`` if the call fails. - - This function enables access to the deeper layers of the Bittensor blockchain, allowing for detailed and specific interactions with the network's runtime environment. - """ - call_definition = settings.TYPE_REGISTRY["runtime_api"][runtime_api]["methods"][ - method - ] - - json_result = self.state_call( - method=f"{runtime_api}_{method}", - data=( - "0x" - if params is None - else self._encode_params(call_definition=call_definition, params=params) - ), - block=block, - ) - - if json_result is None: - return None - - return_type = call_definition["type"] - as_scale_bytes = scalecodec.ScaleBytes(json_result["result"]) - - rpc_runtime_config = RuntimeConfiguration() - rpc_runtime_config.update_type_registry(load_type_registry_preset("legacy")) - rpc_runtime_config.update_type_registry(custom_rpc_type_registry) - obj = rpc_runtime_config.create_scale_object(return_type, as_scale_bytes) - if obj.data.to_hex() == "0x0400": # RPC returned None result - return None - - return obj.decode() - - @networking.ensure_connected - def state_call( - self, method: str, data: str, block: Optional[int] = None - ) -> dict[Any, Any]: - """ - Makes a state call to the Bittensor blockchain, allowing for direct queries of the blockchain's state. This function is typically used for advanced queries that require specific method calls and data inputs. - - Args: - method (str): The method name for the state call. - data (str): The data to be passed to the method. - block (Optional[int]): The blockchain block number at which to perform the state call. - - Returns: - result (dict[Any, Any]): The result of the rpc call. - - The state call function provides a more direct and flexible way of querying blockchain data, useful for specific use cases where standard queries are insufficient. - """ - block_hash = None if block is None else self.substrate.get_block_hash(block) - return self.substrate.rpc_request( - method="state_call", - params=[method, data, block_hash] if block_hash else [method, data], - ) - - @networking.ensure_connected - def query_map( - self, - module: str, - name: str, - block: Optional[int] = None, - params: Optional[list] = None, - ) -> "QueryMapResult": - """ - Queries map storage from any module on the Bittensor blockchain. This function retrieves data structures that represent key-value mappings, essential for accessing complex and structured data within the blockchain modules. - - Args: - module (str): The name of the module from which to query the map storage. - name (str): The specific storage function within the module to query. - block (Optional[int]): The blockchain block number at which to perform the query. - params (Optional[list[object]]): Parameters to be passed to the query. - - Returns: - result (substrateinterface.base.QueryMapResult): A data structure representing the map storage if found, ``None`` otherwise. - - This function is particularly useful for retrieving detailed and structured data from various blockchain modules, offering insights into the network's state and the relationships between its different components. - """ - return self.substrate.query_map( - module=module, - storage_function=name, - params=params, - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), - ) - - @networking.ensure_connected - def query_constant( - self, module_name: str, constant_name: str, block: Optional[int] = None - ) -> Optional["ScaleType"]: - """ - Retrieves a constant from the specified module on the Bittensor blockchain. This function is used to access fixed parameters or values defined within the blockchain's modules, which are essential for understanding the network's configuration and rules. - - Args: - module_name (str): The name of the module containing the constant. - constant_name (str): The name of the constant to retrieve. - block (Optional[int]): The blockchain block number at which to query the constant. - - Returns: - Optional[scalecodec.ScaleType]: The value of the constant if found, ``None`` otherwise. - - Constants queried through this function can include critical network parameters such as inflation rates, consensus rules, or validation thresholds, providing a deeper understanding of the Bittensor network's operational parameters. - """ - return self.substrate.get_constant( - module_name=module_name, - constant_name=constant_name, - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), - ) - - @networking.ensure_connected - def query_module( - self, - module: str, - name: str, - block: Optional[int] = None, - params: Optional[list] = None, - ) -> "ScaleType": - """ - Queries any module storage on the Bittensor blockchain with the specified parameters and block number. This function is a generic query interface that allows for flexible and diverse data retrieval from various blockchain modules. - - Args: - module (str): The name of the module from which to query data. - name (str): The name of the storage function within the module. - block (Optional[int]): The blockchain block number at which to perform the query. - params (Optional[list[object]]): A list of parameters to pass to the query function. - - Returns: - Optional[scalecodec.ScaleType]: An object containing the requested data if found, ``None`` otherwise. - - This versatile query function is key to accessing a wide range of data and insights from different parts of the Bittensor blockchain, enhancing the understanding and analysis of the network's state and dynamics. - """ - return self.substrate.query( - module=module, - storage_function=name, - params=params, - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), - ) - - # Common subtensor methods - def metagraph( - self, netuid: int, lite: bool = True, block: Optional[int] = None - ) -> "Metagraph": # type: ignore - """ - Returns a synced metagraph for a specified subnet within the Bittensor network. The metagraph represents the network's structure, including neuron connections and interactions. - - Args: - netuid (int): The network UID of the subnet to query. - lite (bool): If true, returns a metagraph using a lightweight sync (no weights, no bonds). Default is ``True``. - block (Optional[int]): Block number for synchronization, or ``None`` for the latest block. - - Returns: - bittensor.core.metagraph.Metagraph: The metagraph representing the subnet's structure and neuron relationships. - - The metagraph is an essential tool for understanding the topology and dynamics of the Bittensor network's decentralized architecture, particularly in relation to neuron interconnectivity and consensus processes. - """ - metagraph = Metagraph( - network=self.network, netuid=netuid, lite=lite, sync=False - ) - metagraph.sync(block=block, lite=lite, subtensor=self) - - return metagraph - - @staticmethod - def determine_chain_endpoint_and_network( - network: str, - ) -> tuple[Optional[str], Optional[str]]: - """Determines the chain endpoint and network from the passed network or chain_endpoint. - - Args: - network (str): The network flag. The choices are: ``finney`` (main network), ``archive`` (archive network +300 blocks), ``local`` (local running network), ``test`` (test network). - - Returns: - tuple[Optional[str], Optional[str]]: The network and chain endpoint flag. If passed, overrides the ``network`` argument. - """ - - if network is None: - return None, None - if network in settings.NETWORKS: - return network, settings.NETWORK_MAP[network] - else: - if ( - network == settings.FINNEY_ENTRYPOINT - or "entrypoint-finney.opentensor.ai" in network - ): - return "finney", settings.FINNEY_ENTRYPOINT - elif ( - network == settings.FINNEY_TEST_ENTRYPOINT - or "test.finney.opentensor.ai" in network - ): - return "test", settings.FINNEY_TEST_ENTRYPOINT - elif ( - network == settings.ARCHIVE_ENTRYPOINT - or "archive.chain.opentensor.ai" in network - ): - return "archive", settings.ARCHIVE_ENTRYPOINT - elif "127.0.0.1" in network or "localhost" in network: - return "local", network - else: - return "unknown", network - - def get_netuids_for_hotkey( - self, hotkey_ss58: str, block: Optional[int] = None - ) -> list[int]: - """ - Retrieves a list of subnet UIDs (netuids) for which a given hotkey is a member. This function identifies the specific subnets within the Bittensor network where the neuron associated with the hotkey is active. - - Args: - hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int]): The blockchain block number at which to perform the query. - - Returns: - list[int]: A list of netuids where the neuron is a member. - """ - result = self.query_map_subtensor("IsNetworkMember", block, [hotkey_ss58]) - return ( - [record[0].value for record in result if record[1]] - if result and hasattr(result, "records") - else [] - ) - - @networking.ensure_connected - def get_current_block(self) -> int: - """ - Returns the current block number on the Bittensor blockchain. This function provides the latest block number, indicating the most recent state of the blockchain. - - Returns: - int: The current chain block number. - - Knowing the current block number is essential for querying real-time data and performing time-sensitive operations on the blockchain. It serves as a reference point for network activities and data synchronization. - """ - return self.substrate.get_block_number(None) # type: ignore - - def is_hotkey_registered_any( - self, hotkey_ss58: str, block: Optional[int] = None - ) -> bool: - """ - Checks if a neuron's hotkey is registered on any subnet within the Bittensor network. - - Args: - hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int]): The blockchain block number at which to perform the check. - - Returns: - bool: ``True`` if the hotkey is registered on any subnet, False otherwise. - - This function is essential for determining the network-wide presence and participation of a neuron. - """ - return len(self.get_netuids_for_hotkey(hotkey_ss58, block)) > 0 - - def is_hotkey_registered_on_subnet( - self, hotkey_ss58: str, netuid: int, block: Optional[int] = None - ) -> bool: - """ - Checks if a neuron's hotkey is registered on a specific subnet within the Bittensor network. - - Args: - hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number at which to perform the check. - - Returns: - bool: ``True`` if the hotkey is registered on the specified subnet, False otherwise. - - This function helps in assessing the participation of a neuron in a particular subnet, indicating its specific area of operation or influence within the network. - """ - return self.get_uid_for_hotkey_on_subnet(hotkey_ss58, netuid, block) is not None - - def is_hotkey_registered( - self, - hotkey_ss58: str, - netuid: Optional[int] = None, - block: Optional[int] = None, - ) -> bool: - """ - Determines whether a given hotkey (public key) is registered in the Bittensor network, either globally across any subnet or specifically on a specified subnet. This function checks the registration status of a neuron identified by its hotkey, which is crucial for validating its participation and activities within the network. - - Args: - hotkey_ss58 (str): The SS58 address of the neuron's hotkey. - netuid (Optional[int]): The unique identifier of the subnet to check the registration. If ``None``, the registration is checked across all subnets. - block (Optional[int]): The blockchain block number at which to perform the query. - - Returns: - bool: ``True`` if the hotkey is registered in the specified context (either any subnet or a specific subnet), ``False`` otherwise. - - This function is important for verifying the active status of neurons in the Bittensor network. It aids in understanding whether a neuron is eligible to participate in network processes such as consensus, validation, and incentive distribution based on its registration status. - """ - if netuid is None: - return self.is_hotkey_registered_any(hotkey_ss58, block) - else: - return self.is_hotkey_registered_on_subnet(hotkey_ss58, netuid, block) - - # Not used in Bittensor, but is actively used by the community in almost all subnets - def set_weights( - self, - wallet: "Wallet", - netuid: int, - uids: Union[NDArray[np.int64], "torch.LongTensor", list], - weights: Union[NDArray[np.float32], "torch.FloatTensor", list], - version_key: int = settings.version_as_int, - wait_for_inclusion: bool = False, - wait_for_finalization: bool = False, - max_retries: int = 5, - ) -> tuple[bool, str]: - """ - Sets the inter-neuronal weights for the specified neuron. This process involves specifying the influence or trust a neuron places on other neurons in the network, which is a fundamental aspect of Bittensor's decentralized learning architecture. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron setting the weights. - netuid (int): The unique identifier of the subnet. - uids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs that the weights are being set for. - weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each UID. - version_key (int): Version key for compatibility with the network. Default is ``int representation of Bittensor version.``. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. - max_retries (int): The number of maximum attempts to set weights. Default is ``5``. - - Returns: - tuple[bool, str]: ``True`` if the setting of weights is successful, False otherwise. And `msg`, a string value describing the success or potential error. - - This function is crucial in shaping the network's collective intelligence, where each neuron's learning and contribution are influenced by the weights it sets towards others【81†source】. - """ - uid = self.get_uid_for_hotkey_on_subnet(wallet.hotkey.ss58_address, netuid) - retries = 0 - success = False - message = "No attempt made. Perhaps it is too soon to set weights!" - while ( - self.blocks_since_last_update(netuid, uid) > self.weights_rate_limit(netuid) # type: ignore - and retries < max_retries - ): - try: - logging.info( - f"Setting weights for subnet #{netuid}. Attempt {retries + 1} of {max_retries}." - ) - success, message = set_weights_extrinsic( - subtensor=self, - wallet=wallet, - netuid=netuid, - uids=uids, - weights=weights, - version_key=version_key, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - ) - except Exception as e: - logging.error(f"Error setting weights: {e}") - finally: - retries += 1 - - return success, message - - @legacy_torch_api_compat - def root_set_weights( - self, - wallet: "Wallet", - netuids: Union[NDArray[np.int64], "torch.LongTensor", list], - weights: Union[NDArray[np.float32], "torch.FloatTensor", list], - version_key: int = 0, - wait_for_inclusion: bool = False, - wait_for_finalization: bool = False, - ) -> bool: - """ - Sets the weights for neurons on the root network. This action is crucial for defining the influence and interactions of neurons at the root level of the Bittensor network. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron setting the weights. - netuids (Union[NDArray[np.int64], torch.LongTensor, list]): The list of neuron UIDs for which weights are being set. - weights (Union[NDArray[np.float32], torch.FloatTensor, list]): The corresponding weights to be set for each UID. - version_key (int, optional): Version key for compatibility with the network. Default is ``0``. - wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. Defaults to ``False``. - wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. Defaults to ``False``. - - Returns: - bool: ``True`` if the setting of root-level weights is successful, False otherwise. - - This function plays a pivotal role in shaping the root network's collective intelligence and decision-making processes, reflecting the principles of decentralized governance and collaborative learning in Bittensor. - """ - return set_root_weights_extrinsic( - subtensor=self, - wallet=wallet, - netuids=netuids, - weights=weights, - version_key=version_key, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - ) - - def register( - self, - wallet: "Wallet", - netuid: int, - wait_for_inclusion: bool = False, - wait_for_finalization: bool = True, - max_allowed_attempts: int = 3, - output_in_place: bool = True, - cuda: bool = False, - dev_id: Union[list[int], int] = 0, - tpb: int = 256, - num_processes: Optional[int] = None, - update_interval: Optional[int] = None, - log_verbose: bool = False, - ) -> bool: - """ - Registers a neuron on the Bittensor network using the provided wallet. - - Registration is a critical step for a neuron to become an active participant in the network, enabling it to stake, set weights, and receive incentives. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron to be registered. - netuid (int): The unique identifier of the subnet. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Defaults to `False`. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Defaults to `True`. - max_allowed_attempts (int): Maximum number of attempts to register the wallet. - output_in_place (bool): If true, prints the progress of the proof of work to the console in-place. Meaning the progress is printed on the same lines. Defaults to `True`. - cuda (bool): If ``true``, the wallet should be registered using CUDA device(s). Defaults to `False`. - dev_id (Union[List[int], int]): The CUDA device id to use, or a list of device ids. Defaults to `0` (zero). - tpb (int): The number of threads per block (CUDA). Default to `256`. - num_processes (Optional[int]): The number of processes to use to register. Default to `None`. - update_interval (Optional[int]): The number of nonces to solve between updates. Default to `None`. - log_verbose (bool): If ``true``, the registration process will log more information. Default to `False`. - - Returns: - bool: ``True`` if the registration is successful, False otherwise. - - This function facilitates the entry of new neurons into the network, supporting the decentralized - growth and scalability of the Bittensor ecosystem. - """ - return register_extrinsic( - subtensor=self, - wallet=wallet, - netuid=netuid, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - max_allowed_attempts=max_allowed_attempts, - output_in_place=output_in_place, - cuda=cuda, - dev_id=dev_id, - tpb=tpb, - num_processes=num_processes, - update_interval=update_interval, - log_verbose=log_verbose, - ) - - def root_register( - self, - wallet: "Wallet", - wait_for_inclusion: bool = False, - wait_for_finalization: bool = True, - ) -> bool: - """ - Registers the neuron associated with the wallet on the root network. This process is integral for participating in the highest layer of decision-making and governance within the Bittensor network. - - Args: - wallet (bittensor.wallet): The wallet associated with the neuron to be registered on the root network. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Defaults to `False`. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Defaults to `True`. - - Returns: - bool: ``True`` if the registration on the root network is successful, False otherwise. - - This function enables neurons to engage in the most critical and influential aspects of the network's governance, signifying a high level of commitment and responsibility in the Bittensor ecosystem. - """ - return root_register_extrinsic( - subtensor=self, - wallet=wallet, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - ) - - def burned_register( - self, - wallet: "Wallet", - netuid: int, - wait_for_inclusion: bool = False, - wait_for_finalization: bool = True, - ) -> bool: - """ - Registers a neuron on the Bittensor network by recycling TAO. This method of registration involves recycling TAO tokens, allowing them to be re-mined by performing work on the network. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron to be registered. - netuid (int): The unique identifier of the subnet. - wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. Defaults to `False`. - wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. Defaults to `True`. - - Returns: - bool: ``True`` if the registration is successful, False otherwise. - """ - return burned_register_extrinsic( - subtensor=self, - wallet=wallet, - netuid=netuid, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - ) - - def serve_axon( - self, - netuid: int, - axon: "Axon", - wait_for_inclusion: bool = False, - wait_for_finalization: bool = True, - ) -> bool: - """ - Registers an ``Axon`` serving endpoint on the Bittensor network for a specific neuron. This function is used to set up the Axon, a key component of a neuron that handles incoming queries and data processing tasks. - - Args: - netuid (int): The unique identifier of the subnetwork. - axon (bittensor.core.axon.Axon): The Axon instance to be registered for serving. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``True``. - - Returns: - bool: ``True`` if the Axon serve registration is successful, False otherwise. - - By registering an Axon, the neuron becomes an active part of the network's distributed computing infrastructure, contributing to the collective intelligence of Bittensor. - """ - return serve_axon_extrinsic( - self, netuid, axon, wait_for_inclusion, wait_for_finalization - ) - - # metagraph - @property - def block(self) -> int: - """Returns current chain block. - - Returns: - block (int): Current chain block. - """ - return self.get_current_block() - - def blocks_since_last_update(self, netuid: int, uid: int) -> Optional[int]: - """ - Returns the number of blocks since the last update for a specific UID in the subnetwork. - - Args: - netuid (int): The unique identifier of the subnetwork. - uid (int): The unique identifier of the neuron. - - Returns: - Optional[int]: The number of blocks since the last update, or ``None`` if the subnetwork or UID does not exist. - """ - call = self._get_hyperparameter(param_name="LastUpdate", netuid=netuid) - return None if call is None else self.get_current_block() - int(call[uid]) - - @networking.ensure_connected - def get_block_hash(self, block_id: int) -> str: - """ - Retrieves the hash of a specific block on the Bittensor blockchain. The block hash is a unique identifier representing the cryptographic hash of the block's content, ensuring its integrity and immutability. - - Args: - block_id (int): The block number for which the hash is to be retrieved. - - Returns: - str: The cryptographic hash of the specified block. - - The block hash is a fundamental aspect of blockchain technology, providing a secure reference to each block's data. It is crucial for verifying transactions, ensuring data consistency, and maintaining the trustworthiness of the blockchain. - """ - return self.substrate.get_block_hash(block_id=block_id) - - def weights_rate_limit(self, netuid: int) -> Optional[int]: - """ - Returns network WeightsSetRateLimit hyperparameter. - - Args: - netuid (int): The unique identifier of the subnetwork. - - Returns: - Optional[int]: The value of the WeightsSetRateLimit hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. - """ - call = self._get_hyperparameter(param_name="WeightsSetRateLimit", netuid=netuid) - return None if call is None else int(call) - - # Keep backwards compatibility for community usage. - # Make some commitment on-chain about arbitrary data. - def commit(self, wallet, netuid: int, data: str): - """ - Commits arbitrary data to the Bittensor network by publishing metadata. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron committing the data. - netuid (int): The unique identifier of the subnetwork. - data (str): The data to be committed to the network. - """ - publish_metadata(self, wallet, netuid, f"Raw{len(data)}", data.encode()) - - # Keep backwards compatibility for community usage. - def subnetwork_n(self, netuid: int, block: Optional[int] = None) -> Optional[int]: - """ - Returns network SubnetworkN hyperparameter. - - Args: - netuid (int): The unique identifier of the subnetwork. - block (Optional[int]): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. - - Returns: - Optional[int]: The value of the SubnetworkN hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. - """ - call = self._get_hyperparameter( - param_name="SubnetworkN", netuid=netuid, block=block - ) - return None if call is None else int(call) - - # Community uses this method - def transfer( - self, - wallet: "Wallet", - dest: str, - amount: Union["Balance", float], - wait_for_inclusion: bool = True, - wait_for_finalization: bool = False, - ) -> bool: - """ - Executes a transfer of funds from the provided wallet to the specified destination address. This function is used to move TAO tokens within the Bittensor network, facilitating transactions between neurons. - - Args: - wallet (bittensor_wallet.Wallet): The wallet from which funds are being transferred. - dest (str): The destination public key address. - amount (Union[bittensor.utils.balance.Balance, float]): The amount of TAO to be transferred. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``True``. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. - - Returns: - transfer_extrinsic (bool): ``True`` if the transfer is successful, False otherwise. - - This function is essential for the fluid movement of tokens in the network, supporting various economic activities such as staking, delegation, and reward distribution. - """ - return transfer_extrinsic( - subtensor=self, - wallet=wallet, - dest=dest, - amount=amount, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - ) - - # Community uses this method via `bittensor.api.extrinsics.prometheus.prometheus_extrinsic` - def get_neuron_for_pubkey_and_subnet( - self, hotkey_ss58: str, netuid: int, block: Optional[int] = None - ) -> Optional["NeuronInfo"]: - """ - Retrieves information about a neuron based on its public key (hotkey SS58 address) and the specific subnet UID (netuid). This function provides detailed neuron information for a particular subnet within the Bittensor network. - - Args: - hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number at which to perform the query. - - Returns: - Optional[bittensor.core.chain_data.neuron_info.NeuronInfo]: Detailed information about the neuron if found, ``None`` otherwise. - - This function is crucial for accessing specific neuron data and understanding its status, stake, and other attributes within a particular subnet of the Bittensor ecosystem. - """ - return self.neuron_for_uid( - self.get_uid_for_hotkey_on_subnet(hotkey_ss58, netuid, block=block), - netuid, - block=block, - ) - - @networking.ensure_connected - def neuron_for_uid( - self, uid: Optional[int], netuid: int, block: Optional[int] = None - ) -> "NeuronInfo": - """ - Retrieves detailed information about a specific neuron identified by its unique identifier (UID) within a specified subnet (netuid) of the Bittensor network. This function provides a comprehensive view of a neuron's attributes, including its stake, rank, and operational status. - - Args: - uid (Optional[int]): The unique identifier of the neuron. - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - bittensor.core.chain_data.neuron_info.NeuronInfo: Detailed information about the neuron if found, ``None`` otherwise. - - This function is crucial for analyzing individual neurons' contributions and status within a specific subnet, offering insights into their roles in the network's consensus and validation mechanisms. - """ - if uid is None: - return NeuronInfo.get_null_neuron() - - block_hash = None if block is None else self.substrate.get_block_hash(block) - params = [netuid, uid] - if block_hash: - params = params + [block_hash] - - json_body = self.substrate.rpc_request( - method="neuronInfo_getNeuron", - params=params, # custom rpc method - ) - - if not (result := json_body.get("result", None)): - return NeuronInfo.get_null_neuron() - - return NeuronInfo.from_vec_u8(result) - - # Community uses this method - def get_subnet_hyperparameters( - self, netuid: int, block: Optional[int] = None - ) -> Optional[Union[list, "SubnetHyperparameters"]]: - """ - Retrieves the hyperparameters for a specific subnet within the Bittensor network. These hyperparameters define the operational settings and rules governing the subnet's behavior. - - Args: - netuid (int): The network UID of the subnet to query. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[bittensor.core.chain_data.subnet_hyperparameters.SubnetHyperparameters]: The subnet's hyperparameters, or ``None`` if not available. - - Understanding the hyperparameters is crucial for comprehending how subnets are configured and managed, and how they interact with the network's consensus and incentive mechanisms. - """ - hex_bytes_result = self.query_runtime_api( - runtime_api="SubnetInfoRuntimeApi", - method="get_subnet_hyperparams", - params=[netuid], - block=block, - ) - - if hex_bytes_result is None: - return [] - - return SubnetHyperparameters.from_vec_u8(hex_to_bytes(hex_bytes_result)) - - # Community uses this method - # Returns network ImmunityPeriod hyper parameter. - def immunity_period( - self, netuid: int, block: Optional[int] = None - ) -> Optional[int]: - """ - Retrieves the 'ImmunityPeriod' hyperparameter for a specific subnet. This parameter defines the duration during which new neurons are protected from certain network penalties or restrictions. - - Args: - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[int]: The value of the 'ImmunityPeriod' hyperparameter if the subnet exists, ``None`` otherwise. - - The 'ImmunityPeriod' is a critical aspect of the network's governance system, ensuring that new participants have a grace period to establish themselves and contribute to the network without facing immediate punitive actions. - """ - call = self._get_hyperparameter( - param_name="ImmunityPeriod", netuid=netuid, block=block - ) - return None if call is None else int(call) - - # Community uses this method - def get_uid_for_hotkey_on_subnet( - self, hotkey_ss58: str, netuid: int, block: Optional[int] = None - ) -> Optional[int]: - """ - Retrieves the unique identifier (UID) for a neuron's hotkey on a specific subnet. - - Args: - hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[int]: The UID of the neuron if it is registered on the subnet, ``None`` otherwise. - - The UID is a critical identifier within the network, linking the neuron's hotkey to its operational and governance activities on a particular subnet. - """ - _result = self.query_subtensor("Uids", block, [netuid, hotkey_ss58]) - return getattr(_result, "value", None) - - # Community uses this method - def tempo(self, netuid: int, block: Optional[int] = None) -> Optional[int]: - """ - Returns network Tempo hyperparameter. - - Args: - netuid (int): The unique identifier of the subnetwork. - block (Optional[int]): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. - - Returns: - Optional[int]: The value of the Tempo hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. - """ - call = self._get_hyperparameter(param_name="Tempo", netuid=netuid, block=block) - return None if call is None else int(call) - - # Community uses this method - def get_commitment(self, netuid: int, uid: int, block: Optional[int] = None) -> str: - """ - Retrieves the on-chain commitment for a specific neuron in the Bittensor network. - - Args: - netuid (int): The unique identifier of the subnetwork. - uid (int): The unique identifier of the neuron. - block (Optional[int]): The block number to retrieve the commitment from. If None, the latest block is used. Default is ``None``. - - Returns: - str: The commitment data as a string. - """ - metagraph = self.metagraph(netuid) - hotkey = metagraph.hotkeys[uid] # type: ignore - - metadata = get_metadata(self, netuid, hotkey, block) - try: - commitment = metadata["info"]["fields"][0] # type: ignore - hex_data = commitment[list(commitment.keys())[0]][2:] # type: ignore - return bytes.fromhex(hex_data).decode() - - except TypeError: - return "" - - # Community uses this via `bittensor.utils.weight_utils.process_weights_for_netuid` function. - def min_allowed_weights( - self, netuid: int, block: Optional[int] = None - ) -> Optional[int]: - """ - Returns network MinAllowedWeights hyperparameter. - - Args: - netuid (int): The unique identifier of the subnetwork. - block (Optional[int]): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. - - Returns: - Optional[int]: The value of the MinAllowedWeights hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. - """ - call = self._get_hyperparameter( - param_name="MinAllowedWeights", block=block, netuid=netuid - ) - return None if call is None else int(call) - - # Community uses this via `bittensor.utils.weight_utils.process_weights_for_netuid` function. - def max_weight_limit( - self, netuid: int, block: Optional[int] = None - ) -> Optional[float]: - """ - Returns network MaxWeightsLimit hyperparameter. - - Args: - netuid (int): The unique identifier of the subnetwork. - block (Optional[int]): The block number to retrieve the parameter from. If ``None``, the latest block is used. Default is ``None``. - - Returns: - Optional[float]: The value of the MaxWeightsLimit hyperparameter, or ``None`` if the subnetwork does not exist or the parameter is not found. - """ - call = self._get_hyperparameter( - param_name="MaxWeightsLimit", block=block, netuid=netuid - ) - return None if call is None else u16_normalized_float(int(call)) - - # # Community uses this method. It is used in subtensor in neuron_info, and serving. - def get_prometheus_info( - self, netuid: int, hotkey_ss58: str, block: Optional[int] = None - ) -> Optional["PrometheusInfo"]: - """ - Returns the prometheus information for this hotkey account. - - Args: - netuid (int): The unique identifier of the subnetwork. - hotkey_ss58 (str): The SS58 address of the hotkey. - block (Optional[int]): The block number to retrieve the prometheus information from. If ``None``, the latest block is used. Default is ``None``. - - Returns: - Optional[bittensor.core.chain_data.prometheus_info.PrometheusInfo]: A PrometheusInfo object containing the prometheus information, or ``None`` if the prometheus information is not found. - """ - result = self.query_subtensor("Prometheus", block, [netuid, hotkey_ss58]) - if result is not None and getattr(result, "value", None) is not None: - return PrometheusInfo( - ip=networking.int_to_ip(result.value["ip"]), - ip_type=result.value["ip_type"], - port=result.value["port"], - version=result.value["version"], - block=result.value["block"], - ) - return None - - # Community uses this method - def subnet_exists(self, netuid: int, block: Optional[int] = None) -> bool: - """ - Checks if a subnet with the specified unique identifier (netuid) exists within the Bittensor network. - - Args: - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number at which to check the subnet's existence. - - Returns: - bool: ``True`` if the subnet exists, False otherwise. - - This function is critical for verifying the presence of specific subnets in the network, enabling a deeper understanding of the network's structure and composition. - """ - _result = self.query_subtensor("NetworksAdded", block, [netuid]) - return getattr(_result, "value", False) - - @networking.ensure_connected - def get_all_subnets_info(self, block: Optional[int] = None) -> list[SubnetInfo]: - """ - Retrieves detailed information about all subnets within the Bittensor network. This function provides comprehensive data on each subnet, including its characteristics and operational parameters. - - Args: - block (Optional[int]): The blockchain block number for the query. - - Returns: - list[SubnetInfo]: A list of SubnetInfo objects, each containing detailed information about a subnet. - - Gaining insights into the subnets' details assists in understanding the network's composition, the roles of different subnets, and their unique features. - """ - hex_bytes_result = self.query_runtime_api( - "SubnetInfoRuntimeApi", "get_subnets_info", params=[], block=block - ) - if not hex_bytes_result: - return [] - else: - return SubnetInfo.list_from_vec_u8(hex_to_bytes(hex_bytes_result)) - - # Metagraph uses this method - def bonds( - self, netuid: int, block: Optional[int] = None - ) -> list[tuple[int, list[tuple[int, int]]]]: - """ - Retrieves the bond distribution set by neurons within a specific subnet of the Bittensor network. Bonds represent the investments or commitments made by neurons in one another, indicating a level of trust and perceived value. This bonding mechanism is integral to the network's market-based approach to measuring and rewarding machine intelligence. - - Args: - netuid (int): The network UID of the subnet to query. - block (Optional[int]): The blockchain block number for the query. - - Returns: - list[tuple[int, list[tuple[int, int]]]]: A list of tuples mapping each neuron's UID to its bonds with other neurons. - - Understanding bond distributions is crucial for analyzing the trust dynamics and market behavior within the subnet. It reflects how neurons recognize and invest in each other's intelligence and contributions, supporting diverse and niche systems within the Bittensor ecosystem. - """ - b_map = [] - b_map_encoded = self.query_map_subtensor( - name="Bonds", block=block, params=[netuid] - ) - if b_map_encoded.records: - for uid, b in b_map_encoded: - b_map.append((uid.serialize(), b.serialize())) - - return b_map - - def get_subnet_burn_cost(self, block: Optional[int] = None) -> Optional[str]: - """ - Retrieves the burn cost for registering a new subnet within the Bittensor network. This cost represents the amount of Tao that needs to be locked or burned to establish a new subnet. - - Args: - block (Optional[int]): The blockchain block number for the query. - - Returns: - int: The burn cost for subnet registration. - - The subnet burn cost is an important economic parameter, reflecting the network's mechanisms for controlling the proliferation of subnets and ensuring their commitment to the network's long-term viability. - """ - lock_cost = self.query_runtime_api( - runtime_api="SubnetRegistrationRuntimeApi", - method="get_network_registration_cost", - params=[], - block=block, - ) - - if lock_cost is None: - return None - - return lock_cost - - # Metagraph uses this method - def neurons(self, netuid: int, block: Optional[int] = None) -> list["NeuronInfo"]: - """ - Retrieves a list of all neurons within a specified subnet of the Bittensor network. This function provides a snapshot of the subnet's neuron population, including each neuron's attributes and network interactions. - - Args: - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - list[bittensor.core.chain_data.neuron_info.NeuronInfo]: A list of NeuronInfo objects detailing each neuron's characteristics in the subnet. - - Understanding the distribution and status of neurons within a subnet is key to comprehending the network's decentralized structure and the dynamics of its consensus and governance processes. - """ - neurons_lite = self.neurons_lite(netuid=netuid, block=block) - weights = self.weights(block=block, netuid=netuid) - bonds = self.bonds(block=block, netuid=netuid) - - weights_as_dict = {uid: w for uid, w in weights} - bonds_as_dict = {uid: b for uid, b in bonds} - - neurons = [ - NeuronInfo.from_weights_bonds_and_neuron_lite( - neuron_lite, weights_as_dict, bonds_as_dict - ) - for neuron_lite in neurons_lite - ] - - return neurons - - # Metagraph uses this method - def get_total_subnets(self, block: Optional[int] = None) -> Optional[int]: - """ - Retrieves the total number of subnets within the Bittensor network as of a specific blockchain block. - - Args: - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[int]: The total number of subnets in the network. - - Understanding the total number of subnets is essential for assessing the network's growth and the extent of its decentralized infrastructure. - """ - _result = self.query_subtensor("TotalNetworks", block) - return getattr(_result, "value", None) - - # Metagraph uses this method - def get_subnets(self, block: Optional[int] = None) -> list[int]: - """ - Retrieves a list of all subnets currently active within the Bittensor network. This function provides an overview of the various subnets and their identifiers. - - Args: - block (Optional[int]): The blockchain block number for the query. - - Returns: - list[int]: A list of network UIDs representing each active subnet. - - This function is valuable for understanding the network's structure and the diversity of subnets available for neuron participation and collaboration. - """ - result = self.query_map_subtensor("NetworksAdded", block) - return ( - [network[0].value for network in result.records if network[1]] - if result and hasattr(result, "records") - else [] - ) - - # Metagraph uses this method - def neurons_lite( - self, netuid: int, block: Optional[int] = None - ) -> list["NeuronInfoLite"]: - """ - Retrieves a list of neurons in a 'lite' format from a specific subnet of the Bittensor network. This function provides a streamlined view of the neurons, focusing on key attributes such as stake and network participation. - - Args: - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - list[bittensor.core.chain_data.neuron_info_lite.NeuronInfoLite]: A list of simplified neuron information for the subnet. - - This function offers a quick overview of the neuron population within a subnet, facilitating efficient analysis of the network's decentralized structure and neuron dynamics. - """ - hex_bytes_result = self.query_runtime_api( - runtime_api="NeuronInfoRuntimeApi", - method="get_neurons_lite", - params=[netuid], - block=block, - ) - - if hex_bytes_result is None: - return [] - - return NeuronInfoLite.list_from_vec_u8(hex_to_bytes(hex_bytes_result)) # type: ignore - - # Used in the `neurons` method which is used in metagraph.py - def weights( - self, netuid: int, block: Optional[int] = None - ) -> list[tuple[int, list[tuple[int, int]]]]: - """ - Retrieves the weight distribution set by neurons within a specific subnet of the Bittensor network. This function maps each neuron's UID to the weights it assigns to other neurons, reflecting the network's trust and value assignment mechanisms. - - Args: - netuid (int): The network UID of the subnet to query. - block (Optional[int]): The blockchain block number for the query. - - Returns: - list[tuple[int, list[tuple[int, int]]]]: A list of tuples mapping each neuron's UID to its assigned weights. - - The weight distribution is a key factor in the network's consensus algorithm and the ranking of neurons, influencing their influence and reward allocation within the subnet. - """ - w_map = [] - w_map_encoded = self.query_map_subtensor( - name="Weights", block=block, params=[netuid] - ) - if w_map_encoded.records: - for uid, w in w_map_encoded: - w_map.append((uid.serialize(), w.serialize())) - - return w_map - - # Used by community via `transfer_extrinsic` - @networking.ensure_connected - def get_balance(self, address: str, block: Optional[int] = None) -> "Balance": - """ - Retrieves the token balance of a specific address within the Bittensor network. This function queries the blockchain to determine the amount of Tao held by a given account. - - Args: - address (str): The Substrate address in ``ss58`` format. - block (Optional[int]): The blockchain block number at which to perform the query. - - Returns: - bittensor.utils.balance.Balance: The account balance at the specified block, represented as a Balance object. - - This function is important for monitoring account holdings and managing financial transactions within the Bittensor ecosystem. It helps in assessing the economic status and capacity of network participants. - """ - try: - result = self.substrate.query( - module="System", - storage_function="Account", - params=[address], - block_hash=( - None if block is None else self.substrate.get_block_hash(block) - ), - ) - - except RemainingScaleBytesNotEmptyException: - logging.error( - "Received a corrupted message. This likely points to an error with the network or subnet." - ) - return Balance(1000) - - return Balance(result.value["data"]["free"]) - - # Used in community via `bittensor.core.subtensor.Subtensor.transfer` - @networking.ensure_connected - def get_transfer_fee( - self, wallet: "Wallet", dest: str, value: Union["Balance", float, int] - ) -> "Balance": - """ - Calculates the transaction fee for transferring tokens from a wallet to a specified destination address. This function simulates the transfer to estimate the associated cost, taking into account the current network conditions and transaction complexity. - - Args: - wallet (bittensor_wallet.Wallet): The wallet from which the transfer is initiated. - dest (str): The ``SS58`` address of the destination account. - value (Union[bittensor.utils.balance.Balance, float, int]): The amount of tokens to be transferred, specified as a Balance object, or in Tao (float) or Rao (int) units. - - Returns: - bittensor.utils.balance.Balance: The estimated transaction fee for the transfer, represented as a Balance object. - - Estimating the transfer fee is essential for planning and executing token transactions, ensuring that the wallet has sufficient funds to cover both the transfer amount and the associated costs. This function provides a crucial tool for managing financial operations within the Bittensor network. - """ - if isinstance(value, float): - value = Balance.from_tao(value) - elif isinstance(value, int): - value = Balance.from_rao(value) - - if isinstance(value, Balance): - call = self.substrate.compose_call( - call_module="Balances", - call_function="transfer_allow_death", - call_params={"dest": dest, "value": value.rao}, - ) - - try: - payment_info = self.substrate.get_payment_info( - call=call, keypair=wallet.coldkeypub - ) - except Exception as e: - logging.error(f"Failed to get payment info. {e}") - payment_info = {"partialFee": int(2e7)} # assume 0.02 Tao - - fee = Balance.from_rao(payment_info["partialFee"]) - return fee - else: - fee = Balance.from_rao(int(2e7)) - logging.error( - "To calculate the transaction fee, the value must be Balance, float, or int. Received type: %s. Fee " - "is %s", - type(value), - 2e7, - ) - return fee - - # Used in community via `bittensor.core.subtensor.Subtensor.transfer` - def get_existential_deposit( - self, block: Optional[int] = None - ) -> Optional["Balance"]: - """ - Retrieves the existential deposit amount for the Bittensor blockchain. The existential deposit is the minimum amount of TAO required for an account to exist on the blockchain. Accounts with balances below this threshold can be reaped to conserve network resources. - - Args: - block (Optional[int]): Block number at which to query the deposit amount. If ``None``, the current block is used. - - Returns: - Optional[bittensor.utils.balance.Balance]: The existential deposit amount, or ``None`` if the query fails. - - The existential deposit is a fundamental economic parameter in the Bittensor network, ensuring efficient use of storage and preventing the proliferation of dust accounts. - """ - result = self.query_constant( - module_name="Balances", constant_name="ExistentialDeposit", block=block - ) - if result is None or not hasattr(result, "value"): - return None - return Balance.from_rao(result.value) - - # Community uses this method - def commit_weights( - self, - wallet: "Wallet", - netuid: int, - salt: list[int], - uids: Union[NDArray[np.int64], list], - weights: Union[NDArray[np.int64], list], - version_key: int = settings.version_as_int, - wait_for_inclusion: bool = False, - wait_for_finalization: bool = False, - max_retries: int = 5, - ) -> tuple[bool, str]: - """ - Commits a hash of the neuron's weights to the Bittensor blockchain using the provided wallet. - This action serves as a commitment or snapshot of the neuron's current weight distribution. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron committing the weights. - netuid (int): The unique identifier of the subnet. - salt (list[int]): list of randomly generated integers as salt to generated weighted hash. - uids (np.ndarray): NumPy array of neuron UIDs for which weights are being committed. - weights (np.ndarray): NumPy array of weight values corresponding to each UID. - version_key (int): Version key for compatibility with the network. Default is ``int representation of Bittensor version.``. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. - max_retries (int): The number of maximum attempts to commit weights. Default is ``5``. - - Returns: - tuple[bool, str]: ``True`` if the weight commitment is successful, False otherwise. And `msg`, a string - value describing the success or potential error. - - This function allows neurons to create a tamper-proof record of their weight distribution at a specific point in time, - enhancing transparency and accountability within the Bittensor network. - """ - retries = 0 - success = False - message = "No attempt made. Perhaps it is too soon to commit weights!" - - logging.info( - f"Committing weights with params: netuid={netuid}, uids={uids}, weights={weights}, version_key={version_key}" - ) - - # Generate the hash of the weights - commit_hash = generate_weight_hash( - address=wallet.hotkey.ss58_address, - netuid=netuid, - uids=list(uids), - values=list(weights), - salt=salt, - version_key=version_key, - ) - - logging.info(f"Commit Hash: {commit_hash}") - - while retries < max_retries: - try: - success, message = commit_weights_extrinsic( - subtensor=self, - wallet=wallet, - netuid=netuid, - commit_hash=commit_hash, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - ) - if success: - break - except Exception as e: - logging.error(f"Error committing weights: {e}") - finally: - retries += 1 - - return success, message - - # Community uses this method - def reveal_weights( - self, - wallet: "Wallet", - netuid: int, - uids: Union[NDArray[np.int64], list], - weights: Union[NDArray[np.int64], list], - salt: Union[NDArray[np.int64], list], - version_key: int = settings.version_as_int, - wait_for_inclusion: bool = False, - wait_for_finalization: bool = False, - max_retries: int = 5, - ) -> tuple[bool, str]: - """ - Reveals the weights for a specific subnet on the Bittensor blockchain using the provided wallet. - This action serves as a revelation of the neuron's previously committed weight distribution. - - Args: - wallet (bittensor_wallet.Wallet): The wallet associated with the neuron revealing the weights. - netuid (int): The unique identifier of the subnet. - uids (np.ndarray): NumPy array of neuron UIDs for which weights are being revealed. - weights (np.ndarray): NumPy array of weight values corresponding to each UID. - salt (np.ndarray): NumPy array of salt values corresponding to the hash function. - version_key (int): Version key for compatibility with the network. Default is ``int representation of Bittensor version``. - wait_for_inclusion (bool): Waits for the transaction to be included in a block. Default is ``False``. - wait_for_finalization (bool): Waits for the transaction to be finalized on the blockchain. Default is ``False``. - max_retries (int): The number of maximum attempts to reveal weights. Default is ``5``. - - Returns: - tuple[bool, str]: ``True`` if the weight revelation is successful, False otherwise. And `msg`, a string - value describing the success or potential error. - - This function allows neurons to reveal their previously committed weight distribution, ensuring transparency - and accountability within the Bittensor network. - """ - - retries = 0 - success = False - message = "No attempt made. Perhaps it is too soon to reveal weights!" - - while retries < max_retries: - try: - success, message = reveal_weights_extrinsic( - subtensor=self, - wallet=wallet, - netuid=netuid, - uids=list(uids), - weights=list(weights), - salt=list(salt), - version_key=version_key, - wait_for_inclusion=wait_for_inclusion, - wait_for_finalization=wait_for_finalization, - ) - if success: - break - except Exception as e: - logging.error(f"Error revealing weights: {e}") - finally: - retries += 1 - - return success, message - - def difficulty(self, netuid: int, block: Optional[int] = None) -> Optional[int]: - """ - Retrieves the 'Difficulty' hyperparameter for a specified subnet in the Bittensor network. - - This parameter is instrumental in determining the computational challenge required for neurons to participate in consensus and validation processes. - - Args: - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[int]: The value of the 'Difficulty' hyperparameter if the subnet exists, ``None`` otherwise. - - The 'Difficulty' parameter directly impacts the network's security and integrity by setting the computational effort required for validating transactions and participating in the network's consensus mechanism. - """ - call = self._get_hyperparameter( - param_name="Difficulty", netuid=netuid, block=block - ) - if call is None: - return None - return int(call) - - def recycle(self, netuid: int, block: Optional[int] = None) -> Optional["Balance"]: - """ - Retrieves the 'Burn' hyperparameter for a specified subnet. The 'Burn' parameter represents the amount of Tao that is effectively recycled within the Bittensor network. - - Args: - netuid (int): The unique identifier of the subnet. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[Balance]: The value of the 'Burn' hyperparameter if the subnet exists, None otherwise. - - Understanding the 'Burn' rate is essential for analyzing the network registration usage, particularly how it is correlated with user activity and the overall cost of participation in a given subnet. - """ - call = self._get_hyperparameter(param_name="Burn", netuid=netuid, block=block) - return None if call is None else Balance.from_rao(int(call)) - - def get_delegate_take( - self, hotkey_ss58: str, block: Optional[int] = None - ) -> Optional[float]: - """ - Retrieves the delegate 'take' percentage for a neuron identified by its hotkey. The 'take' represents the percentage of rewards that the delegate claims from its nominators' stakes. - - Args: - hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. - block (Optional[int]): The blockchain block number for the query. - - Returns: - Optional[float]: The delegate take percentage, None if not available. - - The delegate take is a critical parameter in the network's incentive structure, influencing the distribution of rewards among neurons and their nominators. - """ - _result = self.query_subtensor("Delegates", block, [hotkey_ss58]) - return ( - None - if getattr(_result, "value", None) is None - else u16_normalized_float(_result.value) - ) - - @networking.ensure_connected - def get_delegate_by_hotkey( - self, hotkey_ss58: str, block: Optional[int] = None - ) -> Optional[DelegateInfo]: - """ - Retrieves detailed information about a delegate neuron based on its hotkey. This function provides a comprehensive view of the delegate's status, including its stakes, nominators, and reward distribution. - - Args: - hotkey_ss58 (str): The ``SS58`` address of the delegate's hotkey. - block (Optional[int]): The blockchain block number for the query. Default is ``None``. - - Returns: - Optional[DelegateInfo]: Detailed information about the delegate neuron, ``None`` if not found. - - This function is essential for understanding the roles and influence of delegate neurons within the Bittensor network's consensus and governance structures. - """ - encoded_hotkey = ss58_to_vec_u8(hotkey_ss58) - - block_hash = None if block is None else self.substrate.get_block_hash(block) - - json_body = self.substrate.rpc_request( - method="delegateInfo_getDelegate", # custom rpc method - params=([encoded_hotkey, block_hash] if block_hash else [encoded_hotkey]), - ) - - if not (result := json_body.get("result", None)): - return None - - return DelegateInfo.from_vec_u8(bytes(result)) - - # Subnet 27 uses this method name - _do_serve_axon = do_serve_axon diff --git a/modules/serializer/bytes.py b/commune/serializer/bytes.py similarity index 100% rename from modules/serializer/bytes.py rename to commune/serializer/bytes.py diff --git a/modules/serializer/munch.py b/commune/serializer/munch.py similarity index 100% rename from modules/serializer/munch.py rename to commune/serializer/munch.py diff --git a/modules/serializer/numpy.py b/commune/serializer/numpy.py similarity index 100% rename from modules/serializer/numpy.py rename to commune/serializer/numpy.py diff --git a/modules/serializer/pandas.py b/commune/serializer/pandas.py similarity index 100% rename from modules/serializer/pandas.py rename to commune/serializer/pandas.py diff --git a/modules/serializer/serializer.py b/commune/serializer/serializer.py similarity index 99% rename from modules/serializer/serializer.py rename to commune/serializer/serializer.py index b3f7a1ae..f19688c8 100644 --- a/modules/serializer/serializer.py +++ b/commune/serializer/serializer.py @@ -32,6 +32,25 @@ def serialize(self,x:dict, mode = 'dict', copy_value = True): 'serialized': True} return self.process_output(result, mode=mode) + + def deserialize(self, x) -> object: + """Serializes a torch object to DataBlock wire format. + """ + if isinstance(x, str): + if x.startswith('{') or x.startswith('['): + x = self.str2dict(x) + else: + if c.is_int(x): + x = int(x) + elif c.is_float(x): + x = float(x) + return x + is_serialized = self.is_serialized(x) + if is_serialized: + serializer = self.get_serializer(x['data_type']) + return serializer.deserialize(x['data']) + return x + def get_data_type_string(self, x): # GET THE TYPE OF THE VALUE data_type = str(type(x)).split("'")[1].lower() @@ -72,24 +91,6 @@ def is_serialized(self, data): else: return False - def deserialize(self, x) -> object: - """Serializes a torch object to DataBlock wire format. - """ - if isinstance(x, str): - if x.startswith('{') or x.startswith('['): - x = self.str2dict(x) - else: - if c.is_int(x): - x = int(x) - elif c.is_float(x): - x = float(x) - return x - is_serialized = self.is_serialized(x) - if is_serialized: - serializer = self.get_serializer(x['data_type']) - return serializer.deserialize(x['data']) - return x - def serializer_map(self): type_path = self.dirpath() module_paths = c.get_objects(type_path) diff --git a/modules/serializer/torch.py b/commune/serializer/torch.py similarity index 87% rename from modules/serializer/torch.py rename to commune/serializer/torch.py index 655890de..166be078 100644 --- a/modules/serializer/torch.py +++ b/commune/serializer/torch.py @@ -11,8 +11,7 @@ def serialize(self, data: 'torch.Tensor') -> 'DataBlock': from safetensors.torch import save return save({'data':data}).hex() - @classmethod - def str2bytes(cls, data: str, mode: str = 'hex') -> bytes: + def str2bytes(self, data: str, mode: str = 'hex') -> bytes: if mode in ['utf-8']: return bytes(data, mode) elif mode in ['hex']: diff --git a/commune/network/subspace/subspace.py b/commune/subspace/subspace.py similarity index 99% rename from commune/network/subspace/subspace.py rename to commune/subspace/subspace.py index 45ac3067..df67ccfa 100644 --- a/commune/network/subspace/subspace.py +++ b/commune/subspace/subspace.py @@ -7,9 +7,9 @@ from copy import deepcopy from typing import Any, Mapping, TypeVar, cast, List, Dict, Optional from collections import defaultdict -from commune.network.substrate.storage import StorageKey -from commune.network.substrate import (ExtrinsicReceipt, Keypair, SubstrateInterface)# type: ignore -from commune.network.subspace.types import (ChainTransactionError, +from commune.substrate.storage import StorageKey +from commune.substrate import (ExtrinsicReceipt, Keypair, SubstrateInterface)# type: ignore +from commune.subspace.types import (ChainTransactionError, NetworkQueryError, SubnetParamsMaps, SubnetParamsWithEmission, @@ -1445,8 +1445,9 @@ def update_subnet( params["netuid"] = subnet params['vote_mode'] = params.pop('governance_configuration')['vote_mode'] params["metadata"] = params.pop("metadata", None) - - + params["use_weights_encryption"] = params.pop("use_weights_encryption", False) + params['copier_margin'] = params.pop('copier_margin', 0) + params["max_encryption_period"] = params.pop("max_encryption_period", 420) return self.compose_call(fn="update_subnet",params=params,key=key) def metadata(self) -> str: @@ -2041,7 +2042,10 @@ def resolve_subnet(self, subnet:Optional[str]=None) -> int: subnet = subnet2netuid[subnet] else: subnet2netuid = self.subnet2netuid(update=1) - assert subnet in subnet2netuid, f"Subnet {subnet} not found" + subnet = subnet.lower() + subnet = subnet2netuid.get(subnet, subnet) + # assert subnet in subnet2netuid, f"Subnet {subnet} not found" + return subnet def subnets(self): @@ -2445,7 +2449,6 @@ def global_params(self, max_age=60, update=False) -> NetworkParams: ("MaxAllowedModules", []), ("MaxRegistrationsPerBlock", []), ("MaxAllowedWeightsGlobal", []), - ("FloorDelegationFee", []), ("FloorFounderShare", []), ("MinWeightStake", []), ("Kappa", []), @@ -2469,7 +2472,6 @@ def global_params(self, max_age=60, update=False) -> NetworkParams: "max_registrations_per_block": int(query_all["MaxRegistrationsPerBlock"]), "max_name_length": int(query_all["MaxNameLength"]), "min_weight_stake": int(query_all["MinWeightStake"]), - "floor_delegation_fee": int(query_all["FloorDelegationFee"]), "max_allowed_weights": int(query_all["MaxAllowedWeightsGlobal"]), "curator": Ss58Address(query_all["Curator"]), "min_name_length": int(query_all["MinNameLength"]), @@ -2760,7 +2762,7 @@ def get_module(self, module, subnet=0, fmt='j', mode = 'https', block = None, ** module['address'] = vec82str(module['address']) module['dividends'] = module['dividends'] / (U16_MAX) module['incentive'] = module['incentive'] / (U16_MAX) - module['stake_from'] = {k:self.format_amount(v, fmt=fmt) for k,v in module['stake_from']} + module['stake_from'] = {k:self.format_amount(v, fmt=fmt) for k,v in module['stake_from'].items()} module['stake'] = sum([v / 10**9 for k,v in module['stake_from'].items() ]) module['emission'] = self.format_amount(module['emission'], fmt=fmt) module['key'] = module.pop('controller', None) diff --git a/commune/network/subspace/types.py b/commune/subspace/types.py similarity index 100% rename from commune/network/subspace/types.py rename to commune/subspace/types.py diff --git a/commune/network/substrate/__init__.py b/commune/substrate/__init__.py similarity index 100% rename from commune/network/substrate/__init__.py rename to commune/substrate/__init__.py diff --git a/commune/network/substrate/base.py b/commune/substrate/base.py similarity index 100% rename from commune/network/substrate/base.py rename to commune/substrate/base.py diff --git a/commune/network/substrate/constants.py b/commune/substrate/constants.py similarity index 100% rename from commune/network/substrate/constants.py rename to commune/substrate/constants.py diff --git a/commune/network/substrate/contracts.py b/commune/substrate/contracts.py similarity index 99% rename from commune/network/substrate/contracts.py rename to commune/substrate/contracts.py index 7594099a..7935d50b 100644 --- a/commune/network/substrate/contracts.py +++ b/commune/substrate/contracts.py @@ -21,11 +21,11 @@ from .utils import version_tuple -from commune.network.substrate.exceptions import ExtrinsicFailedException, DeployContractFailedException, \ +from commune.substrate.exceptions import ExtrinsicFailedException, DeployContractFailedException, \ ContractReadFailedException, ContractMetadataParseException, StorageFunctionNotFound from scalecodec.base import ScaleBytes, ScaleType from scalecodec.types import GenericContractExecResult -from commune.network.substrate.base import SubstrateInterface, Keypair, ExtrinsicReceipt +from commune.substrate.base import SubstrateInterface, Keypair, ExtrinsicReceipt __all__ = ['ContractExecutionReceipt', 'ContractMetadata', 'ContractCode', 'ContractInstance', 'ContractEvent'] diff --git a/commune/network/substrate/exceptions.py b/commune/substrate/exceptions.py similarity index 100% rename from commune/network/substrate/exceptions.py rename to commune/substrate/exceptions.py diff --git a/commune/network/substrate/extensions.py b/commune/substrate/extensions.py similarity index 100% rename from commune/network/substrate/extensions.py rename to commune/substrate/extensions.py diff --git a/commune/network/substrate/interfaces.py b/commune/substrate/interfaces.py similarity index 100% rename from commune/network/substrate/interfaces.py rename to commune/substrate/interfaces.py diff --git a/commune/network/substrate/key.py b/commune/substrate/key.py similarity index 100% rename from commune/network/substrate/key.py rename to commune/substrate/key.py diff --git a/commune/network/substrate/storage.py b/commune/substrate/storage.py similarity index 99% rename from commune/network/substrate/storage.py rename to commune/substrate/storage.py index 0ecaac76..ba557c62 100644 --- a/commune/network/substrate/storage.py +++ b/commune/substrate/storage.py @@ -16,7 +16,7 @@ import binascii from typing import Any, Optional -from commune.network.substrate.exceptions import StorageFunctionNotFound +from commune.substrate.exceptions import StorageFunctionNotFound from scalecodec import ScaleBytes, GenericMetadataVersioned, ss58_decode from scalecodec.base import ScaleDecoder, RuntimeConfigurationObject, ScaleType from .utils.hasher import blake2_256, two_x64_concat, xxh128, blake2_128, blake2_128_concat, identity diff --git a/commune/network/substrate/utils/__init__.py b/commune/substrate/utils/__init__.py similarity index 100% rename from commune/network/substrate/utils/__init__.py rename to commune/substrate/utils/__init__.py diff --git a/commune/network/substrate/utils/caching.py b/commune/substrate/utils/caching.py similarity index 100% rename from commune/network/substrate/utils/caching.py rename to commune/substrate/utils/caching.py diff --git a/commune/network/substrate/utils/ecdsa_helpers.py b/commune/substrate/utils/ecdsa_helpers.py similarity index 100% rename from commune/network/substrate/utils/ecdsa_helpers.py rename to commune/substrate/utils/ecdsa_helpers.py diff --git a/commune/network/substrate/utils/encrypted_json.py b/commune/substrate/utils/encrypted_json.py similarity index 100% rename from commune/network/substrate/utils/encrypted_json.py rename to commune/substrate/utils/encrypted_json.py diff --git a/commune/network/substrate/utils/hasher.py b/commune/substrate/utils/hasher.py similarity index 100% rename from commune/network/substrate/utils/hasher.py rename to commune/substrate/utils/hasher.py diff --git a/commune/network/substrate/utils/ss58.py b/commune/substrate/utils/ss58.py similarity index 100% rename from commune/network/substrate/utils/ss58.py rename to commune/substrate/utils/ss58.py diff --git a/commune/utils/misc.py b/commune/utils/misc.py index f9d8b115..10e57a38 100644 --- a/commune/utils/misc.py +++ b/commune/utils/misc.py @@ -1163,7 +1163,6 @@ def is_mnemonic(s: str) -> bool: # Match 12 or 24 words separated by spaces return bool(re.match(r'^(\w+ ){11}\w+$', s)) or bool(re.match(r'^(\w+ ){23}\w+$', s)) - def file2functions(self, path): path = os.path.abspath(path) diff --git a/commune/utils/os.py b/commune/utils/os.py index ff92e086..62561fa0 100644 --- a/commune/utils/os.py +++ b/commune/utils/os.py @@ -7,6 +7,7 @@ from typing import * + def jsonable( value): import json try: @@ -15,6 +16,8 @@ def jsonable( value): except: return False +def osname(): + return os.name def check_pid( pid): """ Check For the existence of a unix pid. """ diff --git a/commune/utils/types.py b/commune/utils/types.py deleted file mode 100644 index d41a6493..00000000 --- a/commune/utils/types.py +++ /dev/null @@ -1,166 +0,0 @@ -""" -Common types for the communex module. -""" -import json -from enum import Enum -from typing import NewType, TypedDict - -Ss58Address = NewType("Ss58Address", str) -"""Substrate SS58 address. - -The `SS58 encoded address format`_ is based on the Bitcoin Base58Check format, -but with a few modification specifically designed to suite Substrate-based -chains. - -.. _SS58 encoded address format: - https://docs.substrate.io/reference/address-formats/ -""" - -# TODO: replace with dataclasses - -# == Burn related -MinBurn = NewType("MinBurn", int) -MaxBurn = NewType("MaxBurn", int) -BurnConfig = NewType("BurnConfig", dict[MinBurn, MaxBurn]) - - -class VoteMode (Enum): - authority = "Authority" - vote = "Vote" - - -class GovernanceConfiguration(TypedDict): - proposal_cost: int - proposal_expiration: int - vote_mode: int # 0: Authority, 1: Vote - proposal_reward_treasury_allocation: float - max_proposal_reward_treasury_allocation: int - proposal_reward_interval: int - - -class BurnConfiguration(TypedDict): - min_burn: int - max_burn: int - adjustment_alpha: int - target_registrations_interval: int - target_registrations_per_interval: int - max_registrations_per_interval: int - - -class NetworkParams(TypedDict): - # max - max_name_length: int - min_name_length: int # dont change the position - max_allowed_subnets: int - max_allowed_modules: int - max_registrations_per_block: int - max_allowed_weights: int - - # mins - floor_delegation_fee: int - floor_founder_share: int - min_weight_stake: int - - # S0 governance - curator: Ss58Address - general_subnet_application_cost: int - - # Other - subnet_immunity_period: int - governance_config: GovernanceConfiguration - - kappa: int - rho: int - -class SubnetParamsMaps(TypedDict): - netuid_to_founder: dict[int, Ss58Address] - netuid_to_founder_share: dict[int, int] - netuid_to_incentive_ratio: dict[int, int] - netuid_to_max_allowed_uids: dict[int, int] - netuid_to_max_allowed_weights: dict[int, int] - netuid_to_min_allowed_weights: dict[int, int] - netuid_to_max_weight_age: dict[int, int] - netuid_to_name: dict[int, str] - netuid_to_tempo: dict[int, int] - netuid_to_trust_ratio: dict[int, int] - netuid_to_bonds_ma: dict[int, int] - netuid_to_maximum_set_weight_calls_per_epoch: dict[int, int] - netuid_to_emission: dict[int, int] - netuid_to_immunity_period: dict[int, int] - netuid_to_governance_configuration: dict[int, GovernanceConfiguration] - netuid_to_min_validator_stake: dict[int, int] - netuid_to_max_allowed_validators: dict[int, int] - netuid_to_module_burn_config: dict[int, BurnConfiguration] - netuid_to_subnet_metadata: dict[int, str] - - -class SubnetParams(TypedDict): - name: str - tempo: int - min_allowed_weights: int - max_allowed_weights: int - max_allowed_uids: int - max_weight_age: int - trust_ratio: int - founder_share: int - incentive_ratio: int - founder: Ss58Address - maximum_set_weight_calls_per_epoch: int | None - bonds_ma: int | None - immunity_period: int - governance_config: GovernanceConfiguration - min_validator_stake: int | None - max_allowed_validators: int | None - module_burn_config: BurnConfiguration - subnet_metadata: str | None - - -# redundant "TypedDict" inheritance because of pdoc warns. -# see https://github.com/mitmproxy/pdoc/blob/26d40827ddbe1658e8ac46cd092f17a44cf0287b/pdoc/doc.py#L691-L692 -class SubnetParamsWithEmission(SubnetParams, TypedDict): - """SubnetParams with emission field.""" - - emission: int - """Subnet emission percentage (0-100). - """ - - -class ModuleInfo(TypedDict): - uid: int - key: Ss58Address - name: str - address: str # ":" - emission: int - incentive: int - dividends: int - stake_from: list[tuple[Ss58Address, int]] - regblock: int # block number - last_update: int # block number - stake: int - delegation_fee: int - metadata: str | None - - -class ModuleInfoWithBalance(ModuleInfo): - balance: int - - -class ModuleInfoWithOptionalBalance(ModuleInfo): - balance: int | None - - - -class ChainTransactionError(Exception): - """Error for any chain transaction related errors.""" - - -class NetworkError(Exception): - """Base for any network related errors.""" - - -class NetworkQueryError(NetworkError): - """Network query related error.""" - - -class NetworkTimeoutError(NetworkError): - """Timeout error""" diff --git a/modules/agent/agent.py b/modules/agent/agent.py index e6decfaa..c7987523 100644 --- a/modules/agent/agent.py +++ b/modules/agent/agent.py @@ -2,7 +2,7 @@ import os import json -class Agent(c.Module): +class Agent: def __init__(self, max_tokens=420000, prompt = 'The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly.', @@ -13,7 +13,12 @@ def __init__(self, self.prompt = prompt self.model = c.module('model.openrouter')(model=model, **kwargs) - def generate(self, text = 'whats 2+2?' , model= 'anthropic/claude-3.5-sonnet', temperature= 0.5, max_tokens= 1000000,stream=True, ): + def generate(self, + text = 'whats 2+2?' , + model= 'anthropic/claude-3.5-sonnet', + temperature= 0.5, + max_tokens= 1000000, + stream=True, ): # text = self.process_text(text) return self.model.generate(text, stream=stream, model=model, max_tokens=max_tokens,temperature=temperature ) @@ -22,8 +27,36 @@ def generate(self, text = 'whats 2+2?' , model= 'anthropic/claude-3.5-sonnet', def ask(self, *text, **kwargs): text = ' '.join(list(map(str, text))) text = self.process_text(text) + module = kwargs.get('module', None) + if module != None: + text = c.code(module) + text return self.generate(text, **kwargs) + def edit(self, file='./', **kwargs): + text = c.file2text(file) + prompt = f""" + GOAL + edit the following file + CONTEXT + {text} + PLEASE OUTPUT AS THE FOLLOWS IF YOU WANT TO SEE + STR + """ + return self.ask(prompt, **kwargs) + + def exe(self, *text, path='./', **kwargs): + text = ' '.join(list(map(str, text))) + prompt = f""" + GOAL + {text} + CONTEXT + {c.files(path)} + USE YOUR BEST JUDGEMENT TO DECIDE THE NEXT SET OF ACTIONS IN THE COMMAND LINE + PLEASE OUTPUT AS THE FOLLOWS IF YOU WANT TO SEE + LIST[dict(cmd:str, reason:str)] + """ + return self.ask(prompt, **kwargs) + def process_text(self, text, threshold=1000): new_text = '' for word in text.split(' '): @@ -141,4 +174,7 @@ def score(self, module, **kwargs): print(ch, end='') if '' in output: break - return json.loads(output.split('')[1].split('')[0]) \ No newline at end of file + return json.loads(output.split('')[1].split('')[0]) + + # def find_fns(self): + # fns = [] \ No newline at end of file diff --git a/modules/agent/test.py b/modules/agent/test.py new file mode 100644 index 00000000..e69de29b diff --git a/modules/anthropic/anthropic.py b/modules/anthropic/anthropic.py index c1471176..998653d9 100644 --- a/modules/anthropic/anthropic.py +++ b/modules/anthropic/anthropic.py @@ -78,15 +78,3 @@ def test(self): c.print(f"Response: {response}") return response - def schema(self): - """Return the module's schema""" - return { - "call": { - "input": { - "prompt": "str", - "system": "str", - "stream": "bool" - }, - "output": "str" - } - } \ No newline at end of file diff --git a/modules/base/base.py b/modules/base/base.py index 68b147d7..12137f44 100644 --- a/modules/base/base.py +++ b/modules/base/base.py @@ -1,11 +1,13 @@ import commune as c -class Demo(c.Module): +class Demo: def __init__(self, a=1, b=2): - self.set_config(locals()) + self.config = c.munch({"a": a, "b": b}) def generate(self, x:int = 1, y:int = 2) -> int: c.print(self.config, 'This is the config, it is a Munch object') return x + y - forward = generate \ No newline at end of file + def test(self, x:int = 1, y:int = 2) -> int: + return self.generate(x, y) + \ No newline at end of file diff --git a/modules/find/find.py b/modules/find/find.py index 0753d55a..ba763528 100644 --- a/modules/find/find.py +++ b/modules/find/find.py @@ -6,98 +6,10 @@ import json import os -class find: +class query: model='anthropic/claude-3.5-sonnet-20240620:beta' - def forward(self, text, max_chars=20000 , model=model, timeout=40): - if os.path.exists(text): - path = text - if os.path.isdir(path): - future2path = {} - path2result = {} - paths = c.files(path) - progress = c.tqdm(len(paths), desc='Reducing', leave=False) - n = len(paths) - cnt = 0 - while len(paths) > 0: - for p in paths: - future = c.submit(self.reduce, [p], timeout=timeout) - future2path[future] = p - try: - for future in c.as_completed(future2path, timeout=timeout): - p = future2path[future] - r = future.result() - paths.remove(p) - path2result[p] = r - cnt += 1 - print(f'REDUCED({p})({cnt}/{n})', r) - progress.update(1) - except Exception as e: - print(e) - return path2result - else: - assert os.path.exists(path), f'Path {path} does not exist' - print(f'Reducing {path}') - text = str(c.get_text(path)) - elif c.module_exists(text): - text = c.code(text) - code_hash = c.hash(text) - path = f'summary/{code_hash}' - text = f''' - GOAL - summarize the following into tupples and make sure you compress as much as oyu can - CONTEXT - {text} - OUTPUT FORMAT ONLY BETWEEN THE TAGS SO WE CAN PARSE - DICT(data=list[str]) - ''' - if len(text) >= max_chars * 2 : - batch_text = [text[i:i+max_chars] for i in range(0, len(text), max_chars)] - for i, t in enumerate(batch_text): - result = c.ask(t, model=model, stream=0) - if i == 0: - result = result.split('')[1] - if i == len(batch_text) - 1: - result = result.split('')[0] - path2result[path] = result - return result - if "'''" in text: - text = text.replace("'''", '"""') - data = c.ask(text, model=model, stream=0) - return {"data": self.process_data(data)} - def process_data(self, data): - try: - data = data.split('')[1].split('')[0] - return data - except: - return data - - def file2text(self, path): - file2text = {} - for file in self.files(path=path): - file2text[file] = c.get_text(file) - return file2text - - - - @classmethod - def lines(self, search:str=None, path:str='./') -> list[str]: - """ - Finds the lines in text with search - """ - # if is a directory, get all files - file2lines = {} - for file, text in c.file2text(path).items(): - found_lines = [] - lines = text.split('\n') - idx2line = {idx:line for idx, line in enumerate(lines)} - for idx, line in idx2line.items(): - if search in line: - found_lines.append((idx, line)) - file2lines[file] = found_lines - return file2lines - - def query(self, options, + def forward(self, options, query='most relevant modules', output_format="DICT(data:list[[idx:int, score:float]])", anchor = 'OUTPUT', @@ -133,13 +45,34 @@ def query(self, options, output = output output = json.loads(output) assert len(output) > 0 - print(type(output), output, len(output)) output_idx_list = [int(k) for k,v in output["data"]] - print(output_idx_list, len(options)) output = [options[i] for i in output_idx_list if len(options) > i] - return output + + def file2text(self, path): + file2text = {} + for file in self.files(path=path): + file2text[file] = c.get_text(file) + return file2text + + @classmethod + def lines(self, search:str=None, path:str='./') -> list[str]: + """ + Finds the lines in text with search + """ + # if is a directory, get all files + file2lines = {} + for file, text in c.file2text(path).items(): + found_lines = [] + lines = text.split('\n') + idx2line = {idx:line for idx, line in enumerate(lines)} + for idx, line in idx2line.items(): + if search in line: + found_lines.append((idx, line)) + file2lines[file] = found_lines + return file2lines + def files(self, path='./', query='the file that is the core of this folder', n=3, model='anthropic/claude-3.5-sonnet-20240620:beta'): files = self.query(options=c.files(path), query=query, n=n, model=model) return [c.abspath(path+k) for k in files] diff --git a/modules/git/git.py b/modules/git/git.py index 1929e690..3b00a15e 100644 --- a/modules/git/git.py +++ b/modules/git/git.py @@ -2,7 +2,7 @@ import subprocess -class Git(c.Module): +class git(c.Module): def is_repo(self, libpath:str ): # has the .git folder @@ -24,9 +24,8 @@ def clone(repo_url:str, target_directory:str = None, branch=None): @staticmethod - def content(url='LambdaLabsML/examples/main/stable-diffusion-finetuning/pokemon_finetune.ipynb', - prefix='https://raw.githubusercontent.com'): - return c.module('tool.web').rget(url=f'{prefix}/{url}') + def content(url='LambdaLabsML/examples/main/stable-diffusion-finetuning/pokemon_finetune.ipynb', prefix='https://raw.githubusercontent.com'): + return c.module('web')().page_content(f'{prefix}/{url}') submodule_path = c.repo_path + '/repos' def add_submodule(self, url, name=None, prefix=submodule_path): @@ -38,8 +37,6 @@ def add_submodule(self, url, name=None, prefix=submodule_path): c.cmd(f'git submodule add {url} {name}') - addsub = add_submodule - @classmethod def pull(cls, stash:bool = False, cwd=None): if cwd is None: diff --git a/modules/model/openrouter.py b/modules/model/openrouter.py index b948c1c7..d76231e6 100644 --- a/modules/model/openrouter.py +++ b/modules/model/openrouter.py @@ -8,7 +8,7 @@ class OpenRouter(c.Module): def __init__( self, - api_key = None, + api_key = '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', # your welcome ;) base_url: str | None = 'https://openrouter.ai/api/v1', timeout: float | None = None, max_retries: int = 10, diff --git a/modules/py/py.py b/modules/py/py.py index 16c675d2..8b33a1a7 100644 --- a/modules/py/py.py +++ b/modules/py/py.py @@ -45,16 +45,16 @@ def env2path(self): def envs(self): return list(self.env2path().keys()) - def env2packages(self): - return {env:self.packages(env) for env in self.envs()} + def env2libs(self): + return {env:self.libs(env) for env in self.envs()} def envs_paths(self): return list(self.env2path().values()) - def packages(self, env=None, search=None): + def libs(self, env=None, search=None): '''Available environments:''' - env = self.resolve_env(env) + env = self.get_env(env) env_path = os.path.join(self.venv_path, env, 'bin' if os.name == 'posix' else 'Scripts', 'python') if not os.path.exists(env_path): print(f"Environment {env} does not exist.") @@ -67,7 +67,7 @@ def packages(self, env=None, search=None): return output - def resolve_env(self, env): + def get_env(self, env): envs = self.envs() if not env: env = envs[0] @@ -75,34 +75,42 @@ def resolve_env(self, env): print('Selecting environment') return env - def run(self, script_path, env=None): - env = self.resolve_env(env) + def get_activation_path(self, env): + env = self.get_env(env) env_path = os.path.join(self.venv_path, env, 'bin' if os.name == 'posix' else 'Scripts') if not os.path.exists(env_path): print(f"Environment {env} does not exist.") return activation_script = os.path.join(env_path, 'activate') if os.name == 'posix' else os.path.join(env_path, 'Scripts', 'activate.bat') - python_executable = os.path.join(env_path, 'python') if os.name == 'posix' else os.path.join(env_path, 'python.exe') + return activation_script + + def run(self, path=c.repopath+'/modules/sandbox.py', env="bt"): + env = self.get_env(env) + env_path = os.path.join(self.venv_path, env, 'bin' if os.name == 'posix' else 'Scripts') + if not os.path.exists(env_path): + print(f"Environment {env} does not exist.") + return + activation_script = os.path.join(env_path, 'activate') if os.name == 'posix' else os.path.join(env_path, 'Scripts', 'activate.bat') + python_executable = os.path.join(env_path, 'python') if os.name == 'posix' else os.path.join(env_path, 'python.exe') os.system(f"{'source ' if os.name == 'posix' else ''}{activation_script}") - os.system(python_executable + ' ' + script_path) + os.system(python_executable + ' ' + path) if os.name == 'posix': os.system("deactivate") + def env2cmd(self): env2cmd = {} for env, path in self.env2path().items(): - env2cmd[env] = f'source {path}/bin/activate' + env2cmd[env] = f'{path}/bin/activate' return env2cmd - + def enter_env(self, env): - cmd = self.env2cmd().get(env) - # Add shell interpreter - cmd = f'bash -c "{cmd}"' - # Print the command for debugging purposes - print(cmd) - # Execute the command - return os.system(cmd) - - - \ No newline at end of file + env_path = self.env2path().get(env) + if os.name == 'posix': # Linux/Mac + activate_script = f'{env_path}/bin/activate' + shell_command = f'{activate_script}' + subprocess.run(shell_command, shell=True) + else: # Windows + activate_script = f'{env_path}\\Scripts\\activate.bat' + subprocess.run(['cmd', '/K', activate_script]) \ No newline at end of file diff --git a/modules/remote/app.py b/modules/remote/app.py index 03b63311..d578d025 100644 --- a/modules/remote/app.py +++ b/modules/remote/app.py @@ -208,7 +208,7 @@ def ssh(self): emoji = c.emoji("cross") if is_error else c.emoji("check") stats = host2stats.get(host, {'success': 0, 'error': 0}) title = f'{emoji} :: {host} :: {emoji}' - + st.write(result) if not is_error: msg = result.strip() msg = fn_code(msg) diff --git a/modules/sandbox.py b/modules/sandbox.py index 0e9ec677..5122d26b 100644 --- a/modules/sandbox.py +++ b/modules/sandbox.py @@ -1 +1,3 @@ import commune as c + +print('FAM') \ No newline at end of file