From 6d17325b4b7883d16bb950db2366e42d5e2b3f1f Mon Sep 17 00:00:00 2001 From: latentvector Date: Sun, 17 Nov 2024 14:56:02 -0500 Subject: [PATCH] refactor --- commune/cli.py | 203 ++++++-------- commune/executor.py | 1 - commune/key.py | 2 - commune/module.py | 150 +++++------ commune/modules/agent/agent.py | 8 +- commune/modules/chat/chat.py | 9 +- commune/modules/selenium/selenium.py | 188 +++++++++++++ commune/{ => modules}/serializer/bytes.py | 0 commune/{ => modules}/serializer/munch.py | 0 commune/{ => modules}/serializer/numpy.py | 0 commune/{ => modules}/serializer/pandas.py | 0 .../{ => modules}/serializer/serializer.py | 0 commune/{ => modules}/serializer/torch.py | 0 commune/modules/store/store.py | 154 +++++++++++ commune/network/subspace/key.py | 67 ----- commune/network/subspace/subspace.py | 126 +++++---- commune/network/substrate/storage.py | 1 - commune/routes.json | 186 ------------- commune/server.py | 45 ++-- commune/vali.py | 5 + docs/2_key_management.md | 251 ------------------ docs/3_cli_basics.md | 179 ------------- docs/4_running_tests.md | 4 - docs/{0_install.md => _install.md} | 1 - docs/{1_what_is_a_module.md => _intro.md} | 0 docs/{modules => }/cli.md | 0 docs/{modules => }/key.md | 0 docs/{modules => }/module.md | 2 +- docs/modules/serializer.md | 37 --- docs/{modules => }/network.md | 0 docs/{modules => }/server.md | 40 +++ docs/{modules => }/vali.md | 0 scripts/REAMDE.md | 0 setup.py | 29 +- tests/test_subspace.py | 2 +- 35 files changed, 653 insertions(+), 1037 deletions(-) create mode 100644 commune/modules/selenium/selenium.py rename commune/{ => modules}/serializer/bytes.py (100%) rename commune/{ => modules}/serializer/munch.py (100%) rename commune/{ => modules}/serializer/numpy.py (100%) rename commune/{ => modules}/serializer/pandas.py (100%) rename commune/{ => modules}/serializer/serializer.py (100%) rename commune/{ => modules}/serializer/torch.py (100%) create mode 100644 commune/modules/store/store.py delete mode 100644 commune/network/subspace/key.py delete mode 100644 commune/routes.json delete mode 100644 docs/2_key_management.md delete mode 100644 docs/3_cli_basics.md delete mode 100644 docs/4_running_tests.md rename docs/{0_install.md => _install.md} (99%) rename docs/{1_what_is_a_module.md => _intro.md} (100%) rename docs/{modules => }/cli.md (100%) rename docs/{modules => }/key.md (100%) rename docs/{modules => }/module.md (90%) delete mode 100644 docs/modules/serializer.md rename docs/{modules => }/network.md (100%) rename docs/{modules => }/server.md (52%) rename docs/{modules => }/vali.md (100%) delete mode 100644 scripts/REAMDE.md diff --git a/commune/cli.py b/commune/cli.py index b0c099e4..8b9228d5 100644 --- a/commune/cli.py +++ b/commune/cli.py @@ -2,7 +2,6 @@ import sys import time import sys - print = c.print def determine_type(x): x = str(x) @@ -28,6 +27,7 @@ def determine_type(x): except: # if conversion fails, return as string return x + elif x.startswith('{') and x.endswith('}'): # this is a dictionary if len(x) == 2: @@ -41,124 +41,93 @@ def determine_type(x): return x else: # try to convert to int or float, otherwise return as string - try: - return int(x) - except ValueError: + + for type_fn in [int, float]: try: - return float(x) + return type_fn(x) except ValueError: - return x - -class cli: - """ - Create and init the CLI class, which handles the coldkey, hotkey and tao transfer - """ - def __init__(self, - base = 'module', - fn_splitters = [':', '/', '//', '::'], - helper_fns = ['code', 'schema', 'fn_schema', 'help', 'fn_info', 'fn_hash'], - sep = '--', - ai_catch = True, - ): - self.set_kwargs(locals()) - - def set_kwargs(self, kwargs, avoid=['self']): - # remove self from kwargs - for key, value in kwargs.items(): - if key in avoid: - continue - setattr(self, key, value) - - - def forward(self, *argv): - t0 = time.time() - argv = list(*argv) - if len(argv) == 0: - argv = sys.argv[1:] - output = None - init_kwargs = {} - if any([arg.startswith(self.sep) for arg in argv]): - for arg in c.copy(argv): - if arg.startswith(self.sep): - key = arg[len(self.sep):].split('=')[0] - if key in self.helper_fns: - # is it a helper function - return self.forward([key , argv[0]]) - else: - value = arg.split('=')[-1] if '=' in arg else True - argv.remove(arg) - init_kwargs[key] = determine_type(value) - - # any of the --flags are init kwargs - fn = argv.pop(0).replace('-', '_') - module = c.module(self.base) - fs = [fs for fs in self.fn_splitters if fs in fn] - if len(fs) == 1: - module, fn = fn.split(fs[0]) - module = c.shortcuts.get(module, module) - modules = c.modules() - module_options = [] - for m in modules: - if module == m: - module_options = [m] - break - if module in m: - module_options.append(m) - if len(module_options)>0: - module = module_options[0] - module = c.module(module) - else: - raise AttributeError(f'Function {fn} not found in {module}') - if hasattr(module, 'fn2module') and not hasattr(module, fn): - c.print(f'ROUTE_ACTIVATED({fn} from {module})') - fn2module = module.fn2module() - if not fn in fn2module: - functions = c.get_functions(module) - return c.print(f'FN({fn}) not found {module}', color='red') - module = c.module(fn2module[fn]) - - fn_obj = getattr(module, fn) - - if c.is_property(fn_obj) or c.classify_fn(fn_obj) == 'self': - fn_obj = getattr(module(**init_kwargs), fn) - - - if callable(fn_obj): - args = [] - kwargs = {} - parsing_kwargs = False - for arg in argv: - if '=' in arg: - parsing_kwargs = True - key, value = arg.split('=') - kwargs[key] = determine_type(value) + pass + return x +def forward(sep = '--', + fn_splitters = [':', '/', '//', '::'], + base = 'module', + helper_fns = ['code', 'schema', 'fn_schema', 'help', 'fn_info', 'fn_hash']): + t0 = time.time() + argv = sys.argv[1:] + output = None + init_kwargs = {} + if any([arg.startswith(sep) for arg in argv]): + for arg in c.copy(argv): + if arg.startswith(sep): + key = arg[len(sep):].split('=')[0] + if key in helper_fns: + # is it a helper function + return forward([key , argv[0]]) else: - assert parsing_kwargs is False, 'Cannot mix positional and keyword arguments' - args.append(determine_type(arg)) - output = fn_obj(*args, **kwargs) - else: - output = fn_obj - buffer = '⚡️'*4 - c.print(buffer+fn+buffer, color='yellow') - latency = time.time() - t0 - is_error = c.is_error(output) - msg = f'❌Error({latency:.3f}sec)❌' if is_error else f'✅Result({latency:.3f}s)✅' - c.print(msg) - is_generator = c.is_generator(output) - if is_generator: - for item in output: - if isinstance(item, dict): - c.print(item) - else: - c.print(item, end='') - else: - c.print(output) - return output + value = arg.split('=')[-1] if '=' in arg else True + argv.remove(arg) + init_kwargs[key] = determine_type(value) - def is_property(self, obj): - return isinstance(obj, property) - - - + # any of the --flags are init kwargs + fn = argv.pop(0).replace('-', '_') + module = c.module(base) + fs = [fs for fs in fn_splitters if fs in fn] + if len(fs) == 1: + module, fn = fn.split(fs[0]) + module = c.shortcuts.get(module, module) + modules = c.modules() + module_options = [] + for m in modules: + if module == m: + module_options = [m] + break + if module in m: + module_options.append(m) + if len(module_options)>0: + module = module_options[0] + print('Module:', module) + module = c.module(module) + else: + raise AttributeError(f'Function {fn} not found in {module}') + if hasattr(module, 'fn2module') and not hasattr(module, fn): + c.print(f'ROUTE_ACTIVATED({fn} from {module})') + fn2module = module.fn2module() + if not fn in fn2module: + return c.print(f'FN({fn}) not found {module}', color='red') + module = c.module(fn2module[fn]) + fn_obj = getattr(module, fn) + if c.is_property(fn_obj) or c.classify_fn(fn_obj) == 'self': + fn_obj = getattr(module(**init_kwargs), fn) + if callable(fn_obj): + args = [] + kwargs = {} + parsing_kwargs = False + for arg in argv: + if '=' in arg: + parsing_kwargs = True + key, value = arg.split('=') + kwargs[key] = determine_type(value) + else: + assert parsing_kwargs is False, 'Cannot mix positional and keyword arguments' + args.append(determine_type(arg)) + output = fn_obj(*args, **kwargs) + else: + output = fn_obj + buffer = '⚡️'*4 + c.print(buffer+fn+buffer, color='yellow') + latency = time.time() - t0 + is_error = c.is_error(output) + msg = f'❌Error({latency:.3f}sec)❌' if is_error else f'✅Result({latency:.3f}s)✅' + c.print(msg) + is_generator = c.is_generator(output) + if is_generator: + for item in output: + if isinstance(item, dict): + c.print(item) + else: + c.print(item, end='') + else: + c.print(output) + return output def main(): - cli().forward() \ No newline at end of file + forward() \ No newline at end of file diff --git a/commune/executor.py b/commune/executor.py index d46d368a..55ec0ba6 100644 --- a/commune/executor.py +++ b/commune/executor.py @@ -349,4 +349,3 @@ def status(self): is_full = self.is_full ) - \ No newline at end of file diff --git a/commune/key.py b/commune/key.py index 7d120569..b079e3ae 100644 --- a/commune/key.py +++ b/commune/key.py @@ -13,8 +13,6 @@ from Crypto.Cipher import AES import nacl.bindings import nacl.public - - from eth_keys.datatypes import PrivateKey from scalecodec.utils.ss58 import ss58_encode, ss58_decode, get_ss58_format from scalecodec.base import ScaleBytes diff --git a/commune/module.py b/commune/module.py index 2308d090..d03f9fd7 100755 --- a/commune/module.py +++ b/commune/module.py @@ -17,18 +17,6 @@ class c: splitters = [':', '/', '.'] endpoints = ['ask', 'generate', 'forward'] core_features = ['module_name', 'module_class', 'filepath', 'dirpath', 'tree'] - shortcuts = { - 'openai' : 'model.openai', - 'openrouter': 'model.openrouter', - 'or' : ' model.openrouter', - 'r' : 'remote', - 's' : 'network.subspace', - 'subspace': 'network.subspace', - 'namespace': 'network', - 'local': 'network', - 'network.local': 'network', - } - lib_name = libname = lib = __file__.split('/')[-3]# the name of the library organization = org = orgname = 'commune-ai' # the organization git_host = 'https://github.com' @@ -50,7 +38,64 @@ class c: __ss58_format__ = 42 # the ss58 format for the substrate address storage_path = os.path.expanduser(f'~/.{libname}') default_tag = 'base' + shortcuts = { + 'openai' : 'model.openai', + 'openrouter': 'model.openrouter', + 'or' : ' model.openrouter', + 'r' : 'remote', + 's' : 'network.subspace', + 'subspace': 'network.subspace', + 'namespace': 'network', + 'local': 'network', + 'network.local': 'network', + } + @classmethod + def module(cls, path:str = 'module', cache=True, verbose = False, tree = None, trials=1, **_kwargs ) -> str: + if path == None: + path = 'module' + if path.endswith('.py'): + path = c.path2name(path) + og_path = path + path = path or 'module' + t0 = time.time() + og_path = path + if path in c.module_cache and cache: + module = c.module_cache[path] + else: + if path in ['module', 'c']: + module = c + else: + + tree = tree or c.tree() + path = c.shortcuts.get(path, path) + path = tree.get(path, path) + try: + module = c.import_object(path) + except Exception as e: + if trials == 0: + raise ValueError(f'Error in module {og_path} {e}') + return c.module(path, cache=cache, verbose=verbose, tree=tree, trials=trials-1) + if cache: + c.module_cache[path] = module + latency = c.round(time.time() - t0, 3) + if not hasattr(module, 'module_name'): + module.module_name = module.name = lambda *args, **kwargs : c.module_name(module) + module.module_class = lambda *args, **kwargs : c.module_class(module) + module.resolve_object = lambda *args, **kwargs : c.resolve_object(module) + module.filepath = lambda *args, **kwargs : c.filepath(module) + module.dirpath = lambda *args, **kwargs : c.dirpath(module) + module.code = lambda *args, **kwargs : c.code(module) + module.schema = lambda *args, **kwargs : c.schema(module) + module.functions = module.fns = lambda *args, **kwargs : c.get_functions(module) + module.params = lambda *args, **kwargs : c.params(module) + module.key = c.get_key(module.module_name(), create_if_not_exists=True) + module.fn2code = lambda *args, **kwargs : c.fn2code(module) + module.help = lambda *args, **kwargs : c.help(*args, module=module, **kwargs) + c.print(f'Module({og_path}->{path})({latency}s)', verbose=verbose) + return module + get_module = module + def __init__(self, *args, **kwargs): pass @@ -199,8 +244,6 @@ def resolve_object(cls, obj:str = None, **kwargs): obj = cls return obj - ## - @classmethod def pwd(cls): pwd = os.getcwd() # the current wor king directory from the process starts @@ -322,7 +365,10 @@ def is_module_file(cls, module = None) -> bool: dirpath = cls.dirpath() filepath = cls.filepath() return bool(dirpath.split('/')[-1] != filepath.split('/')[-1].split('.')[0]) + + is_file_module = is_module_file + @classmethod def is_module_folder(cls, module = None) -> bool: if module != None: @@ -479,6 +525,7 @@ def get_yaml( path:str=None, default={}, **kwargs) -> Dict: @classmethod def get_routes(cls, cache=True): + if not hasattr(cls, 'routes'): if cls.route_cache is not None and cache: return cls.route_cache @@ -489,7 +536,6 @@ def get_routes(cls, cache=True): if callable(routes): routes = routes() - def add_utils(): utils = c.utils() for util in utils: @@ -498,13 +544,12 @@ def add_utils(): routes[k] = routes.get(k , []) routes[k].append(v) return routes - + add_utils() cls.route_cache = routes return routes - #### THE FINAL TOUCH , ROUTE ALL OF THE MODULES TO THE CURRENT MODULE BASED ON THE routes CONFIG - + @classmethod def fn2route(cls): routes = cls.get_routes() @@ -1865,61 +1910,7 @@ def get_tree(cls, path, depth = 10, max_age=60, update=False, **kwargs): c.put(tree_cache_path, tree) return tree - @classmethod - def module(cls, path:str = 'module', - cache=True, - verbose = False, - tree = None, - trials=1, **_kwargs ) -> str: - if path == None: - path = 'module' - if path.endswith('.py'): - path = c.path2name(path) - og_path = path - path = path or 'module' - t0 = time.time() - og_path = path - if path in c.module_cache and cache: - module = c.module_cache[path] - else: - if path in ['module', 'c']: - module = c - else: - - tree = tree or c.tree() - path = c.shortcuts.get(path, path) - path = tree.get(path, path) - try: - module = c.import_object(path) - except Exception as e: - if trials == 0: - raise ValueError(f'Error in module {og_path} {e}') - return c.module(path, cache=cache, verbose=verbose, tree=tree, trials=trials-1) - - if cache: - c.module_cache[path] = module - latency = c.round(time.time() - t0, 3) - # if - if not hasattr(module, 'module_name'): - - module.module_name = module.name = lambda *args, **kwargs : c.module_name(module) - module.module_class = lambda *args, **kwargs : c.module_class(module) - module.resolve_object = lambda *args, **kwargs : c.resolve_object(module) - module.filepath = lambda *args, **kwargs : c.filepath(module) - module.dirpath = lambda *args, **kwargs : c.dirpath(module) - module.code = lambda *args, **kwargs : c.code(module) - module.schema = lambda *args, **kwargs : c.schema(module) - module.functions = module.fns = lambda *args, **kwargs : c.get_functions(module) - module.params = lambda *args, **kwargs : c.params(module) - module.key = c.get_key(module.module_name(), create_if_not_exists=True) - module.fn2code = lambda *args, **kwargs : c.fn2code(module) - module.help = lambda *args, **kwargs : c.help(*args, module=module, **kwargs) - - c.print(f'Module({og_path}->{path})({latency}s)', verbose=verbose) - return module - get_module = module - _tree = None @classmethod def tree(cls, search=None, max_age=60,update=False, **kwargs): @@ -2091,6 +2082,10 @@ def set_api_key(self, api_key:str, cache:bool = True): self.add_api_key(api_key) assert isinstance(api_key, str) + + def add_repo(self, repo:str, path:str=None, **kwargs): + return c.cmd(f'git clone {repo} {path}', **kwargs) + def add_api_key(self, api_key:str): assert isinstance(api_key, str) path = self.resolve_path('api_keys') @@ -2174,13 +2169,14 @@ def resolve_extension( filename:str, extension = '.py') -> str: return filename return filename + extension - def help(self, *text, module=None, global_context=f'{rootpath}/docs', **kwargs): - if self.module_name() == 'module': + @classmethod + def help(cls, *text, module=None, global_context=f'{rootpath}/docs', **kwargs): + if cls.module_name() == 'module': return c.module('docs')().help(*text) text = ' '.join(map(str, text)) if global_context != None: text = text + str(c.file2text(global_context)) - module = module or self.module_name() + module = module or cls.module_name() context = c.code(module) return c.ask(f'{context} {text} \n') @@ -2241,7 +2237,7 @@ def install(self, path ): def epoch(self, *args, **kwargs): return c.run_epoch(*args, **kwargs) - + c.routes = { "vali": [ @@ -2409,7 +2405,7 @@ def epoch(self, *args, **kwargs): "global_params", "balance", "get_balance", - "get_stak", + "get_stake", "get_stake_to", "get_stake_from", "my_stake_to", diff --git a/commune/modules/agent/agent.py b/commune/modules/agent/agent.py index d736236a..10fa2836 100644 --- a/commune/modules/agent/agent.py +++ b/commune/modules/agent/agent.py @@ -19,12 +19,8 @@ def build(self, *args, path=c.docs_path): for ch in c.ask(prompt): output += output if front_anchor in output: - output.split(front_anchor)[1] - yield ch - - - - + content = output.split(front_anchor)[1] + def args2text(self, args): return ' '.join(list(map(str, args))) diff --git a/commune/modules/chat/chat.py b/commune/modules/chat/chat.py index 8c76666e..302b7062 100644 --- a/commune/modules/chat/chat.py +++ b/commune/modules/chat/chat.py @@ -11,9 +11,8 @@ def __init__(self, model = None, history_path='history', **kwargs): - + self.max_tokens = max_tokens - self.set_module(model, password = password, name = name, @@ -60,14 +59,16 @@ def generate(self, stream=True, ): context = context or path - text = self.process_text(text, context=context) + # text = self.process_text(text, context=context) + print(text) output = self.model.generate(text, stream=stream, model=model, max_tokens=max_tokens,temperature=temperature ) for token in output: yield token forward = generate def ask(self, *text, **kwargs): - return self.generate(' '.join(list(map(str, text))), **kwargs) + text = ' '.join(list(map(str, text))) + return self.generate(text, **kwargs) def process_text(self, text, context=None): if context != None: diff --git a/commune/modules/selenium/selenium.py b/commune/modules/selenium/selenium.py new file mode 100644 index 00000000..40f0f5d1 --- /dev/null +++ b/commune/modules/selenium/selenium.py @@ -0,0 +1,188 @@ +from selenium import webdriver +from selenium.webdriver.common.by import By +from selenium.webdriver.common.keys import Keys +from selenium.webdriver.support.ui import WebDriverWait +from selenium.webdriver.support import expected_conditions as EC +from selenium.common.exceptions import TimeoutException + +class GoogleAgent: + def __init__(self): + # Initialize the Chrome driver + self.driver = webdriver.Chrome() + self.wait = WebDriverWait(self.driver, 10) + + def start(self): + """Start the browser and go to Google""" + self.driver.get("https://www.google.com") + + def search(self, query): + """Perform a Google search""" + try: + # Find the search box + search_box = self.wait.until( + EC.presence_of_element_located((By.NAME, "q")) + ) + + # Clear any existing text and enter the search query + search_box.clear() + search_box.send_keys(query) + search_box.send_keys(Keys.RETURN) + + # Wait for results to load + self.wait.until( + EC.presence_of_element_located((By.ID, "search")) + ) + + return True + except TimeoutException: + print("Timeout while searching") + return False + + def get_search_results(self, num_results=5): + """Get search results""" + try: + # Wait for results to be present + results = self.wait.until( + EC.presence_of_all_elements_located((By.CSS_SELECTOR, "div.g")) + ) + + # Extract and return results + search_results = [] + for i, result in enumerate(results): + if i >= num_results: + break + + try: + title = result.find_element(By.CSS_SELECTOR, "h3").text + link = result.find_element(By.CSS_SELECTOR, "a").get_attribute("href") + search_results.append({"title": title, "link": link}) + except: + continue + + return search_results + + except TimeoutException: + print("Timeout while getting results") + return [] + + def click_result(self, index): + """Click on a search result by index""" + try: + results = self.wait.until( + EC.presence_of_all_elements_located((By.CSS_SELECTOR, "div.g h3")) + ) + + if 0 <= index < len(results): + results.click() + return True + return False + + except TimeoutException: + print("Timeout while clicking result") + return False + + def go_back(self): + """Go back to the previous page""" + self.driver.back() + + def close(self): + """Close the browser""" + self.driver.quit() + +# Example usage: +if __name__ == "__main__": + # Create an instance of the GoogleAgent + agent = GoogleAgent() + + try: + # Start the browser + agent.start() + + # Perform a search + agent.search("Python programming") + + # Get search results + results = agent.get_search_results(3) + + # Print results + for i, result in enumerate(results): + print(f"Result {i + 1}:") + print(f"Title: {result['title']}") + print(f"Link: {result['link']}") + print("---") + + # Click the first result + agent.click_result(0) + + # Wait a few seconds to see the page + import time + time.sleep(3) + + # Go back to search results + agent.go_back() + + # Wait a few more seconds + time.sleep(3) + + finally: + # Close the browser + agent.close() + +class EnhancedGoogleAgent(GoogleAgent): + def scroll_down(self): + """Scroll down the page""" + self.driver.execute_script("window.scrollTo(0, document.body.scrollHeight);") + + def scroll_up(self): + """Scroll up the page""" + self.driver.execute_script("window.scrollTo(0, 0);") + + def take_screenshot(self, filename): + """Take a screenshot of the current page""" + self.driver.save_screenshot(filename) + + def get_page_text(self): + """Get all text from the current page""" + return self.driver.find_element(By.TAG_NAME, "body").text + + def navigate_to(self, url): + """Navigate to a specific URL""" + self.driver.get(url) + + def get_current_url(self): + """Get the current URL""" + return self.driver.current_url + + def run(self): + # Example usage with enhanced features: + agent = EnhancedGoogleAgent() + + try: + # Start the browser + agent.start() + + # Perform a search + agent.search("Python web scraping") + + # Take a screenshot of search results + agent.take_screenshot("search_results.png") + + # Scroll down and up + agent.scroll_down() + time.sleep(1) + agent.scroll_up() + + # Get current URL + current_url = agent.get_current_url() + print(f"Current URL: {current_url}") + + # Navigate to a specific website + agent.navigate_to("https://python.org") + + # Get page text + page_text = agent.get_page_text() + print("First 200 characters of page text:") + print(page_text[:200]) + + finally: + agent.close() \ No newline at end of file diff --git a/commune/serializer/bytes.py b/commune/modules/serializer/bytes.py similarity index 100% rename from commune/serializer/bytes.py rename to commune/modules/serializer/bytes.py diff --git a/commune/serializer/munch.py b/commune/modules/serializer/munch.py similarity index 100% rename from commune/serializer/munch.py rename to commune/modules/serializer/munch.py diff --git a/commune/serializer/numpy.py b/commune/modules/serializer/numpy.py similarity index 100% rename from commune/serializer/numpy.py rename to commune/modules/serializer/numpy.py diff --git a/commune/serializer/pandas.py b/commune/modules/serializer/pandas.py similarity index 100% rename from commune/serializer/pandas.py rename to commune/modules/serializer/pandas.py diff --git a/commune/serializer/serializer.py b/commune/modules/serializer/serializer.py similarity index 100% rename from commune/serializer/serializer.py rename to commune/modules/serializer/serializer.py diff --git a/commune/serializer/torch.py b/commune/modules/serializer/torch.py similarity index 100% rename from commune/serializer/torch.py rename to commune/modules/serializer/torch.py diff --git a/commune/modules/store/store.py b/commune/modules/store/store.py new file mode 100644 index 00000000..34b435ca --- /dev/null +++ b/commune/modules/store/store.py @@ -0,0 +1,154 @@ + + + +import commune as c +import sys +import time + + +class MultiModalVectorStore: + def __init__( + self, + shared_model: bool = False, + text_model_name: str = "sentence-transformers/all-MiniLM-L6-v2", + image_model_name: str = "openai/clip-vit-base-patch32", + audio_model_name: str = "speechbrain/spkrec-ecapa-voxceleb", + vector_dim: int = 384 + ): + self.shared_model = shared_model + self.vector_dim = vector_dim + + # Initialize models based on shared_model flag + if shared_model: + self.model = SentenceTransformer(text_model_name) + else: + self.text_model = AutoModel.from_pretrained(text_model_name) + self.text_tokenizer = AutoTokenizer.from_pretrained(text_model_name) + + self.image_model = AutoModel.from_pretrained(image_model_name) + self.image_processor = AutoFeatureExtractor.from_pretrained(image_model_name) + + self.audio_model = AutoModel.from_pretrained(audio_model_name) + + # Initialize FAISS indexes for each modality + self.text_index = faiss.IndexFlatL2(vector_dim) + self.image_index = faiss.IndexFlatL2(vector_dim) + self.audio_index = faiss.IndexFlatL2(vector_dim) + + # Storage for items and their metadata + self.items: Dict = { + 'text': {}, + 'image': {}, + 'audio': {} + } + + def _encode_text(self, text: str) -> np.ndarray: + if self.shared_model: + embeddings = self.model.encode(text) + else: + inputs = self.text_tokenizer(text, return_tensors="pt", padding=True, truncation=True) + with torch.no_grad(): + outputs = self.text_model(**inputs) + embeddings = outputs.last_hidden_state.mean(dim=1).numpy() + return embeddings + + def _encode_image(self, image: Image.Image) -> np.ndarray: + if self.shared_model: + embeddings = self.model.encode(image) + else: + inputs = self.image_processor(image, return_tensors="pt") + with torch.no_grad(): + outputs = self.image_model(**inputs) + embeddings = outputs.last_hidden_state.mean(dim=1).numpy() + return embeddings + + def _encode_audio(self, audio: np.ndarray) -> np.ndarray: + if self.shared_model: + embeddings = self.model.encode(audio) + else: + # Convert audio to appropriate format for the model + with torch.no_grad(): + outputs = self.audio_model(torch.from_numpy(audio).unsqueeze(0)) + embeddings = outputs.last_hidden_state.mean(dim=1).numpy() + return embeddings + + def add_item( + self, + item: Union[str, Image.Image, np.ndarray], + modality: str, + metadata: Optional[Dict] = None + ): + """Add an item to the vector store.""" + if modality not in ['text', 'image', 'audio']: + raise ValueError(f"Unsupported modality: {modality}") + + # Generate embedding based on modality + if modality == 'text': + embedding = self._encode_text(item) + elif modality == 'image': + embedding = self._encode_image(item) + else: # audio + embedding = self._encode_audio(item) + + # Add to appropriate FAISS index + index = getattr(self, f"{modality}_index") + item_id = index.ntotal + index.add(embedding) + + # Store metadata + self.items[modality] = { + 'item': item, + 'metadata': metadata or {} + } + + def search( + self, + query: Union[str, Image.Image, np.ndarray], + modality: str, + k: int = 5, + cross_modal: bool = False + ) -> List[Dict]: + """Search for similar items in the vector store.""" + if modality == 'text': + query_embedding = self._encode_text(query) + elif modality == 'image': + query_embedding = self._encode_image(query) + else: # audio + query_embedding = self._encode_audio(query) + + results = [] + + # Determine which indexes to search + indexes_to_search = ['text', 'image', 'audio'] if cross_modal else `` + + for idx_type in indexes_to_search: + index = getattr(self, f"{idx_type}_index") + if index.ntotal > 0: # Only search if index has items + distances, indices = index.search(query_embedding, min(k, index.ntotal)) + + for dist, idx in zip(distances[0], indices[0]): + results.append({ + 'modality': idx_type, + 'item': self.items[idx_type]['item'], + 'metadata': self.items[idx_type]['metadata'], + 'distance': float(dist) + }) + + # Sort results by distance + results.sort(key=lambda x: x['distance']) + return results[:k] + + def get_stats(self) -> Dict: + """Get statistics about the vector store.""" + return { + 'text_items': self.text_index.ntotal, + 'image_items': self.image_index.ntotal, + 'audio_items': self.audio_index.ntotal, + 'total_items': sum([ + self.text_index.ntotal, + self.image_index.ntotal, + self.audio_index.ntotal + ]) + } + + diff --git a/commune/network/subspace/key.py b/commune/network/subspace/key.py deleted file mode 100644 index c1cb6ebe..00000000 --- a/commune/network/subspace/key.py +++ /dev/null @@ -1,67 +0,0 @@ -# Python Substrate Interface Library -# -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import re -from hashlib import blake2b -from math import ceil - -from scalecodec.types import Bytes - -RE_JUNCTION = r'(\/\/?)([^/]+)' -JUNCTION_ID_LEN = 32 - - -class DeriveJunction: - def __init__(self, chain_code, is_hard=False): - self.chain_code = chain_code - self.is_hard = is_hard - - @classmethod - def from_derive_path(cls, path: str, is_hard=False): - - if path.isnumeric(): - byte_length = ceil(int(path).bit_length() / 8) - chain_code = int(path).to_bytes(byte_length, 'little').ljust(32, b'\x00') - - else: - path_scale = Bytes() - path_scale.encode(path) - - if len(path_scale.data) > JUNCTION_ID_LEN: - chain_code = blake2b(path_scale.data.data, digest_size=32).digest() - else: - chain_code = bytes(path_scale.data.data.ljust(32, b'\x00')) - - return cls(chain_code=chain_code, is_hard=is_hard) - - -def extract_derive_path(derive_path: str): - - path_check = '' - junctions = [] - paths = re.findall(RE_JUNCTION, derive_path) - - if paths: - path_check = ''.join(''.join(path) for path in paths) - - for path_separator, path_value in paths: - junctions.append(DeriveJunction.from_derive_path( - path=path_value, is_hard=path_separator == '//') - ) - - if path_check != derive_path: - raise ValueError('Reconstructed path "{}" does not match input'.format(path_check)) - - return junctions - diff --git a/commune/network/subspace/subspace.py b/commune/network/subspace/subspace.py index 51f3c972..10aab9de 100644 --- a/commune/network/subspace/subspace.py +++ b/commune/network/subspace/subspace.py @@ -33,7 +33,6 @@ class Subspace(c.Module): tempo = 60 blocktime = block_time = 8 blocks_per_day = 24*60*60/block_time - url_map = { "main": [ "api.communeai.net" @@ -786,6 +785,7 @@ def query_map( extract_value: bool = True, max_age=0, update=False, + block = None, block_hash: str | None = None, ) -> dict[Any, Any]: """ @@ -1054,7 +1054,7 @@ def compose_call_multisig( ) return response - + def transfer( self, key: Keypair, @@ -1434,6 +1434,14 @@ def metadata(self) -> str: metadata = sorted(metadata.items(), key=lambda x: x[0]) return {k: v for k, v in metadata} + def subnet2metadata(self) -> str: + netuids = self.netuids() + metadata = self.query_map('SubnetMetadata') + metadata = {i : metadata.get(i, None) for i in netuids} + metadata = sorted(metadata.items(), key=lambda x: x[0]) + netuid2subnet = self.netuid2subnet() + return {netuid2subnet.get(k): v for k, v in metadata} + # def topup_miners(self, subnet): def transfer_stake( @@ -1620,7 +1628,7 @@ def add_subnet_proposal( # general_params["burn_config"] = json.dumps(general_params["burn_config"]) response = self.compose_call( - fn="add_subnet_params_proposal", + fn="add_params_proposal", params=general_params, key=key, module="GovernanceModule", @@ -1870,27 +1878,6 @@ def curator_applications(self) -> dict[str, dict[str, str]]: ) return applications - def proposals( - self, extract_value: bool = False - ) -> dict[int, dict[str, Any]]: - """ - Retrieves a mappping of proposals from the network. - - Queries the network and returns a mapping of proposal IDs to - their respective parameters. - - Returns: - A dictionary mapping proposal IDs - to dictionaries of their parameters. - - Raises: - QueryError: If the query to the network fails or is invalid. - """ - - return self.query_map( - "Proposals", extract_value=extract_value, module="GovernanceModule" - ) - def weights(self, subnet: int = 0, extract_value: bool = False ) -> dict[int, list[tuple[int, int]]] | None: """ Retrieves a mapping of weights for keys on the network. @@ -1926,7 +1913,7 @@ def addresses( def state(self, timeout=42): futures = [] - fns = ['subnet_params', 'global_params', 'modules'] + fns = ['params', 'global_params', 'modules'] futures = [c.submit(getattr(self,fn), kwargs=dict(update=1), timeout=timeout) for fn in fns] return dict(zip(fns, c.wait(futures, timeout=timeout))) @@ -1937,7 +1924,7 @@ def subnet(self,subnet=0, update=False, max_age=60): state = self.get(path, max_age=max_age, update=update) if state == None: c.print(f"subnet_state: {path} not found") - futures = [c.submit(self.subnet_params, kwargs=dict(subnet=subnet, max_age=max_age, update=update)), + futures = [c.submit(self.params, kwargs=dict(subnet=subnet, max_age=max_age, update=update)), c.submit(self.modules, kwargs=dict(subnet=subnet, max_age=max_age, update=update))] params, modules = c.wait(futures) state = {'params': params, 'modules': modules} @@ -1986,19 +1973,19 @@ def legit_whitelist( """ return self.query_map( "LegitWhitelist", module="GovernanceModule", extract_value=extract_value) - def subnet_names(self, extract_value: bool = False, max_age=60, update=False) -> dict[int, str]: + def subnet_names(self, extract_value: bool = False, max_age=60, update=False, block=None) -> dict[int, str]: """ Retrieves a mapping of subnet names within the network. """ - subnet_names = self.query_map("SubnetNames", extract_value=extract_value, max_age=max_age, update=update) + subnet_names = self.query_map("SubnetNames", extract_value=extract_value, max_age=max_age, update=update, block=block) return {int(k):v for k,v in subnet_names.items()} - def subnet_map(self, max_age=10, update=False) -> dict[int, str]: + def subnet_map(self, max_age=10, update=False, **kwargs) -> dict[int, str]: """ Retrieves a mapping of subnet names within the network. """ - return {v:k for k,v in self.subnet_names(max_age=max_age, update=update).items()} + return {v:k for k,v in self.subnet_names(max_age=max_age, update=update, **kwargs).items()} def netuid2subnet(self, *args, **kwargs): return {v:k for k,v in self.subnet_map(*args, **kwargs).items()} @@ -2063,7 +2050,32 @@ def names( names = dict(sorted(names.items(), key=lambda x: x[0])) return names - #  == QUERY FUNCTIONS == # + + def proposal(self, proposal_id: int = 0): + """ + Queries the network for a specific proposal. + """ + + return self.query( + "Proposals", + params=[proposal_id], + ) + + def proposals( + self, extract_value: bool = False + ) -> dict[int, dict[str, Any]]: + """ + Retrieves a mappping of proposals from the network. + + Returns: + A dictionary mapping proposal IDs + to dictionaries of their parameters. + + Raises: + QueryError: If the query to the network fails or is invalid. + """ + + return self.query_map( "Proposals", extract_value=extract_value, module="GovernanceModule") def dao_treasury_address(self) -> Ss58Address: return self.query("DaoTreasuryAddress", module="GovernanceModule") @@ -2097,13 +2109,6 @@ def n(self, subnet: int = 0, max_age=60, update=False ) -> int: subnet = str(subnet) return n[subnet] - def total_free_issuance(self, block_hash: str | None = None) -> int: - """ - Queries the network for the total free issuance. - """ - - return self.query("TotalIssuance", module="Balances", block_hash=block_hash) - def total_stake(self, block_hash: str | None = None) -> int: """ Retrieves a mapping of total stakes for keys on the network. @@ -2120,15 +2125,6 @@ def registrations_per_block(self): "RegistrationsPerBlock", ) - def proposal(self, proposal_id: int = 0): - """ - Queries the network for a specific proposal. - """ - - return self.query( - "Proposals", - params=[proposal_id], - ) def unit_emission(self) -> int: """ @@ -2290,7 +2286,7 @@ def params(self, subnet = None, block_hash: str | None = None, max_age=tempo, u """ Gets all subnets info on the network """ - path = f'{self.network}/subnet_params_map' + path = f'{self.network}/params_map' results = self.get(path,None, max_age=max_age, update=update) if results == None: print("Updating Subnet Params") @@ -2362,14 +2358,13 @@ def params(self, subnet = None, block_hash: str | None = None, max_age=tempo, u results[_netuid] = subnet_result self.put(path, results) results = {int(k):v for k,v in results.items()} - - if subnet != None: subnet = self.resolve_subnet(subnet) - print(subnet, results) - return results[subnet] + results = results[subnet] return results + subnet_params = params + def global_params(self, max_age=60, update=False) -> NetworkParams: """ Returns global parameters of the whole commune ecosystem @@ -2433,7 +2428,6 @@ def global_params(self, max_age=60, update=False) -> NetworkParams: self.put(path, result) return result - subnet_params = params def clean_feature_name(self, x): new_x = '' @@ -2465,14 +2459,22 @@ def my_subnets(self, key=None): # group by founder return c.df(results).sort_values('subnet') - def my_modules(self, subnet=0, max_age=60, features=['name', 'key', 'address', 'emission', 'weights', 'stake'], update=False): + + def my_modules(self, subnet="all", + max_age=60, + keys=None, + features=['name', 'key', 'address', 'emission', 'weights', 'stake'], + df = False, + update=False): + if subnet == "all": + return {sn: self.my_modules(subnet=sn, keys=ks, df=df) for sn, ks in self.keys_map().items()} subnet = self.resolve_subnet(subnet) - path = f'my_modules/{subnet}' + path = f'my_modules/{self.network}/{subnet}' modules = self.get(path, None, max_age=max_age, update=update) namespace = c.namespace() if modules == None: address2key = c.address2key() - keys = self.keys(subnet) + keys = keys or self.keys(subnet) my_keys = [] for k in keys: if k in address2key: @@ -2485,6 +2487,8 @@ def my_modules(self, subnet=0, max_age=60, features=['name', 'key', 'address', ' modules[i] = m features += ['serving'] modules = [{f:m[f] for f in features} for m in modules] + if df: + modules = c.df(modules) return modules def my_valis(self, subnet=0): @@ -2639,7 +2643,11 @@ def block_number(self) -> int: def keys(self, subnet=0, max_age=60) -> List[str]: subnet = self.resolve_subnet(subnet) - return list(self.query_map('Keys', params=[subnet], max_age=max_age).values()) + return self.keys_map(max_age=max_age)[int(subnet)] + + def keys_map(self, max_age=60): + return {int(k):list(v.values()) for k,v in self.query_map('Keys', params=[], max_age=max_age).items()} + def key2uid(self, subnet=0) -> int: subnet = self.resolve_subnet(subnet) return {v:k for k,v in self.query_map('Keys', params=[subnet]).items()} @@ -2685,8 +2693,8 @@ def netuids(self, update=False, block=None) -> Dict[int, str]: return list(self.netuid2subnet( update=update, block=block).keys()) def netuid2emission(self , fmt='j', **kwargs) -> Dict[str, int]: - subnet_params = self.subnet_params(**kwargs) - subnet2emission = {v:params['emission'] * self.blocks_per_day for v,params in subnet_params.items()} + params = self.params(**kwargs) + subnet2emission = {v:params['emission'] * self.blocks_per_day for v,params in params.items()} return self.format_amount(subnet2emission, fmt=fmt) def subnet2emission(self, **kwargs ) -> Dict[str, str]: diff --git a/commune/network/substrate/storage.py b/commune/network/substrate/storage.py index 370bfde2..0ecaac76 100644 --- a/commune/network/substrate/storage.py +++ b/commune/network/substrate/storage.py @@ -17,7 +17,6 @@ from typing import Any, Optional from commune.network.substrate.exceptions import StorageFunctionNotFound - from scalecodec import ScaleBytes, GenericMetadataVersioned, ss58_decode from scalecodec.base import ScaleDecoder, RuntimeConfigurationObject, ScaleType from .utils.hasher import blake2_256, two_x64_concat, xxh128, blake2_128, blake2_128_concat, identity diff --git a/commune/routes.json b/commune/routes.json deleted file mode 100644 index 58124169..00000000 --- a/commune/routes.json +++ /dev/null @@ -1,186 +0,0 @@ -{ - "vali": [ - "run_epoch", - "setup_vali", - "from_module" - ], - "py": [ - "envs", - "env2cmd", - "create_env", - "env2path" - ], - "cli": [ - "parse_args" - ], - "streamlit": [ - "set_page_config", - "load_style", - "st_load_css" - ], - "docker": [ - "containers", - "dlogs", - "images" - ], - "client": [ - "call", - "call_search", - "connect" - ], - "repo": [ - "is_repo", - "repos" - ], - "serializer": [ - "serialize", - "deserialize", - "serializer_map", - ], - "key": [ - "rename_key", - "ss58_encode", - "ss58_decode", - "key2mem", - "key_info_map", - "key_info", - "valid_ss58_address", - "valid_h160_address", - "add_key", - "from_password", - "str2key", - "pwd2key", - "getmem", - "mem", - "mems", - "switch_key", - "module_info", - "rename_kefy", - "mv_key", - "add_keys", - "key_exists", - "ls_keys", - "rm_key", - "key_encrypted", - "encrypt_key", - "get_keys", - "rm_keys", - "key2address", - "key_addresses", - "address2key", - "is_key", - "new_key", - "save_keys", - "load_key", - "load_keys", - "get_signer", - "encrypt_file", - "decrypt_file", - "get_key_for_address", - "resolve_key_address", - "ticket" - ], - "remote": [ - "host2ssh" - ], - "network": [ - "networks", - "register_server", - "deregister_server", - "server_exists", - "add_server", - "has_server", - "add_servers", - "rm_servers", - "rm_server", - "namespace", - "namespace", - "infos", - "get_address", - "servers", - "name2address" - ], - "app": [ - "start_app", - "app", - "apps", - "app2info", - "kill_app" - ], - "user": [ - "role2users", - "is_user", - "get_user", - "update_user", - "get_role", - "refresh_users", - "user_exists", - "is_admin", - "admins", - "add_admin", - "rm_admin", - "num_roles", - "rm_user" - ], - "server": [ - "serve", - "wait_for_server", - "endpoint", - "is_endpoint", - "fleet", - "processes", - "kill", - "kill_many", - "kill_all", - "kill_all_processes", - "logs" - ], - - "subspace": [ - "transfer_stake", - "stake_trnsfer", - "switch", - "switchnet", - "subnet", - "update_module", - "subnet_params_map", - "staketo", - "network", - "get_staketo", - "stakefrom", - "get_stakefrom", - "switch_network", - "key2balance", - "subnets", - "send", - "my_keys", - "key2value", - "transfer", - "multistake", - "stake", - "unstake", - "register", - "subnet_params", - "global_params", - "balance", - "get_balance", - "get_stak", - "get_stake_to", - "get_stake_from", - "my_stake_to", - "netuid2subnet", - "subnet2netuid", - "is_registered", - "update_subnet", - "my_subnets", - "my_netuids", - "register_subnet", - "registered_subnets", - "registered_netuids" - ], - "model.openrouter": [ - "generate", - "models" - ], - "chat": ["ask", "models", "pricing", "model2info"] -} \ No newline at end of file diff --git a/commune/server.py b/commune/server.py index 98cf057d..72db0f7e 100644 --- a/commune/server.py +++ b/commune/server.py @@ -13,14 +13,13 @@ class Server(c.Module): tag_seperator:str='::' pm2_dir = os.path.expanduser('~/.pm2') - functions_attributes : List[str] =['helper_functions', 'whitelist','endpoints','functions', 'fns', 'server_functions', 'public'] - user_functions = ['user_count', 'user_paths','user_data','user2count', 'user_path2latency','user_path2time', 'remove_user_data', 'users'] - helper_functions : List[str] = ['info', 'metadata', 'schema', 'name', 'functions','key_address', 'crypto_type','fns', 'forward', 'rate_limit'] + functions_attributes =['helper_functions', 'whitelist','endpoints','functions', 'fns', 'server_functions', 'public'] + helper_functions = ['info', 'metadata', 'schema', 'name', 'functions','key_address', 'crypto_type','fns', 'forward', 'rate_limit'] max_bytes:int = 10 * 1024 * 1024 # max bytes within the request (bytes) - allow_origins: List[str] = ["*"] # allowed origins - allow_credentials: bool =True # allow credentials - allow_methods: List[str] = ["*"] # allowed methods - allow_headers: List[str] = ["*"] # allowed headers + allow_origins = ["*"] # allowed origins + allow_credentials =True # allow credentials + allow_methods = ["*"] # allowed methods + allow_headers = ["*"] # allowed headers period : int = 3600 # the period for max_request_staleness : int = 4 # (in seconds) the time it takes for the request to be too old max_network_staleness: int = 60 # (in seconds) the time it takes for. the network to refresh @@ -76,8 +75,6 @@ def __init__( schema[fn] = c.fn_schema(fn_obj)['input'] else: schema[fn] = {'type': str(type(fn_obj)).split("'")[1]} - for fn in self.user_functions: - setattr(module, fn, getattr(self, fn)) if port in [None, 'None']: namespace = c.namespace() if name in namespace: @@ -94,7 +91,7 @@ def __init__( module.address = f"{module.ip}:{module.port}" module.functions = functions module.schema = dict(sorted(schema.items())) - module.info = self.get_info(module) + module.info = self.info self.network = network self.network_path = self.resolve_path(f'networks/{self.network}/state.json') self.users_path = self.resolve_path(f'users/{name}') @@ -145,6 +142,7 @@ def forward(self, fn:str, request: Request, catch_exception:bool=True) -> dict: except Exception as e: return c.detailed_error(e) module = self.module + headers = dict(request.headers.items()) address = headers.get('key', headers.get('address', None)) assert address, 'No key or address in headers' @@ -156,14 +154,14 @@ def forward(self, fn:str, request: Request, catch_exception:bool=True) -> dict: auth={'data': c.hash(data), 'time': headers['time']} signature = headers.get('signature', None) assert c.verify(auth=auth,signature=signature, address=address), 'Invalid signature' - server_signature = self.module.key.sign(headers) + server_signature = module.key.sign(headers) kwargs = dict(data.get('kwargs', {})) args = list(data.get('args', [])) data = {'args': args, 'kwargs': kwargs} is_admin = bool(c.is_admin(address) or address == self.module.key.ss58_address) if not is_admin: assert not bool(fn.startswith('__') or fn.startswith('_')), f'Function {fn} is private' - assert fn in self.module.functions , f"Function {fn} not in endpoints={self.module.functions}" + assert fn in module.functions , f"Function {fn} not in endpoints={module.functions}" count = self.user_count(address) rate_limit = self.rate_limit(fn=fn, address=address) assert count <= rate_limit, f'rate limit exceeded {count} > {rate_limit}' @@ -200,12 +198,11 @@ def generator_wrapper(generator): 'latency': latency, # the latency 'time': start_time, # the time of the request 'user_key': address, # the key of the user - 'server_key': self.module.key.ss58_address, # the key of the server + 'server_key': module.key.ss58_address, # the key of the server 'user_signature': signature, # the signature of the user 'server_signature': server_signature, # the signature of the server - 'cost': self.module.fn2cost.get(fn, 1), # the cost of the function + 'cost': module.fn2cost.get(fn, 1), # the cost of the function } - user_path = self.user_path(user_data["user_key"]) + f'/{user_data["fn"]}/{c.time()}.json' # get the user info path c.put(user_path, user_data) return result @@ -264,7 +261,7 @@ def wait_for_server(cls, time_waiting = 0 # rotating status thing - c.print(f'Waiting for {name} to start', color='cyan') + c.print(f'WAITING_FOR_SERVER(module{name})', color='cyan') while time_waiting < timeout: namespace = c.namespace(network=network, max_age=max_age) @@ -282,8 +279,9 @@ def wait_for_server(cls, # c.kill(name) raise TimeoutError(f'Waited for {timeout} seconds for {name} to start') - def get_info(self, module): + def info(self): info = {} + module = self.module info['schema'] = module.schema info['name'] = module.name info['address'] = module.address @@ -498,13 +496,15 @@ def processes(cls, search=None, **kwargs) -> List[str]: module_list = [m for m in module_list if search in m] module_list = sorted(list(set(module_list))) return module_list - + pm2ls = pids = procs = processes @classmethod def process_exists(cls, name:str, **kwargs) -> bool: return name in cls.processes(**kwargs) + + @classmethod def serve(cls, module: Any = None, @@ -512,13 +512,18 @@ def serve(cls, port :Optional[int] = None, # name of the server if None, it will be the module name name = None, # name of the server if None, it will be the module name remote:bool = True, # runs the server remotely (pm2, ray) - functions = None, - key = None, + functions = None, # list of functions to serve, if none, it will be the endpoints of the module + key = None, # the key for the server **extra_kwargs ): module = module or 'module' name = name or module kwargs = {**(kwargs or {}), **extra_kwargs} + + c.print(f'Serving(module={module} params={kwargs} name={name} function={functions})') + + if not isinstance(module, str): + remote = False if remote: rkwargs = {k : v for k, v in c.locals2kwargs(locals()).items() if k not in ['extra_kwargs', 'response', 'namespace']} rkwargs['remote'] = False diff --git a/commune/vali.py b/commune/vali.py index dfd6aa2a..d60e62c3 100644 --- a/commune/vali.py +++ b/commune/vali.py @@ -34,6 +34,11 @@ def __init__(self, c.thread(self.run_loop) init_vali = __init__ + + def set_key(self, key): + self.key = key or c.get_key() + return {'success': True, 'msg': 'Key set', 'key': self.key} + def set_executor(self, max_workers:int, batch_size:int, timeout:int): self.timeout = timeout or 3 self.max_workers = max_workers or c.cpu_count() * 5 diff --git a/docs/2_key_management.md b/docs/2_key_management.md deleted file mode 100644 index 78602c67..00000000 --- a/docs/2_key_management.md +++ /dev/null @@ -1,251 +0,0 @@ - -# KEY MANAGEMENT - -The key is a sr25519 key that is used to sign, encrypt, decrypt and verify any string or messege. -We can also replicate the key to other chains through using the same seed to generate the other keys. This means you can have one key instead of multiple keys for each chain, which is more convient and secure. - -![Alt text](./images/image_key.png) - -c add_key alice # c.add_key("alice") - -# Crypto Types -The crypto type is the type of key that is used. The default is 1, which is sr25519. The other type is 2, which is ed25519. -Key - -# Refreshing existing key - -c add_key alice refresh=True - - - -To list all the keys you can do so like this. - -c.keys("alice") - -or - -c keys alice - -[ - 'alice', -] - -To search for your keys you can do so like this. The search term finds all of the keys that contain the search term. - -c keys ali -[ - 'alice', - 'alice2', - 'alice3', -] - - -# Save Keys - -To save the keys to a file you can do so like this. - -c save_keys - -This saves the keys to a specific path in the config file. You can also specify the path like this. - -To sign a message you can do so like this. - -key = c.get_key("alice") - - - -Original (Substrate) signature output : - -key.sign("hello") - -hexadecimal (bytes): - -b'\xd6RV\xf4)\x88\x9aC\x99$\xe5E\xa5N=\xcf\xf4\x7f\xc7\\\xfe\xa1V\xdd\xc0 -\xfc\x1bz:\x17\xa1$[\x84Al\xb0\xee\x0b\xedg\xc2\xe7\x93\x00\xf1~}\xd2r;\x -f2\xb4.\x90\xf2k\xd1\x10\xd9\xd5\x8f\x9d\x85' - -dictionary - -{"data":"hello", -"signature":"0x7e7","public_key":"0x7cd0e327f4f6649719158892dafe766a5efd0185cb5fe17548d294f00f12661b"} - - -String Output - -This is a string that cotainers the data and signature. The seperator is used to mainly distinguish the data from the signature. - -{DATA}{SEPERATOR}{SIGNATURE} - - - -Signature Tickets for Temporary Tokens - -In the ticket the timestamp is taken, and the seperator is "::ticket::". - -such that the format is -timestamp::ticket::signature - -by calling - -c.ticket("alice") - -the alice key signs the current timestamp and returns the ticket. - -1713500654.659339::ticket::e0559b535129037a62947c65af35f17c50d29b4a5c31df86b069d8ada5bcbb230f4c1e996393e6721f78d88f9b512b -6493b5ca743d027091585366875c6bea8e - -now to verify the ticket you can do so like this. - -c.verify_ticket("1713500654.659339::ticket::e0559b535129037a62947c65af35f17c50d29b4a5c31df86b069d8ada5bcbb230f4c1e996393e6721f78d88f9b512b6493b5ca743d027091585366875c6bea8e") - -to get the signer - -c.ticket2signer("1713500654.659339::ticket::e0559b535129037a62947c65af35f17c50d29b4a5c31df86b069d8ada5bcbb230f4c1e996393e6721f78d88f9b512b6493b5ca743d027091585366875c6bea8e") - -To create a temperary token you can do so like this. - -Temporary Tokens using Time Stampted Signaturs: Verification Without Your Keys - -This allows for anyone to sign a timestamp, and vendors can verify the signature. This does not require the seed to be exposed, and can be used to identify key likley to be the same person. The only issue is if the staleness of the timestamp is too old. This can be adjusted by the vendor. - - - -```markdown -# Key Management - -In this tutorial, we'll explore the usage of the `commune` Python package for managing keys, balances, stakes, and key statistics. - -## Listing Keys - -To start, let's list all the available keys using the `keys()` function: - - -```bash -c keys -``` -or -```python -c.keys() -``` - -``` -[ - 'model.openrouter::replica.1', - 'model.openrouter::replica.2', - 'model.openrouter::replica.3', - 'model.openrouter::replica.4', - 'model.openrouter::replica.5', - 'model.openrouter::replica.6', - 'model.openrouter::replica.7', - 'model.openrouter::replica.8', - 'model.openrouter::replica.9' -] -``` - -## Adding and Removing Keys - -You can add and remove keys with the following steps: - -### Adding a New Key - -To add a new key, use the `add_key()` function: - -```python -c.add_key('fam') -``` -or - -```bash -c add_key fam -``` - -## Getting Key Info - -You can also retrieve key info using the `key_info()` function: - -```python -c.key_info('fam') # Replace 'fam' with the key name - -``` -{ - 'crypto_type': 1, - 'seed_hex': '6a363df4c2b7eaeb0b13efedbd37308d2bda3be8bc8aa758ecc00eb3089f7b97', - 'derive_path': None, - 'path': 'fam', - 'ss58_format': 42, - 'public_key': '38199493328ca2224364c77204ee61008a9cab5a8246906201357ef056b82142', - 'key': '5DLG8wM2beoHcveKEXxuh2NRgh55vRRx8b1PE4Ch3ZE8fndL', - 'private_key': -'d8e1c3d46f813eafac0d44481737e87b06241ba9cb5d6f760f8d62df48be450d2a84dcdfe506f218bc6646fe8 -9daa1c1d1fd7af3a64ea0f3e8a73cc766743aa1', - 'mnemonic': 'typical piece chair oven lift trap case current tomorrow wrap motor -light' -} -``` - - - -### Removing a Key - -To remove a key, use the `rm_key()` function: - -```python -c.rm_key('demo') # Replace 'demo' with the key you want to remove -``` - -## Saving and Loading Keys - -You can save and load keys for future use: - -### Saving Keys - -To save the keys, use the `save_keys()` function: - -```python -c.save_keys(path='./keys.json') # save the key mnemonics to this file -``` - -### Loading Keys - -To load the saved keys, use the `load_keys()` function: - -```python -c.load_keys('./keys.json') -``` - - -# SUBSPACE # - - -## Retrieving Balances and Stakes - -You can retrieve balance and stake information for a specific key: - -### Balance - -To get the balance for a key, use the `get_balance()` function: - -```python -c.get_balance('fam') # Replace 'fam' with the key name -``` -or -```bash -c balance fam -``` - -### Get stake of the Key - -To get the stake for a key, use the `get_stake()` function: - -```bash -c get_stake fam # Replace 'fam' with the key name or the address -``` - -```python -c.get_stake('fam', netuid='text') # Replace 'fam' with the key name -``` - -### Get Registered Keys - - - diff --git a/docs/3_cli_basics.md b/docs/3_cli_basics.md deleted file mode 100644 index e0b8a3c9..00000000 --- a/docs/3_cli_basics.md +++ /dev/null @@ -1,179 +0,0 @@ -# CLI BASICS - -We have a pythonic cli for commune, which is a wrapper around the `c.Module` library. This is a simple way to interact with the commune library. This does not need to be formated like argparse, and is more like a pythonic cli, where you can test out the functions and modules. - -```bash -c {module_name}/{function_name} *args **kwargs -# positional arguments and keyword arguments are accepted as if you were calling a function in python -``` - -so - -module = c.module("module") -module.ls("./") - -is the same as - -```bash -c module/ls ./ -``` - -## Pythoni -You do not need to specify the module when calling the root (name=module) module. -```bash -c {function_name} *args **kwargs -``` -Example - - -For example, the following command: -```bash -c ls ./ # -``` -is the same as -```bash -c module/ls ./ # calls the ls fn=ls module=module -``` - -and -```python -import commune as c -c.ls('./') -``` - -To make a new module -``` -c new_module agi -``` -```python -c.new_module("agi") -``` - - -This will create a new module called `agi` in the `modules` directory. -This will be located in - -to get the config of the model.agi module, you can use the following command: - -```bash -c agi/config -``` -if you dont have a config or yaml file, the key word arguments will be used as the config. - -This is the same as the following python code: -```python - -import commune as c -c.module("agi").config() -``` - - -To get the code -```bash -c agi/code -``` - -```python - -import commune as c - -class Agi(c.Module): - def __init__(self, a=1, b=2): - self.set_config(locals()) - - def call(self, x:int = 1, y:int = 2) -> int: - c.print(self.config) - c.print(self.config, 'This is the config, it is a Munch object') - return x + y - - -``` - -to get the config, which is a yaml, or the key word arguments of the __init__ -```bash -c agi/config -``` - - - - - - - - - - -The template for the cli is as follows: -```bash -c model.openai/forward text="sup" -``` -or -```bash - -c {module_name}/ -``` - -For example, the following command: - - -```python -import commune as c -c.modules("model") -``` - -is the same as - -```bash -c modules model -``` - -## Using the cli to interact with your modules - -You can use the cli to interact with your modules. For example, if you have a module called `demo`, you can use the cli to interact with it. - -For instance, to get the config of the model.openai module, you can use the following command: - -```bash -c model.openai/config -``` - -This is the same as the following python code: - -```python -import commune as c -c.module("model.openai").config() -``` - - -## Serving - -You can also serve your modules using the cli. For example, if you have a module called `demo`, you can serve it using the following command: - -```bash -c demo/serve tag=latest -``` - -This is the same as the following python code: - -```python -import commune as c -c.module("demo").serve(tag="latest") -``` - - - - -# Misc - - -c run-epoch is c run_epoch # all - is _ - - -to serve a module - - -c serve "model.openai" - -is the same as -c.serve("model.openai") - diff --git a/docs/4_running_tests.md b/docs/4_running_tests.md deleted file mode 100644 index fd40910d..00000000 --- a/docs/4_running_tests.md +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/docs/0_install.md b/docs/_install.md similarity index 99% rename from docs/0_install.md rename to docs/_install.md index 2030715e..abfdc4b0 100644 --- a/docs/0_install.md +++ b/docs/_install.md @@ -116,4 +116,3 @@ c port_range c set_port_range 8000 9000 # set the port range to 8000-9000 ``` -``` diff --git a/docs/1_what_is_a_module.md b/docs/_intro.md similarity index 100% rename from docs/1_what_is_a_module.md rename to docs/_intro.md diff --git a/docs/modules/cli.md b/docs/cli.md similarity index 100% rename from docs/modules/cli.md rename to docs/cli.md diff --git a/docs/modules/key.md b/docs/key.md similarity index 100% rename from docs/modules/key.md rename to docs/key.md diff --git a/docs/modules/module.md b/docs/module.md similarity index 90% rename from docs/modules/module.md rename to docs/module.md index 40936fda..db1e6f00 100644 --- a/docs/modules/module.md +++ b/docs/module.md @@ -1,4 +1,4 @@ -A module is a colleciton of functions as well as data or state +xA module is a colleciton of functions as well as data or state variables. The functions can be called from the command line interface or through the API. The data or state variables can be accessed and modified through the API. There is no blockchain or database in the module. The module is a simple way to organize code and data. To define a module, just define a class To make a module create a name diff --git a/docs/modules/serializer.md b/docs/modules/serializer.md deleted file mode 100644 index f8244f77..00000000 --- a/docs/modules/serializer.md +++ /dev/null @@ -1,37 +0,0 @@ -The serializer is responsible for making sure the object is json serializable - - - -The rules are simple - -If the object is a dictionary, we iterate over the keys and values and serialize them. -If the value is a dictionary, we recursively put that dictionary through the serializer -if the value is not a dictionary, we see if the value is json serializable. - -Default json serializable types are: -- str -- int -- float -- bool -- None - - -Adding a new type is simple. Just add the type to the `SERIALIZABLE_TYPES` list in the `Serializer` class. - -If the value is not json serializable, we raise a `NotSerializableError` exception. - -The serializer is used in the `Commune` class to serialize the object before it is saved to the database. -``` - -```python -# File: commune/serializer/serializer.py -from typing import Any, Dict, Union -def serialize_{type}(obj: {type}) -> Dict[str, Any]: - return {{"value": obj.value}} - -def deserialize_{type}(data: Dict[str, Any]) -> {type}: - return {type}(data["value"]) -``` - -Now when that type is encoutered, the serializer will use the `serialize_{type}` and `deserialize_{type}` functions to serialize and deserialize the object. - diff --git a/docs/modules/network.md b/docs/network.md similarity index 100% rename from docs/modules/network.md rename to docs/network.md diff --git a/docs/modules/server.md b/docs/server.md similarity index 52% rename from docs/modules/server.md rename to docs/server.md index 98effa7a..51da63f0 100644 --- a/docs/modules/server.md +++ b/docs/server.md @@ -65,3 +65,43 @@ c.restart('demo') # Restart the module which will run back on the same port ``` + +SERIALIZER + +The serializer is responsible for making sure the object is json serializable + + + +The rules are simple + +If the object is a dictionary, we iterate over the keys and values and serialize them. +If the value is a dictionary, we recursively put that dictionary through the serializer +if the value is not a dictionary, we see if the value is json serializable. + +Default json serializable types are: +- str +- int +- float +- bool +- None + + +Adding a new type is simple. Just add the type to the `SERIALIZABLE_TYPES` list in the `Serializer` class. + +If the value is not json serializable, we raise a `NotSerializableError` exception. + +The serializer is used in the `Commune` class to serialize the object before it is saved to the database. +``` + +```python +# File: commune/serializer/serializer.py +from typing import Any, Dict, Union +def serialize_{type}(obj: {type}) -> Dict[str, Any]: + return {{"value": obj.value}} + +def deserialize_{type}(data: Dict[str, Any]) -> {type}: + return {type}(data["value"]) +``` + +Now when that type is encoutered, the serializer will use the `serialize_{type}` and `deserialize_{type}` functions to serialize and deserialize the object. + diff --git a/docs/modules/vali.md b/docs/vali.md similarity index 100% rename from docs/modules/vali.md rename to docs/vali.md diff --git a/scripts/REAMDE.md b/scripts/REAMDE.md deleted file mode 100644 index e69de29b..00000000 diff --git a/setup.py b/setup.py index b6e6397a..d3478895 100644 --- a/setup.py +++ b/setup.py @@ -3,19 +3,16 @@ from os import path from io import open from pkg_resources import parse_requirements -libname = 'commune' here = path.abspath(path.dirname(__file__)) - +libname = here.split('/')[-1] # name of the package is assumed to be the name of the directory with open(f'{here}/README.md', encoding='utf-8') as f: long_description = f.read() - with open(f'{here}/requirements.txt') as requirements_file: install_requires = [str(requirement) for requirement in parse_requirements(requirements_file)] - setup( name=libname, - version='0.0.1', - description='A package for building and deploying modules', + version='1.0.0', + description='a way for connecting and verifying tools for the global toolbox', long_description=long_description, long_description_content_type='text/markdown', url='https://github.com/commune-ai/commune', @@ -23,22 +20,8 @@ packages=find_packages(), include_package_data=True, author_email='', - license='AGIDOESNTCAREABOUTYOURLISCENCES', + license='IDGAF License, Do What You Want, I wont sue you', install_requires=install_requires, - entry_points={ - 'console_scripts': [ - f'{libname[0]}={libname}.cli:main' - ], - }, - classifiers=[ - 'Intended Audience :: Developers', - 'Topic :: Software Development :: Build Tools', - "AGIDOESNTCAREABOUTYOURLISCENCES" - # Pick your license as you wish - 'License :: IDGAF License, Do What You Want, I wont sue you', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.8', - ], + entry_points={'console_scripts': [f'{libname[0]}={libname}.cli:main']}, + classifiers=['FUCK SHIT UP'], python_requires='>=3.8') \ No newline at end of file diff --git a/tests/test_subspace.py b/tests/test_subspace.py index bf700a59..a7365cf8 100644 --- a/tests/test_subspace.py +++ b/tests/test_subspace.py @@ -9,7 +9,7 @@ def test_global_params(): def test_subnet_params(subnet=0): self = c.module('subspace')() subnet_params = self.subnet_params(subnet=subnet) - assert isinstance(subnet_params, dict) + assert isinstance(subnet_params, dict), f'{subnet_params} is not a dict' return {'msg': 'subnet_params test passed', 'success': True}