diff --git a/Makefile b/Makefile index bb3ad7984677..43055ffa0e1d 100644 --- a/Makefile +++ b/Makefile @@ -743,7 +743,11 @@ update-wss-proxy-version: cd plugins/wss-proxy && $(MAKE) upgrade-version update-poetry-lock: - poetry update clnrest wss-proxy pyln-client pyln-proto pyln-testing + poetry update clnrest wss-proxy pyln-client pyln-proto pyln-testing update-reckless-version + +update-reckless-version: + @if [ -z "$(NEW_VERSION)" ]; then echo "Set NEW_VERSION!" >&2; exit 1; fi + @sed -i "s/__VERSION__ = '\([.-z]*\)'/__VERSION__ = '$(NEW_VERSION)'/" tools/reckless update-mocks: $(ALL_TEST_PROGRAMS:%=update-mocks/%.c) diff --git a/tests/test_reckless.py b/tests/test_reckless.py index bca922e013f7..8f32447b014b 100644 --- a/tests/test_reckless.py +++ b/tests/test_reckless.py @@ -111,7 +111,8 @@ def get_reckless_node(node_factory): def check_stderr(stderr): def output_okay(out): for warning in ['[notice]', 'WARNING:', 'npm WARN', - 'npm notice', 'DEPRECATION:', 'Creating virtualenv']: + 'npm notice', 'DEPRECATION:', 'Creating virtualenv', + 'config file not found:', 'press [Y]']: if out.startswith(warning): return True return False diff --git a/tools/reckless b/tools/reckless index ac6247673b96..f029daafd53b 100755 --- a/tools/reckless +++ b/tools/reckless @@ -21,16 +21,89 @@ from urllib.error import HTTPError import venv +__VERSION__ = '24.08' + logging.basicConfig( - level=logging.DEBUG, + level=logging.INFO, format='[%(asctime)s] %(levelname)s: %(message)s', handlers=[logging.StreamHandler(stream=sys.stdout)], ) +LAST_FOUND = None + + +class Logger: + """Redirect logging output to a json object or stdout as appropriate.""" + def __init__(self, capture: bool = False): + self.json_output = {"result": [], + "log": []} + self.capture = capture + + def str_esc(self, raw_string: str) -> str: + assert isinstance(raw_string, str) + return json.dumps(raw_string)[1:-1] + + def debug(self, to_log: str): + assert isinstance(to_log, str) or hasattr(to_log, "__repr__") + if logging.root.level > logging.DEBUG: + return + if self.capture: + self.json_output['log'].append(self.str_esc(f"DEBUG: {to_log}")) + else: + logging.debug(to_log) + + def info(self, to_log: str): + assert isinstance(to_log, str) or hasattr(to_log, "__repr__") + if logging.root.level > logging.INFO: + return + if self.capture: + self.json_output['log'].append(self.str_esc(f"INFO: {to_log}")) + else: + print(to_log) + + def warning(self, to_log: str): + assert isinstance(to_log, str) or hasattr(to_log, "__repr__") + if logging.root.level > logging.WARNING: + return + if self.capture: + self.json_output['log'].append(self.str_esc(f"WARNING: {to_log}")) + else: + logging.warning(to_log) + + def error(self, to_log: str): + assert isinstance(to_log, str) or hasattr(to_log, "__repr__") + if logging.root.level > logging.ERROR: + return + if self.capture: + self.json_output['log'].append(self.str_esc(f"ERROR: {to_log}")) + else: + logging.error(to_log) + + def add_result(self, result: Union[str, None]): + assert json.dumps(result), "result must be json serializable" + self.json_output["result"].append(result) + + def reply_json(self): + """json output to stdout with accumulated result.""" + if len(log.json_output["result"]) == 1 and \ + isinstance(log.json_output["result"][0], list): + # unpack sources output + log.json_output["result"] = log.json_output["result"][0] + print(json.dumps(log.json_output, indent=3)) + + +log = Logger() repos = ['https://github.com/lightningd/plugins'] +def reckless_abort(err: str): + log.error(err) + log.add_result(None) + log.reply_json() + sys.exit(1) + + def py_entry_guesses(name) -> list: return [name, f'{name}.py', '__init__.py'] @@ -52,13 +125,12 @@ class Installer: The identification of a plugin language, compiler or interpreter availability, and the install procedures. ''' - def __init__(self, name: str, mimetype: str, + def __init__(self, name: str, exe: Union[str, None] = None, compiler: Union[str, None] = None, manager: Union[str, None] = None, entry: Union[str, None] = None): self.name = name - self.mimetype = mimetype self.entries = [] if entry: self.entries.append(entry) @@ -69,7 +141,7 @@ class Installer: self.dependency_call = None def __repr__(self): - return (f'') def executable(self) -> bool: @@ -123,12 +195,12 @@ class InstInfo: def __init__(self, name: str, location: str, git_url: str): self.name = name self.source_loc = str(location) # Used for 'git clone' - self.git_url = git_url # API access for github repos - self.srctype = Source.get_type(location) - self.entry = None # relative to source_loc or subdir - self.deps = None - self.subdir = None - self.commit = None + self.git_url: str = git_url # API access for github repos + self.srctype: Source = Source.get_type(location) + self.entry: SourceFile = None # relative to source_loc or subdir + self.deps: str = None + self.subdir: str = None + self.commit: str = None def __repr__(self): return (f'InstInfo({self.name}, {self.source_loc}, {self.git_url}, ' @@ -185,7 +257,7 @@ class InstInfo: self.entry = found_entry.name self.deps = found_dep.name return sub - logging.debug(f"missing dependency for {self}") + log.debug(f"missing dependency for {self}") found_entry = None for file in sub.contents: if isinstance(file, SourceDir): @@ -206,9 +278,9 @@ class InstInfo: # clone source to reckless dir target = copy_remote_git_source(self) if not target: - logging.warning(f"could not clone github source {self}") + log.warning(f"could not clone github source {self}") return False - logging.debug(f"falling back to cloning remote repo {self}") + log.debug(f"falling back to cloning remote repo {self}") # Update to reflect use of a local clone self.source_loc = target.location self.srctype = target.srctype @@ -246,9 +318,10 @@ def remove_dir(directory: str) -> bool: shutil.rmtree(directory) return True except NotADirectoryError: - print(f"Tried to remove directory {directory} that does not exist.") + log.warning(f"Tried to remove directory {directory} that " + "does not exist.") except PermissionError: - print(f"Permission denied removing dir: {directory}") + log.warning(f"Permission denied removing dir: {directory}") return False @@ -309,7 +382,6 @@ class SourceDir(): return if not self.srctype: self.srctype = Source.get_type(self.location) - # logging.debug(f"populating {self.srctype} {self.location}") if self.srctype == Source.DIRECTORY: self.contents = populate_local_dir(self.location) elif self.srctype in [Source.LOCAL_REPO, Source.GIT_LOCAL_CLONE]: @@ -394,7 +466,7 @@ def populate_local_repo(path: str, parent=None) -> list: This populates all intermediate directories and the file.""" parentdir = parent if mypath == '.': - logging.debug(' asked to populate root dir') + log.debug(' asked to populate root dir') return # reverse the parents pdirs = mypath @@ -431,7 +503,7 @@ def populate_local_repo(path: str, parent=None) -> list: proc = run(['git', '-C', path, 'submodule', 'status'], stdout=PIPE, stderr=PIPE, text=True, timeout=5) if proc.returncode != 0: - logging.debug(f"'git submodule status' of repo {path} failed") + log.debug(f"'git submodule status' of repo {path} failed") return None submodules = [] for sub in proc.stdout.splitlines(): @@ -443,7 +515,7 @@ def populate_local_repo(path: str, parent=None) -> list: '--name-only', ver] proc = run(git_call, stdout=PIPE, stderr=PIPE, text=True, timeout=5) if proc.returncode != 0: - logging.debug(f'ls-tree of repo {path} failed') + log.debug(f'ls-tree of repo {path} failed') return None for filepath in proc.stdout.splitlines(): @@ -482,7 +554,7 @@ def source_element_from_repo_api(member: dict): # git_url with /tree/ presents results a little differently elif 'type' in member and 'path' in member and 'url' in member: if member['type'] not in ['tree', 'blob']: - logging.debug(f' skipping {member["path"]} type={member["type"]}') + log.debug(f' skipping {member["path"]} type={member["type"]}') if member['type'] == 'tree': return SourceDir(member['url'], srctype=Source.GITHUB_REPO, name=member['path']) @@ -531,7 +603,7 @@ def populate_github_repo(url: str) -> list: git_url = api_url if "api.github.com" in git_url: # This lets us redirect to handle blackbox testing - logging.debug(f'fetching from gh API: {git_url}') + log.debug(f'fetching from gh API: {git_url}') git_url = (API_GITHUB_COM + git_url.split("api.github.com")[-1]) # Ratelimiting occurs for non-authenticated GH API calls at 60 in 1 hour. r = urlopen(git_url, timeout=5) @@ -552,14 +624,14 @@ def copy_remote_git_source(github_source: InstInfo): """clone or fetch & checkout a local copy of a remote git repo""" user, repo = Source.get_github_user_repo(github_source.source_loc) if not user or not repo: - logging.warning('could not extract github user and repo ' - f'name for {github_source.source_loc}') + log.warning('could not extract github user and repo ' + f'name for {github_source.source_loc}') return None local_path = RECKLESS_DIR / '.remote_sources' / user create_dir(RECKLESS_DIR / '.remote_sources') if not create_dir(local_path): - logging.warning(f'could not provision dir {local_path} to ' - f'clone remote source {github_source.source_loc}') + log.warning(f'could not provision dir {local_path} to ' + f'clone remote source {github_source.source_loc}') return None local_path = local_path / repo if local_path.exists(): @@ -586,13 +658,17 @@ class Config(): with open(config_path, 'r+') as f: config_content = f.readlines() return config_content + # redirecting the prompts to stderr is kinder for json consumers + tmp = sys.stdout + sys.stdout = sys.stderr print(f'config file not found: {config_path}') if warn: confirm = input('press [Y] to create one now.\n').upper() == 'Y' else: confirm = True + sys.stdout = tmp if not confirm: - sys.exit(1) + reckless_abort(f"config file required: {config_path}") parent_path = Path(config_path).parent # Create up to one parent in the directory tree. if create_dir(parent_path): @@ -601,8 +677,8 @@ class Config(): # FIXME: Handle write failure return default_text else: - logging.debug('could not create the parent directory ' + - parent_path) + log.warning('could not create the parent directory ' + + parent_path) raise FileNotFoundError('invalid parent directory') def editConfigFile(self, addline: Union[str, None], @@ -699,6 +775,10 @@ class LightningBitcoinConfig(Config): default_text=default_text, warn=warn) +class NotFoundError(Exception): + """Raised by InferInstall when a source/entrypoint cannot be located.""" + + class InferInstall(): """Once a plugin is installed, we may need its directory and entrypoint""" def __init__(self, name: str): @@ -727,7 +807,8 @@ class InferInstall(): actual_name = reck_contents_lower[name.lower()] self.dir = Path(RECKLESS_CONFIG.reckless_dir).joinpath(actual_name) else: - raise Exception(f"Could not find a reckless directory for {name}") + raise NotFoundError("Could not find a reckless directory " + f"for {name}") plug_dir = Path(RECKLESS_CONFIG.reckless_dir).joinpath(actual_name) for guess in entry_guesses(actual_name): for content in plug_dir.iterdir(): @@ -735,7 +816,7 @@ class InferInstall(): self.entry = str(content) self.name = actual_name return - raise Exception(f'plugin entrypoint not found in {self.dir}') + raise NotFoundError(f'plugin entrypoint not found in {self.dir}') class InstallationFailure(Exception): @@ -746,16 +827,12 @@ def create_python3_venv(staged_plugin: InstInfo) -> InstInfo: "Create a virtual environment, install dependencies and test plugin." env_path = Path('.venv') env_path_full = Path(staged_plugin.source_loc) / env_path - plugin_path = Path(staged_plugin.source_loc) / 'source' - - # subdir should always be None at this point - if staged_plugin.subdir: - logging.warning("cloned plugin contains subdirectory") - plugin_path = plugin_path / staged_plugin.subdir + assert staged_plugin.subdir # relative dir of original source + plugin_path = Path(staged_plugin.source_loc) / staged_plugin.subdir if shutil.which('poetry') and staged_plugin.deps == 'pyproject.toml': - logging.debug('configuring a python virtual environment (poetry) in ' - f'{env_path_full}') + log.debug('configuring a python virtual environment (poetry) in ' + f'{env_path_full}') # The virtual environment should be located with the plugin. # This installs it to .venv instead of in the global location. mod_poetry_env = os.environ @@ -774,7 +851,8 @@ def create_python3_venv(staged_plugin: InstInfo) -> InstInfo: # Avoid redirecting stdout in order to stream progress. # Timeout excluded as armv7 grpcio build/install can take 1hr. pip = run(['poetry', 'install', '--no-root'], check=False, - cwd=staged_plugin.source_loc, env=mod_poetry_env) + cwd=staged_plugin.source_loc, env=mod_poetry_env, + stdout=stdout_redirect, stderr=stderr_redirect) (Path(staged_plugin.source_loc) / 'pyproject.toml').unlink() (Path(staged_plugin.source_loc) / 'poetry.lock').unlink() @@ -782,25 +860,25 @@ def create_python3_venv(staged_plugin: InstInfo) -> InstInfo: else: builder = venv.EnvBuilder(with_pip=True) builder.create(env_path_full) - logging.debug('configuring a python virtual environment (pip) in ' - f'{env_path_full}') - logging.debug(f'virtual environment created in {env_path_full}.') + log.debug('configuring a python virtual environment (pip) in ' + f'{env_path_full}') + log.debug(f'virtual environment created in {env_path_full}.') if staged_plugin.deps == 'pyproject.toml': pip = run(['bin/pip', 'install', str(plugin_path)], check=False, cwd=plugin_path) elif staged_plugin.deps == 'requirements.txt': pip = run([str(env_path_full / 'bin/pip'), 'install', '-r', str(plugin_path / 'requirements.txt')], - check=False, cwd=plugin_path) + check=False, cwd=plugin_path, + stdout=stdout_redirect, stderr=stderr_redirect) else: - logging.debug("no python dependency file") + log.debug("no python dependency file") if pip and pip.returncode != 0: - logging.debug("install to virtual environment failed") - print('error encountered installing dependencies') + log.error('error encountered installing dependencies') raise InstallationFailure staged_plugin.venv = env_path - print('dependencies installed successfully') + log.info('dependencies installed successfully') return staged_plugin @@ -813,8 +891,10 @@ def create_wrapper(plugin: InstInfo): wrapper.write((f"#!{venv_full_path}/bin/python\n" "import sys\n" "import runpy\n\n" - f"if '{plugin.source_loc}/source' not in sys.path:\n" - f" sys.path.append('{plugin.source_loc}/source')\n" + f"if '{plugin.source_loc}/{plugin.subdir}' not in " + "sys.path:\n" + f" sys.path.append('{plugin.source_loc}/" + f"{plugin.subdir}')\n" f"if '{plugin.source_loc}' in sys.path:\n" f" sys.path.remove('{plugin.source_loc}')\n" f"runpy.run_module(\"{plugin.name}\", " @@ -827,34 +907,67 @@ def install_to_python_virtual_environment(cloned_plugin: InstInfo): '''Called during install in place of a subprocess.run list''' # Delete symlink so that a venv wrapper can take it's place (Path(cloned_plugin.source_loc) / cloned_plugin.entry).unlink() - # The original entrypoint is imported as a python module - ensure - # it has a .py extension. The wrapper can keep the original naming. - entry = Path(cloned_plugin.source_loc) / 'source' / cloned_plugin.entry - entry.rename(entry.with_suffix('.py')) create_python3_venv(cloned_plugin) if not hasattr(cloned_plugin, 'venv'): raise InstallationFailure - logging.debug('virtual environment for cloned plugin: ' - f'{cloned_plugin.venv}') + log.debug('virtual environment for cloned plugin: ' + f'{cloned_plugin.venv}') create_wrapper(cloned_plugin) return cloned_plugin -python3venv = Installer('python3venv', 'text/x-python', exe='python3', +def cargo_installation(cloned_plugin: InstInfo): + call = ['cargo', 'build', '--release', '-vv'] + # FIXME: the symlinked Cargo.toml allows the installer to identify a valid + # plugin directory, but is unneeded, and actually confuses cargo if not + # removed prior to installing. + cargo_toml_path = Path(cloned_plugin.source_loc) / 'Cargo.toml' + if cargo_toml_path.exists(): + cargo_toml_path.unlink() + + # source_loc now contains a symlink to the entrypoint and 'source/plugin/' + source = Path(cloned_plugin.source_loc) / 'source' / cloned_plugin.name + log.debug(f'cargo installing from {source}') + if logging.root.level < logging.INFO and not log.capture: + cargo = run(call, cwd=str(source), text=True) + else: + cargo = run(call, cwd=str(source), stdout=PIPE, + stderr=PIPE, text=True) + + if cargo.returncode == 0: + log.debug('rust project compiled successfully') + else: + log.error(cargo.stderr if cargo.stderr else + 'error encountered during build, cargo exited with return ' + f'code {cargo.returncode}') + + log.debug(f'removing {cloned_plugin.source_loc}') + remove_dir(cloned_plugin.source_loc) + raise InstallationFailure + + # We do need to symlink to the executable binary though. + (Path(cloned_plugin.source_loc) / cloned_plugin.name).\ + symlink_to(source / f'target/release/{cloned_plugin.name}') + cloned_plugin.entry = cloned_plugin.name + + return cloned_plugin + + +python3venv = Installer('python3venv', exe='python3', manager='pip', entry='{name}.py') python3venv.add_entrypoint('{name}') python3venv.add_entrypoint('__init__.py') python3venv.add_dependency_file('requirements.txt') python3venv.dependency_call = install_to_python_virtual_environment -poetryvenv = Installer('poetryvenv', 'text/x-python', exe='python3', +poetryvenv = Installer('poetryvenv', exe='python3', manager='poetry', entry='{name}.py') poetryvenv.add_entrypoint('{name}') poetryvenv.add_entrypoint('__init__.py') poetryvenv.add_dependency_file('pyproject.toml') poetryvenv.dependency_call = install_to_python_virtual_environment -pyprojectViaPip = Installer('pyprojectViaPip', 'text/x-python', exe='python3', +pyprojectViaPip = Installer('pyprojectViaPip', exe='python3', manager='pip', entry='{name}.py') pyprojectViaPip.add_entrypoint('{name}') pyprojectViaPip.add_entrypoint('__init__.py') @@ -863,20 +976,25 @@ pyprojectViaPip.dependency_call = install_to_python_virtual_environment # Nodejs plugin installer -nodejs = Installer('nodejs', 'application/javascript', exe='node', +nodejs = Installer('nodejs', exe='node', manager='npm', entry='{name}.js') nodejs.add_entrypoint('{name}') nodejs.add_dependency_call(['npm', 'install', '--omit=dev']) nodejs.add_dependency_file('package.json') -INSTALLERS = [python3venv, poetryvenv, pyprojectViaPip, nodejs] +# This entrypoint is used to identify a candidate directory, don't call it. +rust_cargo = Installer('rust', manager='cargo', entry='Cargo.toml') +rust_cargo.add_dependency_file('Cargo.toml') +rust_cargo.dependency_call = cargo_installation + +INSTALLERS = [python3venv, poetryvenv, pyprojectViaPip, nodejs, rust_cargo] def help_alias(targets: list): if len(targets) == 0: parser.print_help(sys.stdout) else: - print('try "reckless {} -h"'.format(' '.join(targets))) + log.info('try "reckless {} -h"'.format(' '.join(targets))) sys.exit(1) @@ -897,8 +1015,8 @@ def _source_search(name: str, src: str) -> Union[InstInfo, None]: if local_clone_location.exists(): # Make sure it's the correct remote source and fetch any updates. if _git_update(source, local_clone_location): - logging.debug(f"Using local clone of {src}: " - f"{local_clone_location}") + log.debug(f"Using local clone of {src}: " + f"{local_clone_location}") source.source_loc = local_clone_location source.srctype = Source.GIT_LOCAL_CLONE @@ -908,7 +1026,7 @@ def _source_search(name: str, src: str) -> Union[InstInfo, None]: def _git_clone(src: InstInfo, dest: Union[PosixPath, str]) -> bool: - print(f'cloning {src.srctype} {src}') + log.info(f'cloning {src.srctype} {src}') if src.srctype == Source.GITHUB_REPO: assert 'github.com' in src.source_loc source = f"{GITHUB_COM}" + src.source_loc.split("github.com")[-1] @@ -918,13 +1036,13 @@ def _git_clone(src: InstInfo, dest: Union[PosixPath, str]) -> bool: else: return False git = run(['git', 'clone', '--recurse-submodules', source, str(dest)], - stdout=PIPE, stderr=PIPE, text=True, check=False, timeout=60) + stdout=PIPE, stderr=PIPE, text=True, check=False, timeout=180) if git.returncode != 0: for line in git.stderr.splitlines(): - logging.debug(line) + log.debug(line) if Path(dest).exists(): remove_dir(str(dest)) - print('Error: Failed to clone repo') + log.error('Failed to clone repo') return False return True @@ -955,8 +1073,8 @@ def _git_update(github_source: InstInfo, local_copy: PosixPath): return False default_branch = git.stdout.splitlines()[0] if default_branch != 'origin/master': - logging.debug(f'UNUSUAL: fetched default branch {default_branch} for ' - f'{github_source.source_loc}') + log.debug(f'UNUSUAL: fetched default branch {default_branch} for ' + f'{github_source.source_loc}') # Checkout default branch git = run(['git', 'checkout', default_branch], @@ -1002,31 +1120,31 @@ def _checkout_commit(orig_src: InstInfo, if orig_src.srctype in [Source.LOCAL_REPO, Source.GITHUB_REPO, Source.OTHER_URL, Source.GIT_LOCAL_CLONE]: if orig_src.commit: - logging.debug(f"Checking out {orig_src.commit}") + log.debug(f"Checking out {orig_src.commit}") checkout = Popen(['git', 'checkout', orig_src.commit], cwd=str(cloned_path), stdout=PIPE, stderr=PIPE) checkout.wait() if checkout.returncode != 0: - print('failed to checkout referenced ' - f'commit {orig_src.commit}') + log.warning('failed to checkout referenced ' + f'commit {orig_src.commit}') return None else: - logging.debug("using latest commit of default branch") + log.debug("using latest commit of default branch") # Log the commit we actually used (for installation metadata) git = run(['git', 'rev-parse', 'HEAD'], cwd=str(cloned_path), stdout=PIPE, stderr=PIPE, text=True, check=False, timeout=60) if git.returncode == 0: head_commit = git.stdout.splitlines()[0] - logging.debug(f'checked out HEAD: {head_commit}') + log.debug(f'checked out HEAD: {head_commit}') cloned_src.commit = head_commit else: - logging.debug(f'unable to collect commit: {git.stderr}') + log.debug(f'unable to collect commit: {git.stderr}') else: if orig_src.commit: - logging.warning("unable to checkout commit/tag on non-repository " - "source") + log.warning("unable to checkout commit/tag on non-repository " + "source") return cloned_path if cloned_src.subdir is not None: @@ -1036,29 +1154,29 @@ def _checkout_commit(orig_src: InstInfo, def _install_plugin(src: InstInfo) -> Union[InstInfo, None]: """make sure the repo exists and clone it.""" - logging.debug(f'Install requested from {src}.') + log.debug(f'Install requested from {src}.') if RECKLESS_CONFIG is None: - print('error: reckless install directory unavailable') - sys.exit(2) + log.error('reckless install directory unavailable') + return None # Use a unique directory for each cloned repo. tmp_path = get_temp_reckless_dir() if not create_dir(tmp_path): - logging.debug(f'failed to create {tmp_path}') + log.debug(f'failed to create {tmp_path}') return None clone_path = tmp_path / 'clone' if not create_dir(tmp_path): - logging.debug(f'failed to create {clone_path}') + log.debug(f'failed to create {clone_path}') return None # we rename the original repo here. plugin_path = clone_path / src.name inst_path = Path(RECKLESS_CONFIG.reckless_dir) / src.name if Path(clone_path).exists(): - logging.debug(f'{clone_path} already exists - deleting') + log.debug(f'{clone_path} already exists - deleting') shutil.rmtree(clone_path) if src.srctype == Source.DIRECTORY: - logging.debug(("copying local directory contents from" - f" {src.source_loc}")) + log.debug(("copying local directory contents from" + f" {src.source_loc}")) create_dir(clone_path) shutil.copytree(src.source_loc, plugin_path) elif src.srctype in [Source.LOCAL_REPO, Source.GITHUB_REPO, @@ -1070,9 +1188,9 @@ def _install_plugin(src: InstInfo) -> Union[InstInfo, None]: # Depending on how we accessed the original source, there may be install # details missing. Searching the cloned repo makes sure we have it. cloned_src = _source_search(src.name, str(clone_path)) - logging.debug(f'cloned_src: {cloned_src}') + log.debug(f'cloned_src: {cloned_src}') if not cloned_src: - logging.debug('failed to find plugin after cloning repo.') + log.warning('failed to find plugin after cloning repo.') return None # If a specific commit or tag was requested, check it out now. @@ -1088,11 +1206,12 @@ def _install_plugin(src: InstInfo) -> Union[InstInfo, None]: if inst_method.dependency_file is not None: if inst_method.dependency_file not in os.listdir(plugin_path): continue - logging.debug(f"using installer {inst_method.name}") + log.debug(f"using installer {inst_method.name}") INSTALLER = inst_method break if not INSTALLER: - logging.debug('Could not find a suitable installer method.') + log.warning('Could not find a suitable installer method for ' + f'{src.name}') return None if not cloned_src.entry: # The plugin entrypoint may not be discernable prior to cloning. @@ -1100,17 +1219,28 @@ def _install_plugin(src: InstInfo) -> Union[InstInfo, None]: cloned_src.source_loc = plugin_path # Relocate plugin to a staging directory prior to testing - staging_path = inst_path / 'source' + if not Path(inst_path).exists(): + log.debug(f'creating {inst_path}') + create_dir(inst_path) + if not Path(inst_path / 'source').exists(): + log.debug(f'creating {inst_path / "source"}') + create_dir(inst_path / 'source') + staging_path = inst_path / 'source' / src.name + log.debug(f'copying {plugin_path} tree to {staging_path}') shutil.copytree(str(plugin_path), staging_path) staged_src = cloned_src # Because the source files are copied to a 'source' directory, the # get_inst_details function no longer works. (dir must match plugin name) # Set these manually instead. - staged_src.source_loc = str(staging_path.parent) + staged_src.source_loc = str(inst_path) staged_src.srctype = Source.DIRECTORY - staged_src.subdir = None + # Use subdir to redirect the symlink to the actual executable location + staged_src.subdir = f'source/{src.name}' # Create symlink in staging tree to redirect to the plugins entrypoint - Path(staging_path.parent / cloned_src.entry).\ + log.debug(f"linking source {staging_path / cloned_src.entry} to " + f"{Path(staged_src.source_loc) / cloned_src.entry}") + log.debug(staged_src) + (Path(staged_src.source_loc) / cloned_src.entry).\ symlink_to(staging_path / cloned_src.entry) # try it out @@ -1122,8 +1252,8 @@ def _install_plugin(src: InstInfo) -> Union[InstInfo, None]: return None else: for call in INSTALLER.dependency_call: - logging.debug(f"Install: invoking '{' '.join(call)}'") - if logging.root.level < logging.WARNING: + log.debug(f"Install: invoking '{' '.join(call)}'") + if logging.root.level < logging.INFO: pip = Popen(call, cwd=staging_path, text=True) else: pip = Popen(call, cwd=staging_path, stdout=PIPE, @@ -1132,14 +1262,15 @@ def _install_plugin(src: InstInfo) -> Union[InstInfo, None]: # FIXME: handle output of multiple calls if pip.returncode == 0: - print('dependencies installed successfully') + log.info('dependencies installed successfully') else: - print('error encountered installing dependencies') + log.error('error encountered installing dependencies') if pip.stdout: - logging.debug(pip.stdout.read()) + log.debug(pip.stdout.read()) remove_dir(clone_path) remove_dir(inst_path) return None + staged_src.subdir = None test_log = [] try: test = run([Path(staged_src.source_loc).joinpath(staged_src.entry)], @@ -1152,39 +1283,47 @@ def _install_plugin(src: InstInfo) -> Union[InstInfo, None]: # If the plugin is still running, it's assumed to be okay. returncode = 0 if returncode != 0: - logging.debug("plugin testing error:") + log.debug("plugin testing error:") for line in test_log: - logging.debug(f' {line}') - print('plugin testing failed') + log.debug(f' {line}') + log.error('plugin testing failed') remove_dir(clone_path) remove_dir(inst_path) return None add_installation_metadata(staged_src, src) - print(f'plugin installed: {inst_path}') + log.info(f'plugin installed: {inst_path}') remove_dir(clone_path) return staged_src -def install(plugin_name: str): - """downloads plugin from source repos, installs and activates plugin""" +def install(plugin_name: str) -> Union[str, None]: + """Downloads plugin from source repos, installs and activates plugin. + Returns the location of the installed plugin or "None" in the case of + failure.""" assert isinstance(plugin_name, str) # Specify a tag or commit to checkout by adding @ to plugin name if '@' in plugin_name: - logging.debug("testing for a commit/tag in plugin name") + log.debug("testing for a commit/tag in plugin name") name, commit = plugin_name.split('@', 1) else: name = plugin_name commit = None - logging.debug(f"Searching for {name}") - src = search(name) - if src: + log.debug(f"Searching for {name}") + if search(name): + global LAST_FOUND + src = LAST_FOUND src.commit = commit - logging.debug(f'Retrieving {src.name} from {src.source_loc}') - installed = _install_plugin(src) + log.debug(f'Retrieving {src.name} from {src.source_loc}') + try: + installed = _install_plugin(src) + except FileExistsError as err: + log.error(f'File exists: {err.filename}') + return None + LAST_FOUND = None if not installed: - print('installation aborted') - sys.exit(1) + log.warning(f'{plugin_name}: installation aborted') + return None # Match case of the containing directory for dirname in os.listdir(RECKLESS_CONFIG.reckless_dir): @@ -1193,24 +1332,34 @@ def install(plugin_name: str): inst_path = inst_path / dirname / installed.entry RECKLESS_CONFIG.enable_plugin(inst_path) enable(installed.name) - return - print(('dynamic activation failed: ' - f'{installed.name} not found in reckless directory')) - sys.exit(1) + return f"{installed.source_loc}" + log.error(('dynamic activation failed: ' + f'{installed.name} not found in reckless directory')) + return None + return None -def uninstall(plugin_name: str): - """disables plugin and deletes the plugin's reckless dir""" +def uninstall(plugin_name: str) -> str: + """dDisables plugin and deletes the plugin's reckless dir. Returns the + status of the uninstall attempt.""" assert isinstance(plugin_name, str) - logging.debug(f'Uninstalling plugin {plugin_name}') + log.debug(f'Uninstalling plugin {plugin_name}') disable(plugin_name) - inst = InferInstall(plugin_name) + try: + inst = InferInstall(plugin_name) + except NotFoundError as err: + log.error(err) + return "uninstall failed" if not Path(inst.entry).exists(): - print(f'cannot find installed plugin at expected path {inst.entry}') - sys.exit(1) - logging.debug(f'looking for {str(Path(inst.entry).parent)}') + log.error("cannot find installed plugin at expected path" + f"{inst.entry}") + return "uninstall failed" + log.debug(f'looking for {str(Path(inst.entry).parent)}') if remove_dir(str(Path(inst.entry).parent)): - print(f"{inst.name} uninstalled successfully.") + log.info(f"{inst.name} uninstalled successfully.") + else: + return "uninstall failed" + return "uninstalled" def search(plugin_name: str) -> Union[InstInfo, None]: @@ -1231,19 +1380,22 @@ def search(plugin_name: str) -> Union[InstInfo, None]: for source in ordered_sources: srctype = Source.get_type(source) if srctype == Source.UNKNOWN: - logging.debug(f'cannot search {srctype} {source}') + log.debug(f'cannot search {srctype} {source}') continue if srctype in [Source.DIRECTORY, Source.LOCAL_REPO, Source.GITHUB_REPO, Source.OTHER_URL]: found = _source_search(plugin_name, source) if not found: continue - print(f"found {found.name} in source: {found.source_loc}") - logging.debug(f"entry: {found.entry}") + log.info(f"found {found.name} in source: {found.source_loc}") + log.debug(f"entry: {found.entry}") if found.subdir: - logging.debug(f'sub-directory: {found.subdir}') - return found - logging.debug("Search exhausted all sources") + log.debug(f'sub-directory: {found.subdir}') + global LAST_FOUND + # Stashing the search result saves install() a call to _source_search. + LAST_FOUND = found + return str(found.source_loc) + log.info("Search exhausted all sources") return None @@ -1293,50 +1445,63 @@ def lightning_cli(*cli_args, timeout: int = 15) -> dict: def enable(plugin_name: str): """dynamically activates plugin and adds to config (persistent)""" assert isinstance(plugin_name, str) - inst = InferInstall(plugin_name) + try: + inst = InferInstall(plugin_name) + except NotFoundError as err: + log.error(err) + return None path = inst.entry if not Path(path).exists(): - print(f'cannot find installed plugin at expected path {path}') - sys.exit(1) - logging.debug(f'activating {plugin_name}') + log.error(f'cannot find installed plugin at expected path {path}') + return None + log.debug(f'activating {plugin_name}') try: lightning_cli('plugin', 'start', path) except CLIError as err: if 'already registered' in err.message: - logging.debug(f'{inst.name} is already running') + log.debug(f'{inst.name} is already running') + return None else: - print(f'reckless: {inst.name} failed to start!') - raise err + log.error(f'reckless: {inst.name} failed to start!') + log.error(err) + return None except RPCError: - logging.debug(('lightningd rpc unavailable. ' - 'Skipping dynamic activation.')) + log.debug(('lightningd rpc unavailable. ' + 'Skipping dynamic activation.')) RECKLESS_CONFIG.enable_plugin(path) - print(f'{inst.name} enabled') + log.info(f'{inst.name} enabled') + return 'enabled' def disable(plugin_name: str): """reckless disable deactivates an installed plugin""" assert isinstance(plugin_name, str) - inst = InferInstall(plugin_name) + try: + inst = InferInstall(plugin_name) + except NotFoundError as err: + log.warning(f'failed to disable: {err}') + return None path = inst.entry if not Path(path).exists(): sys.stderr.write(f'Could not find plugin at {path}\n') - sys.exit(1) - logging.debug(f'deactivating {plugin_name}') + return None + log.debug(f'deactivating {plugin_name}') try: lightning_cli('plugin', 'stop', path) except CLIError as err: if err.code == -32602: - logging.debug('plugin not currently running') + log.debug('plugin not currently running') else: - print('lightning-cli plugin stop failed') - raise err + log.error('lightning-cli plugin stop failed') + logging.error(err) + return None except RPCError: - logging.debug(('lightningd rpc unavailable. ' - 'Skipping dynamic deactivation.')) + log.debug(('lightningd rpc unavailable. ' + 'Skipping dynamic deactivation.')) RECKLESS_CONFIG.disable_plugin(path) - print(f'{inst.name} disabled') + log.info(f'{inst.name} disabled') + return 'disabled' def load_config(reckless_dir: Union[str, None] = None, @@ -1363,10 +1528,9 @@ def load_config(reckless_dir: Union[str, None] = None, reck_conf_path = Path(reckless_dir) / f'{network}-reckless.conf' if net_conf: if str(network_path) != net_conf.conf_fp: - print('error: reckless configuration does not match lightningd:\n' - f'reckless network config path: {network_path}\n' - f'lightningd active config: {net_conf.conf_fp}') - sys.exit(1) + reckless_abort('reckless configuration does not match lightningd:\n' + f'reckless network config path: {network_path}\n' + f'lightningd active config: {net_conf.conf_fp}') else: # The network-specific config file (bitcoin by default) net_conf = LightningBitcoinConfig(path=network_path) @@ -1374,13 +1538,11 @@ def load_config(reckless_dir: Union[str, None] = None, try: reckless_conf = RecklessConfig(path=reck_conf_path) except FileNotFoundError: - print('Error: reckless config file could not be written: ', - str(reck_conf_path)) - sys.exit(1) + reckless_abort('reckless config file could not be written: ' + + str(reck_conf_path)) if not net_conf: - print('Error: could not load or create the network specific lightningd' - ' config (default .lightning/bitcoin)') - sys.exit(1) + reckless_abort('Error: could not load or create the network specific lightningd' + ' config (default .lightning/bitcoin)') net_conf.editConfigFile(f'include {reckless_conf.conf_fp}', None) return reckless_conf @@ -1404,7 +1566,7 @@ def load_sources() -> list: sources_file = get_sources_file() # This would have been created if possible if not Path(sources_file).exists(): - logging.debug('Warning: Reckless requires write access') + log.debug('Warning: Reckless requires write access') Config(path=str(sources_file), default_text='https://github.com/lightningd/plugins') return ['https://github.com/lightningd/plugins'] @@ -1416,18 +1578,17 @@ def add_source(src: str): assert isinstance(src, str) # Is it a file? maybe_path = os.path.realpath(src) + sources = Config(path=str(get_sources_file()), + default_text='https://github.com/lightningd/plugins') if Path(maybe_path).exists(): if os.path.isdir(maybe_path): - default_repo = 'https://github.com/lightningd/plugins' - my_file = Config(path=str(get_sources_file()), - default_text=default_repo) - my_file.editConfigFile(src, None) + sources.editConfigFile(src, None) elif 'github.com' in src or 'http://' in src or 'https://' in src: - my_file = Config(path=str(get_sources_file()), - default_text='https://github.com/lightningd/plugins') - my_file.editConfigFile(src, None) + sources.editConfigFile(src, None) else: - print(f'failed to add source {src}') + log.warning(f'failed to add source {src}') + return None + return sources_from_file() def remove_source(src: str): @@ -1437,40 +1598,76 @@ def remove_source(src: str): my_file = Config(path=get_sources_file(), default_text='https://github.com/lightningd/plugins') my_file.editConfigFile(None, src) - print('plugin source removed') + log.info('plugin source removed') else: - print(f'source not found: {src}') + log.warning(f'source not found: {src}') + return sources_from_file() def list_source(): """Provide the user with all stored source repositories.""" for src in sources_from_file(): - print(src) + log.info(src) + return sources_from_file() + + +def report_version() -> str: + """return reckless version""" + log.info(__VERSION__) + log.add_result(__VERSION__) + + +def unpack_json_arg(json_target: str) -> list: + """validate json for any command line targets passes as a json array""" + try: + targets = json.loads(json_target) + except json.decoder.JSONDecodeError: + return None + if isinstance(targets, list): + return targets + log.warning(f'input {target_list} is not a json array') + return None + + +class StoreIdempotent(argparse.Action): + """Make the option idempotent. This adds a secondary argument that doesn't + get reinitialized. The downside is it""" + def __init__(self, option_strings, dest, nargs=None, **kwargs): + super().__init__(option_strings, dest, **kwargs) + + def __call__(self, parser, namespace, values, option_string=None): + if option_string: + setattr(namespace, self.dest, values) + setattr(namespace, f'{self.dest}_idempotent', values) + + +class StoreTrueIdempotent(argparse._StoreConstAction): + """Make the option idempotent""" + def __init__(self, option_strings, dest, default=False, + required=False, nargs=None, const=None, help=None): + super().__init__(option_strings=option_strings, dest=dest, + const=const, help=help) + + def __call__(self, parser, namespace, values, option_string=None): + if option_string: + setattr(namespace, self.dest, True) + setattr(namespace, f'{self.dest}_idempotent', True) + + +def process_idempotent_args(args): + """Swap idempotently set arguments back in for the default arg names.""" + original_args = dict(vars(args)) + for arg, value in original_args.items(): + if f"{arg}_idempotent" in vars(args): + setattr(args, f"{arg}", vars(args)[f"{arg}_idempotent"]) + delattr(args, f"{arg}_idempotent") + return args if __name__ == '__main__': parser = argparse.ArgumentParser() - # This default depends on the .lightning directory - parser.add_argument('-d', '--reckless-dir', - help='specify a data directory for reckless to use', - type=str, default=None) - parser.add_argument('-l', '--lightning', - help='lightning data directory (default:~/.lightning)', - type=str, - default=Path.home().joinpath('.lightning')) - parser.add_argument('-c', '--conf', - help=' config file used by lightningd', - type=str, - default=None) - parser.add_argument('-r', '--regtest', action='store_true') - parser.add_argument('--network', - help="specify a network to use (default: bitcoin)", - type=str) - parser.add_argument('-v', '--verbose', action="store_const", - dest="loglevel", const=logging.DEBUG, - default=logging.WARNING) cmd1 = parser.add_subparsers(dest='cmd1', help='command', - required=True) + required=False) install_cmd = cmd1.add_parser('install', help='search for and install a ' 'plugin, then test and activate') @@ -1514,17 +1711,63 @@ if __name__ == '__main__': '"reckless -h"') help_cmd.add_argument('targets', type=str, nargs='*') help_cmd.set_defaults(func=help_alias) + parser.add_argument('-V', '--version', + action=StoreTrueIdempotent, const=None, + help='print version and exit') + + all_parsers = [parser, install_cmd, uninstall_cmd, search_cmd, enable_cmd, + disable_cmd, list_parse, source_add, source_rem, help_cmd] + for p in all_parsers: + # This default depends on the .lightning directory + p.add_argument('-d', '--reckless-dir', action=StoreIdempotent, + help='specify a data directory for reckless to use', + type=str, default=None) + p.add_argument('-l', '--lightning', type=str, action=StoreIdempotent, + help='lightning data directory ' + '(default:~/.lightning)', + default=Path.home().joinpath('.lightning')) + p.add_argument('-c', '--conf', action=StoreIdempotent, + help=' config file used by lightningd', + type=str, + default=None) + p.add_argument('-r', '--regtest', action=StoreTrueIdempotent) + p.add_argument('--network', action=StoreIdempotent, + help="specify a network to use (default: bitcoin)", + type=str) + p.add_argument('-v', '--verbose', action=StoreTrueIdempotent, + const=None) + p.add_argument('-j', '--json', action=StoreTrueIdempotent, + help='output in json format') args = parser.parse_args() + args = process_idempotent_args(args) + + if args.json: + log.capture = True + stdout_redirect = PIPE + stderr_redirect = PIPE + else: + stdout_redirect = None + stderr_redirect = None + + if args.verbose: + logging.root.setLevel(logging.DEBUG) + else: + logging.root.setLevel(logging.INFO) NETWORK = 'regtest' if args.regtest else 'bitcoin' SUPPORTED_NETWORKS = ['bitcoin', 'regtest', 'liquid', 'liquid-regtest', 'litecoin', 'signet', 'testnet'] + if args.version: + report_version() + elif args.cmd1 is None: + parser.print_help(sys.stdout) + sys.exit(1) if args.network: if args.network in SUPPORTED_NETWORKS: NETWORK = args.network else: - print(f"Error: {args.network} network not supported") + log.error(f"{args.network} network not supported") LIGHTNING_DIR = Path(args.lightning) # This env variable is set under CI testing LIGHTNING_CLI_CALL = [os.environ.get('LIGHTNING_CLI')] @@ -1549,7 +1792,6 @@ if __name__ == '__main__': API_GITHUB_COM = os.environ['REDIR_GITHUB_API'] if 'REDIR_GITHUB' in os.environ: GITHUB_COM = os.environ['REDIR_GITHUB'] - logging.root.setLevel(args.loglevel) GITHUB_API_FALLBACK = False if 'GITHUB_API_FALLBACK' in os.environ: @@ -1561,6 +1803,15 @@ if __name__ == '__main__': args.func(args.targets) sys.exit(0) for target in args.targets: - args.func(target) - else: - args.func() + # Accept single item arguments, or a json array + target_list = unpack_json_arg(target) + if target_list: + for tar in target_list: + log.add_result(args.func(tar)) + else: + log.add_result(args.func(target)) + elif 'func' in args: + log.add_result(args.func()) + + if log.capture: + log.reply_json()