#!/usr/bin/env python3 import sys import argparse import copy import datetime from enum import Enum import json import logging import os from pathlib import Path, PosixPath import shutil from subprocess import Popen, PIPE, TimeoutExpired, run import tempfile import time import types from typing import Union from urllib.parse import urlparse from urllib.request import urlopen from urllib.error import HTTPError import venv __VERSION__ = '24.08' logging.basicConfig( level=logging.INFO, format='[%(asctime)s] %(levelname)s: %(message)s', handlers=[logging.StreamHandler(stream=sys.stdout)], ) LAST_FOUND = None class Logger: """Redirect logging output to a json object or stdout as appropriate.""" def __init__(self, capture: bool = False): self.json_output = {"result": [], "log": []} self.capture = capture def str_esc(self, raw_string: str) -> str: assert isinstance(raw_string, str) return json.dumps(raw_string)[1:-1] def debug(self, to_log: str): assert isinstance(to_log, str) or hasattr(to_log, "__repr__") if logging.root.level > logging.DEBUG: return if self.capture: self.json_output['log'].append(self.str_esc(f"DEBUG: {to_log}")) else: logging.debug(to_log) def info(self, to_log: str): assert isinstance(to_log, str) or hasattr(to_log, "__repr__") if logging.root.level > logging.INFO: return if self.capture: self.json_output['log'].append(self.str_esc(f"INFO: {to_log}")) else: print(to_log) def warning(self, to_log: str): assert isinstance(to_log, str) or hasattr(to_log, "__repr__") if logging.root.level > logging.WARNING: return if self.capture: self.json_output['log'].append(self.str_esc(f"WARNING: {to_log}")) else: logging.warning(to_log) def error(self, to_log: str): assert isinstance(to_log, str) or hasattr(to_log, "__repr__") if logging.root.level > logging.ERROR: return if self.capture: self.json_output['log'].append(self.str_esc(f"ERROR: {to_log}")) else: logging.error(to_log) def add_result(self, result: Union[str, None]): assert json.dumps(result), "result must be json serializable" self.json_output["result"].append(result) def reply_json(self): """json output to stdout with accumulated result.""" if len(log.json_output["result"]) == 1 and \ isinstance(log.json_output["result"][0], list): # unpack sources output log.json_output["result"] = log.json_output["result"][0] print(json.dumps(log.json_output, indent=3)) log = Logger() repos = ['https://github.com/lightningd/plugins'] def reckless_abort(err: str): log.error(err) log.add_result(None) log.reply_json() sys.exit(1) def py_entry_guesses(name) -> list: return [name, f'{name}.py', '__init__.py'] def unsupported_entry(name) -> list: return [f'{name}.go', f'{name}.sh'] def entry_guesses(name: str) -> list: guesses = [] for inst in INSTALLERS: for entry in inst.entries: guesses.append(entry.format(name=name)) return guesses class Installer: ''' The identification of a plugin language, compiler or interpreter availability, and the install procedures. ''' def __init__(self, name: str, exe: Union[str, None] = None, compiler: Union[str, None] = None, manager: Union[str, None] = None, entry: Union[str, None] = None): self.name = name self.entries = [] if entry: self.entries.append(entry) self.exe = exe # interpreter (if required) self.compiler = compiler # compiler bin (if required) self.manager = manager # dependency manager (if required) self.dependency_file = None self.dependency_call = None def __repr__(self): return (f'') def executable(self) -> bool: '''Validate the necessary bins are available to execute the plugin.''' if self.exe: if shutil.which(self.exe): # This should arguably not be checked here. if self.manager: if shutil.which(self.manager): return True return False return True return False return True def installable(self) -> bool: '''Validate the necessary compiler and package manager executables are available to install. If these are defined, they are considered mandatory even though the user may have the requisite packages already installed.''' if self.compiler and not shutil.which(self.compiler): return False if self.manager and not shutil.which(self.manager): return False return True def add_entrypoint(self, entry: str): assert isinstance(entry, str) self.entries.append(entry) def get_entrypoints(self, name: str): guesses = [] for entry in self.entries: guesses.append(entry.format(name=name)) return guesses def add_dependency_file(self, dep: str): assert isinstance(dep, str) self.dependency_file = dep def add_dependency_call(self, call: list): if self.dependency_call is None: self.dependency_call = [] self.dependency_call.append(call) def copy(self): return copy.deepcopy(self) class InstInfo: def __init__(self, name: str, location: str, git_url: str): self.name = name self.source_loc = str(location) # Used for 'git clone' self.git_url: str = git_url # API access for github repos self.srctype: Source = Source.get_type(location) self.entry: SourceFile = None # relative to source_loc or subdir self.deps: str = None self.subdir: str = None self.commit: str = None def __repr__(self): return (f'InstInfo({self.name}, {self.source_loc}, {self.git_url}, ' f'{self.entry}, {self.deps}, {self.subdir})') def get_inst_details(self) -> bool: """Search the source_loc for plugin install details. This may be necessary if a contents api is unavailable. Extracts entrypoint and dependencies if searchable, otherwise matches a directory to the plugin name and stops.""" if self.srctype == Source.DIRECTORY: assert Path(self.source_loc).exists() assert os.path.isdir(self.source_loc) target = SourceDir(self.source_loc, srctype=self.srctype) # Set recursion for how many directories deep we should search depth = 0 if self.srctype in [Source.DIRECTORY, Source.LOCAL_REPO, Source.GIT_LOCAL_CLONE]: depth = 5 elif self.srctype == Source.GITHUB_REPO: depth = 1 def search_dir(self, sub: SourceDir, subdir: bool, recursion: int) -> Union[SourceDir, None]: assert isinstance(recursion, int) # carveout for archived plugins in lightningd/plugins. Other repos # are only searched by API at the top level. if recursion == 0 and 'archive' in sub.name.lower(): pass # If unable to search deeper, resort to matching directory name elif recursion < 1: if sub.name.lower() == self.name.lower(): # Partial success (can't check for entrypoint) self.name = sub.name return sub return None sub.populate() if sub.name.lower() == self.name.lower(): # Directory matches the name we're trying to install, so check # for entrypoint and dependencies. for inst in INSTALLERS: for g in inst.get_entrypoints(self.name): found_entry = sub.find(g, ftype=SourceFile) if found_entry: break # FIXME: handle a list of dependencies found_dep = sub.find(inst.dependency_file, ftype=SourceFile) if found_entry: # Success! if found_dep: self.name = sub.name self.entry = found_entry.name self.deps = found_dep.name return sub log.debug(f"missing dependency for {self}") found_entry = None for file in sub.contents: if isinstance(file, SourceDir): assert file.relative success = search_dir(self, file, True, recursion - 1) if success: return success return None try: result = search_dir(self, target, False, depth) # Using the rest API of github.com may result in a # "Error 403: rate limit exceeded" or other access issues. # Fall back to cloning and searching the local copy instead. except HTTPError: result = None if self.srctype == Source.GITHUB_REPO: # clone source to reckless dir target = copy_remote_git_source(self) if not target: log.warning(f"could not clone github source {self}") return False log.debug(f"falling back to cloning remote repo {self}") # Update to reflect use of a local clone self.source_loc = target.location self.srctype = target.srctype result = search_dir(self, target, False, 5) if not result: return False if result: if result != target: if result.relative: self.subdir = result.relative else: # populate() should always assign a relative path # if not in the top-level source directory assert self.subdir == result.name return True return False def create_dir(directory: PosixPath) -> bool: try: Path(directory).mkdir(parents=False, exist_ok=True) return True # Okay if directory already exists except FileExistsError: return True # Parent directory missing except FileNotFoundError: return False def remove_dir(directory: str) -> bool: try: shutil.rmtree(directory) return True except NotADirectoryError: log.warning(f"Tried to remove directory {directory} that " "does not exist.") except PermissionError: log.warning(f"Permission denied removing dir: {directory}") return False class Source(Enum): DIRECTORY = 1 LOCAL_REPO = 2 GITHUB_REPO = 3 OTHER_URL = 4 UNKNOWN = 5 # Cloned from remote source before searching (rather than github API) GIT_LOCAL_CLONE = 6 @classmethod def get_type(cls, source: str): if Path(os.path.realpath(source)).exists(): if os.path.isdir(os.path.realpath(source)): # returns 0 if git repository proc = run(['git', '-C', source, 'rev-parse'], cwd=os.path.realpath(source), stdout=PIPE, stderr=PIPE, text=True, timeout=5) if proc.returncode == 0: return cls(2) return cls(1) if 'github.com' in source.lower(): return cls(3) if 'http://' in source.lower() or 'https://' in source.lower(): return cls(4) return cls(5) @classmethod def get_github_user_repo(cls, source: str) -> (str, str): 'extract a github username and repository name' if 'github.com/' not in source.lower(): return None, None trailing = Path(source.lower().partition('github.com/')[2]).parts if len(trailing) < 2: return None, None return trailing[0], trailing[1] class SourceDir(): """Structure to search source contents.""" def __init__(self, location: str, srctype: Source = None, name: str = None, relative: str = None): self.location = str(location) if name: self.name = name else: self.name = Path(location).name self.contents = [] self.srctype = srctype self.prepopulated = False self.relative = relative # location relative to source def populate(self): """populates contents of the directory at least one level""" if self.prepopulated: return if not self.srctype: self.srctype = Source.get_type(self.location) if self.srctype == Source.DIRECTORY: self.contents = populate_local_dir(self.location) elif self.srctype in [Source.LOCAL_REPO, Source.GIT_LOCAL_CLONE]: self.contents = populate_local_repo(self.location) elif self.srctype == Source.GITHUB_REPO: self.contents = populate_github_repo(self.location) else: raise Exception("populate method undefined for {self.srctype}") # Ensure the relative path of the contents is inherited. for c in self.contents: if self.relative is None: c.relative = c.name else: c.relative = str(Path(self.relative) / c.name) def find(self, name: str, ftype: type = None) -> str: """Match a SourceFile or SourceDir to the provided name (case insentive) and return its filename.""" assert isinstance(name, str) if len(self.contents) == 0: return None for c in self.contents: if ftype and not isinstance(c, ftype): continue if c.name.lower() == name.lower(): return c return None def __repr__(self): return f"" def __eq__(self, compared): if isinstance(compared, str): return self.name == compared if isinstance(compared, SourceDir): return (self.name == compared.name and self.location == compared.location) return False class SourceFile(): def __init__(self, location: str): self.location = str(location) self.name = Path(location).name def __repr__(self): return f"" def __eq__(self, compared): if isinstance(compared, str): return self.name == compared if isinstance(compared, SourceFile): return (self.name == compared.name and self.location == compared.location) return False def populate_local_dir(path: str) -> list: assert Path(os.path.realpath(path)).exists() contents = [] for c in os.listdir(path): fullpath = Path(path) / c if os.path.isdir(fullpath): # Inheriting type saves a call to test if it's a git repo contents.append(SourceDir(fullpath, srctype=Source.DIRECTORY)) else: contents.append(SourceFile(fullpath)) return contents def populate_local_repo(path: str, parent=None) -> list: assert Path(os.path.realpath(path)).exists() if parent is None: basedir = SourceDir('base') else: assert isinstance(parent, SourceDir) basedir = parent def populate_source_path(parent: SourceDir, mypath: PosixPath, relative: str = None): """`git ls-tree` lists all files with their full path. This populates all intermediate directories and the file.""" parentdir = parent if mypath == '.': log.debug(' asked to populate root dir') return # reverse the parents pdirs = mypath revpath = [] child = parentdir while pdirs.parent.name != '': revpath.append(pdirs.parent.name) pdirs = pdirs.parent for p in reversed(revpath): child = parentdir.find(p) if child: parentdir = child else: if p == revpath[-1]: relative_path = None if parentdir.relative: relative_path = parentdir.relative elif parentdir.relative: relative_path = str(Path(parentdir.relative) / parentdir.name) else: relative_path = parentdir.name child = SourceDir(p, srctype=Source.LOCAL_REPO, relative=relative_path) # ls-tree lists every file in the repo with full path. # No need to populate each directory individually. child.prepopulated = True parentdir.contents.append(child) parentdir = child newfile = SourceFile(mypath.name) child.contents.append(newfile) # Submodules contents are populated separately proc = run(['git', '-C', path, 'submodule', 'status'], stdout=PIPE, stderr=PIPE, text=True, timeout=5) if proc.returncode != 0: log.debug(f"'git submodule status' of repo {path} failed") return None submodules = [] for sub in proc.stdout.splitlines(): submodules.append(sub.split()[1]) # FIXME: Pass in tag or commit hash ver = 'HEAD' git_call = ['git', '-C', path, 'ls-tree', '--full-tree', '-r', '--name-only', ver] proc = run(git_call, stdout=PIPE, stderr=PIPE, text=True, timeout=5) if proc.returncode != 0: log.debug(f'ls-tree of repo {path} failed') return None for filepath in proc.stdout.splitlines(): if filepath in submodules: if parent is None: relative_path = filepath elif basedir.relative: relative_path = str(Path(basedir.relative) / filepath) assert relative_path submodule_dir = SourceDir(filepath, srctype=Source.LOCAL_REPO, relative=relative_path) populate_local_repo(Path(path) / filepath, parent=submodule_dir) submodule_dir.prepopulated = True basedir.contents.append(submodule_dir) else: populate_source_path(basedir, Path(filepath)) return basedir.contents def source_element_from_repo_api(member: dict): # api accessed via /contents/ if 'type' in member and 'name' in member and 'git_url' in member: if member['type'] == 'dir': return SourceDir(member['git_url'], srctype=Source.GITHUB_REPO, name=member['name']) elif member['type'] == 'file': # Likely a submodule if member['size'] == 0: return SourceDir(None, srctype=Source.GITHUB_REPO, name=member['name']) return SourceFile(member['name']) elif member['type'] == 'commit': # No path is given by the api here return SourceDir(None, srctype=Source.GITHUB_REPO, name=member['name']) # git_url with /tree/ presents results a little differently elif 'type' in member and 'path' in member and 'url' in member: if member['type'] not in ['tree', 'blob']: log.debug(f' skipping {member["path"]} type={member["type"]}') if member['type'] == 'tree': return SourceDir(member['url'], srctype=Source.GITHUB_REPO, name=member['path']) elif member['type'] == 'blob': # This can be a submodule if member['size'] == 0: return SourceDir(member['git_url'], srctype=Source.GITHUB_REPO, name=member['name']) return SourceFile(member['path']) elif member['type'] == 'commit': # No path is given by the api here return SourceDir(None, srctype=Source.GITHUB_REPO, name=member['name']) return None def populate_github_repo(url: str) -> list: """populate one level of a github repository via REST API""" # Forces search to clone remote repos (for blackbox testing) if GITHUB_API_FALLBACK: with tempfile.NamedTemporaryFile() as tmp: raise HTTPError(url, 403, 'simulated ratelimit', {}, tmp) # FIXME: This probably contains leftover cruft. repo = url.split('/') while '' in repo: repo.remove('') repo_name = None parsed_url = urlparse(url) if 'github.com' not in parsed_url.netloc: return None if len(parsed_url.path.split('/')) < 2: return None start = 1 # Maybe we were passed an api.github.com/repo/ url if 'api' in parsed_url.netloc: start += 1 repo_user = parsed_url.path.split('/')[start] repo_name = parsed_url.path.split('/')[start + 1] # Get details from the github API. if API_GITHUB_COM in url: api_url = url else: api_url = f'{API_GITHUB_COM}/repos/{repo_user}/{repo_name}/contents/' git_url = api_url if "api.github.com" in git_url: # This lets us redirect to handle blackbox testing log.debug(f'fetching from gh API: {git_url}') git_url = (API_GITHUB_COM + git_url.split("api.github.com")[-1]) # Ratelimiting occurs for non-authenticated GH API calls at 60 in 1 hour. r = urlopen(git_url, timeout=5) if r.status != 200: return False if 'git/tree' in git_url: tree = json.loads(r.read().decode())['tree'] else: tree = json.loads(r.read().decode()) contents = [] for sub in tree: if source_element_from_repo_api(sub): contents.append(source_element_from_repo_api(sub)) return contents def copy_remote_git_source(github_source: InstInfo): """clone or fetch & checkout a local copy of a remote git repo""" user, repo = Source.get_github_user_repo(github_source.source_loc) if not user or not repo: log.warning('could not extract github user and repo ' f'name for {github_source.source_loc}') return None local_path = RECKLESS_DIR / '.remote_sources' / user create_dir(RECKLESS_DIR / '.remote_sources') if not create_dir(local_path): log.warning(f'could not provision dir {local_path} to ' f'clone remote source {github_source.source_loc}') return None local_path = local_path / repo if local_path.exists(): # Fetch the latest assert _git_update(github_source, local_path) else: _git_clone(github_source, local_path) return SourceDir(local_path, srctype=Source.GIT_LOCAL_CLONE) class Config(): """A generic class for procuring, reading and editing config files""" def obtain_config(self, config_path: str, default_text: str, warn: bool = False) -> str: """Return a config file from the desired location. Create one with default_text if it cannot be found.""" if isinstance(config_path, type(None)): raise Exception("Generic config must be passed a config_path.") assert isinstance(config_path, str) # FIXME: warn if reckless dir exists, but conf not found if Path(config_path).exists(): with open(config_path, 'r+') as f: config_content = f.readlines() return config_content # redirecting the prompts to stderr is kinder for json consumers tmp = sys.stdout sys.stdout = sys.stderr print(f'config file not found: {config_path}') if warn: confirm = input('press [Y] to create one now.\n').upper() == 'Y' else: confirm = True sys.stdout = tmp if not confirm: reckless_abort(f"config file required: {config_path}") parent_path = Path(config_path).parent # Create up to one parent in the directory tree. if create_dir(parent_path): with open(self.conf_fp, 'w') as f: f.write(default_text) # FIXME: Handle write failure return default_text else: log.warning('could not create the parent directory ' + parent_path) raise FileNotFoundError('invalid parent directory') def editConfigFile(self, addline: Union[str, None], removeline: Union[str, None]): """Idempotent function to add and/or remove a single line each.""" remove_these_lines = [] with open(self.conf_fp, 'r') as reckless_conf: original = reckless_conf.readlines() empty_lines = [] write_required = False for n, l in enumerate(original): if removeline and l.strip() == removeline.strip(): write_required = True remove_these_lines.append(n) continue if l.strip() == '': empty_lines.append(n) if n-1 in empty_lines: # The white space is getting excessive. remove_these_lines.append(n) continue if not addline and not write_required: return # No write necessary if addline is already in config. if addline and not write_required: for line in original: if line.strip() == addline.strip(): return with open(self.conf_fp, 'w') as conf_write: # no need to write if passed 'None' line_exists = not bool(addline) for n, l in enumerate(original): if n not in remove_these_lines: if n > 0: conf_write.write(f'\n{l.strip()}') else: conf_write.write(l.strip()) if addline and addline.strip() == l.strip(): # addline is idempotent line_exists = True if not line_exists: conf_write.write(f'\n{addline}') def __init__(self, path: Union[str, None] = None, default_text: Union[str, None] = None, warn: bool = False): assert path is not None assert default_text is not None self.conf_fp = path self.content = self.obtain_config(self.conf_fp, default_text, warn=warn) class RecklessConfig(Config): """Reckless config (by default, specific to the bitcoin network only.) This is inherited by the lightningd config and contains all reckless maintained plugins.""" def enable_plugin(self, plugin_path: str): """Handle persistent plugin loading via config""" self.editConfigFile(f'plugin={plugin_path}', f'disable-plugin={plugin_path}') def disable_plugin(self, plugin_path: str): """Handle persistent plugin disabling via config""" self.editConfigFile(f'disable-plugin={plugin_path}', f'plugin={plugin_path}') def __init__(self, path: Union[str, None] = None, default_text: Union[str, None] = None): if path is None: path = Path(LIGHTNING_DIR) / 'reckless' / 'bitcoin-reckless.conf' if default_text is None: default_text = ( '# This configuration file is managed by reckless to activate ' 'and disable\n# reckless-installed plugins\n\n' ) Config.__init__(self, path=str(path), default_text=default_text) self.reckless_dir = Path(path).parent class LightningBitcoinConfig(Config): """lightningd config specific to the bitcoin network. This is inherited by the main lightningd config and in turn, inherits bitcoin-reckless.conf.""" def __init__(self, path: Union[str, None] = None, default_text: Union[str, None] = None, warn: bool = True): if path is None: path = Path(LIGHTNING_DIR).joinpath('bitcoin', 'config') if default_text is None: default_text = "# This config was autopopulated by reckless\n\n" Config.__init__(self, path=str(path), default_text=default_text, warn=warn) class NotFoundError(Exception): """Raised by InferInstall when a source/entrypoint cannot be located.""" class InferInstall(): """Once a plugin is installed, we may need its directory and entrypoint""" def __init__(self, name: str): reck_contents = os.listdir(RECKLESS_CONFIG.reckless_dir) reck_contents_lower = {} for f in reck_contents: reck_contents_lower.update({f.lower(): f}) def match_name(name) -> str: for tier in range(0, 10): # Look for each installers preferred entrypoint format first for inst in INSTALLERS: fmt = inst.entries[tier] if '{name}' in fmt: pre = fmt.split('{name}')[0] post = fmt.split('{name}')[-1] if name.startswith(pre) and name.endswith(post): return name.lstrip(pre).rstrip(post) else: if fmt == name: return name return name name = match_name(name) if name.lower() in reck_contents_lower.keys(): actual_name = reck_contents_lower[name.lower()] self.dir = Path(RECKLESS_CONFIG.reckless_dir).joinpath(actual_name) else: raise NotFoundError("Could not find a reckless directory " f"for {name}") plug_dir = Path(RECKLESS_CONFIG.reckless_dir).joinpath(actual_name) for guess in entry_guesses(actual_name): for content in plug_dir.iterdir(): if content.name == guess: self.entry = str(content) self.name = actual_name return raise NotFoundError(f'plugin entrypoint not found in {self.dir}') class InstallationFailure(Exception): "raised when pip fails to complete dependency installation" def create_python3_venv(staged_plugin: InstInfo) -> InstInfo: "Create a virtual environment, install dependencies and test plugin." env_path = Path('.venv') env_path_full = Path(staged_plugin.source_loc) / env_path assert staged_plugin.subdir # relative dir of original source plugin_path = Path(staged_plugin.source_loc) / staged_plugin.subdir if shutil.which('poetry') and staged_plugin.deps == 'pyproject.toml': log.debug('configuring a python virtual environment (poetry) in ' f'{env_path_full}') # The virtual environment should be located with the plugin. # This installs it to .venv instead of in the global location. mod_poetry_env = os.environ mod_poetry_env['POETRY_VIRTUALENVS_IN_PROJECT'] = 'true' # This ensures poetry installs to a new venv even though one may # already be active (i.e., under CI) if 'VIRTUAL_ENV' in mod_poetry_env: del mod_poetry_env['VIRTUAL_ENV'] # to avoid relocating and breaking the venv, symlink pyroject.toml # to the location of poetry's .venv dir (Path(staged_plugin.source_loc) / 'pyproject.toml') \ .symlink_to(plugin_path / 'pyproject.toml') (Path(staged_plugin.source_loc) / 'poetry.lock') \ .symlink_to(plugin_path / 'poetry.lock') # Avoid redirecting stdout in order to stream progress. # Timeout excluded as armv7 grpcio build/install can take 1hr. pip = run(['poetry', 'install', '--no-root'], check=False, cwd=staged_plugin.source_loc, env=mod_poetry_env, stdout=stdout_redirect, stderr=stderr_redirect) (Path(staged_plugin.source_loc) / 'pyproject.toml').unlink() (Path(staged_plugin.source_loc) / 'poetry.lock').unlink() else: builder = venv.EnvBuilder(with_pip=True) builder.create(env_path_full) log.debug('configuring a python virtual environment (pip) in ' f'{env_path_full}') log.debug(f'virtual environment created in {env_path_full}.') if staged_plugin.deps == 'pyproject.toml': pip = run(['bin/pip', 'install', str(plugin_path)], check=False, cwd=plugin_path) elif staged_plugin.deps == 'requirements.txt': pip = run([str(env_path_full / 'bin/pip'), 'install', '-r', str(plugin_path / 'requirements.txt')], check=False, cwd=plugin_path, stdout=stdout_redirect, stderr=stderr_redirect) else: log.debug("no python dependency file") if pip and pip.returncode != 0: log.error('error encountered installing dependencies') raise InstallationFailure staged_plugin.venv = env_path log.info('dependencies installed successfully') return staged_plugin def create_wrapper(plugin: InstInfo): '''The wrapper will activate the virtual environment for this plugin and then run the plugin from within the same process.''' assert hasattr(plugin, 'venv') venv_full_path = Path(plugin.source_loc) / plugin.venv with open(Path(plugin.source_loc) / plugin.entry, 'w') as wrapper: wrapper.write((f"#!{venv_full_path}/bin/python\n" "import sys\n" "import runpy\n\n" f"if '{plugin.source_loc}/{plugin.subdir}' not in " "sys.path:\n" f" sys.path.append('{plugin.source_loc}/" f"{plugin.subdir}')\n" f"if '{plugin.source_loc}' in sys.path:\n" f" sys.path.remove('{plugin.source_loc}')\n" f"runpy.run_module(\"{plugin.name}\", " "{}, \"__main__\")")) wrapper_file = Path(plugin.source_loc) / plugin.entry wrapper_file.chmod(0o755) def install_to_python_virtual_environment(cloned_plugin: InstInfo): '''Called during install in place of a subprocess.run list''' # Delete symlink so that a venv wrapper can take it's place (Path(cloned_plugin.source_loc) / cloned_plugin.entry).unlink() create_python3_venv(cloned_plugin) if not hasattr(cloned_plugin, 'venv'): raise InstallationFailure log.debug('virtual environment for cloned plugin: ' f'{cloned_plugin.venv}') create_wrapper(cloned_plugin) return cloned_plugin def cargo_installation(cloned_plugin: InstInfo): call = ['cargo', 'build', '--release', '-vv'] # FIXME: the symlinked Cargo.toml allows the installer to identify a valid # plugin directory, but is unneeded, and actually confuses cargo if not # removed prior to installing. cargo_toml_path = Path(cloned_plugin.source_loc) / 'Cargo.toml' if cargo_toml_path.exists(): cargo_toml_path.unlink() # source_loc now contains a symlink to the entrypoint and 'source/plugin/' source = Path(cloned_plugin.source_loc) / 'source' / cloned_plugin.name log.debug(f'cargo installing from {source}') if logging.root.level < logging.INFO and not log.capture: cargo = run(call, cwd=str(source), text=True) else: cargo = run(call, cwd=str(source), stdout=PIPE, stderr=PIPE, text=True) if cargo.returncode == 0: log.debug('rust project compiled successfully') else: log.error(cargo.stderr if cargo.stderr else 'error encountered during build, cargo exited with return ' f'code {cargo.returncode}') log.debug(f'removing {cloned_plugin.source_loc}') remove_dir(cloned_plugin.source_loc) raise InstallationFailure # We do need to symlink to the executable binary though. (Path(cloned_plugin.source_loc) / cloned_plugin.name).\ symlink_to(source / f'target/release/{cloned_plugin.name}') cloned_plugin.entry = cloned_plugin.name return cloned_plugin python3venv = Installer('python3venv', exe='python3', manager='pip', entry='{name}.py') python3venv.add_entrypoint('{name}') python3venv.add_entrypoint('__init__.py') python3venv.add_dependency_file('requirements.txt') python3venv.dependency_call = install_to_python_virtual_environment poetryvenv = Installer('poetryvenv', exe='python3', manager='poetry', entry='{name}.py') poetryvenv.add_entrypoint('{name}') poetryvenv.add_entrypoint('__init__.py') poetryvenv.add_dependency_file('pyproject.toml') poetryvenv.dependency_call = install_to_python_virtual_environment pyprojectViaPip = Installer('pyprojectViaPip', exe='python3', manager='pip', entry='{name}.py') pyprojectViaPip.add_entrypoint('{name}') pyprojectViaPip.add_entrypoint('__init__.py') pyprojectViaPip.add_dependency_file('pyproject.toml') pyprojectViaPip.dependency_call = install_to_python_virtual_environment # Nodejs plugin installer nodejs = Installer('nodejs', exe='node', manager='npm', entry='{name}.js') nodejs.add_entrypoint('{name}') nodejs.add_dependency_call(['npm', 'install', '--omit=dev']) nodejs.add_dependency_file('package.json') # This entrypoint is used to identify a candidate directory, don't call it. rust_cargo = Installer('rust', manager='cargo', entry='Cargo.toml') rust_cargo.add_dependency_file('Cargo.toml') rust_cargo.dependency_call = cargo_installation INSTALLERS = [python3venv, poetryvenv, pyprojectViaPip, nodejs, rust_cargo] def help_alias(targets: list): if len(targets) == 0: parser.print_help(sys.stdout) else: log.info('try "reckless {} -h"'.format(' '.join(targets))) sys.exit(1) def _source_search(name: str, src: str) -> Union[InstInfo, None]: """Identify source type, retrieve contents, and populate InstInfo if the relevant contents are found.""" root_dir = SourceDir(src) source = InstInfo(name, root_dir.location, None) # If a local clone of a github source already exists, prefer searching # that instead of accessing the github API. if source.srctype == Source.GITHUB_REPO: # Do we have a local copy already? Use that. user, repo = Source.get_github_user_repo(src) assert user assert repo local_clone_location = RECKLESS_DIR / '.remote_sources' / user / repo if local_clone_location.exists(): # Make sure it's the correct remote source and fetch any updates. if _git_update(source, local_clone_location): log.debug(f"Using local clone of {src}: " f"{local_clone_location}") source.source_loc = local_clone_location source.srctype = Source.GIT_LOCAL_CLONE if source.get_inst_details(): return source return None def _git_clone(src: InstInfo, dest: Union[PosixPath, str]) -> bool: log.info(f'cloning {src.srctype} {src}') if src.srctype == Source.GITHUB_REPO: assert 'github.com' in src.source_loc source = f"{GITHUB_COM}" + src.source_loc.split("github.com")[-1] elif src.srctype in [Source.LOCAL_REPO, Source.OTHER_URL, Source.GIT_LOCAL_CLONE]: source = src.source_loc else: return False git = run(['git', 'clone', '--recurse-submodules', source, str(dest)], stdout=PIPE, stderr=PIPE, text=True, check=False, timeout=180) if git.returncode != 0: for line in git.stderr.splitlines(): log.debug(line) if Path(dest).exists(): remove_dir(str(dest)) log.error('Failed to clone repo') return False return True def _git_update(github_source: InstInfo, local_copy: PosixPath): # Ensure this is the correct source git = run(['git', 'remote', 'set-url', 'origin', github_source.source_loc], cwd=str(local_copy), stdout=PIPE, stderr=PIPE, text=True, check=False, timeout=60) assert git.returncode == 0 if git.returncode != 0: return False # Fetch the latest from the remote git = run(['git', 'fetch', 'origin', '--recurse-submodules=on-demand'], cwd=str(local_copy), stdout=PIPE, stderr=PIPE, text=True, check=False, timeout=60) assert git.returncode == 0 if git.returncode != 0: return False # Find default branch git = run(['git', 'symbolic-ref', 'refs/remotes/origin/HEAD', '--short'], cwd=str(local_copy), stdout=PIPE, stderr=PIPE, text=True, check=False, timeout=60) assert git.returncode == 0 if git.returncode != 0: return False default_branch = git.stdout.splitlines()[0] if default_branch != 'origin/master': log.debug(f'UNUSUAL: fetched default branch {default_branch} for ' f'{github_source.source_loc}') # Checkout default branch git = run(['git', 'checkout', default_branch], cwd=str(local_copy), stdout=PIPE, stderr=PIPE, text=True, check=False, timeout=60) assert git.returncode == 0 if git.returncode != 0: return False return True def get_temp_reckless_dir() -> PosixPath: random_dir = 'reckless-{}'.format(str(hash(os.times()))[-9:]) new_path = Path(tempfile.mkdtemp(prefix=random_dir)) return new_path def add_installation_metadata(installed: InstInfo, original_request: InstInfo): """Document the install request and installation details for use when updating the plugin.""" install_dir = Path(installed.source_loc) assert install_dir.is_dir() data = ('installation date\n' f'{datetime.date.today().isoformat()}\n' 'installation time\n' f'{int(time.time())}\n' 'original source\n' f'{original_request.source_loc}\n' 'requested commit\n' f'{original_request.commit}\n' 'installed commit\n' f'{installed.commit}\n') with open(install_dir / '.metadata', 'w') as metadata: metadata.write(data) def _checkout_commit(orig_src: InstInfo, cloned_src: InstInfo, cloned_path: PosixPath): # Check out and verify commit/tag if source was a repository if orig_src.srctype in [Source.LOCAL_REPO, Source.GITHUB_REPO, Source.OTHER_URL, Source.GIT_LOCAL_CLONE]: if orig_src.commit: log.debug(f"Checking out {orig_src.commit}") checkout = Popen(['git', 'checkout', orig_src.commit], cwd=str(cloned_path), stdout=PIPE, stderr=PIPE) checkout.wait() if checkout.returncode != 0: log.warning('failed to checkout referenced ' f'commit {orig_src.commit}') return None else: log.debug("using latest commit of default branch") # Log the commit we actually used (for installation metadata) git = run(['git', 'rev-parse', 'HEAD'], cwd=str(cloned_path), stdout=PIPE, stderr=PIPE, text=True, check=False, timeout=60) if git.returncode == 0: head_commit = git.stdout.splitlines()[0] log.debug(f'checked out HEAD: {head_commit}') cloned_src.commit = head_commit else: log.debug(f'unable to collect commit: {git.stderr}') else: if orig_src.commit: log.warning("unable to checkout commit/tag on non-repository " "source") return cloned_path if cloned_src.subdir is not None: return Path(cloned_src.source_loc) / cloned_src.subdir return cloned_path def _install_plugin(src: InstInfo) -> Union[InstInfo, None]: """make sure the repo exists and clone it.""" log.debug(f'Install requested from {src}.') if RECKLESS_CONFIG is None: log.error('reckless install directory unavailable') return None # Use a unique directory for each cloned repo. tmp_path = get_temp_reckless_dir() if not create_dir(tmp_path): log.debug(f'failed to create {tmp_path}') return None clone_path = tmp_path / 'clone' if not create_dir(tmp_path): log.debug(f'failed to create {clone_path}') return None # we rename the original repo here. plugin_path = clone_path / src.name inst_path = Path(RECKLESS_CONFIG.reckless_dir) / src.name if Path(clone_path).exists(): log.debug(f'{clone_path} already exists - deleting') shutil.rmtree(clone_path) if src.srctype == Source.DIRECTORY: log.debug(("copying local directory contents from" f" {src.source_loc}")) create_dir(clone_path) shutil.copytree(src.source_loc, plugin_path) elif src.srctype in [Source.LOCAL_REPO, Source.GITHUB_REPO, Source.OTHER_URL, Source.GIT_LOCAL_CLONE]: # clone git repository to /tmp/reckless-... if not _git_clone(src, plugin_path): return None # FIXME: Validate path was cloned successfully. # Depending on how we accessed the original source, there may be install # details missing. Searching the cloned repo makes sure we have it. cloned_src = _source_search(src.name, str(clone_path)) log.debug(f'cloned_src: {cloned_src}') if not cloned_src: log.warning('failed to find plugin after cloning repo.') return None # If a specific commit or tag was requested, check it out now. plugin_path = _checkout_commit(src, cloned_src, plugin_path) if not plugin_path: return None # Find a suitable installer INSTALLER = None for inst_method in INSTALLERS: if not (inst_method.installable() and inst_method.executable()): continue if inst_method.dependency_file is not None: if inst_method.dependency_file not in os.listdir(plugin_path): continue log.debug(f"using installer {inst_method.name}") INSTALLER = inst_method break if not INSTALLER: log.warning('Could not find a suitable installer method for ' f'{src.name}') return None if not cloned_src.entry: # The plugin entrypoint may not be discernable prior to cloning. # Need to search the newly cloned directory, not the original cloned_src.source_loc = plugin_path # Relocate plugin to a staging directory prior to testing if not Path(inst_path).exists(): log.debug(f'creating {inst_path}') create_dir(inst_path) if not Path(inst_path / 'source').exists(): log.debug(f'creating {inst_path / "source"}') create_dir(inst_path / 'source') staging_path = inst_path / 'source' / src.name log.debug(f'copying {plugin_path} tree to {staging_path}') shutil.copytree(str(plugin_path), staging_path) staged_src = cloned_src # Because the source files are copied to a 'source' directory, the # get_inst_details function no longer works. (dir must match plugin name) # Set these manually instead. staged_src.source_loc = str(inst_path) staged_src.srctype = Source.DIRECTORY # Use subdir to redirect the symlink to the actual executable location staged_src.subdir = f'source/{src.name}' # Create symlink in staging tree to redirect to the plugins entrypoint log.debug(f"linking source {staging_path / cloned_src.entry} to " f"{Path(staged_src.source_loc) / cloned_src.entry}") log.debug(staged_src) (Path(staged_src.source_loc) / cloned_src.entry).\ symlink_to(staging_path / cloned_src.entry) # try it out if INSTALLER.dependency_call: if isinstance(INSTALLER.dependency_call, types.FunctionType): try: staged_src = INSTALLER.dependency_call(staged_src) except InstallationFailure: return None else: for call in INSTALLER.dependency_call: log.debug(f"Install: invoking '{' '.join(call)}'") if logging.root.level < logging.INFO: pip = Popen(call, cwd=staging_path, text=True) else: pip = Popen(call, cwd=staging_path, stdout=PIPE, stderr=PIPE, text=True) pip.wait() # FIXME: handle output of multiple calls if pip.returncode == 0: log.info('dependencies installed successfully') else: log.error('error encountered installing dependencies') if pip.stdout: log.debug(pip.stdout.read()) remove_dir(clone_path) remove_dir(inst_path) return None staged_src.subdir = None test_log = [] try: test = run([Path(staged_src.source_loc).joinpath(staged_src.entry)], cwd=str(staging_path), stdout=PIPE, stderr=PIPE, text=True, timeout=10) for line in test.stderr.splitlines(): test_log.append(line) returncode = test.returncode except TimeoutExpired: # If the plugin is still running, it's assumed to be okay. returncode = 0 if returncode != 0: log.debug("plugin testing error:") for line in test_log: log.debug(f' {line}') log.error('plugin testing failed') remove_dir(clone_path) remove_dir(inst_path) return None add_installation_metadata(staged_src, src) log.info(f'plugin installed: {inst_path}') remove_dir(clone_path) return staged_src def install(plugin_name: str) -> Union[str, None]: """Downloads plugin from source repos, installs and activates plugin. Returns the location of the installed plugin or "None" in the case of failure.""" assert isinstance(plugin_name, str) # Specify a tag or commit to checkout by adding @ to plugin name if '@' in plugin_name: log.debug("testing for a commit/tag in plugin name") name, commit = plugin_name.split('@', 1) else: name = plugin_name commit = None log.debug(f"Searching for {name}") if search(name): global LAST_FOUND src = LAST_FOUND src.commit = commit log.debug(f'Retrieving {src.name} from {src.source_loc}') try: installed = _install_plugin(src) except FileExistsError as err: log.error(f'File exists: {err.filename}') return None LAST_FOUND = None if not installed: log.warning(f'{plugin_name}: installation aborted') return None # Match case of the containing directory for dirname in os.listdir(RECKLESS_CONFIG.reckless_dir): if dirname.lower() == installed.name.lower(): inst_path = Path(RECKLESS_CONFIG.reckless_dir) inst_path = inst_path / dirname / installed.entry RECKLESS_CONFIG.enable_plugin(inst_path) enable(installed.name) return f"{installed.source_loc}" log.error(('dynamic activation failed: ' f'{installed.name} not found in reckless directory')) return None return None def uninstall(plugin_name: str) -> str: """dDisables plugin and deletes the plugin's reckless dir. Returns the status of the uninstall attempt.""" assert isinstance(plugin_name, str) log.debug(f'Uninstalling plugin {plugin_name}') disable(plugin_name) try: inst = InferInstall(plugin_name) except NotFoundError as err: log.error(err) return "uninstall failed" if not Path(inst.entry).exists(): log.error("cannot find installed plugin at expected path" f"{inst.entry}") return "uninstall failed" log.debug(f'looking for {str(Path(inst.entry).parent)}') if remove_dir(str(Path(inst.entry).parent)): log.info(f"{inst.name} uninstalled successfully.") else: return "uninstall failed" return "uninstalled" def search(plugin_name: str) -> Union[InstInfo, None]: """searches plugin index for plugin""" ordered_sources = RECKLESS_SOURCES.copy() for src in RECKLESS_SOURCES: # Search repos named after the plugin before collections if Source.get_type(src) == Source.GITHUB_REPO: if src.split('/')[-1].lower() == plugin_name.lower(): ordered_sources.remove(src) ordered_sources.insert(0, src) # Check locally before reaching out to remote repositories for src in RECKLESS_SOURCES: if Source.get_type(src) in [Source.DIRECTORY, Source.LOCAL_REPO]: ordered_sources.remove(src) ordered_sources.insert(0, src) for source in ordered_sources: srctype = Source.get_type(source) if srctype == Source.UNKNOWN: log.debug(f'cannot search {srctype} {source}') continue if srctype in [Source.DIRECTORY, Source.LOCAL_REPO, Source.GITHUB_REPO, Source.OTHER_URL]: found = _source_search(plugin_name, source) if not found: continue log.info(f"found {found.name} in source: {found.source_loc}") log.debug(f"entry: {found.entry}") if found.subdir: log.debug(f'sub-directory: {found.subdir}') global LAST_FOUND # Stashing the search result saves install() a call to _source_search. LAST_FOUND = found return str(found.source_loc) log.info("Search exhausted all sources") return None class RPCError(Exception): """lightning-cli fails to connect to lightningd RPC""" def __init__(self, err): self.err = err def __str__(self): return 'RPCError({self.err})' class CLIError(Exception): """lightningd error response""" def __init__(self, code, message): self.code = code self.message = message def __str__(self): return f'CLIError({self.code} {self.message})' def lightning_cli(*cli_args, timeout: int = 15) -> dict: """Interfaces with Core-Lightning via CLI using any configured options.""" cmd = LIGHTNING_CLI_CALL.copy() cmd.extend(cli_args) clncli = run(cmd, stdout=PIPE, stderr=PIPE, check=False, timeout=timeout) out = clncli.stdout.decode() if len(out) > 0 and out[0] == '{': # If all goes well, a json object is typically returned out = json.loads(out.replace('\n', '')) else: # help, -V, etc. may not return json, so stash it here. out = {'content': out} if clncli.returncode == 0: return out if clncli.returncode == 1: # RPC doesn't like our input # output contains 'code' and 'message' raise CLIError(out['code'], out['message']) # RPC may not be available - i.e., lightningd not running, using # alternate config. err = clncli.stderr.decode() raise RPCError(err) def enable(plugin_name: str): """dynamically activates plugin and adds to config (persistent)""" assert isinstance(plugin_name, str) try: inst = InferInstall(plugin_name) except NotFoundError as err: log.error(err) return None path = inst.entry if not Path(path).exists(): log.error(f'cannot find installed plugin at expected path {path}') return None log.debug(f'activating {plugin_name}') try: lightning_cli('plugin', 'start', path) except CLIError as err: if 'already registered' in err.message: log.debug(f'{inst.name} is already running') return None else: log.error(f'reckless: {inst.name} failed to start!') log.error(err) return None except RPCError: log.debug(('lightningd rpc unavailable. ' 'Skipping dynamic activation.')) RECKLESS_CONFIG.enable_plugin(path) log.info(f'{inst.name} enabled') return 'enabled' def disable(plugin_name: str): """reckless disable deactivates an installed plugin""" assert isinstance(plugin_name, str) try: inst = InferInstall(plugin_name) except NotFoundError as err: log.warning(f'failed to disable: {err}') return None path = inst.entry if not Path(path).exists(): sys.stderr.write(f'Could not find plugin at {path}\n') return None log.debug(f'deactivating {plugin_name}') try: lightning_cli('plugin', 'stop', path) except CLIError as err: if err.code == -32602: log.debug('plugin not currently running') else: log.error('lightning-cli plugin stop failed') logging.error(err) return None except RPCError: log.debug(('lightningd rpc unavailable. ' 'Skipping dynamic deactivation.')) RECKLESS_CONFIG.disable_plugin(path) log.info(f'{inst.name} disabled') return 'disabled' def load_config(reckless_dir: Union[str, None] = None, network: str = 'bitcoin') -> Config: """Initial directory discovery and config file creation.""" net_conf = None # Does the lightning-cli already reference an explicit config? try: active_config = lightning_cli('listconfigs', timeout=10)['configs'] if 'conf' in active_config: net_conf = LightningBitcoinConfig(path=active_config['conf'] ['value_str']) except RPCError: pass if reckless_dir is None: reckless_dir = Path(LIGHTNING_DIR) / 'reckless' else: if not os.path.isabs(reckless_dir): reckless_dir = Path.cwd() / reckless_dir if LIGHTNING_CONFIG: network_path = LIGHTNING_CONFIG else: network_path = Path(LIGHTNING_DIR) / network / 'config' reck_conf_path = Path(reckless_dir) / f'{network}-reckless.conf' if net_conf: if str(network_path) != net_conf.conf_fp: reckless_abort('reckless configuration does not match lightningd:\n' f'reckless network config path: {network_path}\n' f'lightningd active config: {net_conf.conf_fp}') else: # The network-specific config file (bitcoin by default) net_conf = LightningBitcoinConfig(path=network_path) # Reckless manages plugins here. try: reckless_conf = RecklessConfig(path=reck_conf_path) except FileNotFoundError: reckless_abort('reckless config file could not be written: ' + str(reck_conf_path)) if not net_conf: reckless_abort('Error: could not load or create the network specific lightningd' ' config (default .lightning/bitcoin)') net_conf.editConfigFile(f'include {reckless_conf.conf_fp}', None) return reckless_conf def get_sources_file() -> str: return str(RECKLESS_DIR / '.sources') def sources_from_file() -> list: sources_file = get_sources_file() read_sources = [] with open(sources_file, 'r') as f: for src in f.readlines(): if len(src.strip()) > 0: read_sources.append(src.strip()) return read_sources def load_sources() -> list: """Look for the repo sources file.""" sources_file = get_sources_file() # This would have been created if possible if not Path(sources_file).exists(): log.debug('Warning: Reckless requires write access') Config(path=str(sources_file), default_text='https://github.com/lightningd/plugins') return ['https://github.com/lightningd/plugins'] return sources_from_file() def add_source(src: str): """Additional git repositories, directories, etc. are passed here.""" assert isinstance(src, str) # Is it a file? maybe_path = os.path.realpath(src) sources = Config(path=str(get_sources_file()), default_text='https://github.com/lightningd/plugins') if Path(maybe_path).exists(): if os.path.isdir(maybe_path): sources.editConfigFile(src, None) elif 'github.com' in src or 'http://' in src or 'https://' in src: sources.editConfigFile(src, None) else: log.warning(f'failed to add source {src}') return None return sources_from_file() def remove_source(src: str): """Remove a source from the sources file.""" assert isinstance(src, str) if src in sources_from_file(): my_file = Config(path=get_sources_file(), default_text='https://github.com/lightningd/plugins') my_file.editConfigFile(None, src) log.info('plugin source removed') else: log.warning(f'source not found: {src}') return sources_from_file() def list_source(): """Provide the user with all stored source repositories.""" for src in sources_from_file(): log.info(src) return sources_from_file() def report_version() -> str: """return reckless version""" log.info(__VERSION__) log.add_result(__VERSION__) def unpack_json_arg(json_target: str) -> list: """validate json for any command line targets passes as a json array""" try: targets = json.loads(json_target) except json.decoder.JSONDecodeError: return None if isinstance(targets, list): return targets log.warning(f'input {target_list} is not a json array') return None class StoreIdempotent(argparse.Action): """Make the option idempotent. This adds a secondary argument that doesn't get reinitialized. The downside is it""" def __init__(self, option_strings, dest, nargs=None, **kwargs): super().__init__(option_strings, dest, **kwargs) def __call__(self, parser, namespace, values, option_string=None): if option_string: setattr(namespace, self.dest, values) setattr(namespace, f'{self.dest}_idempotent', values) class StoreTrueIdempotent(argparse._StoreConstAction): """Make the option idempotent""" def __init__(self, option_strings, dest, default=False, required=False, nargs=None, const=None, help=None): super().__init__(option_strings=option_strings, dest=dest, const=const, help=help) def __call__(self, parser, namespace, values, option_string=None): if option_string: setattr(namespace, self.dest, True) setattr(namespace, f'{self.dest}_idempotent', True) def process_idempotent_args(args): """Swap idempotently set arguments back in for the default arg names.""" original_args = dict(vars(args)) for arg, value in original_args.items(): if f"{arg}_idempotent" in vars(args): setattr(args, f"{arg}", vars(args)[f"{arg}_idempotent"]) delattr(args, f"{arg}_idempotent") return args if __name__ == '__main__': parser = argparse.ArgumentParser() cmd1 = parser.add_subparsers(dest='cmd1', help='command', required=False) install_cmd = cmd1.add_parser('install', help='search for and install a ' 'plugin, then test and activate') install_cmd.add_argument('targets', type=str, nargs='*') install_cmd.set_defaults(func=install) uninstall_cmd = cmd1.add_parser('uninstall', help='deactivate a plugin ' 'and remove it from the directory') uninstall_cmd.add_argument('targets', type=str, nargs='*') uninstall_cmd.set_defaults(func=uninstall) search_cmd = cmd1.add_parser('search', help='search for a plugin from ' 'the available source repositories') search_cmd.add_argument('targets', type=str, nargs='*') search_cmd.set_defaults(func=search) enable_cmd = cmd1.add_parser('enable', help='dynamically enable a plugin ' 'and update config') enable_cmd.add_argument('targets', type=str, nargs='*') enable_cmd.set_defaults(func=enable) disable_cmd = cmd1.add_parser('disable', help='disable a plugin') disable_cmd.add_argument('targets', type=str, nargs='*') disable_cmd.set_defaults(func=disable) source_parser = cmd1.add_parser('source', help='manage plugin search ' 'sources') source_subs = source_parser.add_subparsers(dest='source_subs', required=True) list_parse = source_subs.add_parser('list', help='list available plugin ' 'sources (repositories)') list_parse.set_defaults(func=list_source) source_add = source_subs.add_parser('add', help='add a source repository') source_add.add_argument('targets', type=str, nargs='*') source_add.set_defaults(func=add_source) source_rem = source_subs.add_parser('remove', aliases=['rem', 'rm'], help='remove a plugin source ' 'repository') source_rem.add_argument('targets', type=str, nargs='*') source_rem.set_defaults(func=remove_source) help_cmd = cmd1.add_parser('help', help='for contextual help, use ' '"reckless -h"') help_cmd.add_argument('targets', type=str, nargs='*') help_cmd.set_defaults(func=help_alias) parser.add_argument('-V', '--version', action=StoreTrueIdempotent, const=None, help='print version and exit') all_parsers = [parser, install_cmd, uninstall_cmd, search_cmd, enable_cmd, disable_cmd, list_parse, source_add, source_rem, help_cmd] for p in all_parsers: # This default depends on the .lightning directory p.add_argument('-d', '--reckless-dir', action=StoreIdempotent, help='specify a data directory for reckless to use', type=str, default=None) p.add_argument('-l', '--lightning', type=str, action=StoreIdempotent, help='lightning data directory ' '(default:~/.lightning)', default=Path.home().joinpath('.lightning')) p.add_argument('-c', '--conf', action=StoreIdempotent, help=' config file used by lightningd', type=str, default=None) p.add_argument('-r', '--regtest', action=StoreTrueIdempotent) p.add_argument('--network', action=StoreIdempotent, help="specify a network to use (default: bitcoin)", type=str) p.add_argument('-v', '--verbose', action=StoreTrueIdempotent, const=None) p.add_argument('-j', '--json', action=StoreTrueIdempotent, help='output in json format') args = parser.parse_args() args = process_idempotent_args(args) if args.json: log.capture = True stdout_redirect = PIPE stderr_redirect = PIPE else: stdout_redirect = None stderr_redirect = None if args.verbose: logging.root.setLevel(logging.DEBUG) else: logging.root.setLevel(logging.INFO) NETWORK = 'regtest' if args.regtest else 'bitcoin' SUPPORTED_NETWORKS = ['bitcoin', 'regtest', 'liquid', 'liquid-regtest', 'litecoin', 'signet', 'testnet'] if args.version: report_version() elif args.cmd1 is None: parser.print_help(sys.stdout) sys.exit(1) if args.network: if args.network in SUPPORTED_NETWORKS: NETWORK = args.network else: log.error(f"{args.network} network not supported") LIGHTNING_DIR = Path(args.lightning) # This env variable is set under CI testing LIGHTNING_CLI_CALL = [os.environ.get('LIGHTNING_CLI')] if LIGHTNING_CLI_CALL == [None]: LIGHTNING_CLI_CALL = ['lightning-cli'] if NETWORK != 'bitcoin': LIGHTNING_CLI_CALL.append(f'--network={NETWORK}') if LIGHTNING_DIR != Path.home().joinpath('.lightning'): LIGHTNING_CLI_CALL.append(f'--lightning-dir={LIGHTNING_DIR}') if args.reckless_dir: RECKLESS_DIR = Path(args.reckless_dir) else: RECKLESS_DIR = Path(LIGHTNING_DIR) / 'reckless' LIGHTNING_CONFIG = args.conf RECKLESS_CONFIG = load_config(reckless_dir=str(RECKLESS_DIR), network=NETWORK) RECKLESS_SOURCES = load_sources() API_GITHUB_COM = 'https://api.github.com' GITHUB_COM = 'https://github.com' # Used for blackbox testing to avoid hitting github servers if 'REDIR_GITHUB_API' in os.environ: API_GITHUB_COM = os.environ['REDIR_GITHUB_API'] if 'REDIR_GITHUB' in os.environ: GITHUB_COM = os.environ['REDIR_GITHUB'] GITHUB_API_FALLBACK = False if 'GITHUB_API_FALLBACK' in os.environ: GITHUB_API_FALLBACK = os.environ['GITHUB_API_FALLBACK'] if 'targets' in args: # FIXME: Catch missing argument if args.func.__name__ == 'help_alias': args.func(args.targets) sys.exit(0) for target in args.targets: # Accept single item arguments, or a json array target_list = unpack_json_arg(target) if target_list: for tar in target_list: log.add_result(args.func(tar)) else: log.add_result(args.func(target)) elif 'func' in args: log.add_result(args.func()) if log.capture: log.reply_json()