diff --git a/lbry/extras/__init__.py b/lbry/extras/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/lbry/extras/cli.py b/lbry/extras/cli.py deleted file mode 100644 index 4e2a8b527..000000000 --- a/lbry/extras/cli.py +++ /dev/null @@ -1,341 +0,0 @@ -import os -import sys -import shutil -import signal -import pathlib -import json -import asyncio -import argparse -import logging -import logging.handlers - -import aiohttp -from aiohttp.web import GracefulExit -from docopt import docopt - -from lbry import __version__ as lbrynet_version -from lbry.extras.daemon.loggly_handler import get_loggly_handler -from lbry.extras.daemon.daemon import Daemon -from lbry.conf import Config, CLIConfig - -log = logging.getLogger('lbry') - - -def display(data): - print(json.dumps(data, indent=2)) - - -async def execute_command(conf, method, params, callback=display): - async with aiohttp.ClientSession() as session: - try: - message = {'method': method, 'params': params} - async with session.get(conf.api_connection_url, json=message) as resp: - try: - data = await resp.json() - if 'result' in data: - return callback(data['result']) - elif 'error' in data: - return callback(data['error']) - except Exception as e: - log.exception('Could not process response from server:', exc_info=e) - except aiohttp.ClientConnectionError: - print("Could not connect to daemon. Are you sure it's running?") - - -def normalize_value(x, key=None): - if not isinstance(x, str): - return x - if key in ('uri', 'channel_name', 'name', 'file_name', 'claim_name', 'download_directory'): - return x - if x.lower() == 'true': - return True - if x.lower() == 'false': - return False - if x.isdigit(): - return int(x) - return x - - -def remove_brackets(key): - if key.startswith("<") and key.endswith(">"): - return str(key[1:-1]) - return key - - -def set_kwargs(parsed_args): - kwargs = {} - for key, arg in parsed_args.items(): - if arg is None: - continue - k = None - if key.startswith("--") and remove_brackets(key[2:]) not in kwargs: - k = remove_brackets(key[2:]) - elif remove_brackets(key) not in kwargs: - k = remove_brackets(key) - kwargs[k] = normalize_value(arg, k) - return kwargs - - -def split_subparser_argument(parent, original, name, condition): - new_sub_parser = argparse._SubParsersAction( - original.option_strings, - original._prog_prefix, - original._parser_class, - metavar=original.metavar - ) - new_sub_parser._name_parser_map = original._name_parser_map - new_sub_parser._choices_actions = [ - a for a in original._choices_actions if condition(original._name_parser_map[a.dest]) - ] - group = argparse._ArgumentGroup(parent, name) - group._group_actions = [new_sub_parser] - return group - - -class ArgumentParser(argparse.ArgumentParser): - def __init__(self, *args, group_name=None, **kwargs): - super().__init__(*args, formatter_class=HelpFormatter, add_help=False, **kwargs) - self.add_argument( - '--help', dest='help', action='store_true', default=False, - help='Show this help message and exit.' - ) - self._optionals.title = 'Options' - if group_name is None: - self.epilog = ( - f"Run 'lbrynet COMMAND --help' for more information on a command or group." - ) - else: - self.epilog = ( - f"Run 'lbrynet {group_name} COMMAND --help' for more information on a command." - ) - self.set_defaults(group=group_name, group_parser=self) - - def format_help(self): - formatter = self._get_formatter() - formatter.add_usage( - self.usage, self._actions, self._mutually_exclusive_groups - ) - formatter.add_text(self.description) - - # positionals, optionals and user-defined groups - for action_group in self._granular_action_groups: - formatter.start_section(action_group.title) - formatter.add_text(action_group.description) - formatter.add_arguments(action_group._group_actions) - formatter.end_section() - - formatter.add_text(self.epilog) - return formatter.format_help() - - @property - def _granular_action_groups(self): - if self.prog != 'lbrynet': - yield from self._action_groups - return - yield self._optionals - action: argparse._SubParsersAction = self._positionals._group_actions[0] - yield split_subparser_argument( - self, action, "Grouped Commands", lambda parser: 'group' in parser._defaults - ) - yield split_subparser_argument( - self, action, "Commands", lambda parser: 'group' not in parser._defaults - ) - - def error(self, message): - self.print_help(argparse._sys.stderr) - self.exit(2, f"\n{message}\n") - - -class HelpFormatter(argparse.HelpFormatter): - - def add_usage(self, usage, actions, groups, prefix='Usage: '): - super().add_usage( - usage, [a for a in actions if a.option_strings != ['--help']], groups, prefix - ) - - -def add_command_parser(parent, command): - subcommand = parent.add_parser( - command['name'], - help=command['doc'].strip().splitlines()[0] - ) - subcommand.set_defaults( - api_method_name=command['api_method_name'], - command=command['name'], - doc=command['doc'], - replaced_by=command.get('replaced_by', None) - ) - - -def get_argument_parser(): - root = ArgumentParser( - 'lbrynet', description='An interface to the LBRY Network.', allow_abbrev=False, - ) - root.add_argument( - '-v', '--version', dest='cli_version', action="store_true", - help='Show lbrynet CLI version and exit.' - ) - root.set_defaults(group=None, command=None) - CLIConfig.contribute_to_argparse(root) - sub = root.add_subparsers(metavar='COMMAND') - start = sub.add_parser( - 'start', - usage='lbrynet start [--config FILE] [--data-dir DIR] [--wallet-dir DIR] [--download-dir DIR] ...', - help='Start LBRY Network interface.' - ) - start.add_argument( - '--quiet', dest='quiet', action="store_true", - help='Disable all console output.' - ) - start.add_argument( - '--no-logging', dest='no_logging', action="store_true", - help='Disable all logging of any kind.' - ) - start.add_argument( - '--verbose', nargs="*", - help=('Enable debug output for lbry logger and event loop. Optionally specify loggers for which debug output ' - 'should selectively be applied.') - ) - start.add_argument( - '--initial-headers', dest='initial_headers', - help='Specify path to initial blockchain headers, faster than downloading them on first run.' - ) - Config.contribute_to_argparse(start) - start.set_defaults(command='start', start_parser=start, doc=start.format_help()) - - api = Daemon.get_api_definitions() - groups = {} - for group_name in sorted(api['groups']): - group_parser = sub.add_parser(group_name, group_name=group_name, help=api['groups'][group_name]) - groups[group_name] = group_parser.add_subparsers(metavar='COMMAND') - - nicer_order = ['stop', 'get', 'publish', 'resolve'] - for command_name in sorted(api['commands']): - if command_name not in nicer_order: - nicer_order.append(command_name) - - for command_name in nicer_order: - command = api['commands'][command_name] - if command['group'] is None: - add_command_parser(sub, command) - else: - add_command_parser(groups[command['group']], command) - - return root - - -def ensure_directory_exists(path: str): - if not os.path.isdir(path): - pathlib.Path(path).mkdir(parents=True, exist_ok=True) - - -LOG_MODULES = 'lbry', 'aioupnp' - - -def setup_logging(logger: logging.Logger, args: argparse.Namespace, conf: Config): - default_formatter = logging.Formatter("%(asctime)s %(levelname)-8s %(name)s:%(lineno)d: %(message)s") - file_handler = logging.handlers.RotatingFileHandler(conf.log_file_path, maxBytes=2097152, backupCount=5) - file_handler.setFormatter(default_formatter) - for module_name in LOG_MODULES: - logger.getChild(module_name).addHandler(file_handler) - if not args.quiet: - handler = logging.StreamHandler() - handler.setFormatter(default_formatter) - for module_name in LOG_MODULES: - logger.getChild(module_name).addHandler(handler) - - logger.getChild('lbry').setLevel(logging.INFO) - logger.getChild('aioupnp').setLevel(logging.WARNING) - logger.getChild('aiohttp').setLevel(logging.CRITICAL) - - if args.verbose is not None: - if len(args.verbose) > 0: - for module in args.verbose: - logger.getChild(module).setLevel(logging.DEBUG) - else: - logger.getChild('lbry').setLevel(logging.DEBUG) - - loggly_handler = get_loggly_handler(conf) - loggly_handler.setLevel(logging.ERROR) - logger.getChild('lbry').addHandler(loggly_handler) - - -def run_daemon(args: argparse.Namespace, conf: Config): - loop = asyncio.get_event_loop() - if args.verbose is not None: - loop.set_debug(True) - if not args.no_logging: - setup_logging(logging.getLogger(), args, conf) - daemon = Daemon(conf) - - def __exit(): - raise GracefulExit() - - try: - loop.add_signal_handler(signal.SIGINT, __exit) - loop.add_signal_handler(signal.SIGTERM, __exit) - except NotImplementedError: - pass # Not implemented on Windows - - try: - loop.run_until_complete(daemon.start()) - loop.run_forever() - except (GracefulExit, KeyboardInterrupt, asyncio.CancelledError): - pass - finally: - loop.run_until_complete(daemon.stop()) - logging.shutdown() - - if hasattr(loop, 'shutdown_asyncgens'): - loop.run_until_complete(loop.shutdown_asyncgens()) - - -def main(argv=None): - argv = argv or sys.argv[1:] - parser = get_argument_parser() - args, command_args = parser.parse_known_args(argv) - - conf = Config.create_from_arguments(args) - for directory in (conf.data_dir, conf.download_dir, conf.wallet_dir): - ensure_directory_exists(directory) - - if args.cli_version: - print(f"lbrynet {lbrynet_version}") - elif args.command == 'start': - if args.help: - args.start_parser.print_help() - else: - if args.initial_headers: - ledger_path = os.path.join(conf.wallet_dir, 'lbc_mainnet') - ensure_directory_exists(ledger_path) - current_size = 0 - headers_path = os.path.join(ledger_path, 'headers') - if os.path.exists(headers_path): - current_size = os.stat(headers_path).st_size - if os.stat(args.initial_headers).st_size > current_size: - log.info('Copying header from %s to %s', args.initial_headers, headers_path) - shutil.copy(args.initial_headers, headers_path) - run_daemon(args, conf) - elif args.command is not None: - doc = args.doc - api_method_name = args.api_method_name - if args.replaced_by: - print(f"{args.api_method_name} is deprecated, using {args.replaced_by['api_method_name']}.") - doc = args.replaced_by['doc'] - api_method_name = args.replaced_by['api_method_name'] - if args.help: - print(doc) - else: - parsed = docopt(doc, command_args) - params = set_kwargs(parsed) - asyncio.get_event_loop().run_until_complete(execute_command(conf, api_method_name, params)) - elif args.group is not None: - args.group_parser.print_help() - else: - parser.print_help() - - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/lbry/extras/daemon/__init__.py b/lbry/extras/daemon/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/lbry/extras/daemon/analytics.py b/lbry/extras/daemon/analytics.py deleted file mode 100644 index 828112396..000000000 --- a/lbry/extras/daemon/analytics.py +++ /dev/null @@ -1,233 +0,0 @@ -import asyncio -import collections -import logging -import typing -import aiohttp -from lbry import utils -from lbry.conf import Config -from lbry.extras import system_info - -ANALYTICS_ENDPOINT = 'https://api.segment.io/v1' -ANALYTICS_TOKEN = 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H=' - -# Things We Track -SERVER_STARTUP = 'Server Startup' -SERVER_STARTUP_SUCCESS = 'Server Startup Success' -SERVER_STARTUP_ERROR = 'Server Startup Error' -DOWNLOAD_STARTED = 'Download Started' -DOWNLOAD_ERRORED = 'Download Errored' -DOWNLOAD_FINISHED = 'Download Finished' -HEARTBEAT = 'Heartbeat' -CLAIM_ACTION = 'Claim Action' # publish/create/update/abandon -NEW_CHANNEL = 'New Channel' -CREDITS_SENT = 'Credits Sent' -UPNP_SETUP = "UPnP Setup" - -BLOB_BYTES_UPLOADED = 'Blob Bytes Uploaded' - - -TIME_TO_FIRST_BYTES = "Time To First Bytes" - - -log = logging.getLogger(__name__) - - -def _event_properties(installation_id: str, session_id: str, - event_properties: typing.Optional[typing.Dict]) -> typing.Dict: - properties = { - 'lbry_id': installation_id, - 'session_id': session_id, - } - properties.update(event_properties or {}) - return properties - - -def _download_properties(conf: Config, external_ip: str, resolve_duration: float, - total_duration: typing.Optional[float], download_id: str, name: str, - outpoint: str, active_peer_count: typing.Optional[int], - tried_peers_count: typing.Optional[int], connection_failures_count: typing.Optional[int], - added_fixed_peers: bool, fixed_peer_delay: float, sd_hash: str, - sd_download_duration: typing.Optional[float] = None, - head_blob_hash: typing.Optional[str] = None, - head_blob_length: typing.Optional[int] = None, - head_blob_download_duration: typing.Optional[float] = None, - error: typing.Optional[str] = None, error_msg: typing.Optional[str] = None, - wallet_server: typing.Optional[str] = None) -> typing.Dict: - return { - "external_ip": external_ip, - "download_id": download_id, - "total_duration": round(total_duration, 4), - "resolve_duration": None if not resolve_duration else round(resolve_duration, 4), - "error": error, - "error_message": error_msg, - 'name': name, - "outpoint": outpoint, - - "node_rpc_timeout": conf.node_rpc_timeout, - "peer_connect_timeout": conf.peer_connect_timeout, - "blob_download_timeout": conf.blob_download_timeout, - "use_fixed_peers": len(conf.reflector_servers) > 0, - "fixed_peer_delay": fixed_peer_delay, - "added_fixed_peers": added_fixed_peers, - "active_peer_count": active_peer_count, - "tried_peers_count": tried_peers_count, - - "sd_blob_hash": sd_hash, - "sd_blob_duration": None if not sd_download_duration else round(sd_download_duration, 4), - - "head_blob_hash": head_blob_hash, - "head_blob_length": head_blob_length, - "head_blob_duration": None if not head_blob_download_duration else round(head_blob_download_duration, 4), - - "connection_failures_count": connection_failures_count, - "wallet_server": wallet_server - } - - -def _make_context(platform): - # see https://segment.com/docs/spec/common/#context - # they say they'll ignore fields outside the spec, but evidently they don't - context = { - 'app': { - 'version': platform['lbrynet_version'], - 'build': platform['build'], - }, - # TODO: expand os info to give linux/osx specific info - 'os': { - 'name': platform['os_system'], - 'version': platform['os_release'] - }, - } - if 'desktop' in platform and 'distro' in platform: - context['os']['desktop'] = platform['desktop'] - context['os']['distro'] = platform['distro'] - return context - - -class AnalyticsManager: - def __init__(self, conf: Config, installation_id: str, session_id: str): - self.conf = conf - self.cookies = {} - self.url = ANALYTICS_ENDPOINT - self._write_key = utils.deobfuscate(ANALYTICS_TOKEN) - self._tracked_data = collections.defaultdict(list) - self.context = _make_context(system_info.get_platform()) - self.installation_id = installation_id - self.session_id = session_id - self.task: typing.Optional[asyncio.Task] = None - self.external_ip: typing.Optional[str] = None - - @property - def enabled(self): - return self.conf.share_usage_data - - @property - def is_started(self): - return self.task is not None - - async def start(self): - if self.task is None: - self.task = asyncio.create_task(self.run()) - - async def run(self): - while True: - if self.enabled: - self.external_ip = await utils.get_external_ip() - await self._send_heartbeat() - await asyncio.sleep(1800) - - def stop(self): - if self.task is not None and not self.task.done(): - self.task.cancel() - - async def _post(self, data: typing.Dict): - request_kwargs = { - 'method': 'POST', - 'url': self.url + '/track', - 'headers': {'Connection': 'Close'}, - 'auth': aiohttp.BasicAuth(self._write_key, ''), - 'json': data, - 'cookies': self.cookies - } - try: - async with utils.aiohttp_request(**request_kwargs) as response: - self.cookies.update(response.cookies) - except Exception as e: - log.debug('Encountered an exception while POSTing to %s: ', self.url + '/track', exc_info=e) - - async def track(self, event: typing.Dict): - """Send a single tracking event""" - if self.enabled: - log.debug('Sending track event: %s', event) - await self._post(event) - - async def send_upnp_setup_success_fail(self, success, status): - await self.track( - self._event(UPNP_SETUP, { - 'success': success, - 'status': status, - }) - ) - - async def send_server_startup(self): - await self.track(self._event(SERVER_STARTUP)) - - async def send_server_startup_success(self): - await self.track(self._event(SERVER_STARTUP_SUCCESS)) - - async def send_server_startup_error(self, message): - await self.track(self._event(SERVER_STARTUP_ERROR, {'message': message})) - - async def send_time_to_first_bytes(self, resolve_duration: typing.Optional[float], - total_duration: typing.Optional[float], download_id: str, - name: str, outpoint: typing.Optional[str], - found_peers_count: typing.Optional[int], - tried_peers_count: typing.Optional[int], - connection_failures_count: typing.Optional[int], - added_fixed_peers: bool, - fixed_peers_delay: float, sd_hash: str, - sd_download_duration: typing.Optional[float] = None, - head_blob_hash: typing.Optional[str] = None, - head_blob_length: typing.Optional[int] = None, - head_blob_duration: typing.Optional[int] = None, - error: typing.Optional[str] = None, - error_msg: typing.Optional[str] = None, - wallet_server: typing.Optional[str] = None): - await self.track(self._event(TIME_TO_FIRST_BYTES, _download_properties( - self.conf, self.external_ip, resolve_duration, total_duration, download_id, name, outpoint, - found_peers_count, tried_peers_count, connection_failures_count, added_fixed_peers, fixed_peers_delay, - sd_hash, sd_download_duration, head_blob_hash, head_blob_length, head_blob_duration, error, error_msg, - wallet_server - ))) - - async def send_download_finished(self, download_id, name, sd_hash): - await self.track( - self._event( - DOWNLOAD_FINISHED, { - 'download_id': download_id, - 'name': name, - 'stream_info': sd_hash - } - ) - ) - - async def send_claim_action(self, action): - await self.track(self._event(CLAIM_ACTION, {'action': action})) - - async def send_new_channel(self): - await self.track(self._event(NEW_CHANNEL)) - - async def send_credits_sent(self): - await self.track(self._event(CREDITS_SENT)) - - async def _send_heartbeat(self): - await self.track(self._event(HEARTBEAT)) - - def _event(self, event, properties: typing.Optional[typing.Dict] = None): - return { - 'userId': 'lbry', - 'event': event, - 'properties': _event_properties(self.installation_id, self.session_id, properties), - 'context': self.context, - 'timestamp': utils.isonow() - } diff --git a/lbry/extras/daemon/client.py b/lbry/extras/daemon/client.py deleted file mode 100644 index 7f0997320..000000000 --- a/lbry/extras/daemon/client.py +++ /dev/null @@ -1,6 +0,0 @@ -from lbry.conf import Config -from lbry.extras.cli import execute_command - - -def daemon_rpc(conf: Config, method: str, **kwargs): - return execute_command(conf, method, kwargs, callback=lambda data: data) diff --git a/lbry/extras/daemon/comment_client.py b/lbry/extras/daemon/comment_client.py deleted file mode 100644 index 840c4a8aa..000000000 --- a/lbry/extras/daemon/comment_client.py +++ /dev/null @@ -1,66 +0,0 @@ -import logging -import time -import hashlib -import binascii - -import ecdsa -from lbry import utils -from lbry.crypto.hash import sha256 -from lbry.wallet.transaction import Output - -log = logging.getLogger(__name__) - - -def get_encoded_signature(signature): - signature = signature.encode() if isinstance(signature, str) else signature - r = int(signature[:int(len(signature) / 2)], 16) - s = int(signature[int(len(signature) / 2):], 16) - return ecdsa.util.sigencode_der(r, s, len(signature) * 4) - - -def cid2hash(claim_id: str) -> bytes: - return binascii.unhexlify(claim_id.encode())[::-1] - - -def is_comment_signed_by_channel(comment: dict, channel: Output, abandon=False): - if isinstance(channel, Output): - try: - signing_field = comment['comment_id'] if abandon else comment['comment'] - pieces = [ - comment['signing_ts'].encode(), - cid2hash(comment['channel_id']), - signing_field.encode() - ] - return Output.is_signature_valid( - get_encoded_signature(comment['signature']), - sha256(b''.join(pieces)), - channel.claim.channel.public_key_bytes - ) - except KeyError: - pass - return False - - -def sign_comment(comment: dict, channel: Output, abandon=False): - timestamp = str(int(time.time())) - signing_field = comment['comment_id'] if abandon else comment['comment'] - pieces = [timestamp.encode(), channel.claim_hash, signing_field.encode()] - digest = sha256(b''.join(pieces)) - signature = channel.private_key.sign_digest_deterministic(digest, hashfunc=hashlib.sha256) - comment.update({ - 'signature': binascii.hexlify(signature).decode(), - 'signing_ts': timestamp - }) - - -async def jsonrpc_post(url: str, method: str, params: dict = None, **kwargs) -> any: - params = params or {} - params.update(kwargs) - json_body = {'jsonrpc': '2.0', 'id': None, 'method': method, 'params': params} - async with utils.aiohttp_request('POST', url, json=json_body) as response: - try: - result = await response.json() - return result['result'] if 'result' in result else result - except Exception as cte: - log.exception('Unable to decode response from server: %s', cte) - return await response.text() diff --git a/lbry/extras/daemon/component.py b/lbry/extras/daemon/component.py deleted file mode 100644 index cce380f85..000000000 --- a/lbry/extras/daemon/component.py +++ /dev/null @@ -1,75 +0,0 @@ -import asyncio -import logging -from lbry.conf import Config -from lbry.extras.daemon.componentmanager import ComponentManager - -log = logging.getLogger(__name__) - - -class ComponentType(type): - def __new__(mcs, name, bases, newattrs): - klass = type.__new__(mcs, name, bases, newattrs) - if name != "Component" and newattrs['__module__'] != 'lbry.testcase': - ComponentManager.default_component_classes[klass.component_name] = klass - return klass - - -class Component(metaclass=ComponentType): - """ - lbry-daemon component helper - - Inheriting classes will be automatically registered with the ComponentManager and must implement setup and stop - methods - """ - - depends_on = [] - component_name = None - - def __init__(self, component_manager): - self.conf: Config = component_manager.conf - self.component_manager = component_manager - self._running = False - - def __lt__(self, other): - return self.component_name < other.component_name - - @property - def running(self): - return self._running - - async def get_status(self): - return - - async def start(self): - raise NotImplementedError() - - async def stop(self): - raise NotImplementedError() - - @property - def component(self): - raise NotImplementedError() - - async def _setup(self): - try: - result = await self.start() - self._running = True - return result - except asyncio.CancelledError: - log.info("Cancelled setup of %s component", self.__class__.__name__) - raise - except Exception as err: - log.exception("Error setting up %s", self.component_name or self.__class__.__name__) - raise err - - async def _stop(self): - try: - result = await self.stop() - self._running = False - return result - except asyncio.CancelledError: - log.info("Cancelled stop of %s component", self.__class__.__name__) - raise - except Exception as err: - log.exception("Error stopping %s", self.__class__.__name__) - raise err diff --git a/lbry/extras/daemon/componentmanager.py b/lbry/extras/daemon/componentmanager.py deleted file mode 100644 index f9f7903ae..000000000 --- a/lbry/extras/daemon/componentmanager.py +++ /dev/null @@ -1,171 +0,0 @@ -import logging -import asyncio -from lbry.conf import Config -from lbry.error import ComponentStartConditionNotMetError -from lbry.dht.peer import PeerManager - -log = logging.getLogger(__name__) - - -class RegisteredConditions: - conditions = {} - - -class RequiredConditionType(type): - def __new__(mcs, name, bases, newattrs): - klass = type.__new__(mcs, name, bases, newattrs) - if name != "RequiredCondition": - if klass.name in RegisteredConditions.conditions: - raise SyntaxError("already have a component registered for \"%s\"" % klass.name) - RegisteredConditions.conditions[klass.name] = klass - return klass - - -class RequiredCondition(metaclass=RequiredConditionType): - name = "" - component = "" - message = "" - - @staticmethod - def evaluate(component): - raise NotImplementedError() - - -class ComponentManager: - default_component_classes = {} - - def __init__(self, conf: Config, analytics_manager=None, skip_components=None, - peer_manager=None, **override_components): - self.conf = conf - self.skip_components = skip_components or [] - self.loop = asyncio.get_event_loop() - self.analytics_manager = analytics_manager - self.component_classes = {} - self.components = set() - self.started = asyncio.Event(loop=self.loop) - self.peer_manager = peer_manager or PeerManager(asyncio.get_event_loop_policy().get_event_loop()) - - for component_name, component_class in self.default_component_classes.items(): - if component_name in override_components: - component_class = override_components.pop(component_name) - if component_name not in self.skip_components: - self.component_classes[component_name] = component_class - - if override_components: - raise SyntaxError("unexpected components: %s" % override_components) - - for component_class in self.component_classes.values(): - self.components.add(component_class(self)) - - def evaluate_condition(self, condition_name): - if condition_name not in RegisteredConditions.conditions: - raise NameError(condition_name) - condition = RegisteredConditions.conditions[condition_name] - try: - component = self.get_component(condition.component) - result = condition.evaluate(component) - except Exception: - log.exception('failed to evaluate condition:') - result = False - return result, "" if result else condition.message - - def sort_components(self, reverse=False): - """ - Sort components by requirements - """ - steps = [] - staged = set() - components = set(self.components) - - # components with no requirements - step = [] - for component in set(components): - if not component.depends_on: - step.append(component) - staged.add(component.component_name) - components.remove(component) - - if step: - step.sort() - steps.append(step) - - while components: - step = [] - to_stage = set() - for component in set(components): - reqs_met = 0 - for needed in component.depends_on: - if needed in staged: - reqs_met += 1 - if reqs_met == len(component.depends_on): - step.append(component) - to_stage.add(component.component_name) - components.remove(component) - if step: - step.sort() - staged.update(to_stage) - steps.append(step) - elif components: - raise ComponentStartConditionNotMetError(components) - if reverse: - steps.reverse() - return steps - - async def start(self): - """ Start Components in sequence sorted by requirements """ - for stage in self.sort_components(): - needing_start = [ - component._setup() for component in stage if not component.running - ] - if needing_start: - await asyncio.wait(needing_start) - self.started.set() - - async def stop(self): - """ - Stop Components in reversed startup order - """ - stages = self.sort_components(reverse=True) - for stage in stages: - needing_stop = [ - component._stop() for component in stage if component.running - ] - if needing_stop: - await asyncio.wait(needing_stop) - - def all_components_running(self, *component_names): - """ - Check if components are running - - :return: (bool) True if all specified components are running - """ - components = {component.component_name: component for component in self.components} - for component in component_names: - if component not in components: - raise NameError("%s is not a known Component" % component) - if not components[component].running: - return False - return True - - def get_components_status(self): - """ - List status of all the components, whether they are running or not - - :return: (dict) {(str) component_name: (bool) True is running else False} - """ - return { - component.component_name: component.running - for component in self.components - } - - def get_actual_component(self, component_name): - for component in self.components: - if component.component_name == component_name: - return component - raise NameError(component_name) - - def get_component(self, component_name): - return self.get_actual_component(component_name).component - - def has_component(self, component_name): - return any(component for component in self.components if component_name == component.component_name) diff --git a/lbry/extras/daemon/components.py b/lbry/extras/daemon/components.py deleted file mode 100644 index 5271c1558..000000000 --- a/lbry/extras/daemon/components.py +++ /dev/null @@ -1,553 +0,0 @@ -import math -import os -import asyncio -import logging -import binascii -import typing -import base58 - -from aioupnp import __version__ as aioupnp_version -from aioupnp.upnp import UPnP -from aioupnp.fault import UPnPError - -from lbry import utils -from lbry.dht.node import Node -from lbry.dht.peer import is_valid_public_ipv4 -from lbry.dht.blob_announcer import BlobAnnouncer -from lbry.blob.blob_manager import BlobManager -from lbry.blob_exchange.server import BlobServer -from lbry.stream.stream_manager import StreamManager -from lbry.extras.daemon.component import Component -from lbry.extras.daemon.exchange_rate_manager import ExchangeRateManager -from lbry.extras.daemon.storage import SQLiteStorage -from lbry.wallet import WalletManager -from lbry.wallet.usage_payment import WalletServerPayer - -log = logging.getLogger(__name__) - -# settings must be initialized before this file is imported - -DATABASE_COMPONENT = "database" -BLOB_COMPONENT = "blob_manager" -WALLET_COMPONENT = "wallet" -WALLET_SERVER_PAYMENTS_COMPONENT = "wallet_server_payments" -DHT_COMPONENT = "dht" -HASH_ANNOUNCER_COMPONENT = "hash_announcer" -STREAM_MANAGER_COMPONENT = "stream_manager" -PEER_PROTOCOL_SERVER_COMPONENT = "peer_protocol_server" -UPNP_COMPONENT = "upnp" -EXCHANGE_RATE_MANAGER_COMPONENT = "exchange_rate_manager" - - -class DatabaseComponent(Component): - component_name = DATABASE_COMPONENT - - def __init__(self, component_manager): - super().__init__(component_manager) - self.storage = None - - @property - def component(self): - return self.storage - - @staticmethod - def get_current_db_revision(): - return 14 - - @property - def revision_filename(self): - return os.path.join(self.conf.data_dir, 'db_revision') - - def _write_db_revision_file(self, version_num): - with open(self.revision_filename, mode='w') as db_revision: - db_revision.write(str(version_num)) - - async def start(self): - # check directories exist, create them if they don't - log.info("Loading databases") - - if not os.path.exists(self.revision_filename): - log.info("db_revision file not found. Creating it") - self._write_db_revision_file(self.get_current_db_revision()) - - # check the db migration and run any needed migrations - with open(self.revision_filename, "r") as revision_read_handle: - old_revision = int(revision_read_handle.read().strip()) - - if old_revision > self.get_current_db_revision(): - raise Exception('This version of lbrynet is not compatible with the database\n' - 'Your database is revision %i, expected %i' % - (old_revision, self.get_current_db_revision())) - if old_revision < self.get_current_db_revision(): - from lbry.extras.daemon.migrator import dbmigrator # pylint: disable=import-outside-toplevel - log.info("Upgrading your databases (revision %i to %i)", old_revision, self.get_current_db_revision()) - await asyncio.get_event_loop().run_in_executor( - None, dbmigrator.migrate_db, self.conf, old_revision, self.get_current_db_revision() - ) - self._write_db_revision_file(self.get_current_db_revision()) - log.info("Finished upgrading the databases.") - - self.storage = SQLiteStorage( - self.conf, os.path.join(self.conf.data_dir, "lbrynet.sqlite") - ) - await self.storage.open() - - async def stop(self): - await self.storage.close() - self.storage = None - - -class WalletComponent(Component): - component_name = WALLET_COMPONENT - depends_on = [DATABASE_COMPONENT] - - def __init__(self, component_manager): - super().__init__(component_manager) - self.wallet_manager = None - - @property - def component(self): - return self.wallet_manager - - async def get_status(self): - if self.wallet_manager is None: - return - session_pool = self.wallet_manager.ledger.network.session_pool - sessions = session_pool.sessions - connected = None - if self.wallet_manager.ledger.network.client: - addr_and_port = self.wallet_manager.ledger.network.client.server_address_and_port - if addr_and_port: - connected = f"{addr_and_port[0]}:{addr_and_port[1]}" - result = { - 'connected': connected, - 'connected_features': self.wallet_manager.ledger.network.server_features, - 'servers': [ - { - 'host': session.server[0], - 'port': session.server[1], - 'latency': session.connection_latency, - 'availability': session.available, - } for session in sessions - ], - 'known_servers': len(sessions), - 'available_servers': len(list(session_pool.available_sessions)) - } - - if self.wallet_manager.ledger.network.remote_height: - local_height = self.wallet_manager.ledger.local_height_including_downloaded_height - disk_height = len(self.wallet_manager.ledger.headers) - remote_height = self.wallet_manager.ledger.network.remote_height - download_height, target_height = local_height - disk_height, remote_height - disk_height - if target_height > 0: - progress = min(max(math.ceil(float(download_height) / float(target_height) * 100), 0), 100) - else: - progress = 100 - best_hash = await self.wallet_manager.get_best_blockhash() - result.update({ - 'headers_synchronization_progress': progress, - 'blocks': max(local_height, 0), - 'blocks_behind': max(remote_height - local_height, 0), - 'best_blockhash': best_hash, - }) - - return result - - async def start(self): - log.info("Starting wallet") - self.wallet_manager = await WalletManager.from_lbrynet_config(self.conf) - await self.wallet_manager.start() - - async def stop(self): - await self.wallet_manager.stop() - self.wallet_manager = None - - -class WalletServerPaymentsComponent(Component): - component_name = WALLET_SERVER_PAYMENTS_COMPONENT - depends_on = [WALLET_COMPONENT] - - def __init__(self, component_manager): - super().__init__(component_manager) - self.usage_payment_service = WalletServerPayer( - max_fee=self.conf.max_wallet_server_fee, analytics_manager=self.component_manager.analytics_manager, - ) - - @property - def component(self) -> typing.Optional[WalletServerPayer]: - return self.usage_payment_service - - async def start(self): - wallet_manager = self.component_manager.get_component(WALLET_COMPONENT) - await self.usage_payment_service.start(wallet_manager.ledger, wallet_manager.default_wallet) - - async def stop(self): - await self.usage_payment_service.stop() - - async def get_status(self): - return { - 'max_fee': self.usage_payment_service.max_fee, - 'running': self.usage_payment_service.running - } - - -class BlobComponent(Component): - component_name = BLOB_COMPONENT - depends_on = [DATABASE_COMPONENT] - - def __init__(self, component_manager): - super().__init__(component_manager) - self.blob_manager: typing.Optional[BlobManager] = None - - @property - def component(self) -> typing.Optional[BlobManager]: - return self.blob_manager - - async def start(self): - storage = self.component_manager.get_component(DATABASE_COMPONENT) - data_store = None - if DHT_COMPONENT not in self.component_manager.skip_components: - dht_node: Node = self.component_manager.get_component(DHT_COMPONENT) - if dht_node: - data_store = dht_node.protocol.data_store - blob_dir = os.path.join(self.conf.data_dir, 'blobfiles') - if not os.path.isdir(blob_dir): - os.mkdir(blob_dir) - self.blob_manager = BlobManager(self.component_manager.loop, blob_dir, storage, self.conf, data_store) - return await self.blob_manager.setup() - - async def stop(self): - self.blob_manager.stop() - - async def get_status(self): - count = 0 - if self.blob_manager: - count = len(self.blob_manager.completed_blob_hashes) - return { - 'finished_blobs': count, - 'connections': {} if not self.blob_manager else self.blob_manager.connection_manager.status - } - - -class DHTComponent(Component): - component_name = DHT_COMPONENT - depends_on = [UPNP_COMPONENT, DATABASE_COMPONENT] - - def __init__(self, component_manager): - super().__init__(component_manager) - self.dht_node: typing.Optional[Node] = None - self.external_udp_port = None - self.external_peer_port = None - - @property - def component(self) -> typing.Optional[Node]: - return self.dht_node - - async def get_status(self): - return { - 'node_id': None if not self.dht_node else binascii.hexlify(self.dht_node.protocol.node_id), - 'peers_in_routing_table': 0 if not self.dht_node else len(self.dht_node.protocol.routing_table.get_peers()) - } - - def get_node_id(self): - node_id_filename = os.path.join(self.conf.data_dir, "node_id") - if os.path.isfile(node_id_filename): - with open(node_id_filename, "r") as node_id_file: - return base58.b58decode(str(node_id_file.read()).strip()) - node_id = utils.generate_id() - with open(node_id_filename, "w") as node_id_file: - node_id_file.write(base58.b58encode(node_id).decode()) - return node_id - - async def start(self): - log.info("start the dht") - upnp_component = self.component_manager.get_component(UPNP_COMPONENT) - self.external_peer_port = upnp_component.upnp_redirects.get("TCP", self.conf.tcp_port) - self.external_udp_port = upnp_component.upnp_redirects.get("UDP", self.conf.udp_port) - external_ip = upnp_component.external_ip - storage = self.component_manager.get_component(DATABASE_COMPONENT) - if not external_ip: - external_ip = await utils.get_external_ip() - if not external_ip: - log.warning("failed to get external ip") - - self.dht_node = Node( - self.component_manager.loop, - self.component_manager.peer_manager, - node_id=self.get_node_id(), - internal_udp_port=self.conf.udp_port, - udp_port=self.external_udp_port, - external_ip=external_ip, - peer_port=self.external_peer_port, - rpc_timeout=self.conf.node_rpc_timeout, - split_buckets_under_index=self.conf.split_buckets_under_index, - storage=storage - ) - self.dht_node.start(self.conf.network_interface, self.conf.known_dht_nodes) - log.info("Started the dht") - - async def stop(self): - self.dht_node.stop() - - -class HashAnnouncerComponent(Component): - component_name = HASH_ANNOUNCER_COMPONENT - depends_on = [DHT_COMPONENT, DATABASE_COMPONENT] - - def __init__(self, component_manager): - super().__init__(component_manager) - self.hash_announcer: typing.Optional[BlobAnnouncer] = None - - @property - def component(self) -> typing.Optional[BlobAnnouncer]: - return self.hash_announcer - - async def start(self): - storage = self.component_manager.get_component(DATABASE_COMPONENT) - dht_node = self.component_manager.get_component(DHT_COMPONENT) - self.hash_announcer = BlobAnnouncer(self.component_manager.loop, dht_node, storage) - self.hash_announcer.start(self.conf.concurrent_blob_announcers) - log.info("Started blob announcer") - - async def stop(self): - self.hash_announcer.stop() - log.info("Stopped blob announcer") - - async def get_status(self): - return { - 'announce_queue_size': 0 if not self.hash_announcer else len(self.hash_announcer.announce_queue) - } - - -class StreamManagerComponent(Component): - component_name = STREAM_MANAGER_COMPONENT - depends_on = [BLOB_COMPONENT, DATABASE_COMPONENT, WALLET_COMPONENT] - - def __init__(self, component_manager): - super().__init__(component_manager) - self.stream_manager: typing.Optional[StreamManager] = None - - @property - def component(self) -> typing.Optional[StreamManager]: - return self.stream_manager - - async def get_status(self): - if not self.stream_manager: - return - return { - 'managed_files': len(self.stream_manager.streams), - } - - async def start(self): - blob_manager = self.component_manager.get_component(BLOB_COMPONENT) - storage = self.component_manager.get_component(DATABASE_COMPONENT) - wallet = self.component_manager.get_component(WALLET_COMPONENT) - node = self.component_manager.get_component(DHT_COMPONENT) \ - if self.component_manager.has_component(DHT_COMPONENT) else None - log.info('Starting the file manager') - loop = asyncio.get_event_loop() - self.stream_manager = StreamManager( - loop, self.conf, blob_manager, wallet, storage, node, self.component_manager.analytics_manager - ) - await self.stream_manager.start() - log.info('Done setting up file manager') - - async def stop(self): - self.stream_manager.stop() - - -class PeerProtocolServerComponent(Component): - component_name = PEER_PROTOCOL_SERVER_COMPONENT - depends_on = [UPNP_COMPONENT, BLOB_COMPONENT, WALLET_COMPONENT] - - def __init__(self, component_manager): - super().__init__(component_manager) - self.blob_server: typing.Optional[BlobServer] = None - - @property - def component(self) -> typing.Optional[BlobServer]: - return self.blob_server - - async def start(self): - log.info("start blob server") - blob_manager: BlobManager = self.component_manager.get_component(BLOB_COMPONENT) - wallet: WalletManager = self.component_manager.get_component(WALLET_COMPONENT) - peer_port = self.conf.tcp_port - address = await wallet.get_unused_address() - self.blob_server = BlobServer(asyncio.get_event_loop(), blob_manager, address) - self.blob_server.start_server(peer_port, interface=self.conf.network_interface) - await self.blob_server.started_listening.wait() - - async def stop(self): - if self.blob_server: - self.blob_server.stop_server() - - -class UPnPComponent(Component): - component_name = UPNP_COMPONENT - - def __init__(self, component_manager): - super().__init__(component_manager) - self._int_peer_port = self.conf.tcp_port - self._int_dht_node_port = self.conf.udp_port - self.use_upnp = self.conf.use_upnp - self.upnp: typing.Optional[UPnP] = None - self.upnp_redirects = {} - self.external_ip: typing.Optional[str] = None - self._maintain_redirects_task = None - - @property - def component(self) -> 'UPnPComponent': - return self - - async def _repeatedly_maintain_redirects(self, now=True): - while True: - if now: - await self._maintain_redirects() - await asyncio.sleep(360, loop=self.component_manager.loop) - - async def _maintain_redirects(self): - # setup the gateway if necessary - if not self.upnp: - try: - self.upnp = await UPnP.discover(loop=self.component_manager.loop) - log.info("found upnp gateway: %s", self.upnp.gateway.manufacturer_string) - except Exception as err: - if isinstance(err, asyncio.CancelledError): # TODO: remove when updated to 3.8 - raise - log.warning("upnp discovery failed: %s", err) - self.upnp = None - - # update the external ip - external_ip = None - if self.upnp: - try: - external_ip = await self.upnp.get_external_ip() - if external_ip != "0.0.0.0" and not self.external_ip: - log.info("got external ip from UPnP: %s", external_ip) - except (asyncio.TimeoutError, UPnPError, NotImplementedError): - pass - if external_ip and not is_valid_public_ipv4(external_ip): - log.warning("UPnP returned a private/reserved ip - %s, checking lbry.com fallback", external_ip) - external_ip = await utils.get_external_ip() - if self.external_ip and self.external_ip != external_ip: - log.info("external ip changed from %s to %s", self.external_ip, external_ip) - if external_ip: - self.external_ip = external_ip - # assert self.external_ip is not None # TODO: handle going/starting offline - - if not self.upnp_redirects and self.upnp: # setup missing redirects - log.info("add UPnP port mappings") - upnp_redirects = {} - if PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components: - try: - upnp_redirects["TCP"] = await self.upnp.get_next_mapping( - self._int_peer_port, "TCP", "LBRY peer port", self._int_peer_port - ) - except (UPnPError, asyncio.TimeoutError, NotImplementedError): - pass - if DHT_COMPONENT not in self.component_manager.skip_components: - try: - upnp_redirects["UDP"] = await self.upnp.get_next_mapping( - self._int_dht_node_port, "UDP", "LBRY DHT port", self._int_dht_node_port - ) - except (UPnPError, asyncio.TimeoutError, NotImplementedError): - pass - if upnp_redirects: - log.info("set up redirects: %s", upnp_redirects) - self.upnp_redirects.update(upnp_redirects) - elif self.upnp: # check existing redirects are still active - found = set() - mappings = await self.upnp.get_redirects() - for mapping in mappings: - proto = mapping.protocol - if proto in self.upnp_redirects and mapping.external_port == self.upnp_redirects[proto]: - if mapping.lan_address == self.upnp.lan_address: - found.add(proto) - if 'UDP' not in found and DHT_COMPONENT not in self.component_manager.skip_components: - try: - udp_port = await self.upnp.get_next_mapping(self._int_dht_node_port, "UDP", "LBRY DHT port") - self.upnp_redirects['UDP'] = udp_port - log.info("refreshed upnp redirect for dht port: %i", udp_port) - except (asyncio.TimeoutError, UPnPError, NotImplementedError): - del self.upnp_redirects['UDP'] - if 'TCP' not in found and PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components: - try: - tcp_port = await self.upnp.get_next_mapping(self._int_peer_port, "TCP", "LBRY peer port") - self.upnp_redirects['TCP'] = tcp_port - log.info("refreshed upnp redirect for peer port: %i", tcp_port) - except (asyncio.TimeoutError, UPnPError, NotImplementedError): - del self.upnp_redirects['TCP'] - if ('TCP' in self.upnp_redirects and - PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components) and \ - ('UDP' in self.upnp_redirects and DHT_COMPONENT not in self.component_manager.skip_components): - if self.upnp_redirects: - log.debug("upnp redirects are still active") - - async def start(self): - log.info("detecting external ip") - if not self.use_upnp: - self.external_ip = await utils.get_external_ip() - return - success = False - await self._maintain_redirects() - if self.upnp: - if not self.upnp_redirects and not all([x in self.component_manager.skip_components for x in - (DHT_COMPONENT, PEER_PROTOCOL_SERVER_COMPONENT)]): - log.error("failed to setup upnp") - else: - success = True - if self.upnp_redirects: - log.debug("set up upnp port redirects for gateway: %s", self.upnp.gateway.manufacturer_string) - else: - log.error("failed to setup upnp") - if not self.external_ip: - self.external_ip = await utils.get_external_ip() - if self.external_ip: - log.info("detected external ip using lbry.com fallback") - if self.component_manager.analytics_manager: - self.component_manager.loop.create_task( - self.component_manager.analytics_manager.send_upnp_setup_success_fail( - success, await self.get_status() - ) - ) - self._maintain_redirects_task = self.component_manager.loop.create_task( - self._repeatedly_maintain_redirects(now=False) - ) - - async def stop(self): - if self.upnp_redirects: - log.info("Removing upnp redirects: %s", self.upnp_redirects) - await asyncio.wait([ - self.upnp.delete_port_mapping(port, protocol) for protocol, port in self.upnp_redirects.items() - ], loop=self.component_manager.loop) - if self._maintain_redirects_task and not self._maintain_redirects_task.done(): - self._maintain_redirects_task.cancel() - - async def get_status(self): - return { - 'aioupnp_version': aioupnp_version, - 'redirects': self.upnp_redirects, - 'gateway': 'No gateway found' if not self.upnp else self.upnp.gateway.manufacturer_string, - 'dht_redirect_set': 'UDP' in self.upnp_redirects, - 'peer_redirect_set': 'TCP' in self.upnp_redirects, - 'external_ip': self.external_ip - } - - -class ExchangeRateManagerComponent(Component): - component_name = EXCHANGE_RATE_MANAGER_COMPONENT - - def __init__(self, component_manager): - super().__init__(component_manager) - self.exchange_rate_manager = ExchangeRateManager() - - @property - def component(self) -> ExchangeRateManager: - return self.exchange_rate_manager - - async def start(self): - self.exchange_rate_manager.start() - - async def stop(self): - self.exchange_rate_manager.stop() diff --git a/lbry/extras/daemon/daemon.py b/lbry/extras/daemon/daemon.py deleted file mode 100644 index 45b0003af..000000000 --- a/lbry/extras/daemon/daemon.py +++ /dev/null @@ -1,5340 +0,0 @@ -import linecache -import os -import re -import asyncio -import logging -import json -import time -import inspect -import typing -import random -import hashlib -import tracemalloc -from urllib.parse import urlencode, quote -from typing import Callable, Optional, List -from binascii import hexlify, unhexlify -from traceback import format_exc -from functools import wraps, partial - -import ecdsa -import base58 -from aiohttp import web -from prometheus_client import generate_latest as prom_generate_latest -from google.protobuf.message import DecodeError -from lbry.wallet import ( - Wallet, ENCRYPT_ON_DISK, SingleKey, HierarchicalDeterministic, - Transaction, Output, Input, Account, database -) -from lbry.wallet.dewies import dewies_to_lbc, lbc_to_dewies, dict_values_to_lbc -from lbry.wallet.constants import TXO_TYPES, CLAIM_TYPE_NAMES - -from lbry import utils -from lbry.conf import Config, Setting, NOT_SET -from lbry.blob.blob_file import is_valid_blobhash, BlobBuffer -from lbry.blob_exchange.downloader import download_blob -from lbry.dht.peer import make_kademlia_peer -from lbry.error import ( - DownloadSDTimeoutError, ComponentsNotStartedError, ComponentStartConditionNotMetError, - CommandDoesNotExistError -) -from lbry.extras import system_info -from lbry.extras.daemon import analytics -from lbry.extras.daemon.components import WALLET_COMPONENT, DATABASE_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT -from lbry.extras.daemon.components import STREAM_MANAGER_COMPONENT -from lbry.extras.daemon.components import EXCHANGE_RATE_MANAGER_COMPONENT, UPNP_COMPONENT -from lbry.extras.daemon.componentmanager import RequiredCondition -from lbry.extras.daemon.componentmanager import ComponentManager -from lbry.extras.daemon.json_response_encoder import JSONResponseEncoder -from lbry.extras.daemon import comment_client -from lbry.extras.daemon.undecorated import undecorated -from lbry.file_analysis import VideoFileAnalyzer -from lbry.schema.claim import Claim -from lbry.schema.url import URL - -if typing.TYPE_CHECKING: - from lbry.blob.blob_manager import BlobManager - from lbry.dht.node import Node - from lbry.extras.daemon.components import UPnPComponent - from lbry.extras.daemon.exchange_rate_manager import ExchangeRateManager - from lbry.extras.daemon.storage import SQLiteStorage - from lbry.stream.stream_manager import StreamManager - from lbry.wallet import WalletManager, Ledger - -log = logging.getLogger(__name__) - - -def is_transactional_function(name): - for action in ('create', 'update', 'abandon', 'send', 'fund'): - if action in name: - return True - - -def requires(*components, **conditions): - if conditions and ["conditions"] != list(conditions.keys()): - raise SyntaxError("invalid conditions argument") - condition_names = conditions.get("conditions", []) - - def _wrap(method): - @wraps(method) - def _inner(*args, **kwargs): - component_manager = args[0].component_manager - for condition_name in condition_names: - condition_result, err_msg = component_manager.evaluate_condition(condition_name) - if not condition_result: - raise ComponentStartConditionNotMetError(err_msg) - if not component_manager.all_components_running(*components): - raise ComponentsNotStartedError( - f"the following required components have not yet started: {json.dumps(components)}" - ) - return method(*args, **kwargs) - - return _inner - - return _wrap - - -def deprecated(new_command=None): - def _deprecated_wrapper(f): - f.new_command = new_command - f._deprecated = True - return f - - return _deprecated_wrapper - - -INITIALIZING_CODE = 'initializing' - -# TODO: make this consistent with the stages in Downloader.py -DOWNLOAD_METADATA_CODE = 'downloading_metadata' -DOWNLOAD_TIMEOUT_CODE = 'timeout' -DOWNLOAD_RUNNING_CODE = 'running' -DOWNLOAD_STOPPED_CODE = 'stopped' -STREAM_STAGES = [ - (INITIALIZING_CODE, 'Initializing'), - (DOWNLOAD_METADATA_CODE, 'Downloading metadata'), - (DOWNLOAD_RUNNING_CODE, 'Started %s, got %s/%s blobs, stream status: %s'), - (DOWNLOAD_STOPPED_CODE, 'Paused stream'), - (DOWNLOAD_TIMEOUT_CODE, 'Stream timed out') -] - -CONNECTION_STATUS_CONNECTED = 'connected' -CONNECTION_STATUS_NETWORK = 'network_connection' -CONNECTION_MESSAGES = { - CONNECTION_STATUS_CONNECTED: 'No connection problems detected', - CONNECTION_STATUS_NETWORK: "Your internet connection appears to have been interrupted", -} - -SHORT_ID_LEN = 20 -MAX_UPDATE_FEE_ESTIMATE = 0.3 -DEFAULT_PAGE_SIZE = 20 - -VALID_FULL_CLAIM_ID = re.compile('[0-9a-fA-F]{40}') - - -def encode_pagination_doc(items): - return { - "page": "Page number of the current items.", - "page_size": "Number of items to show on a page.", - "total_pages": "Total number of pages.", - "total_items": "Total number of items.", - "items": [items], - } - - -async def paginate_rows(get_records: Callable, get_record_count: Optional[Callable], - page: Optional[int], page_size: Optional[int], **constraints): - page = max(1, page or 1) - page_size = max(1, page_size or DEFAULT_PAGE_SIZE) - constraints.update({ - "offset": page_size * (page - 1), - "limit": page_size - }) - items = await get_records(**constraints) - result = {"items": items, "page": page, "page_size": page_size} - if get_record_count is not None: - total_items = await get_record_count(**constraints) - result["total_pages"] = int((total_items + (page_size - 1)) / page_size) - result["total_items"] = total_items - return result - - -def paginate_list(items: List, page: Optional[int], page_size: Optional[int]): - page = max(1, page or 1) - page_size = max(1, page_size or DEFAULT_PAGE_SIZE) - total_items = len(items) - offset = page_size * (page - 1) - subitems = [] - if offset <= total_items: - subitems = items[offset:offset+page_size] - return { - "items": subitems, - "total_pages": int((total_items + (page_size - 1)) / page_size), - "total_items": total_items, - "page": page, "page_size": page_size - } - - -DHT_HAS_CONTACTS = "dht_has_contacts" - - -class DHTHasContacts(RequiredCondition): - name = DHT_HAS_CONTACTS - component = DHT_COMPONENT - message = "your node is not connected to the dht" - - @staticmethod - def evaluate(component): - return len(component.contacts) > 0 - - -class JSONRPCError: - # http://www.jsonrpc.org/specification#error_object - CODE_PARSE_ERROR = -32700 # Invalid JSON. Error while parsing the JSON text. - CODE_INVALID_REQUEST = -32600 # The JSON sent is not a valid Request object. - CODE_METHOD_NOT_FOUND = -32601 # The method does not exist / is not available. - CODE_INVALID_PARAMS = -32602 # Invalid method parameter(s). - CODE_INTERNAL_ERROR = -32603 # Internal JSON-RPC error (I think this is like a 500?) - CODE_APPLICATION_ERROR = -32500 # Generic error with our app?? - CODE_AUTHENTICATION_ERROR = -32501 # Authentication failed - - MESSAGES = { - CODE_PARSE_ERROR: "Parse Error. Data is not valid JSON.", - CODE_INVALID_REQUEST: "JSON data is not a valid Request", - CODE_METHOD_NOT_FOUND: "Method Not Found", - CODE_INVALID_PARAMS: "Invalid Params", - CODE_INTERNAL_ERROR: "Internal Error", - CODE_AUTHENTICATION_ERROR: "Authentication Failed", - } - - HTTP_CODES = { - CODE_INVALID_REQUEST: 400, - CODE_PARSE_ERROR: 400, - CODE_INVALID_PARAMS: 400, - CODE_METHOD_NOT_FOUND: 404, - CODE_INTERNAL_ERROR: 500, - CODE_APPLICATION_ERROR: 500, - CODE_AUTHENTICATION_ERROR: 401, - } - - def __init__(self, code: int, message: str, data: dict = None): - assert code and isinstance(code, int), "'code' must be an int" - assert message and isinstance(message, str), "'message' must be a string" - assert data is None or isinstance(data, dict), "'data' must be None or a dict" - self.code = code - self.message = message - self.data = data or {} - - def to_dict(self): - return { - 'code': self.code, - 'message': self.message, - 'data': self.data, - } - - @staticmethod - def filter_traceback(traceback): - result = [] - if traceback is not None: - result = trace_lines = traceback.split("\n") - for i, t in enumerate(trace_lines): - if "--- ---" in t: - if len(trace_lines) > i + 1: - result = [j for j in trace_lines[i + 1:] if j] - break - return result - - @classmethod - def create_command_exception(cls, command, args, kwargs, exception, traceback): - if 'password' in kwargs and isinstance(kwargs['password'], str): - kwargs['password'] = '*'*len(kwargs['password']) - return cls( - cls.CODE_APPLICATION_ERROR, str(exception), { - 'name': exception.__class__.__name__, - 'traceback': cls.filter_traceback(traceback), - 'command': command, - 'args': args, - 'kwargs': kwargs, - } - ) - - -class UnknownAPIMethodError(Exception): - pass - - -def jsonrpc_dumps_pretty(obj, **kwargs): - if isinstance(obj, JSONRPCError): - data = {"jsonrpc": "2.0", "error": obj.to_dict()} - else: - data = {"jsonrpc": "2.0", "result": obj} - return json.dumps(data, cls=JSONResponseEncoder, sort_keys=True, indent=2, **kwargs) + "\n" - - -def trap(err, *to_trap): - err.trap(*to_trap) - - -class JSONRPCServerType(type): - def __new__(mcs, name, bases, newattrs): - klass = type.__new__(mcs, name, bases, newattrs) - klass.callable_methods = {} - klass.deprecated_methods = {} - - for methodname in dir(klass): - if methodname.startswith("jsonrpc_"): - method = getattr(klass, methodname) - if not hasattr(method, '_deprecated'): - klass.callable_methods.update({methodname.split("jsonrpc_")[1]: method}) - else: - klass.deprecated_methods.update({methodname.split("jsonrpc_")[1]: method}) - return klass - - -class Daemon(metaclass=JSONRPCServerType): - """ - LBRYnet daemon, a jsonrpc interface to lbry functions - """ - callable_methods: dict - deprecated_methods: dict - - def __init__(self, conf: Config, component_manager: typing.Optional[ComponentManager] = None): - self.conf = conf - self.platform_info = system_info.get_platform() - self._video_file_analyzer = VideoFileAnalyzer(conf) - self._node_id = None - self._installation_id = None - self.session_id = base58.b58encode(utils.generate_id()).decode() - self.analytics_manager = analytics.AnalyticsManager(conf, self.installation_id, self.session_id) - self.component_manager = component_manager or ComponentManager( - conf, analytics_manager=self.analytics_manager, - skip_components=conf.components_to_skip or [] - ) - self.component_startup_task = None - self._connection_status: typing.Tuple[float, bool] = (self.component_manager.loop.time(), False) - - logging.getLogger('aiohttp.access').setLevel(logging.WARN) - rpc_app = web.Application() - rpc_app.router.add_get('/lbryapi', self.handle_old_jsonrpc) - rpc_app.router.add_post('/lbryapi', self.handle_old_jsonrpc) - rpc_app.router.add_post('/', self.handle_old_jsonrpc) - self.rpc_runner = web.AppRunner(rpc_app) - - streaming_app = web.Application() - streaming_app.router.add_get('/get/{claim_name}', self.handle_stream_get_request) - streaming_app.router.add_get('/get/{claim_name}/{claim_id}', self.handle_stream_get_request) - streaming_app.router.add_get('/stream/{sd_hash}', self.handle_stream_range_request) - self.streaming_runner = web.AppRunner(streaming_app) - - prom_app = web.Application() - prom_app.router.add_get('/metrics', self.handle_metrics_get_request) - self.metrics_runner = web.AppRunner(prom_app) - - self.need_connection_status_refresh = asyncio.Event() - self._connection_status_task: Optional[asyncio.Task] = None - - @property - def dht_node(self) -> typing.Optional['Node']: - return self.component_manager.get_component(DHT_COMPONENT) - - @property - def wallet_manager(self) -> typing.Optional['WalletManager']: - return self.component_manager.get_component(WALLET_COMPONENT) - - @property - def storage(self) -> typing.Optional['SQLiteStorage']: - return self.component_manager.get_component(DATABASE_COMPONENT) - - @property - def stream_manager(self) -> typing.Optional['StreamManager']: - return self.component_manager.get_component(STREAM_MANAGER_COMPONENT) - - @property - def exchange_rate_manager(self) -> typing.Optional['ExchangeRateManager']: - return self.component_manager.get_component(EXCHANGE_RATE_MANAGER_COMPONENT) - - @property - def blob_manager(self) -> typing.Optional['BlobManager']: - return self.component_manager.get_component(BLOB_COMPONENT) - - @property - def upnp(self) -> typing.Optional['UPnPComponent']: - return self.component_manager.get_component(UPNP_COMPONENT) - - @classmethod - def get_api_definitions(cls): - prefix = 'jsonrpc_' - not_grouped = ['routing_table_get', 'ffmpeg_find'] - api = { - 'groups': { - group_name[:-len('_DOC')].lower(): getattr(cls, group_name).strip() - for group_name in dir(cls) if group_name.endswith('_DOC') - }, - 'commands': {} - } - for jsonrpc_method in dir(cls): - if jsonrpc_method.startswith(prefix): - full_name = jsonrpc_method[len(prefix):] - method = getattr(cls, jsonrpc_method) - if full_name in not_grouped: - name_parts = [full_name] - else: - name_parts = full_name.split('_', 1) - if len(name_parts) == 1: - group = None - name, = name_parts - elif len(name_parts) == 2: - group, name = name_parts - assert group in api['groups'], \ - f"Group {group} does not have doc string for command {full_name}." - else: - raise NameError(f'Could not parse method name: {jsonrpc_method}') - api['commands'][full_name] = { - 'api_method_name': full_name, - 'name': name, - 'group': group, - 'doc': method.__doc__, - 'method': method, - } - if hasattr(method, '_deprecated'): - api['commands'][full_name]['replaced_by'] = method.new_command - - for command in api['commands'].values(): - if 'replaced_by' in command: - command['replaced_by'] = api['commands'][command['replaced_by']] - - return api - - @property - def db_revision_file_path(self): - return os.path.join(self.conf.data_dir, 'db_revision') - - @property - def installation_id(self): - install_id_filename = os.path.join(self.conf.data_dir, "install_id") - if not self._installation_id: - if os.path.isfile(install_id_filename): - with open(install_id_filename, "r") as install_id_file: - self._installation_id = str(install_id_file.read()).strip() - if not self._installation_id: - self._installation_id = base58.b58encode(utils.generate_id()).decode() - with open(install_id_filename, "w") as install_id_file: - install_id_file.write(self._installation_id) - return self._installation_id - - def ensure_data_dir(self): - if not os.path.isdir(self.conf.data_dir): - os.makedirs(self.conf.data_dir) - if not os.path.isdir(os.path.join(self.conf.data_dir, "blobfiles")): - os.makedirs(os.path.join(self.conf.data_dir, "blobfiles")) - return self.conf.data_dir - - def ensure_wallet_dir(self): - if not os.path.isdir(self.conf.wallet_dir): - os.makedirs(self.conf.wallet_dir) - - def ensure_download_dir(self): - if not os.path.isdir(self.conf.download_dir): - os.makedirs(self.conf.download_dir) - - async def update_connection_status(self): - connected = await utils.async_check_connection() - if connected and not self._connection_status[1]: - log.info("detected internet connection is working") - elif not connected and self._connection_status[1]: - log.warning("detected internet connection was lost") - self._connection_status = (self.component_manager.loop.time(), connected) - - async def keep_connection_status_up_to_date(self): - while True: - try: - await asyncio.wait_for(self.need_connection_status_refresh.wait(), 300) - except asyncio.TimeoutError: - pass - await self.update_connection_status() - self.need_connection_status_refresh.clear() - - async def start(self): - log.info("Starting LBRYNet Daemon") - log.debug("Settings: %s", json.dumps(self.conf.settings_dict, indent=2)) - log.info("Platform: %s", json.dumps(self.platform_info, indent=2)) - - self.need_connection_status_refresh.set() - self._connection_status_task = self.component_manager.loop.create_task( - self.keep_connection_status_up_to_date() - ) - - await self.analytics_manager.send_server_startup() - await self.rpc_runner.setup() - await self.streaming_runner.setup() - await self.metrics_runner.setup() - - try: - rpc_site = web.TCPSite(self.rpc_runner, self.conf.api_host, self.conf.api_port, shutdown_timeout=.5) - await rpc_site.start() - log.info('RPC server listening on TCP %s:%i', *rpc_site._server.sockets[0].getsockname()[:2]) - except OSError as e: - log.error('RPC server failed to bind TCP %s:%i', self.conf.api_host, self.conf.api_port) - await self.analytics_manager.send_server_startup_error(str(e)) - raise SystemExit() - - try: - streaming_site = web.TCPSite(self.streaming_runner, self.conf.streaming_host, self.conf.streaming_port, - shutdown_timeout=.5) - await streaming_site.start() - log.info('media server listening on TCP %s:%i', *streaming_site._server.sockets[0].getsockname()[:2]) - - except OSError as e: - log.error('media server failed to bind TCP %s:%i', self.conf.streaming_host, self.conf.streaming_port) - await self.analytics_manager.send_server_startup_error(str(e)) - raise SystemExit() - - if self.conf.prometheus_port: - try: - prom_site = web.TCPSite(self.metrics_runner, "0.0.0.0", self.conf.prometheus_port, shutdown_timeout=.5) - await prom_site.start() - log.info('metrics server listening on TCP %s:%i', *prom_site._server.sockets[0].getsockname()[:2]) - except OSError as e: - log.error('metrics server failed to bind TCP :%i', self.conf.prometheus_port) - await self.analytics_manager.send_server_startup_error(str(e)) - raise SystemExit() - - try: - await self.initialize() - except asyncio.CancelledError: - log.info("shutting down before finished starting") - await self.analytics_manager.send_server_startup_error("shutting down before finished starting") - raise - except Exception as e: - await self.analytics_manager.send_server_startup_error(str(e)) - log.exception('Failed to start lbrynet') - raise SystemExit() - - await self.analytics_manager.send_server_startup_success() - - async def initialize(self): - self.ensure_data_dir() - self.ensure_wallet_dir() - self.ensure_download_dir() - if not self.analytics_manager.is_started: - await self.analytics_manager.start() - self.component_startup_task = asyncio.create_task(self.component_manager.start()) - await self.component_startup_task - - async def stop(self): - if self._connection_status_task: - if not self._connection_status_task.done(): - self._connection_status_task.cancel() - self._connection_status_task = None - if self.component_startup_task is not None: - if self.component_startup_task.done(): - await self.component_manager.stop() - else: - self.component_startup_task.cancel() - log.info("stopped api components") - await self.rpc_runner.cleanup() - await self.streaming_runner.cleanup() - await self.metrics_runner.cleanup() - log.info("stopped api server") - if self.analytics_manager.is_started: - self.analytics_manager.stop() - log.info("finished shutting down") - - async def handle_old_jsonrpc(self, request): - data = await request.json() - params = data.get('params', {}) - include_protobuf = params.pop('include_protobuf', False) if isinstance(params, dict) else False - result = await self._process_rpc_call(data) - ledger = None - if 'wallet' in self.component_manager.get_components_status(): - # self.ledger only available if wallet component is not skipped - ledger = self.ledger - try: - encoded_result = jsonrpc_dumps_pretty( - result, ledger=ledger, include_protobuf=include_protobuf) - except Exception: - log.exception('Failed to encode JSON RPC result:') - encoded_result = jsonrpc_dumps_pretty(JSONRPCError( - JSONRPCError.CODE_APPLICATION_ERROR, - 'After successfully executing the command, failed to encode result for JSON RPC response.', - {'traceback': format_exc()} - ), ledger=ledger) - return web.Response( - text=encoded_result, - content_type='application/json' - ) - - async def handle_metrics_get_request(self, request: web.Request): - try: - return web.Response( - text=prom_generate_latest().decode(), - content_type='text/plain; version=0.0.4' - ) - except Exception: - log.exception('could not generate prometheus data') - raise - - async def handle_stream_get_request(self, request: web.Request): - if not self.conf.streaming_get: - log.warning("streaming_get is disabled, rejecting request") - raise web.HTTPForbidden() - name_and_claim_id = request.path.split("/get/")[1] - if "/" not in name_and_claim_id: - uri = f"lbry://{name_and_claim_id}" - else: - name, claim_id = name_and_claim_id.split("/") - uri = f"lbry://{name}#{claim_id}" - if not self.stream_manager.started.is_set(): - await self.stream_manager.started.wait() - stream = await self.jsonrpc_get(uri) - if isinstance(stream, dict): - raise web.HTTPServerError(text=stream['error']) - raise web.HTTPFound(f"/stream/{stream.sd_hash}") - - async def handle_stream_range_request(self, request: web.Request): - try: - return await self._handle_stream_range_request(request) - except web.HTTPException as err: - log.warning("http code during /stream range request: %s", err) - raise err - except asyncio.CancelledError: - # if not excepted here, it would bubble up the error to the console. every time you closed - # a running tab, you'd get this error in the console - log.debug("/stream range request cancelled") - except Exception: - log.exception("error handling /stream range request") - raise - finally: - log.debug("finished handling /stream range request") - - async def _handle_stream_range_request(self, request: web.Request): - sd_hash = request.path.split("/stream/")[1] - if not self.stream_manager.started.is_set(): - await self.stream_manager.started.wait() - if sd_hash not in self.stream_manager.streams: - return web.HTTPNotFound() - return await self.stream_manager.stream_partial_content(request, sd_hash) - - async def _process_rpc_call(self, data): - args = data.get('params', {}) - - try: - function_name = data['method'] - except KeyError: - return JSONRPCError( - JSONRPCError.CODE_METHOD_NOT_FOUND, - "Missing 'method' value in request." - ) - - try: - method = self._get_jsonrpc_method(function_name) - except UnknownAPIMethodError: - return JSONRPCError( - JSONRPCError.CODE_METHOD_NOT_FOUND, - str(CommandDoesNotExistError(function_name)) - ) - - if args in ([{}], []): - _args, _kwargs = (), {} - elif isinstance(args, dict): - _args, _kwargs = (), args - elif isinstance(args, list) and len(args) == 1 and isinstance(args[0], dict): - # TODO: this is for backwards compatibility. Remove this once API and UI are updated - # TODO: also delete EMPTY_PARAMS then - _args, _kwargs = (), args[0] - elif isinstance(args, list) and len(args) == 2 and \ - isinstance(args[0], list) and isinstance(args[1], dict): - _args, _kwargs = args - else: - return JSONRPCError( - JSONRPCError.CODE_INVALID_PARAMS, - f"Invalid parameters format: {args}" - ) - - if is_transactional_function(function_name): - log.info("%s %s %s", function_name, _args, _kwargs) - - params_error, erroneous_params = self._check_params(method, _args, _kwargs) - if params_error is not None: - params_error_message = '{} for {} command: {}'.format( - params_error, function_name, ', '.join(erroneous_params) - ) - log.warning(params_error_message) - return JSONRPCError( - JSONRPCError.CODE_INVALID_PARAMS, - params_error_message, - ) - - try: - result = method(self, *_args, **_kwargs) - if asyncio.iscoroutine(result): - result = await result - return result - except asyncio.CancelledError: - log.info("cancelled API call for: %s", function_name) - raise - except Exception as e: # pylint: disable=broad-except - log.exception("error handling api request") - return JSONRPCError.create_command_exception( - command=function_name, args=_args, kwargs=_kwargs, exception=e, traceback=format_exc() - ) - - def _verify_method_is_callable(self, function_path): - if function_path not in self.callable_methods: - raise UnknownAPIMethodError(function_path) - - def _get_jsonrpc_method(self, function_path): - if function_path in self.deprecated_methods: - new_command = self.deprecated_methods[function_path].new_command - log.warning('API function \"%s\" is deprecated, please update to use \"%s\"', - function_path, new_command) - function_path = new_command - self._verify_method_is_callable(function_path) - return self.callable_methods.get(function_path) - - @staticmethod - def _check_params(function, args_tup, args_dict): - argspec = inspect.getfullargspec(undecorated(function)) - num_optional_params = 0 if argspec.defaults is None else len(argspec.defaults) - - duplicate_params = [ - duplicate_param - for duplicate_param in argspec.args[1:len(args_tup) + 1] - if duplicate_param in args_dict - ] - - if duplicate_params: - return 'Duplicate parameters', duplicate_params - - missing_required_params = [ - required_param - for required_param in argspec.args[len(args_tup) + 1:-num_optional_params] - if required_param not in args_dict - ] - if len(missing_required_params) > 0: - return 'Missing required parameters', missing_required_params - - extraneous_params = [] if argspec.varkw is not None else [ - extra_param - for extra_param in args_dict - if extra_param not in argspec.args[1:] - ] - if len(extraneous_params) > 0: - return 'Extraneous parameters', extraneous_params - - return None, None - - @property - def ledger(self) -> Optional['Ledger']: - try: - return self.wallet_manager.default_account.ledger - except AttributeError: - return None - - async def get_est_cost_from_uri(self, uri: str) -> typing.Optional[float]: - """ - Resolve a name and return the estimated stream cost - """ - - resolved = await self.resolve([], uri) - if resolved: - claim_response = resolved[uri] - else: - claim_response = None - - if claim_response and 'claim' in claim_response: - if 'value' in claim_response['claim'] and claim_response['claim']['value'] is not None: - claim_value = Claim.from_bytes(claim_response['claim']['value']) - if not claim_value.stream.has_fee: - return 0.0 - return round( - self.exchange_rate_manager.convert_currency( - claim_value.stream.fee.currency, "LBC", claim_value.stream.fee.amount - ), 5 - ) - else: - log.warning("Failed to estimate cost for %s", uri) - - ############################################################################ - # # - # JSON-RPC API methods start here # - # # - ############################################################################ - - def jsonrpc_stop(self): # pylint: disable=no-self-use - """ - Stop lbrynet API server. - - Usage: - stop - - Options: - None - - Returns: - (string) Shutdown message - """ - - def shutdown(): - raise web.GracefulExit() - - log.info("Shutting down lbrynet daemon") - asyncio.get_event_loop().call_later(0, shutdown) - return "Shutting down" - - async def jsonrpc_ffmpeg_find(self): - """ - Get ffmpeg installation information - - Usage: - ffmpeg_find - - Options: - None - - Returns: - (dict) Dictionary of ffmpeg information - { - 'available': (bool) found ffmpeg, - 'which': (str) path to ffmpeg, - 'analyze_audio_volume': (bool) should ffmpeg analyze audio - } - """ - return await self._video_file_analyzer.status(reset=True, recheck=True) - - async def jsonrpc_status(self): - """ - Get daemon status - - Usage: - status - - Options: - None - - Returns: - (dict) lbrynet-daemon status - { - 'installation_id': (str) installation id - base58, - 'is_running': (bool), - 'skipped_components': (list) [names of skipped components (str)], - 'startup_status': { Does not include components which have been skipped - 'blob_manager': (bool), - 'blockchain_headers': (bool), - 'database': (bool), - 'dht': (bool), - 'exchange_rate_manager': (bool), - 'hash_announcer': (bool), - 'peer_protocol_server': (bool), - 'stream_manager': (bool), - 'upnp': (bool), - 'wallet': (bool), - }, - 'connection_status': { - 'code': (str) connection status code, - 'message': (str) connection status message - }, - 'blockchain_headers': { - 'downloading_headers': (bool), - 'download_progress': (float) 0-100.0 - }, - 'wallet': { - 'connected': (str) host and port of the connected spv server, - 'blocks': (int) local blockchain height, - 'blocks_behind': (int) remote_height - local_height, - 'best_blockhash': (str) block hash of most recent block, - 'is_encrypted': (bool), - 'is_locked': (bool), - 'connected_servers': (list) [ - { - 'host': (str) server hostname, - 'port': (int) server port, - 'latency': (int) milliseconds - } - ], - }, - 'dht': { - 'node_id': (str) lbry dht node id - hex encoded, - 'peers_in_routing_table': (int) the number of peers in the routing table, - }, - 'blob_manager': { - 'finished_blobs': (int) number of finished blobs in the blob manager, - 'connections': { - 'incoming_bps': { - : (int) bytes per second received, - }, - 'outgoing_bps': { - : (int) bytes per second sent, - }, - 'total_outgoing_mps': (float) megabytes per second sent, - 'total_incoming_mps': (float) megabytes per second received, - 'time': (float) timestamp - } - }, - 'hash_announcer': { - 'announce_queue_size': (int) number of blobs currently queued to be announced - }, - 'stream_manager': { - 'managed_files': (int) count of files in the stream manager, - }, - 'upnp': { - 'aioupnp_version': (str), - 'redirects': { - : (int) external_port, - }, - 'gateway': (str) manufacturer and model, - 'dht_redirect_set': (bool), - 'peer_redirect_set': (bool), - 'external_ip': (str) external ip address, - } - } - """ - - if not self._connection_status[1]: - self.need_connection_status_refresh.set() - connection_code = CONNECTION_STATUS_CONNECTED if self._connection_status[1] else CONNECTION_STATUS_NETWORK - ffmpeg_status = await self._video_file_analyzer.status() - running_components = self.component_manager.get_components_status() - response = { - 'installation_id': self.installation_id, - 'is_running': all(running_components.values()), - 'skipped_components': self.component_manager.skip_components, - 'startup_status': running_components, - 'connection_status': { - 'code': connection_code, - 'message': CONNECTION_MESSAGES[connection_code], - }, - 'ffmpeg_status': ffmpeg_status - } - for component in self.component_manager.components: - status = await component.get_status() - if status: - response[component.component_name] = status - return response - - def jsonrpc_version(self): # pylint: disable=no-self-use - """ - Get lbrynet API server version information - - Usage: - version - - Options: - None - - Returns: - (dict) Dictionary of lbry version information - { - 'processor': (str) processor type, - 'python_version': (str) python version, - 'platform': (str) platform string, - 'os_release': (str) os release string, - 'os_system': (str) os name, - 'version': (str) lbrynet version, - 'build': (str) "dev" | "qa" | "rc" | "release", - } - """ - return self.platform_info - - @requires(WALLET_COMPONENT) - async def jsonrpc_resolve(self, urls: typing.Union[str, list], wallet_id=None, **kwargs): - """ - Get the claim that a URL refers to. - - Usage: - resolve ... [--wallet_id=] - [--include_purchase_receipt] - [--include_is_my_output] - [--include_sent_supports] - [--include_sent_tips] - [--include_received_tips] - - Options: - --urls= : (str, list) one or more urls to resolve - --wallet_id= : (str) wallet to check for claim purchase reciepts - --include_purchase_receipt : (bool) lookup and include a receipt if this wallet - has purchased the claim being resolved - --include_is_my_output : (bool) lookup and include a boolean indicating - if claim being resolved is yours - --include_sent_supports : (bool) lookup and sum the total amount - of supports you've made to this claim - --include_sent_tips : (bool) lookup and sum the total amount - of tips you've made to this claim - (only makes sense when claim is not yours) - --include_received_tips : (bool) lookup and sum the total amount - of tips you've received to this claim - (only makes sense when claim is yours) - - Returns: - Dictionary of results, keyed by url - '': { - If a resolution error occurs: - 'error': Error message - - If the url resolves to a channel or a claim in a channel: - 'certificate': { - 'address': (str) claim address, - 'amount': (float) claim amount, - 'effective_amount': (float) claim amount including supports, - 'claim_id': (str) claim id, - 'claim_sequence': (int) claim sequence number (or -1 if unknown), - 'decoded_claim': (bool) whether or not the claim value was decoded, - 'height': (int) claim height, - 'confirmations': (int) claim depth, - 'timestamp': (int) timestamp of the block that included this claim tx, - 'has_signature': (bool) included if decoded_claim - 'name': (str) claim name, - 'permanent_url': (str) permanent url of the certificate claim, - 'supports: (list) list of supports [{'txid': (str) txid, - 'nout': (int) nout, - 'amount': (float) amount}], - 'txid': (str) claim txid, - 'nout': (str) claim nout, - 'signature_is_valid': (bool), included if has_signature, - 'value': ClaimDict if decoded, otherwise hex string - } - - If the url resolves to a channel: - 'claims_in_channel': (int) number of claims in the channel, - - If the url resolves to a claim: - 'claim': { - 'address': (str) claim address, - 'amount': (float) claim amount, - 'effective_amount': (float) claim amount including supports, - 'claim_id': (str) claim id, - 'claim_sequence': (int) claim sequence number (or -1 if unknown), - 'decoded_claim': (bool) whether or not the claim value was decoded, - 'height': (int) claim height, - 'depth': (int) claim depth, - 'has_signature': (bool) included if decoded_claim - 'name': (str) claim name, - 'permanent_url': (str) permanent url of the claim, - 'channel_name': (str) channel name if claim is in a channel - 'supports: (list) list of supports [{'txid': (str) txid, - 'nout': (int) nout, - 'amount': (float) amount}] - 'txid': (str) claim txid, - 'nout': (str) claim nout, - 'signature_is_valid': (bool), included if has_signature, - 'value': ClaimDict if decoded, otherwise hex string - } - } - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - - if isinstance(urls, str): - urls = [urls] - - results = {} - - valid_urls = set() - for url in urls: - try: - URL.parse(url) - valid_urls.add(url) - except ValueError: - results[url] = {"error": f"{url} is not a valid url"} - - resolved = await self.resolve(wallet.accounts, list(valid_urls), **kwargs) - - for resolved_uri in resolved: - results[resolved_uri] = resolved[resolved_uri] if resolved[resolved_uri] is not None else \ - {"error": f"{resolved_uri} did not resolve to a claim"} - - return results - - @requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT, - STREAM_MANAGER_COMPONENT) - async def jsonrpc_get( - self, uri, file_name=None, download_directory=None, timeout=None, save_file=None, wallet_id=None): - """ - Download stream from a LBRY name. - - Usage: - get [ | --file_name=] - [ | --download_directory=] [ | --timeout=] - [--save_file=] [--wallet_id=] - - - Options: - --uri= : (str) uri of the content to download - --file_name= : (str) specified name for the downloaded file, overrides the stream file name - --download_directory= : (str) full path to the directory to download into - --timeout= : (int) download timeout in number of seconds - --save_file= : (bool) save the file to the downloads directory - --wallet_id= : (str) wallet to check for claim purchase reciepts - - Returns: {File} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - if download_directory and not os.path.isdir(download_directory): - return {"error": f"specified download directory \"{download_directory}\" does not exist"} - try: - stream = await self.stream_manager.download_stream_from_uri( - uri, self.exchange_rate_manager, timeout, file_name, download_directory, - save_file=save_file, wallet=wallet - ) - if not stream: - raise DownloadSDTimeoutError(uri) - except Exception as e: - log.warning("Error downloading %s: %s", uri, str(e)) - return {"error": str(e)} - return stream - - SETTINGS_DOC = """ - Settings management. - """ - - def jsonrpc_settings_get(self): - """ - Get daemon settings - - Usage: - settings_get - - Options: - None - - Returns: - (dict) Dictionary of daemon settings - See ADJUSTABLE_SETTINGS in lbry/conf.py for full list of settings - """ - return self.conf.settings_dict - - def jsonrpc_settings_set(self, key, value): - """ - Set daemon settings - - Usage: - settings_set () () - - Options: - None - - Returns: - (dict) Updated dictionary of daemon settings - """ - with self.conf.update_config() as c: - if value and isinstance(value, str) and value[0] in ('[', '{'): - value = json.loads(value) - attr: Setting = getattr(type(c), key) - cleaned = attr.deserialize(value) - setattr(c, key, cleaned) - return {key: cleaned} - - def jsonrpc_settings_clear(self, key): - """ - Clear daemon settings - - Usage: - settings_clear () - - Options: - None - - Returns: - (dict) Updated dictionary of daemon settings - """ - with self.conf.update_config() as c: - setattr(c, key, NOT_SET) - return {key: self.conf.settings_dict[key]} - - PREFERENCE_DOC = """ - Preferences management. - """ - - def jsonrpc_preference_get(self, key=None, wallet_id=None): - """ - Get preference value for key or all values if not key is passed in. - - Usage: - preference_get [] [--wallet_id=] - - Options: - --key= : (str) key associated with value - --wallet_id= : (str) restrict operation to specific wallet - - Returns: - (dict) Dictionary of preference(s) - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - if key: - if key in wallet.preferences: - return {key: wallet.preferences[key]} - return - return wallet.preferences.to_dict_without_ts() - - def jsonrpc_preference_set(self, key, value, wallet_id=None): - """ - Set preferences - - Usage: - preference_set () () [--wallet_id=] - - Options: - --key= : (str) key associated with value - --value= : (str) key associated with value - --wallet_id= : (str) restrict operation to specific wallet - - Returns: - (dict) Dictionary with key/value of new preference - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - if value and isinstance(value, str) and value[0] in ('[', '{'): - value = json.loads(value) - wallet.preferences[key] = value - wallet.save() - return {key: value} - - WALLET_DOC = """ - Create, modify and inspect wallets. - """ - - @requires("wallet") - def jsonrpc_wallet_list(self, wallet_id=None, page=None, page_size=None): - """ - List wallets. - - Usage: - wallet_list [--wallet_id=] [--page=] [--page_size=] - - Options: - --wallet_id= : (str) show specific wallet only - --page= : (int) page to return during paginating - --page_size= : (int) number of items on page during pagination - - Returns: {Paginated[Wallet]} - """ - if wallet_id: - return paginate_list([self.wallet_manager.get_wallet_or_error(wallet_id)], 1, 1) - return paginate_list(self.wallet_manager.wallets, page, page_size) - - def jsonrpc_wallet_reconnect(self): - """ - Reconnects ledger network client, applying new configurations. - - Usage: - wallet_reconnect - - Options: - - Returns: None - """ - return self.wallet_manager.reset() - - @requires("wallet") - async def jsonrpc_wallet_create( - self, wallet_id, skip_on_startup=False, create_account=False, single_key=False): - """ - Create a new wallet. - - Usage: - wallet_create ( | --wallet_id=) [--skip_on_startup] - [--create_account] [--single_key] - - Options: - --wallet_id= : (str) wallet file name - --skip_on_startup : (bool) don't add wallet to daemon_settings.yml - --create_account : (bool) generates the default account - --single_key : (bool) used with --create_account, creates single-key account - - Returns: {Wallet} - """ - wallet_path = os.path.join(self.conf.wallet_dir, 'wallets', wallet_id) - for wallet in self.wallet_manager.wallets: - if wallet.id == wallet_id: - raise Exception(f"Wallet at path '{wallet_path}' already exists and is loaded.") - if os.path.exists(wallet_path): - raise Exception(f"Wallet at path '{wallet_path}' already exists, use 'wallet_add' to load wallet.") - - wallet = self.wallet_manager.import_wallet(wallet_path) - if not wallet.accounts and create_account: - account = Account.generate( - self.ledger, wallet, address_generator={ - 'name': SingleKey.name if single_key else HierarchicalDeterministic.name - } - ) - if self.ledger.network.is_connected: - await self.ledger.subscribe_account(account) - wallet.save() - if not skip_on_startup: - with self.conf.update_config() as c: - c.wallets += [wallet_id] - return wallet - - @requires("wallet") - async def jsonrpc_wallet_add(self, wallet_id): - """ - Add existing wallet. - - Usage: - wallet_add ( | --wallet_id=) - - Options: - --wallet_id= : (str) wallet file name - - Returns: {Wallet} - """ - wallet_path = os.path.join(self.conf.wallet_dir, 'wallets', wallet_id) - for wallet in self.wallet_manager.wallets: - if wallet.id == wallet_id: - raise Exception(f"Wallet at path '{wallet_path}' is already loaded.") - if not os.path.exists(wallet_path): - raise Exception(f"Wallet at path '{wallet_path}' was not found.") - wallet = self.wallet_manager.import_wallet(wallet_path) - if self.ledger.network.is_connected: - for account in wallet.accounts: - await self.ledger.subscribe_account(account) - return wallet - - @requires("wallet") - async def jsonrpc_wallet_remove(self, wallet_id): - """ - Remove an existing wallet. - - Usage: - wallet_remove ( | --wallet_id=) - - Options: - --wallet_id= : (str) name of wallet to remove - - Returns: {Wallet} - """ - wallet = self.wallet_manager.get_wallet_or_error(wallet_id) - self.wallet_manager.wallets.remove(wallet) - for account in wallet.accounts: - await self.ledger.unsubscribe_account(account) - return wallet - - @requires("wallet") - async def jsonrpc_wallet_balance(self, wallet_id=None, confirmations=0): - """ - Return the balance of a wallet - - Usage: - wallet_balance [--wallet_id=] [--confirmations=] - - Options: - --wallet_id= : (str) balance for specific wallet - --confirmations= : (int) Only include transactions with this many - confirmed blocks. - - Returns: - (decimal) amount of lbry credits in wallet - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - balance = await self.ledger.get_detailed_balance( - accounts=wallet.accounts, confirmations=confirmations - ) - return dict_values_to_lbc(balance) - - def jsonrpc_wallet_status(self, wallet_id=None): - """ - Status of wallet including encryption/lock state. - - Usage: - wallet_status [ | --wallet_id=] - - Options: - --wallet_id= : (str) status of specific wallet - - Returns: - Dictionary of wallet status information. - """ - if self.wallet_manager is None: - return {'is_encrypted': None, 'is_syncing': None, 'is_locked': None} - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - return { - 'is_encrypted': wallet.is_encrypted, - 'is_syncing': len(self.ledger._update_tasks) > 0, - 'is_locked': wallet.is_locked - } - - @requires(WALLET_COMPONENT) - def jsonrpc_wallet_unlock(self, password, wallet_id=None): - """ - Unlock an encrypted wallet - - Usage: - wallet_unlock ( | --password=) [--wallet_id=] - - Options: - --password= : (str) password to use for unlocking - --wallet_id= : (str) restrict operation to specific wallet - - Returns: - (bool) true if wallet is unlocked, otherwise false - """ - return self.wallet_manager.get_wallet_or_default(wallet_id).unlock(password) - - @requires(WALLET_COMPONENT) - def jsonrpc_wallet_lock(self, wallet_id=None): - """ - Lock an unlocked wallet - - Usage: - wallet_lock [--wallet_id=] - - Options: - --wallet_id= : (str) restrict operation to specific wallet - - Returns: - (bool) true if wallet is locked, otherwise false - """ - return self.wallet_manager.get_wallet_or_default(wallet_id).lock() - - @requires(WALLET_COMPONENT) - def jsonrpc_wallet_decrypt(self, wallet_id=None): - """ - Decrypt an encrypted wallet, this will remove the wallet password. The wallet must be unlocked to decrypt it - - Usage: - wallet_decrypt [--wallet_id=] - - Options: - --wallet_id= : (str) restrict operation to specific wallet - - Returns: - (bool) true if wallet is decrypted, otherwise false - """ - return self.wallet_manager.get_wallet_or_default(wallet_id).decrypt() - - @requires(WALLET_COMPONENT) - def jsonrpc_wallet_encrypt(self, new_password, wallet_id=None): - """ - Encrypt an unencrypted wallet with a password - - Usage: - wallet_encrypt ( | --new_password=) - [--wallet_id=] - - Options: - --new_password= : (str) password to encrypt account - --wallet_id= : (str) restrict operation to specific wallet - - Returns: - (bool) true if wallet is decrypted, otherwise false - """ - return self.wallet_manager.get_wallet_or_default(wallet_id).encrypt(new_password) - - @requires(WALLET_COMPONENT) - async def jsonrpc_wallet_send( - self, amount, addresses, wallet_id=None, - change_account_id=None, funding_account_ids=None, preview=False): - """ - Send the same number of credits to multiple addresses using all accounts in wallet to - fund the transaction and the default account to receive any change. - - Usage: - wallet_send ... [--wallet_id=] [--preview] - [--change_account_id=None] [--funding_account_ids=...] - - Options: - --wallet_id= : (str) restrict operation to specific wallet - --change_account_id= : (str) account where change will go - --funding_account_ids= : (str) accounts to fund the transaction - --preview : (bool) do not broadcast the transaction - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first." - account = wallet.get_account_or_default(change_account_id) - accounts = wallet.get_accounts_or_all(funding_account_ids) - - amount = self.get_dewies_or_error("amount", amount) - - if addresses and not isinstance(addresses, list): - addresses = [addresses] - - outputs = [] - for address in addresses: - self.valid_address_or_error(address) - outputs.append( - Output.pay_pubkey_hash( - amount, self.ledger.address_to_hash160(address) - ) - ) - - tx = await Transaction.create( - [], outputs, accounts, account - ) - - if not preview: - await self.ledger.broadcast(tx) - self.component_manager.loop.create_task(self.analytics_manager.send_credits_sent()) - else: - await self.ledger.release_tx(tx) - - return tx - - ACCOUNT_DOC = """ - Create, modify and inspect wallet accounts. - """ - - @requires("wallet") - async def jsonrpc_account_list( - self, account_id=None, wallet_id=None, confirmations=0, - include_claims=False, show_seed=False, page=None, page_size=None): - """ - List details of all of the accounts or a specific account. - - Usage: - account_list [] [--wallet_id=] - [--confirmations=] - [--include_claims] [--show_seed] - [--page=] [--page_size=] - - Options: - --account_id= : (str) If provided only the balance for this - account will be given - --wallet_id= : (str) accounts in specific wallet - --confirmations= : (int) required confirmations (default: 0) - --include_claims : (bool) include claims, requires than a - LBC account is specified (default: false) - --show_seed : (bool) show the seed for the account - --page= : (int) page to return during paginating - --page_size= : (int) number of items on page during pagination - - Returns: {Paginated[Account]} - """ - kwargs = { - 'confirmations': confirmations, - 'show_seed': show_seed - } - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - if account_id: - return paginate_list([await wallet.get_account_or_error(account_id).get_details(**kwargs)], 1, 1) - else: - return paginate_list(await wallet.get_detailed_accounts(**kwargs), page, page_size) - - @requires("wallet") - async def jsonrpc_account_balance(self, account_id=None, wallet_id=None, confirmations=0): - """ - Return the balance of an account - - Usage: - account_balance [] [
| --address=
] [--wallet_id=] - [ | --confirmations=] - - Options: - --account_id= : (str) If provided only the balance for this - account will be given. Otherwise default account. - --wallet_id= : (str) balance for specific wallet - --confirmations= : (int) Only include transactions with this many - confirmed blocks. - - Returns: - (decimal) amount of lbry credits in wallet - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - account = wallet.get_account_or_default(account_id) - balance = await account.get_detailed_balance( - confirmations=confirmations, reserved_subtotals=True, - ) - return dict_values_to_lbc(balance) - - @requires("wallet") - async def jsonrpc_account_add( - self, account_name, wallet_id=None, single_key=False, - seed=None, private_key=None, public_key=None): - """ - Add a previously created account from a seed, private key or public key (read-only). - Specify --single_key for single address or vanity address accounts. - - Usage: - account_add ( | --account_name=) - (--seed= | --private_key= | --public_key=) - [--single_key] [--wallet_id=] - - Options: - --account_name= : (str) name of the account to add - --seed= : (str) seed to generate new account from - --private_key= : (str) private key for new account - --public_key= : (str) public key for new account - --single_key : (bool) create single key account, default is multi-key - --wallet_id= : (str) restrict operation to specific wallet - - Returns: {Account} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - account = Account.from_dict( - self.ledger, wallet, { - 'name': account_name, - 'seed': seed, - 'private_key': private_key, - 'public_key': public_key, - 'address_generator': { - 'name': SingleKey.name if single_key else HierarchicalDeterministic.name - } - } - ) - wallet.save() - if self.ledger.network.is_connected: - await self.ledger.subscribe_account(account) - return account - - @requires("wallet") - async def jsonrpc_account_create(self, account_name, single_key=False, wallet_id=None): - """ - Create a new account. Specify --single_key if you want to use - the same address for all transactions (not recommended). - - Usage: - account_create ( | --account_name=) - [--single_key] [--wallet_id=] - - Options: - --account_name= : (str) name of the account to create - --single_key : (bool) create single key account, default is multi-key - --wallet_id= : (str) restrict operation to specific wallet - - Returns: {Account} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - account = Account.generate( - self.ledger, wallet, account_name, { - 'name': SingleKey.name if single_key else HierarchicalDeterministic.name - } - ) - wallet.save() - if self.ledger.network.is_connected: - await self.ledger.subscribe_account(account) - return account - - @requires("wallet") - def jsonrpc_account_remove(self, account_id, wallet_id=None): - """ - Remove an existing account. - - Usage: - account_remove ( | --account_id=) [--wallet_id=] - - Options: - --account_id= : (str) id of the account to remove - --wallet_id= : (str) restrict operation to specific wallet - - Returns: {Account} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - account = wallet.get_account_or_error(account_id) - wallet.accounts.remove(account) - wallet.save() - return account - - @requires("wallet") - def jsonrpc_account_set( - self, account_id, wallet_id=None, default=False, new_name=None, - change_gap=None, change_max_uses=None, receiving_gap=None, receiving_max_uses=None): - """ - Change various settings on an account. - - Usage: - account_set ( | --account_id=) [--wallet_id=] - [--default] [--new_name=] - [--change_gap=] [--change_max_uses=] - [--receiving_gap=] [--receiving_max_uses=] - - Options: - --account_id= : (str) id of the account to change - --wallet_id= : (str) restrict operation to specific wallet - --default : (bool) make this account the default - --new_name= : (str) new name for the account - --receiving_gap= : (int) set the gap for receiving addresses - --receiving_max_uses= : (int) set the maximum number of times to - use a receiving address - --change_gap= : (int) set the gap for change addresses - --change_max_uses= : (int) set the maximum number of times to - use a change address - - Returns: {Account} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - account = wallet.get_account_or_error(account_id) - change_made = False - - if account.receiving.name == HierarchicalDeterministic.name: - address_changes = { - 'change': {'gap': change_gap, 'maximum_uses_per_address': change_max_uses}, - 'receiving': {'gap': receiving_gap, 'maximum_uses_per_address': receiving_max_uses}, - } - for chain_name in address_changes: - chain = getattr(account, chain_name) - for attr, value in address_changes[chain_name].items(): - if value is not None: - setattr(chain, attr, value) - change_made = True - - if new_name is not None: - account.name = new_name - change_made = True - - if default and wallet.default_account != account: - wallet.accounts.remove(account) - wallet.accounts.insert(0, account) - change_made = True - - if change_made: - account.modified_on = time.time() - wallet.save() - - return account - - @requires("wallet") - def jsonrpc_account_max_address_gap(self, account_id, wallet_id=None): - """ - Finds ranges of consecutive addresses that are unused and returns the length - of the longest such range: for change and receiving address chains. This is - useful to figure out ideal values to set for 'receiving_gap' and 'change_gap' - account settings. - - Usage: - account_max_address_gap ( | --account_id=) - [--wallet_id=] - - Options: - --account_id= : (str) account for which to get max gaps - --wallet_id= : (str) restrict operation to specific wallet - - Returns: - (map) maximum gap for change and receiving addresses - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - return wallet.get_account_or_error(account_id).get_max_gap() - - @requires("wallet") - def jsonrpc_account_fund(self, to_account=None, from_account=None, amount='0.0', - everything=False, outputs=1, broadcast=False, wallet_id=None): - """ - Transfer some amount (or --everything) to an account from another - account (can be the same account). Amounts are interpreted as LBC. - You can also spread the transfer across a number of --outputs (cannot - be used together with --everything). - - Usage: - account_fund [ | --to_account=] - [ | --from_account=] - ( | --amount= | --everything) - [ | --outputs=] [--wallet_id=] - [--broadcast] - - Options: - --to_account= : (str) send to this account - --from_account= : (str) spend from this account - --amount= : (str) the amount to transfer lbc - --everything : (bool) transfer everything (excluding claims), default: false. - --outputs= : (int) split payment across many outputs, default: 1. - --wallet_id= : (str) limit operation to specific wallet. - --broadcast : (bool) actually broadcast the transaction, default: false. - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - to_account = wallet.get_account_or_default(to_account) - from_account = wallet.get_account_or_default(from_account) - amount = self.get_dewies_or_error('amount', amount) if amount else None - if not isinstance(outputs, int): - raise ValueError("--outputs must be an integer.") - if everything and outputs > 1: - raise ValueError("Using --everything along with --outputs is not supported.") - return from_account.fund( - to_account=to_account, amount=amount, everything=everything, - outputs=outputs, broadcast=broadcast - ) - - @requires(WALLET_COMPONENT) - def jsonrpc_account_send(self, amount, addresses, account_id=None, wallet_id=None, preview=False): - """ - Send the same number of credits to multiple addresses from a specific account (or default account). - - Usage: - account_send ... [--account_id=] [--wallet_id=] [--preview] - - Options: - --account_id= : (str) account to fund the transaction - --wallet_id= : (str) restrict operation to specific wallet - --preview : (bool) do not broadcast the transaction - - Returns: {Transaction} - """ - return self.jsonrpc_wallet_send( - amount=amount, addresses=addresses, wallet_id=wallet_id, - change_account_id=account_id, funding_account_ids=[account_id] if account_id else [], - preview=preview - ) - - SYNC_DOC = """ - Wallet synchronization. - """ - - @requires("wallet") - def jsonrpc_sync_hash(self, wallet_id=None): - """ - Deterministic hash of the wallet. - - Usage: - sync_hash [ | --wallet_id=] - - Options: - --wallet_id= : (str) wallet for which to generate hash - - Returns: - (str) sha256 hash of wallet - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - return hexlify(wallet.hash).decode() - - @requires("wallet") - async def jsonrpc_sync_apply(self, password, data=None, wallet_id=None, blocking=False): - """ - Apply incoming synchronization data, if provided, and return a sync hash and update wallet data. - - Wallet must be unlocked to perform this operation. - - If "encrypt-on-disk" preference is True and supplied password is different from local password, - or there is no local password (because local wallet was not encrypted), then the supplied password - will be used for local encryption (overwriting previous local encryption password). - - Usage: - sync_apply [--data=] [--wallet_id=] [--blocking] - - Options: - --password= : (str) password to decrypt incoming and encrypt outgoing data - --data= : (str) incoming sync data, if any - --wallet_id= : (str) wallet being sync'ed - --blocking : (bool) wait until any new accounts have sync'ed - - Returns: - (map) sync hash and data - - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - wallet_changed = False - if data is not None: - added_accounts = wallet.merge(self.wallet_manager, password, data) - if added_accounts and self.ledger.network.is_connected: - if blocking: - await asyncio.wait([ - a.ledger.subscribe_account(a) for a in added_accounts - ]) - else: - for new_account in added_accounts: - asyncio.create_task(self.ledger.subscribe_account(new_account)) - wallet_changed = True - if wallet.preferences.get(ENCRYPT_ON_DISK, False) and password != wallet.encryption_password: - wallet.encryption_password = password - wallet_changed = True - if wallet_changed: - wallet.save() - encrypted = wallet.pack(password) - return { - 'hash': self.jsonrpc_sync_hash(wallet_id), - 'data': encrypted.decode() - } - - ADDRESS_DOC = """ - List, generate and verify addresses. - """ - - @requires(WALLET_COMPONENT) - async def jsonrpc_address_is_mine(self, address, account_id=None, wallet_id=None): - """ - Checks if an address is associated with the current wallet. - - Usage: - address_is_mine (
| --address=
) - [ | --account_id=] [--wallet_id=] - - Options: - --address=
: (str) address to check - --account_id= : (str) id of the account to use - --wallet_id= : (str) restrict operation to specific wallet - - Returns: - (bool) true, if address is associated with current wallet - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - account = wallet.get_account_or_default(account_id) - match = await self.ledger.db.get_address(address=address, accounts=[account]) - if match is not None: - return True - return False - - @requires(WALLET_COMPONENT) - def jsonrpc_address_list(self, address=None, account_id=None, wallet_id=None, page=None, page_size=None): - """ - List account addresses or details of single address. - - Usage: - address_list [--address=
] [--account_id=] [--wallet_id=] - [--page=] [--page_size=] - - Options: - --address=
: (str) just show details for single address - --account_id= : (str) id of the account to use - --wallet_id= : (str) restrict operation to specific wallet - --page= : (int) page to return during paginating - --page_size= : (int) number of items on page during pagination - - Returns: {Paginated[Address]} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - constraints = {} - if address: - constraints['address'] = address - if account_id: - constraints['accounts'] = [wallet.get_account_or_error(account_id)] - else: - constraints['accounts'] = wallet.accounts - return paginate_rows( - self.ledger.get_addresses, - self.ledger.get_address_count, - page, page_size, **constraints - ) - - @requires(WALLET_COMPONENT) - def jsonrpc_address_unused(self, account_id=None, wallet_id=None): - """ - Return an address containing no balance, will create - a new address if there is none. - - Usage: - address_unused [--account_id=] [--wallet_id=] - - Options: - --account_id= : (str) id of the account to use - --wallet_id= : (str) restrict operation to specific wallet - - Returns: {Address} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - return wallet.get_account_or_default(account_id).receiving.get_or_create_usable_address() - - FILE_DOC = """ - File management. - """ - - @requires(STREAM_MANAGER_COMPONENT) - async def jsonrpc_file_list( - self, sort=None, reverse=False, comparison=None, - wallet_id=None, page=None, page_size=None, **kwargs): - """ - List files limited by optional filters - - Usage: - file_list [--sd_hash=] [--file_name=] [--stream_hash=] - [--rowid=] [--added_on=] [--claim_id=] - [--outpoint=] [--txid=] [--nout=] - [--channel_claim_id=] [--channel_name=] - [--claim_name=] [--blobs_in_stream=] - [--blobs_remaining=] [--sort=] - [--comparison=] [--full_status=] [--reverse] - [--page=] [--page_size=] [--wallet_id=] - - Options: - --sd_hash= : (str) get file with matching sd hash - --file_name= : (str) get file with matching file name in the - downloads folder - --stream_hash= : (str) get file with matching stream hash - --rowid= : (int) get file with matching row id - --added_on= : (int) get file with matching time of insertion - --claim_id= : (str) get file with matching claim id - --outpoint= : (str) get file with matching claim outpoint - --txid= : (str) get file with matching claim txid - --nout= : (int) get file with matching claim nout - --channel_claim_id= : (str) get file with matching channel claim id - --channel_name= : (str) get file with matching channel name - --claim_name= : (str) get file with matching claim name - --blobs_in_stream : (int) get file with matching blobs in stream - --blobs_remaining= : (int) amount of remaining blobs to download - --sort= : (str) field to sort by (one of the above filter fields) - --comparison= : (str) logical comparison, (eq | ne | g | ge | l | le) - --page= : (int) page to return during paginating - --page_size= : (int) number of items on page during pagination - --wallet_id= : (str) add purchase receipts from this wallet - - Returns: {Paginated[File]} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - sort = sort or 'rowid' - comparison = comparison or 'eq' - paginated = paginate_list( - self.stream_manager.get_filtered_streams(sort, reverse, comparison, **kwargs), page, page_size - ) - if paginated['items']: - receipts = { - txo.purchased_claim_id: txo for txo in - await self.ledger.db.get_purchases( - accounts=wallet.accounts, - purchased_claim_hash__in=[unhexlify(s.claim_id)[::-1] for s in paginated['items']] - ) - } - for stream in paginated['items']: - stream.purchase_receipt = receipts.get(stream.claim_id) - return paginated - - @requires(STREAM_MANAGER_COMPONENT) - async def jsonrpc_file_set_status(self, status, **kwargs): - """ - Start or stop downloading a file - - Usage: - file_set_status ( | --status=) [--sd_hash=] - [--file_name=] [--stream_hash=] [--rowid=] - - Options: - --status= : (str) one of "start" or "stop" - --sd_hash= : (str) set status of file with matching sd hash - --file_name= : (str) set status of file with matching file name in the - downloads folder - --stream_hash= : (str) set status of file with matching stream hash - --rowid= : (int) set status of file with matching row id - - Returns: - (str) Confirmation message - """ - - if status not in ['start', 'stop']: - raise Exception('Status must be "start" or "stop".') - - streams = self.stream_manager.get_filtered_streams(**kwargs) - if not streams: - raise Exception(f'Unable to find a file for {kwargs}') - stream = streams[0] - if status == 'start' and not stream.running: - await stream.save_file(node=self.stream_manager.node) - msg = "Resumed download" - elif status == 'stop' and stream.running: - await stream.stop() - msg = "Stopped download" - else: - msg = ( - "File was already being downloaded" if status == 'start' - else "File was already stopped" - ) - return msg - - @requires(STREAM_MANAGER_COMPONENT) - async def jsonrpc_file_delete(self, delete_from_download_dir=False, delete_all=False, **kwargs): - """ - Delete a LBRY file - - Usage: - file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash=] [--file_name=] - [--stream_hash=] [--rowid=] [--claim_id=] [--txid=] - [--nout=] [--claim_name=] [--channel_claim_id=] - [--channel_name=] - - Options: - --delete_from_download_dir : (bool) delete file from download directory, - instead of just deleting blobs - --delete_all : (bool) if there are multiple matching files, - allow the deletion of multiple files. - Otherwise do not delete anything. - --sd_hash= : (str) delete by file sd hash - --file_name= : (str) delete by file name in downloads folder - --stream_hash= : (str) delete by file stream hash - --rowid= : (int) delete by file row id - --claim_id= : (str) delete by file claim id - --txid= : (str) delete by file claim txid - --nout= : (int) delete by file claim nout - --claim_name= : (str) delete by file claim name - --channel_claim_id= : (str) delete by file channel claim id - --channel_name= : (str) delete by file channel claim name - - Returns: - (bool) true if deletion was successful - """ - - streams = self.stream_manager.get_filtered_streams(**kwargs) - - if len(streams) > 1: - if not delete_all: - log.warning("There are %i files to delete, use narrower filters to select one", - len(streams)) - return False - else: - log.warning("Deleting %i files", - len(streams)) - - if not streams: - log.warning("There is no file to delete") - return False - else: - for stream in streams: - message = f"Deleted file {stream.file_name}" - await self.stream_manager.delete_stream(stream, delete_file=delete_from_download_dir) - log.info(message) - result = True - return result - - @requires(STREAM_MANAGER_COMPONENT) - async def jsonrpc_file_save(self, file_name=None, download_directory=None, **kwargs): - """ - Start saving a file to disk. - - Usage: - file_save [--file_name=] [--download_directory=] [--sd_hash=] - [--stream_hash=] [--rowid=] [--claim_id=] [--txid=] - [--nout=] [--claim_name=] [--channel_claim_id=] - [--channel_name=] - - Options: - --file_name= : (str) file name to save to - --download_directory= : (str) directory to save into - --sd_hash= : (str) save file with matching sd hash - --stream_hash= : (str) save file with matching stream hash - --rowid= : (int) save file with matching row id - --claim_id= : (str) save file with matching claim id - --txid= : (str) save file with matching claim txid - --nout= : (int) save file with matching claim nout - --claim_name= : (str) save file with matching claim name - --channel_claim_id= : (str) save file with matching channel claim id - --channel_name= : (str) save file with matching channel claim name - - Returns: {File} - """ - - streams = self.stream_manager.get_filtered_streams(**kwargs) - - if len(streams) > 1: - log.warning("There are %i matching files, use narrower filters to select one", len(streams)) - return False - if not streams: - log.warning("There is no file to save") - return False - stream = streams[0] - await stream.save_file(file_name, download_directory) - return stream - - PURCHASE_DOC = """ - List and make purchases of claims. - """ - - @requires(WALLET_COMPONENT) - def jsonrpc_purchase_list( - self, claim_id=None, resolve=False, account_id=None, wallet_id=None, page=None, page_size=None): - """ - List my claim purchases. - - Usage: - purchase_list [ | --claim_id=] [--resolve] - [--account_id=] [--wallet_id=] - [--page=] [--page_size=] - - Options: - --claim_id= : (str) purchases for specific claim - --resolve : (str) include resolved claim information - --account_id= : (str) id of the account to query - --wallet_id= : (str) restrict results to specific wallet - --page= : (int) page to return during paginating - --page_size= : (int) number of items on page during pagination - - Returns: {Paginated[Output]} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - constraints = { - "wallet": wallet, - "accounts": [wallet.get_account_or_error(account_id)] if account_id else wallet.accounts, - "resolve": resolve, - } - if claim_id: - constraints["purchased_claim_id"] = claim_id - return paginate_rows( - self.ledger.get_purchases, - self.ledger.get_purchase_count, - page, page_size, **constraints - ) - - @requires(WALLET_COMPONENT) - async def jsonrpc_purchase_create( - self, claim_id=None, url=None, wallet_id=None, funding_account_ids=None, - allow_duplicate_purchase=False, override_max_key_fee=False, preview=False, blocking=False): - """ - Purchase a claim. - - Usage: - purchase_create (--claim_id= | --url=) [--wallet_id=] - [--funding_account_ids=...] - [--allow_duplicate_purchase] [--override_max_key_fee] [--preview] [--blocking] - - Options: - --claim_id= : (str) claim id of claim to purchase - --url= : (str) lookup claim to purchase by url - --wallet_id= : (str) restrict operation to specific wallet - --funding_account_ids=: (list) ids of accounts to fund this transaction - --allow_duplicate_purchase : (bool) allow purchasing claim_id you already own - --override_max_key_fee : (bool) ignore max key fee for this purchase - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until transaction is in mempool - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first." - accounts = wallet.get_accounts_or_all(funding_account_ids) - txo = None - if claim_id: - txo = await self.ledger.get_claim_by_claim_id(accounts, claim_id, include_purchase_receipt=True) - if not isinstance(txo, Output) or not txo.is_claim: - raise Exception(f"Could not find claim with claim_id '{claim_id}'. ") - elif url: - txo = (await self.ledger.resolve(accounts, [url], include_purchase_receipt=True))[url] - if not isinstance(txo, Output) or not txo.is_claim: - raise Exception(f"Could not find claim with url '{url}'. ") - else: - raise Exception(f"Missing argument claim_id or url. ") - if not allow_duplicate_purchase and txo.purchase_receipt: - raise Exception( - f"You already have a purchase for claim_id '{claim_id}'. " - f"Use --allow-duplicate-purchase flag to override." - ) - claim = txo.claim - if not claim.is_stream or not claim.stream.has_fee: - raise Exception(f"Claim '{claim_id}' does not have a purchase price.") - tx = await self.wallet_manager.create_purchase_transaction( - accounts, txo, self.exchange_rate_manager, override_max_key_fee - ) - if not preview: - await self.broadcast_or_release(tx, blocking) - else: - await self.ledger.release_tx(tx) - return tx - - CLAIM_DOC = """ - List and search all types of claims. - """ - - @requires(WALLET_COMPONENT) - def jsonrpc_claim_list(self, claim_type=None, **kwargs): - """ - List my stream and channel claims. - - Usage: - claim_list [--claim_type=...] [--claim_id=...] [--name=...] [--is_spent] - [--channel_id=...] [--account_id=] [--wallet_id=] - [--page=] [--page_size=] - [--resolve] [--order_by=] [--no_totals] [--include_received_tips] - - Options: - --claim_type= : (str or list) claim type: channel, stream, repost, collection - --claim_id= : (str or list) claim id - --channel_id= : (str or list) streams in this channel - --name= : (str or list) claim name - --is_spent : (bool) shows previous claim updates and abandons - --account_id= : (str) id of the account to query - --wallet_id= : (str) restrict results to specific wallet - --page= : (int) page to return during paginating - --page_size= : (int) number of items on page during pagination - --resolve : (bool) resolves each claim to provide additional metadata - --order_by= : (str) field to order by: 'name', 'height', 'amount' - --no_totals : (bool) do not calculate the total number of pages and items in result set - (significant performance boost) - --include_received_tips : (bool) calculate the amount of tips recieved for claim outputs - - Returns: {Paginated[Output]} - """ - kwargs['type'] = claim_type or CLAIM_TYPE_NAMES - if 'is_spent' not in kwargs: - kwargs['is_not_spent'] = True - return self.jsonrpc_txo_list(**kwargs) - - @requires(WALLET_COMPONENT) - async def jsonrpc_claim_search(self, **kwargs): - """ - Search for stream and channel claims on the blockchain. - - Arguments marked with "supports equality constraints" allow prepending the - value with an equality constraint such as '>', '>=', '<' and '<=' - eg. --height=">400000" would limit results to only claims above 400k block height. - - Usage: - claim_search [ | --name=] [--text=] [--txid=] [--nout=] - [--claim_id= | --claim_ids=...] - [--channel= | - [[--channel_ids=...] [--not_channel_ids=...]]] - [--has_channel_signature] [--valid_channel_signature | --invalid_channel_signature] - [--is_controlling] [--release_time=] [--public_key_id=] - [--timestamp=] [--creation_timestamp=] - [--height=] [--creation_height=] - [--activation_height=] [--expiration_height=] - [--amount=] [--effective_amount=] - [--support_amount=] [--trending_group=] - [--trending_mixed=] [--trending_local=] - [--trending_global=] [--reposted=] - [--claim_type=] [--stream_types=...] [--media_types=...] - [--fee_currency=] [--fee_amount=] - [--duration=] - [--any_tags=...] [--all_tags=...] [--not_tags=...] - [--any_languages=...] [--all_languages=...] - [--not_languages=...] - [--any_locations=...] [--all_locations=...] - [--not_locations=...] - [--order_by=...] [--page=] [--page_size=] - [--wallet_id=] [--include_purchase_receipt] [--include_is_my_output] - - Options: - --name= : (str) claim name (normalized) - --text= : (str) full text search - --claim_id= : (str) full or partial claim id - --claim_ids= : (list) list of full claim ids - --txid= : (str) transaction id - --nout= : (str) position in the transaction - --channel= : (str) claims signed by this channel (argument is - a URL which automatically gets resolved), - see --channel_ids if you need to filter by - multiple channels at the same time, - includes claims with invalid signatures, - use in conjunction with --valid_channel_signature - --channel_ids= : (list) claims signed by any of these channels - (arguments must be claim ids of the channels), - includes claims with invalid signatures, - implies --has_channel_signature, - use in conjunction with --valid_channel_signature - --not_channel_ids=: (list) exclude claims signed by any of these channels - (arguments must be claim ids of the channels) - --has_channel_signature : (bool) claims with a channel signature (valid or invalid) - --valid_channel_signature : (bool) claims with a valid channel signature or no signature, - use in conjunction with --has_channel_signature to - only get claims with valid signatures - --invalid_channel_signature : (bool) claims with invalid channel signature or no signature, - use in conjunction with --has_channel_signature to - only get claims with invalid signatures - --is_controlling : (bool) winning claims of their respective name - --public_key_id= : (str) only return channels having this public key id, this is - the same key as used in the wallet file to map - channel certificate private keys: {'public_key_id': 'private key'} - --height= : (int) last updated block height (supports equality constraints) - --timestamp= : (int) last updated timestamp (supports equality constraints) - --creation_height= : (int) created at block height (supports equality constraints) - --creation_timestamp=: (int) created at timestamp (supports equality constraints) - --activation_height= : (int) height at which claim starts competing for name - (supports equality constraints) - --expiration_height= : (int) height at which claim will expire - (supports equality constraints) - --release_time= : (int) limit to claims self-described as having been - released to the public on or after this UTC - timestamp, when claim does not provide - a release time the publish time is used instead - (supports equality constraints) - --amount= : (int) limit by claim value (supports equality constraints) - --support_amount=: (int) limit by supports and tips received (supports - equality constraints) - --effective_amount=: (int) limit by total value (initial claim value plus - all tips and supports received), this amount is - blank until claim has reached activation height - (supports equality constraints) - --trending_group=: (int) group numbers 1 through 4 representing the - trending groups of the content: 4 means - content is trending globally and independently, - 3 means content is not trending globally but is - trending independently (locally), 2 means it is - trending globally but not independently and 1 - means it's not trending globally or locally - (supports equality constraints) - --trending_mixed=: (int) trending amount taken from the global or local - value depending on the trending group: - 4 - global value, 3 - local value, 2 - global - value, 1 - local value (supports equality - constraints) - --trending_local=: (int) trending value calculated relative only to - the individual contents past history (supports - equality constraints) - --trending_global=: (int) trending value calculated relative to all - trending content globally (supports - equality constraints) - --reposted_claim_id=: (str) all reposts of the specified original claim id - --reposted= : (int) claims reposted this many times (supports - equality constraints) - --claim_type= : (str) filter by 'channel', 'stream' or 'unknown' - --stream_types= : (list) filter by 'video', 'image', 'document', etc - --media_types= : (list) filter by 'video/mp4', 'image/png', etc - --fee_currency= : (string) specify fee currency: LBC, BTC, USD - --fee_amount= : (decimal) content download fee (supports equality constraints) - --duration= : (int) duration of video or audio in seconds - (supports equality constraints) - --any_tags= : (list) find claims containing any of the tags - --all_tags= : (list) find claims containing every tag - --not_tags= : (list) find claims not containing any of these tags - --any_languages= : (list) find claims containing any of the languages - --all_languages= : (list) find claims containing every language - --not_languages= : (list) find claims not containing any of these languages - --any_locations= : (list) find claims containing any of the locations - --all_locations= : (list) find claims containing every location - --not_locations= : (list) find claims not containing any of these locations - --page= : (int) page to return during paginating - --page_size= : (int) number of items on page during pagination - --order_by= : (list) field to order by, default is descending order, to do an - ascending order prepend ^ to the field name, eg. '^amount' - available fields: 'name', 'height', 'release_time', - 'publish_time', 'amount', 'effective_amount', - 'support_amount', 'trending_group', 'trending_mixed', - 'trending_local', 'trending_global', 'activation_height' - --no_totals : (bool) do not calculate the total number of pages and items in result set - (significant performance boost) - --wallet_id= : (str) wallet to check for claim purchase reciepts - --include_purchase_receipt : (bool) lookup and include a receipt if this wallet - has purchased the claim - --include_is_my_output : (bool) lookup and include a boolean indicating - if claim being resolved is yours - - Returns: {Paginated[Output]} - """ - wallet = self.wallet_manager.get_wallet_or_default(kwargs.pop('wallet_id', None)) - if {'claim_id', 'claim_ids'}.issubset(kwargs): - raise ValueError("Only 'claim_id' or 'claim_ids' is allowed, not both.") - if kwargs.pop('valid_channel_signature', False): - kwargs['signature_valid'] = 1 - if kwargs.pop('invalid_channel_signature', False): - kwargs['signature_valid'] = 0 - page_num, page_size = abs(kwargs.pop('page', 1)), min(abs(kwargs.pop('page_size', DEFAULT_PAGE_SIZE)), 50) - kwargs.update({'offset': page_size * (page_num - 1), 'limit': page_size}) - txos, blocked, _, total = await self.ledger.claim_search(wallet.accounts, **kwargs) - result = { - "items": txos, - "blocked": blocked, - "page": page_num, - "page_size": page_size - } - if not kwargs.pop('no_totals', False): - result['total_pages'] = int((total + (page_size - 1)) / page_size) - result['total_items'] = total - return result - - CHANNEL_DOC = """ - Create, update, abandon and list your channel claims. - """ - - @deprecated('channel_create') - def jsonrpc_channel_new(self): - """ deprecated """ - - @requires(WALLET_COMPONENT) - async def jsonrpc_channel_create( - self, name, bid, allow_duplicate_name=False, account_id=None, wallet_id=None, - claim_address=None, funding_account_ids=None, preview=False, blocking=False, **kwargs): - """ - Create a new channel by generating a channel private key and establishing an '@' prefixed claim. - - Usage: - channel_create ( | --name=) ( | --bid=) - [--allow_duplicate_name=] - [--title=] [--description=<description>] [--email=<email>] - [--website_url=<website_url>] [--featured=<featured>...] - [--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...] - [--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>] - [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...] - [--preview] [--blocking] - - Options: - --name=<name> : (str) name of the channel prefixed with '@' - --bid=<bid> : (decimal) amount to back the claim - --allow_duplicate_name=<allow_duplicate_name> : (bool) create new channel even if one already exists with - given name. default: false. - --title=<title> : (str) title of the publication - --description=<description> : (str) description of the publication - --email=<email> : (str) email of channel owner - --website_url=<website_url> : (str) website url - --featured=<featured> : (list) claim_ids of featured content in channel - --tags=<tags> : (list) content tags - --languages=<languages> : (list) languages used by the channel, - using RFC 5646 format, eg: - for English `--languages=en` - for Spanish (Spain) `--languages=es-ES` - for Spanish (Mexican) `--languages=es-MX` - for Chinese (Simplified) `--languages=zh-Hans` - for Chinese (Traditional) `--languages=zh-Hant` - --locations=<locations> : (list) locations of the channel, consisting of 2 letter - `country` code and a `state`, `city` and a postal - `code` along with a `latitude` and `longitude`. - for JSON RPC: pass a dictionary with aforementioned - attributes as keys, eg: - ... - "locations": [{'country': 'US', 'state': 'NH'}] - ... - for command line: pass a colon delimited list - with values in the following order: - - "COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE" - - making sure to include colon for blank values, for - example to provide only the city: - - ... --locations="::Manchester" - - with all values set: - - ... --locations="US:NH:Manchester:03101:42.990605:-71.460989" - - optionally, you can just pass the "LATITUDE:LONGITUDE": - - ... --locations="42.990605:-71.460989" - - finally, you can also pass JSON string of dictionary - on the command line as you would via JSON RPC - - ... --locations="{'country': 'US', 'state': 'NH'}" - - --thumbnail_url=<thumbnail_url>: (str) thumbnail url - --cover_url=<cover_url> : (str) url of cover image - --account_id=<account_id> : (str) account to use for holding the transaction - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction - --claim_address=<claim_address>: (str) address where the channel is sent to, if not specified - it will be determined automatically from the account - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until transaction is in mempool - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first." - account = wallet.get_account_or_default(account_id) - funding_accounts = wallet.get_accounts_or_all(funding_account_ids) - self.valid_channel_name_or_error(name) - amount = self.get_dewies_or_error('bid', bid, positive_value=True) - claim_address = await self.get_receiving_address(claim_address, account) - - existing_channels = await self.ledger.get_channels(accounts=wallet.accounts, claim_name=name) - if len(existing_channels) > 0: - if not allow_duplicate_name: - raise Exception( - f"You already have a channel under the name '{name}'. " - f"Use --allow-duplicate-name flag to override." - ) - - claim = Claim() - claim.channel.update(**kwargs) - tx = await Transaction.claim_create( - name, claim, amount, claim_address, funding_accounts, funding_accounts[0] - ) - txo = tx.outputs[0] - txo.generate_channel_private_key() - - await tx.sign(funding_accounts) - - if not preview: - account.add_channel_private_key(txo.private_key) - wallet.save() - await self.broadcast_or_release(tx, blocking) - await self.storage.save_claims([self._old_get_temp_claim_info( - tx, txo, claim_address, claim, name, dewies_to_lbc(amount) - )]) - self.component_manager.loop.create_task(self.analytics_manager.send_new_channel()) - else: - await account.ledger.release_tx(tx) - - return tx - - @requires(WALLET_COMPONENT) - async def jsonrpc_channel_update( - self, claim_id, bid=None, account_id=None, wallet_id=None, claim_address=None, - funding_account_ids=None, new_signing_key=False, preview=False, - blocking=False, replace=False, **kwargs): - """ - Update an existing channel claim. - - Usage: - channel_update (<claim_id> | --claim_id=<claim_id>) [<bid> | --bid=<bid>] - [--title=<title>] [--description=<description>] [--email=<email>] - [--website_url=<website_url>] - [--featured=<featured>...] [--clear_featured] - [--tags=<tags>...] [--clear_tags] - [--languages=<languages>...] [--clear_languages] - [--locations=<locations>...] [--clear_locations] - [--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>] - [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--claim_address=<claim_address>] [--new_signing_key] - [--funding_account_ids=<funding_account_ids>...] - [--preview] [--blocking] [--replace] - - Options: - --claim_id=<claim_id> : (str) claim_id of the channel to update - --bid=<bid> : (decimal) amount to back the claim - --title=<title> : (str) title of the publication - --description=<description> : (str) description of the publication - --email=<email> : (str) email of channel owner - --website_url=<website_url> : (str) website url - --featured=<featured> : (list) claim_ids of featured content in channel - --clear_featured : (bool) clear existing featured content (prior to adding new ones) - --tags=<tags> : (list) add content tags - --clear_tags : (bool) clear existing tags (prior to adding new ones) - --languages=<languages> : (list) languages used by the channel, - using RFC 5646 format, eg: - for English `--languages=en` - for Spanish (Spain) `--languages=es-ES` - for Spanish (Mexican) `--languages=es-MX` - for Chinese (Simplified) `--languages=zh-Hans` - for Chinese (Traditional) `--languages=zh-Hant` - --clear_languages : (bool) clear existing languages (prior to adding new ones) - --locations=<locations> : (list) locations of the channel, consisting of 2 letter - `country` code and a `state`, `city` and a postal - `code` along with a `latitude` and `longitude`. - for JSON RPC: pass a dictionary with aforementioned - attributes as keys, eg: - ... - "locations": [{'country': 'US', 'state': 'NH'}] - ... - for command line: pass a colon delimited list - with values in the following order: - - "COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE" - - making sure to include colon for blank values, for - example to provide only the city: - - ... --locations="::Manchester" - - with all values set: - - ... --locations="US:NH:Manchester:03101:42.990605:-71.460989" - - optionally, you can just pass the "LATITUDE:LONGITUDE": - - ... --locations="42.990605:-71.460989" - - finally, you can also pass JSON string of dictionary - on the command line as you would via JSON RPC - - ... --locations="{'country': 'US', 'state': 'NH'}" - - --clear_locations : (bool) clear existing locations (prior to adding new ones) - --thumbnail_url=<thumbnail_url>: (str) thumbnail url - --cover_url=<cover_url> : (str) url of cover image - --account_id=<account_id> : (str) account in which to look for channel (default: all) - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction - --claim_address=<claim_address>: (str) address where the channel is sent - --new_signing_key : (bool) generate a new signing key, will invalidate all previous publishes - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until transaction is in mempool - --replace : (bool) instead of modifying specific values on - the channel, this will clear all existing values - and only save passed in values, useful for form - submissions where all values are always set - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first." - funding_accounts = wallet.get_accounts_or_all(funding_account_ids) - if account_id: - account = wallet.get_account_or_error(account_id) - accounts = [account] - else: - account = wallet.default_account - accounts = wallet.accounts - - existing_channels = await self.ledger.get_claims( - wallet=wallet, accounts=accounts, claim_hash=unhexlify(claim_id)[::-1] - ) - if len(existing_channels) != 1: - account_ids = ', '.join(f"'{account.id}'" for account in accounts) - raise Exception( - f"Can't find the channel '{claim_id}' in account(s) {account_ids}." - ) - old_txo = existing_channels[0] - if not old_txo.claim.is_channel: - raise Exception( - f"A claim with id '{claim_id}' was found but it is not a channel." - ) - - if bid is not None: - amount = self.get_dewies_or_error('bid', bid, positive_value=True) - else: - amount = old_txo.amount - - if claim_address is not None: - self.valid_address_or_error(claim_address) - else: - claim_address = old_txo.get_address(account.ledger) - - if replace: - claim = Claim() - claim.channel.public_key_bytes = old_txo.claim.channel.public_key_bytes - else: - claim = Claim.from_bytes(old_txo.claim.to_bytes()) - claim.channel.update(**kwargs) - tx = await Transaction.claim_update( - old_txo, claim, amount, claim_address, funding_accounts, funding_accounts[0] - ) - new_txo = tx.outputs[0] - - if new_signing_key: - new_txo.generate_channel_private_key() - else: - new_txo.private_key = old_txo.private_key - - new_txo.script.generate() - - await tx.sign(funding_accounts) - - if not preview: - account.add_channel_private_key(new_txo.private_key) - wallet.save() - await self.broadcast_or_release(tx, blocking) - await self.storage.save_claims([self._old_get_temp_claim_info( - tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount) - )]) - self.component_manager.loop.create_task(self.analytics_manager.send_new_channel()) - else: - await account.ledger.release_tx(tx) - - return tx - - @requires(WALLET_COMPONENT) - async def jsonrpc_channel_abandon( - self, claim_id=None, txid=None, nout=None, account_id=None, wallet_id=None, - preview=False, blocking=True): - """ - Abandon one of my channel claims. - - Usage: - channel_abandon [<claim_id> | --claim_id=<claim_id>] - [<txid> | --txid=<txid>] [<nout> | --nout=<nout>] - [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--preview] [--blocking] - - Options: - --claim_id=<claim_id> : (str) claim_id of the claim to abandon - --txid=<txid> : (str) txid of the claim to abandon - --nout=<nout> : (int) nout of the claim to abandon - --account_id=<account_id> : (str) id of the account to use - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until abandon is in mempool - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first." - if account_id: - account = wallet.get_account_or_error(account_id) - accounts = [account] - else: - account = wallet.default_account - accounts = wallet.accounts - - if txid is not None and nout is not None: - claims = await self.ledger.get_claims( - wallet=wallet, accounts=accounts, tx_hash=unhexlify(txid)[::-1], position=nout - ) - elif claim_id is not None: - claims = await self.ledger.get_claims( - wallet=wallet, accounts=accounts, claim_id=claim_id - ) - else: - raise Exception('Must specify claim_id, or txid and nout') - - if not claims: - raise Exception('No claim found for the specified claim_id or txid:nout') - - tx = await Transaction.create( - [Input.spend(txo) for txo in claims], [], [account], account - ) - - if not preview: - await self.broadcast_or_release(tx, blocking) - self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('abandon')) - else: - await account.ledger.release_tx(tx) - - return tx - - @requires(WALLET_COMPONENT) - def jsonrpc_channel_list(self, *args, **kwargs): - """ - List my channel claims. - - Usage: - channel_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>] - [--name=<name>...] [--claim_id=<claim_id>...] [--is_spent] - [--page=<page>] [--page_size=<page_size>] [--resolve] [--no_totals] - - Options: - --name=<name> : (str or list) channel name - --claim_id=<claim_id> : (str or list) channel id - --is_spent : (bool) shows previous channel updates and abandons - --account_id=<account_id> : (str) id of the account to use - --wallet_id=<wallet_id> : (str) restrict results to specific wallet - --page=<page> : (int) page to return during paginating - --page_size=<page_size> : (int) number of items on page during pagination - --resolve : (bool) resolves each channel to provide additional metadata - --no_totals : (bool) do not calculate the total number of pages and items in result set - (significant performance boost) - - Returns: {Paginated[Output]} - """ - kwargs['type'] = 'channel' - if 'is_spent' not in kwargs: - kwargs['is_not_spent'] = True - return self.jsonrpc_txo_list(*args, **kwargs) - - @requires(WALLET_COMPONENT) - async def jsonrpc_channel_export(self, channel_id=None, channel_name=None, account_id=None, wallet_id=None): - """ - Export channel private key. - - Usage: - channel_export (<channel_id> | --channel_id=<channel_id> | --channel_name=<channel_name>) - [--account_id=<account_id>...] [--wallet_id=<wallet_id>] - - Options: - --channel_id=<channel_id> : (str) claim id of channel to export - --channel_name=<channel_name> : (str) name of channel to export - --account_id=<account_id> : (str) one or more account ids for accounts - to look in for channels, defaults to - all accounts. - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - - Returns: - (str) serialized channel private key - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - channel = await self.get_channel_or_error(wallet, account_id, channel_id, channel_name, for_signing=True) - address = channel.get_address(self.ledger) - public_key = await self.ledger.get_public_key_for_address(wallet, address) - if not public_key: - raise Exception("Can't find public key for address holding the channel.") - export = { - 'name': channel.claim_name, - 'channel_id': channel.claim_id, - 'holding_address': address, - 'holding_public_key': public_key.extended_key_string(), - 'signing_private_key': channel.private_key.to_pem().decode() - } - return base58.b58encode(json.dumps(export, separators=(',', ':'))) - - @requires(WALLET_COMPONENT) - async def jsonrpc_channel_import(self, channel_data, wallet_id=None): - """ - Import serialized channel private key (to allow signing new streams to the channel) - - Usage: - channel_import (<channel_data> | --channel_data=<channel_data>) [--wallet_id=<wallet_id>] - - Options: - --channel_data=<channel_data> : (str) serialized channel, as exported by channel export - --wallet_id=<wallet_id> : (str) import into specific wallet - - Returns: - (dict) Result dictionary - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - - decoded = base58.b58decode(channel_data) - data = json.loads(decoded) - channel_private_key = ecdsa.SigningKey.from_pem( - data['signing_private_key'], hashfunc=hashlib.sha256 - ) - public_key_der = channel_private_key.get_verifying_key().to_der() - - # check that the holding_address hasn't changed since the export was made - holding_address = data['holding_address'] - channels, _, _, _ = await self.ledger.claim_search( - wallet.accounts, public_key_id=self.ledger.public_key_to_address(public_key_der) - ) - if channels and channels[0].get_address(self.ledger) != holding_address: - holding_address = channels[0].get_address(self.ledger) - - account = await self.ledger.get_account_for_address(wallet, holding_address) - if account: - # Case 1: channel holding address is in one of the accounts we already have - # simply add the certificate to existing account - pass - else: - # Case 2: channel holding address hasn't changed and thus is in the bundled read-only account - # create a single-address holding account to manage the channel - if holding_address == data['holding_address']: - account = Account.from_dict(self.ledger, wallet, { - 'name': f"Holding Account For Channel {data['name']}", - 'public_key': data['holding_public_key'], - 'address_generator': {'name': 'single-address'} - }) - if self.ledger.network.is_connected: - await self.ledger.subscribe_account(account) - await self.ledger._update_tasks.done.wait() - # Case 3: the holding address has changed and we can't create or find an account for it - else: - raise Exception( - "Channel owning account has changed since the channel was exported and " - "it is not an account to which you have access." - ) - account.add_channel_private_key(channel_private_key) - wallet.save() - return f"Added channel signing key for {data['name']}." - - STREAM_DOC = """ - Create, update, abandon, list and inspect your stream claims. - """ - - @requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT) - async def jsonrpc_publish(self, name, **kwargs): - """ - Create or replace a stream claim at a given name (use 'stream create/update' for more control). - - Usage: - publish (<name> | --name=<name>) [--bid=<bid>] [--file_path=<file_path>] - [--validate_file] [--optimize_file] - [--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>] - [--title=<title>] [--description=<description>] [--author=<author>] - [--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...] - [--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>] - [--release_time=<release_time>] [--width=<width>] [--height=<height>] [--duration=<duration>] - [--channel_id=<channel_id> | --channel_name=<channel_name>] - [--channel_account_id=<channel_account_id>...] - [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...] - [--preview] [--blocking] - - Options: - --name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash)) - --bid=<bid> : (decimal) amount to back the claim - --file_path=<file_path> : (str) path to file to be associated with name. - --validate_file : (bool) validate that the video container and encodings match - common web browser support or that optimization succeeds if specified. - FFmpeg is required - --optimize_file : (bool) transcode the video & audio if necessary to ensure - common web browser support. FFmpeg is required - --fee_currency=<fee_currency> : (string) specify fee currency - --fee_amount=<fee_amount> : (decimal) content download fee - --fee_address=<fee_address> : (str) address where to send fee payments, will use - value from --claim_address if not provided - --title=<title> : (str) title of the publication - --description=<description> : (str) description of the publication - --author=<author> : (str) author of the publication. The usage for this field is not - the same as for channels. The author field is used to credit an author - who is not the publisher and is not represented by the channel. For - example, a pdf file of 'The Odyssey' has an author of 'Homer' but may - by published to a channel such as '@classics', or to no channel at all - --tags=<tags> : (list) add content tags - --languages=<languages> : (list) languages used by the channel, - using RFC 5646 format, eg: - for English `--languages=en` - for Spanish (Spain) `--languages=es-ES` - for Spanish (Mexican) `--languages=es-MX` - for Chinese (Simplified) `--languages=zh-Hans` - for Chinese (Traditional) `--languages=zh-Hant` - --locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter - `country` code and a `state`, `city` and a postal - `code` along with a `latitude` and `longitude`. - for JSON RPC: pass a dictionary with aforementioned - attributes as keys, eg: - ... - "locations": [{'country': 'US', 'state': 'NH'}] - ... - for command line: pass a colon delimited list - with values in the following order: - - "COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE" - - making sure to include colon for blank values, for - example to provide only the city: - - ... --locations="::Manchester" - - with all values set: - - ... --locations="US:NH:Manchester:03101:42.990605:-71.460989" - - optionally, you can just pass the "LATITUDE:LONGITUDE": - - ... --locations="42.990605:-71.460989" - - finally, you can also pass JSON string of dictionary - on the command line as you would via JSON RPC - - ... --locations="{'country': 'US', 'state': 'NH'}" - - --license=<license> : (str) publication license - --license_url=<license_url> : (str) publication license url - --thumbnail_url=<thumbnail_url>: (str) thumbnail url - --release_time=<release_time> : (int) original public release of content, seconds since UNIX epoch - --width=<width> : (int) image/video width, automatically calculated from media file - --height=<height> : (int) image/video height, automatically calculated from media file - --duration=<duration> : (int) audio/video duration in seconds, automatically calculated - --channel_id=<channel_id> : (str) claim id of the publisher channel - --channel_name=<channel_name> : (str) name of publisher channel - --channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in - for channel certificates, defaults to all accounts. - --account_id=<account_id> : (str) account to use for holding the transaction - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction - --claim_address=<claim_address>: (str) address where the claim is sent to, if not specified - it will be determined automatically from the account - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until transaction is in mempool - - Returns: {Transaction} - """ - self.valid_stream_name_or_error(name) - wallet = self.wallet_manager.get_wallet_or_default(kwargs.get('wallet_id')) - if kwargs.get('account_id'): - accounts = [wallet.get_account_or_error(kwargs.get('account_id'))] - else: - accounts = wallet.accounts - claims = await self.ledger.get_claims( - wallet=wallet, accounts=accounts, claim_name=name - ) - if len(claims) == 0: - if 'bid' not in kwargs: - raise Exception("'bid' is a required argument for new publishes.") - if 'file_path' not in kwargs: - raise Exception("'file_path' is a required argument for new publishes.") - return await self.jsonrpc_stream_create(name, **kwargs) - elif len(claims) == 1: - assert claims[0].claim.is_stream, f"Claim at name '{name}' is not a stream claim." - return await self.jsonrpc_stream_update(claims[0].claim_id, replace=True, **kwargs) - raise Exception( - f"There are {len(claims)} claims for '{name}', please use 'stream update' command " - f"to update a specific stream claim." - ) - - @requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT) - async def jsonrpc_stream_repost(self, name, bid, claim_id, allow_duplicate_name=False, channel_id=None, - channel_name=None, channel_account_id=None, account_id=None, wallet_id=None, - claim_address=None, funding_account_ids=None, preview=False, blocking=False): - """ - Creates a claim that references an existing stream by its claim id. - - Usage: - stream_repost (<name> | --name=<name>) (<bid> | --bid=<bid>) (<claim_id> | --claim_id=<claim_id>) - [--allow_duplicate_name=<allow_duplicate_name>] - [--channel_id=<channel_id> | --channel_name=<channel_name>] - [--channel_account_id=<channel_account_id>...] - [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...] - [--preview] [--blocking] - - Options: - --name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash)) - --bid=<bid> : (decimal) amount to back the claim - --claim_id=<claim_id> : (str) id of the claim being reposted - --allow_duplicate_name=<allow_duplicate_name> : (bool) create new claim even if one already exists with - given name. default: false. - --channel_id=<channel_id> : (str) claim id of the publisher channel - --channel_name=<channel_name> : (str) name of the publisher channel - --channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in - for channel certificates, defaults to all accounts. - --account_id=<account_id> : (str) account to use for holding the transaction - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction - --claim_address=<claim_address>: (str) address where the claim is sent to, if not specified - it will be determined automatically from the account - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until transaction is in mempool - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - self.valid_stream_name_or_error(name) - account = wallet.get_account_or_default(account_id) - funding_accounts = wallet.get_accounts_or_all(funding_account_ids) - channel = await self.get_channel_or_none(wallet, channel_account_id, channel_id, channel_name, for_signing=True) - amount = self.get_dewies_or_error('bid', bid, positive_value=True) - claim_address = await self.get_receiving_address(claim_address, account) - claims = await account.get_claims(claim_name=name) - if len(claims) > 0: - if not allow_duplicate_name: - raise Exception( - f"You already have a stream claim published under the name '{name}'. " - f"Use --allow-duplicate-name flag to override." - ) - if not VALID_FULL_CLAIM_ID.fullmatch(claim_id): - raise Exception('Invalid claim id. It is expected to be a 40 characters long hexadecimal string.') - - claim = Claim() - claim.repost.reference.claim_id = claim_id - tx = await Transaction.claim_create( - name, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel - ) - new_txo = tx.outputs[0] - - if channel: - new_txo.sign(channel) - await tx.sign(funding_accounts) - - if not preview: - await self.broadcast_or_release(tx, blocking) - self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish')) - else: - await account.ledger.release_tx(tx) - - return tx - - @requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT) - async def jsonrpc_stream_create( - self, name, bid, file_path, allow_duplicate_name=False, - channel_id=None, channel_name=None, channel_account_id=None, - account_id=None, wallet_id=None, claim_address=None, funding_account_ids=None, - preview=False, blocking=False, validate_file=False, optimize_file=False, **kwargs): - """ - Make a new stream claim and announce the associated file to lbrynet. - - Usage: - stream_create (<name> | --name=<name>) (<bid> | --bid=<bid>) (<file_path> | --file_path=<file_path>) - [--validate_file] [--optimize_file] - [--allow_duplicate_name=<allow_duplicate_name>] - [--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>] - [--title=<title>] [--description=<description>] [--author=<author>] - [--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...] - [--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>] - [--release_time=<release_time>] [--width=<width>] [--height=<height>] [--duration=<duration>] - [--channel_id=<channel_id> | --channel_name=<channel_name>] - [--channel_account_id=<channel_account_id>...] - [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...] - [--preview] [--blocking] - - Options: - --name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash)) - --bid=<bid> : (decimal) amount to back the claim - --file_path=<file_path> : (str) path to file to be associated with name. - --validate_file : (bool) validate that the video container and encodings match - common web browser support or that optimization succeeds if specified. - FFmpeg is required - --optimize_file : (bool) transcode the video & audio if necessary to ensure - common web browser support. FFmpeg is required - --allow_duplicate_name=<allow_duplicate_name> : (bool) create new claim even if one already exists with - given name. default: false. - --fee_currency=<fee_currency> : (string) specify fee currency - --fee_amount=<fee_amount> : (decimal) content download fee - --fee_address=<fee_address> : (str) address where to send fee payments, will use - value from --claim_address if not provided - --title=<title> : (str) title of the publication - --description=<description> : (str) description of the publication - --author=<author> : (str) author of the publication. The usage for this field is not - the same as for channels. The author field is used to credit an author - who is not the publisher and is not represented by the channel. For - example, a pdf file of 'The Odyssey' has an author of 'Homer' but may - by published to a channel such as '@classics', or to no channel at all - --tags=<tags> : (list) add content tags - --languages=<languages> : (list) languages used by the channel, - using RFC 5646 format, eg: - for English `--languages=en` - for Spanish (Spain) `--languages=es-ES` - for Spanish (Mexican) `--languages=es-MX` - for Chinese (Simplified) `--languages=zh-Hans` - for Chinese (Traditional) `--languages=zh-Hant` - --locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter - `country` code and a `state`, `city` and a postal - `code` along with a `latitude` and `longitude`. - for JSON RPC: pass a dictionary with aforementioned - attributes as keys, eg: - ... - "locations": [{'country': 'US', 'state': 'NH'}] - ... - for command line: pass a colon delimited list - with values in the following order: - - "COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE" - - making sure to include colon for blank values, for - example to provide only the city: - - ... --locations="::Manchester" - - with all values set: - - ... --locations="US:NH:Manchester:03101:42.990605:-71.460989" - - optionally, you can just pass the "LATITUDE:LONGITUDE": - - ... --locations="42.990605:-71.460989" - - finally, you can also pass JSON string of dictionary - on the command line as you would via JSON RPC - - ... --locations="{'country': 'US', 'state': 'NH'}" - - --license=<license> : (str) publication license - --license_url=<license_url> : (str) publication license url - --thumbnail_url=<thumbnail_url>: (str) thumbnail url - --release_time=<release_time> : (int) original public release of content, seconds since UNIX epoch - --width=<width> : (int) image/video width, automatically calculated from media file - --height=<height> : (int) image/video height, automatically calculated from media file - --duration=<duration> : (int) audio/video duration in seconds, automatically calculated - --channel_id=<channel_id> : (str) claim id of the publisher channel - --channel_name=<channel_name> : (str) name of the publisher channel - --channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in - for channel certificates, defaults to all accounts. - --account_id=<account_id> : (str) account to use for holding the transaction - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction - --claim_address=<claim_address>: (str) address where the claim is sent to, if not specified - it will be determined automatically from the account - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until transaction is in mempool - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first." - self.valid_stream_name_or_error(name) - account = wallet.get_account_or_default(account_id) - funding_accounts = wallet.get_accounts_or_all(funding_account_ids) - channel = await self.get_channel_or_none(wallet, channel_account_id, channel_id, channel_name, for_signing=True) - amount = self.get_dewies_or_error('bid', bid, positive_value=True) - claim_address = await self.get_receiving_address(claim_address, account) - kwargs['fee_address'] = self.get_fee_address(kwargs, claim_address) - - claims = await account.get_claims(claim_name=name) - if len(claims) > 0: - if not allow_duplicate_name: - raise Exception( - f"You already have a stream claim published under the name '{name}'. " - f"Use --allow-duplicate-name flag to override." - ) - - file_path, spec = await self._video_file_analyzer.verify_or_repair( - validate_file, optimize_file, file_path, ignore_non_video=True - ) - kwargs.update(spec) - - claim = Claim() - claim.stream.update(file_path=file_path, sd_hash='0' * 96, **kwargs) - tx = await Transaction.claim_create( - name, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel - ) - new_txo = tx.outputs[0] - - file_stream = None - if not preview: - file_stream = await self.stream_manager.create_stream(file_path) - claim.stream.source.sd_hash = file_stream.sd_hash - new_txo.script.generate() - - if channel: - new_txo.sign(channel) - await tx.sign(funding_accounts) - - if not preview: - await self.broadcast_or_release(tx, blocking) - await self.storage.save_claims([self._old_get_temp_claim_info( - tx, new_txo, claim_address, claim, name, dewies_to_lbc(amount) - )]) - await self.storage.save_content_claim(file_stream.stream_hash, new_txo.id) - self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish')) - else: - await account.ledger.release_tx(tx) - - return tx - - @requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT) - async def jsonrpc_stream_update( - self, claim_id, bid=None, file_path=None, - channel_id=None, channel_name=None, channel_account_id=None, clear_channel=False, - account_id=None, wallet_id=None, claim_address=None, funding_account_ids=None, - preview=False, blocking=False, replace=False, validate_file=False, optimize_file=False, **kwargs): - """ - Update an existing stream claim and if a new file is provided announce it to lbrynet. - - Usage: - stream_update (<claim_id> | --claim_id=<claim_id>) [--bid=<bid>] [--file_path=<file_path>] - [--validate_file] [--optimize_file] - [--file_name=<file_name>] [--file_size=<file_size>] [--file_hash=<file_hash>] - [--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] - [--fee_address=<fee_address>] [--clear_fee] - [--title=<title>] [--description=<description>] [--author=<author>] - [--tags=<tags>...] [--clear_tags] - [--languages=<languages>...] [--clear_languages] - [--locations=<locations>...] [--clear_locations] - [--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>] - [--release_time=<release_time>] [--width=<width>] [--height=<height>] [--duration=<duration>] - [--channel_id=<channel_id> | --channel_name=<channel_name> | --clear_channel] - [--channel_account_id=<channel_account_id>...] - [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...] - [--preview] [--blocking] [--replace] - - Options: - --claim_id=<claim_id> : (str) id of the stream claim to update - --bid=<bid> : (decimal) amount to back the claim - --file_path=<file_path> : (str) path to file to be associated with name. - --validate_file : (bool) validate that the video container and encodings match - common web browser support or that optimization succeeds if specified. - FFmpeg is required and file_path must be specified. - --optimize_file : (bool) transcode the video & audio if necessary to ensure common - web browser support. FFmpeg is required and file_path must be specified. - --file_name=<file_name> : (str) override file name, defaults to name from file_path. - --file_size=<file_size> : (str) override file size, otherwise automatically computed. - --file_hash=<file_hash> : (str) override file hash, otherwise automatically computed. - --fee_currency=<fee_currency> : (string) specify fee currency - --fee_amount=<fee_amount> : (decimal) content download fee - --fee_address=<fee_address> : (str) address where to send fee payments, will use - value from --claim_address if not provided - --clear_fee : (bool) clear previously set fee - --title=<title> : (str) title of the publication - --description=<description> : (str) description of the publication - --author=<author> : (str) author of the publication. The usage for this field is not - the same as for channels. The author field is used to credit an author - who is not the publisher and is not represented by the channel. For - example, a pdf file of 'The Odyssey' has an author of 'Homer' but may - by published to a channel such as '@classics', or to no channel at all - --tags=<tags> : (list) add content tags - --clear_tags : (bool) clear existing tags (prior to adding new ones) - --languages=<languages> : (list) languages used by the channel, - using RFC 5646 format, eg: - for English `--languages=en` - for Spanish (Spain) `--languages=es-ES` - for Spanish (Mexican) `--languages=es-MX` - for Chinese (Simplified) `--languages=zh-Hans` - for Chinese (Traditional) `--languages=zh-Hant` - --clear_languages : (bool) clear existing languages (prior to adding new ones) - --locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter - `country` code and a `state`, `city` and a postal - `code` along with a `latitude` and `longitude`. - for JSON RPC: pass a dictionary with aforementioned - attributes as keys, eg: - ... - "locations": [{'country': 'US', 'state': 'NH'}] - ... - for command line: pass a colon delimited list - with values in the following order: - - "COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE" - - making sure to include colon for blank values, for - example to provide only the city: - - ... --locations="::Manchester" - - with all values set: - - ... --locations="US:NH:Manchester:03101:42.990605:-71.460989" - - optionally, you can just pass the "LATITUDE:LONGITUDE": - - ... --locations="42.990605:-71.460989" - - finally, you can also pass JSON string of dictionary - on the command line as you would via JSON RPC - - ... --locations="{'country': 'US', 'state': 'NH'}" - - --clear_locations : (bool) clear existing locations (prior to adding new ones) - --license=<license> : (str) publication license - --license_url=<license_url> : (str) publication license url - --thumbnail_url=<thumbnail_url>: (str) thumbnail url - --release_time=<release_time> : (int) original public release of content, seconds since UNIX epoch - --width=<width> : (int) image/video width, automatically calculated from media file - --height=<height> : (int) image/video height, automatically calculated from media file - --duration=<duration> : (int) audio/video duration in seconds, automatically calculated - --channel_id=<channel_id> : (str) claim id of the publisher channel - --channel_name=<channel_name> : (str) name of the publisher channel - --clear_channel : (bool) remove channel signature - --channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in - for channel certificates, defaults to all accounts. - --account_id=<account_id> : (str) account in which to look for stream (default: all) - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction - --claim_address=<claim_address>: (str) address where the claim is sent to, if not specified - it will be determined automatically from the account - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until transaction is in mempool - --replace : (bool) instead of modifying specific values on - the stream, this will clear all existing values - and only save passed in values, useful for form - submissions where all values are always set - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first." - funding_accounts = wallet.get_accounts_or_all(funding_account_ids) - if account_id: - account = wallet.get_account_or_error(account_id) - accounts = [account] - else: - account = wallet.default_account - accounts = wallet.accounts - - existing_claims = await self.ledger.get_claims( - wallet=wallet, accounts=accounts, claim_id=claim_id - ) - if len(existing_claims) != 1: - account_ids = ', '.join(f"'{account.id}'" for account in accounts) - raise Exception( - f"Can't find the stream '{claim_id}' in account(s) {account_ids}." - ) - old_txo = existing_claims[0] - if not old_txo.claim.is_stream: - raise Exception( - f"A claim with id '{claim_id}' was found but it is not a stream claim." - ) - - if bid is not None: - amount = self.get_dewies_or_error('bid', bid, positive_value=True) - else: - amount = old_txo.amount - - if claim_address is not None: - self.valid_address_or_error(claim_address) - else: - claim_address = old_txo.get_address(account.ledger) - - channel = None - if channel_id or channel_name: - channel = await self.get_channel_or_error( - wallet, channel_account_id, channel_id, channel_name, for_signing=True) - elif old_txo.claim.is_signed and not clear_channel and not replace: - channel = old_txo.channel - - fee_address = self.get_fee_address(kwargs, claim_address) - if fee_address: - kwargs['fee_address'] = fee_address - - file_path, spec = await self._video_file_analyzer.verify_or_repair( - validate_file, optimize_file, file_path, ignore_non_video=True - ) - kwargs.update(spec) - - if replace: - claim = Claim() - claim.stream.message.source.CopyFrom( - old_txo.claim.stream.message.source - ) - stream_type = old_txo.claim.stream.stream_type - if stream_type: - old_stream_type = getattr(old_txo.claim.stream.message, stream_type) - new_stream_type = getattr(claim.stream.message, stream_type) - new_stream_type.CopyFrom(old_stream_type) - claim.stream.update(file_path=file_path, **kwargs) - else: - claim = Claim.from_bytes(old_txo.claim.to_bytes()) - claim.stream.update(file_path=file_path, **kwargs) - tx = await Transaction.claim_update( - old_txo, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel - ) - new_txo = tx.outputs[0] - - stream_hash = None - if not preview: - old_stream = self.stream_manager.streams.get(old_txo.claim.stream.source.sd_hash, None) - if file_path is not None: - if old_stream: - await self.stream_manager.delete_stream(old_stream, delete_file=False) - file_stream = await self.stream_manager.create_stream(file_path) - new_txo.claim.stream.source.sd_hash = file_stream.sd_hash - new_txo.script.generate() - stream_hash = file_stream.stream_hash - elif old_stream: - stream_hash = old_stream.stream_hash - - if channel: - new_txo.sign(channel) - await tx.sign(funding_accounts) - - if not preview: - await self.broadcast_or_release(tx, blocking) - await self.storage.save_claims([self._old_get_temp_claim_info( - tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount) - )]) - if stream_hash: - await self.storage.save_content_claim(stream_hash, new_txo.id) - self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish')) - else: - await account.ledger.release_tx(tx) - - return tx - - @requires(WALLET_COMPONENT) - async def jsonrpc_stream_abandon( - self, claim_id=None, txid=None, nout=None, account_id=None, wallet_id=None, - preview=False, blocking=False): - """ - Abandon one of my stream claims. - - Usage: - stream_abandon [<claim_id> | --claim_id=<claim_id>] - [<txid> | --txid=<txid>] [<nout> | --nout=<nout>] - [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--preview] [--blocking] - - Options: - --claim_id=<claim_id> : (str) claim_id of the claim to abandon - --txid=<txid> : (str) txid of the claim to abandon - --nout=<nout> : (int) nout of the claim to abandon - --account_id=<account_id> : (str) id of the account to use - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until abandon is in mempool - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first." - if account_id: - account = wallet.get_account_or_error(account_id) - accounts = [account] - else: - account = wallet.default_account - accounts = wallet.accounts - - if txid is not None and nout is not None: - claims = await self.ledger.get_claims( - wallet=wallet, accounts=accounts, tx_hash=unhexlify(txid)[::-1], position=nout - ) - elif claim_id is not None: - claims = await self.ledger.get_claims( - wallet=wallet, accounts=accounts, claim_id=claim_id - ) - else: - raise Exception('Must specify claim_id, or txid and nout') - - if not claims: - raise Exception('No claim found for the specified claim_id or txid:nout') - - tx = await Transaction.create( - [Input.spend(txo) for txo in claims], [], accounts, account - ) - - if not preview: - await self.broadcast_or_release(tx, blocking) - self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('abandon')) - else: - await self.ledger.release_tx(tx) - - return tx - - @requires(WALLET_COMPONENT) - def jsonrpc_stream_list(self, *args, **kwargs): - """ - List my stream claims. - - Usage: - stream_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>] - [--name=<name>...] [--claim_id=<claim_id>...] [--is_spent] - [--page=<page>] [--page_size=<page_size>] [--resolve] [--no_totals] - - Options: - --name=<name> : (str or list) stream name - --claim_id=<claim_id> : (str or list) stream id - --is_spent : (bool) shows previous stream updates and abandons - --account_id=<account_id> : (str) id of the account to query - --wallet_id=<wallet_id> : (str) restrict results to specific wallet - --page=<page> : (int) page to return during paginating - --page_size=<page_size> : (int) number of items on page during pagination - --resolve : (bool) resolves each stream to provide additional metadata - --no_totals : (bool) do not calculate the total number of pages and items in result set - (significant performance boost) - - Returns: {Paginated[Output]} - """ - kwargs['type'] = 'stream' - if 'is_spent' not in kwargs: - kwargs['is_not_spent'] = True - return self.jsonrpc_txo_list(*args, **kwargs) - - @requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT, - DHT_COMPONENT, DATABASE_COMPONENT) - def jsonrpc_stream_cost_estimate(self, uri): - """ - Get estimated cost for a lbry stream - - Usage: - stream_cost_estimate (<uri> | --uri=<uri>) - - Options: - --uri=<uri> : (str) uri to use - - Returns: - (float) Estimated cost in lbry credits, returns None if uri is not - resolvable - """ - return self.get_est_cost_from_uri(uri) - - COLLECTION_DOC = """ - Create, update, list, resolve, and abandon collections. - """ - - @requires(WALLET_COMPONENT) - async def jsonrpc_collection_create( - self, name, bid, claims, allow_duplicate_name=False, - channel_id=None, channel_name=None, channel_account_id=None, - account_id=None, wallet_id=None, claim_address=None, funding_account_ids=None, - preview=False, blocking=False, **kwargs): - """ - Create a new collection. - - Usage: - collection_create (<name> | --name=<name>) (<bid> | --bid=<bid>) - (<claims>... | --claims=<claims>...) - [--allow_duplicate_name] - [--title=<title>] [--description=<description>] - [--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...] - [--thumbnail_url=<thumbnail_url>] - [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...] - [--preview] [--blocking] - - Options: - --name=<name> : (str) name of the collection - --bid=<bid> : (decimal) amount to back the claim - --claims=<claims> : (list) claim ids to be included in the collection - --allow_duplicate_name : (bool) create new collection even if one already exists with - given name. default: false. - --title=<title> : (str) title of the collection - --description=<description> : (str) description of the collection - --clear_languages : (bool) clear existing languages (prior to adding new ones) - --tags=<tags> : (list) content tags - --clear_languages : (bool) clear existing languages (prior to adding new ones) - --languages=<languages> : (list) languages used by the collection, - using RFC 5646 format, eg: - for English `--languages=en` - for Spanish (Spain) `--languages=es-ES` - for Spanish (Mexican) `--languages=es-MX` - for Chinese (Simplified) `--languages=zh-Hans` - for Chinese (Traditional) `--languages=zh-Hant` - --locations=<locations> : (list) locations of the collection, consisting of 2 letter - `country` code and a `state`, `city` and a postal - `code` along with a `latitude` and `longitude`. - for JSON RPC: pass a dictionary with aforementioned - attributes as keys, eg: - ... - "locations": [{'country': 'US', 'state': 'NH'}] - ... - for command line: pass a colon delimited list - with values in the following order: - - "COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE" - - making sure to include colon for blank values, for - example to provide only the city: - - ... --locations="::Manchester" - - with all values set: - - ... --locations="US:NH:Manchester:03101:42.990605:-71.460989" - - optionally, you can just pass the "LATITUDE:LONGITUDE": - - ... --locations="42.990605:-71.460989" - - finally, you can also pass JSON string of dictionary - on the command line as you would via JSON RPC - - ... --locations="{'country': 'US', 'state': 'NH'}" - - --thumbnail_url=<thumbnail_url>: (str) thumbnail url - --account_id=<account_id> : (str) account to use for holding the transaction - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction - --claim_address=<claim_address>: (str) address where the collection is sent to, if not specified - it will be determined automatically from the account - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until transaction is in mempool - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - account = wallet.get_account_or_default(account_id) - funding_accounts = wallet.get_accounts_or_all(funding_account_ids) - self.valid_collection_name_or_error(name) - channel = await self.get_channel_or_none(wallet, channel_account_id, channel_id, channel_name, for_signing=True) - amount = self.get_dewies_or_error('bid', bid, positive_value=True) - claim_address = await self.get_receiving_address(claim_address, account) - - existing_collections = await self.ledger.get_collections(accounts=wallet.accounts, claim_name=name) - if len(existing_collections) > 0: - if not allow_duplicate_name: - raise Exception( - f"You already have a collection under the name '{name}'. " - f"Use --allow-duplicate-name flag to override." - ) - - claim = Claim() - claim.collection.update(claims=claims, **kwargs) - tx = await Transaction.claim_create( - name, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel - ) - new_txo = tx.outputs[0] - - if channel: - new_txo.sign(channel) - await tx.sign(funding_accounts) - - if not preview: - await self.broadcast_or_release(tx, blocking) - self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish')) - else: - await account.ledger.release_tx(tx) - - return tx - - @requires(WALLET_COMPONENT) - async def jsonrpc_collection_update( - self, claim_id, bid=None, - channel_id=None, channel_name=None, channel_account_id=None, clear_channel=False, - account_id=None, wallet_id=None, claim_address=None, funding_account_ids=None, - preview=False, blocking=False, replace=False, **kwargs): - """ - Update an existing collection claim. - - Usage: - collection_update (<claim_id> | --claim_id=<claim_id>) [--bid=<bid>] - [--claims=<claims>...] [--clear_claims] - [--title=<title>] [--description=<description>] - [--tags=<tags>...] [--clear_tags] - [--languages=<languages>...] [--clear_languages] - [--locations=<locations>...] [--clear_locations] - [--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>] - [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--claim_address=<claim_address>] [--new_signing_key] - [--funding_account_ids=<funding_account_ids>...] - [--preview] [--blocking] [--replace] - - Options: - --claim_id=<claim_id> : (str) claim_id of the collection to update - --bid=<bid> : (decimal) amount to back the claim - --claims=<claims> : (list) claim ids - --clear_claims : (bool) clear existing claim references (prior to adding new ones) - --title=<title> : (str) title of the collection - --description=<description> : (str) description of the collection - --tags=<tags> : (list) add content tags - --clear_tags : (bool) clear existing tags (prior to adding new ones) - --languages=<languages> : (list) languages used by the collection, - using RFC 5646 format, eg: - for English `--languages=en` - for Spanish (Spain) `--languages=es-ES` - for Spanish (Mexican) `--languages=es-MX` - for Chinese (Simplified) `--languages=zh-Hans` - for Chinese (Traditional) `--languages=zh-Hant` - --clear_languages : (bool) clear existing languages (prior to adding new ones) - --locations=<locations> : (list) locations of the collection, consisting of 2 letter - `country` code and a `state`, `city` and a postal - `code` along with a `latitude` and `longitude`. - for JSON RPC: pass a dictionary with aforementioned - attributes as keys, eg: - ... - "locations": [{'country': 'US', 'state': 'NH'}] - ... - for command line: pass a colon delimited list - with values in the following order: - - "COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE" - - making sure to include colon for blank values, for - example to provide only the city: - - ... --locations="::Manchester" - - with all values set: - - ... --locations="US:NH:Manchester:03101:42.990605:-71.460989" - - optionally, you can just pass the "LATITUDE:LONGITUDE": - - ... --locations="42.990605:-71.460989" - - finally, you can also pass JSON string of dictionary - on the command line as you would via JSON RPC - - ... --locations="{'country': 'US', 'state': 'NH'}" - - --clear_locations : (bool) clear existing locations (prior to adding new ones) - --thumbnail_url=<thumbnail_url>: (str) thumbnail url - --account_id=<account_id> : (str) account in which to look for collection (default: all) - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction - --claim_address=<claim_address>: (str) address where the collection is sent - --new_signing_key : (bool) generate a new signing key, will invalidate all previous publishes - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until transaction is in mempool - --replace : (bool) instead of modifying specific values on - the collection, this will clear all existing values - and only save passed in values, useful for form - submissions where all values are always set - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - funding_accounts = wallet.get_accounts_or_all(funding_account_ids) - if account_id: - account = wallet.get_account_or_error(account_id) - accounts = [account] - else: - account = wallet.default_account - accounts = wallet.accounts - - existing_collections = await self.ledger.get_collections( - wallet=wallet, accounts=accounts, claim_id=claim_id - ) - if len(existing_collections) != 1: - account_ids = ', '.join(f"'{account.id}'" for account in accounts) - raise Exception( - f"Can't find the collection '{claim_id}' in account(s) {account_ids}." - ) - old_txo = existing_collections[0] - if not old_txo.claim.is_collection: - raise Exception( - f"A claim with id '{claim_id}' was found but it is not a collection." - ) - - if bid is not None: - amount = self.get_dewies_or_error('bid', bid, positive_value=True) - else: - amount = old_txo.amount - - if claim_address is not None: - self.valid_address_or_error(claim_address) - else: - claim_address = old_txo.get_address(account.ledger) - - channel = None - if channel_id or channel_name: - channel = await self.get_channel_or_error( - wallet, channel_account_id, channel_id, channel_name, for_signing=True) - elif old_txo.claim.is_signed and not clear_channel and not replace: - channel = old_txo.channel - - if replace: - claim = Claim() - claim.collection.message.source.CopyFrom( - old_txo.claim.collection.message.source - ) - claim.collection.update(**kwargs) - else: - claim = Claim.from_bytes(old_txo.claim.to_bytes()) - claim.collection.update(**kwargs) - tx = await Transaction.claim_update( - old_txo, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel - ) - new_txo = tx.outputs[0] - - new_txo.script.generate() - - if channel: - new_txo.sign(channel) - await tx.sign(funding_accounts) - - if not preview: - await self.broadcast_or_release(tx, blocking) - self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish')) - else: - await account.ledger.release_tx(tx) - - return tx - - @requires(WALLET_COMPONENT) - async def jsonrpc_collection_abandon(self, *args, **kwargs): - """ - Abandon one of my collection claims. - - Usage: - collection_abandon [<claim_id> | --claim_id=<claim_id>] - [<txid> | --txid=<txid>] [<nout> | --nout=<nout>] - [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--preview] [--blocking] - - Options: - --claim_id=<claim_id> : (str) claim_id of the claim to abandon - --txid=<txid> : (str) txid of the claim to abandon - --nout=<nout> : (int) nout of the claim to abandon - --account_id=<account_id> : (str) id of the account to use - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until abandon is in mempool - - Returns: {Transaction} - """ - return await self.jsonrpc_stream_abandon(*args, **kwargs) - - @requires(WALLET_COMPONENT) - def jsonrpc_collection_list(self, resolve_claims=0, account_id=None, wallet_id=None, page=None, page_size=None): - """ - List my collection claims. - - Usage: - collection_list [--resolve_claims=<resolve_claims>] [<account_id> | --account_id=<account_id>] - [--wallet_id=<wallet_id>] [--page=<page>] [--page_size=<page_size>] - - Options: - --resolve_claims=<resolve_claims> : (int) resolve every claim - --account_id=<account_id> : (str) id of the account to use - --wallet_id=<wallet_id> : (str) restrict results to specific wallet - --page=<page> : (int) page to return during paginating - --page_size=<page_size> : (int) number of items on page during pagination - - Returns: {Paginated[Output]} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - if account_id: - account = wallet.get_account_or_error(account_id) - collections = account.get_collections - collection_count = account.get_collection_count - else: - collections = partial(self.ledger.get_collections, wallet=wallet, accounts=wallet.accounts) - collection_count = partial(self.ledger.get_collection_count, wallet=wallet, accounts=wallet.accounts) - return paginate_rows(collections, collection_count, page, page_size, resolve_claims=resolve_claims) - - async def jsonrpc_collection_resolve( - self, claim_id=None, url=None, wallet_id=None, page=1, page_size=DEFAULT_PAGE_SIZE): - """ - Resolve claims in the collection. - - Usage: - collection_resolve (--claim_id=<claim_id> | --url=<url>) - [--wallet_id=<wallet_id>] [--page=<page>] [--page_size=<page_size>] - - Options: - --claim_id=<claim_id> : (str) claim id of the collection - --url=<url> : (str) url of the collection - --wallet_id=<wallet_id> : (str) restrict results to specific wallet - --page=<page> : (int) page to return during paginating - --page_size=<page_size> : (int) number of items on page during pagination - - Returns: {Paginated[Output]} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - - if claim_id: - txo = await self.ledger.get_claim_by_claim_id(wallet.accounts, claim_id) - if not isinstance(txo, Output) or not txo.is_claim: - raise Exception(f"Could not find collection with claim_id '{claim_id}'. ") - elif url: - txo = (await self.ledger.resolve(wallet.accounts, [url]))[url] - if not isinstance(txo, Output) or not txo.is_claim: - raise Exception(f"Could not find collection with url '{url}'. ") - else: - raise Exception(f"Missing argument claim_id or url. ") - - page_num, page_size = abs(page), min(abs(page_size), 50) - items = await self.ledger.resolve_collection(txo, page_size * (page_num - 1), page_size) - total_items = len(txo.claim.collection.claims.ids) - - return { - "items": items, - "total_pages": int((total_items + (page_size - 1)) / page_size), - "total_items": total_items, - "page_size": page_size, - "page": page - } - - SUPPORT_DOC = """ - Create, list and abandon all types of supports. - """ - - @requires(WALLET_COMPONENT) - async def jsonrpc_support_create( - self, claim_id, amount, tip=False, account_id=None, wallet_id=None, funding_account_ids=None, - preview=False, blocking=False): - """ - Create a support or a tip for name claim. - - Usage: - support_create (<claim_id> | --claim_id=<claim_id>) (<amount> | --amount=<amount>) - [--tip] [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--preview] [--blocking] [--funding_account_ids=<funding_account_ids>...] - - Options: - --claim_id=<claim_id> : (str) claim_id of the claim to support - --amount=<amount> : (decimal) amount of support - --tip : (bool) send support to claim owner, default: false. - --account_id=<account_id> : (str) account to use for holding the transaction - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until transaction is in mempool - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first." - funding_accounts = wallet.get_accounts_or_all(funding_account_ids) - amount = self.get_dewies_or_error("amount", amount) - claim = await self.ledger.get_claim_by_claim_id(wallet.accounts, claim_id) - claim_address = claim.get_address(self.ledger) - if not tip: - account = wallet.get_account_or_default(account_id) - claim_address = await account.receiving.get_or_create_usable_address() - - tx = await Transaction.support( - claim.claim_name, claim_id, amount, claim_address, funding_accounts, funding_accounts[0] - ) - - if not preview: - await self.broadcast_or_release(tx, blocking) - await self.storage.save_supports({claim_id: [{ - 'txid': tx.id, - 'nout': tx.position, - 'address': claim_address, - 'claim_id': claim_id, - 'amount': dewies_to_lbc(amount) - }]}) - self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('new_support')) - else: - await self.ledger.release_tx(tx) - - return tx - - @requires(WALLET_COMPONENT) - def jsonrpc_support_list(self, *args, received=False, sent=False, staked=False, **kwargs): - """ - List staked supports and sent/received tips. - - Usage: - support_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>] - [--name=<name>...] [--claim_id=<claim_id>...] - [--received | --sent | --staked] [--is_spent] - [--page=<page>] [--page_size=<page_size>] [--no_totals] - - Options: - --name=<name> : (str or list) claim name - --claim_id=<claim_id> : (str or list) claim id - --received : (bool) only show received (tips) - --sent : (bool) only show sent (tips) - --staked : (bool) only show my staked supports - --is_spent : (bool) show abandoned supports - --account_id=<account_id> : (str) id of the account to query - --wallet_id=<wallet_id> : (str) restrict results to specific wallet - --page=<page> : (int) page to return during paginating - --page_size=<page_size> : (int) number of items on page during pagination - --no_totals : (bool) do not calculate the total number of pages and items in result set - (significant performance boost) - - Returns: {Paginated[Output]} - """ - kwargs['type'] = 'support' - if 'is_spent' not in kwargs: - kwargs['is_not_spent'] = True - if received: - kwargs['is_not_my_input'] = True - kwargs['is_my_output'] = True - elif sent: - kwargs['is_my_input'] = True - kwargs['is_not_my_output'] = True - # spent for not my outputs is undetermined - kwargs.pop('is_spent', None) - kwargs.pop('is_not_spent', None) - elif staked: - kwargs['is_my_input'] = True - kwargs['is_my_output'] = True - return self.jsonrpc_txo_list(*args, **kwargs) - - @requires(WALLET_COMPONENT) - async def jsonrpc_support_abandon( - self, claim_id=None, txid=None, nout=None, keep=None, - account_id=None, wallet_id=None, preview=False, blocking=False): - """ - Abandon supports, including tips, of a specific claim, optionally - keeping some amount as supports. - - Usage: - support_abandon [--claim_id=<claim_id>] [(--txid=<txid> --nout=<nout>)] [--keep=<keep>] - [--account_id=<account_id>] [--wallet_id=<wallet_id>] - [--preview] [--blocking] - - Options: - --claim_id=<claim_id> : (str) claim_id of the support to abandon - --txid=<txid> : (str) txid of the claim to abandon - --nout=<nout> : (int) nout of the claim to abandon - --keep=<keep> : (decimal) amount of lbc to keep as support - --account_id=<account_id> : (str) id of the account to use - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until abandon is in mempool - - Returns: {Transaction} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first." - if account_id: - account = wallet.get_account_or_error(account_id) - accounts = [account] - else: - account = wallet.default_account - accounts = wallet.accounts - - if txid is not None and nout is not None: - supports = await self.ledger.get_supports( - wallet=wallet, accounts=accounts, tx_hash=unhexlify(txid)[::-1], position=nout - ) - elif claim_id is not None: - supports = await self.ledger.get_supports( - wallet=wallet, accounts=accounts, claim_id=claim_id - ) - else: - raise Exception('Must specify claim_id, or txid and nout') - - if not supports: - raise Exception('No supports found for the specified claim_id or txid:nout') - - if keep is not None: - keep = self.get_dewies_or_error('keep', keep) - else: - keep = 0 - - outputs = [] - if keep > 0: - outputs = [ - Output.pay_support_pubkey_hash( - keep, supports[0].claim_name, supports[0].claim_id, supports[0].pubkey_hash - ) - ] - - tx = await Transaction.create( - [Input.spend(txo) for txo in supports], outputs, accounts, account - ) - - if not preview: - await self.broadcast_or_release(tx, blocking) - self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('abandon')) - else: - await self.ledger.release_tx(tx) - - return tx - - TRANSACTION_DOC = """ - Transaction management. - """ - - @requires(WALLET_COMPONENT) - def jsonrpc_transaction_list(self, account_id=None, wallet_id=None, page=None, page_size=None): - """ - List transactions belonging to wallet - - Usage: - transaction_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>] - [--page=<page>] [--page_size=<page_size>] - - Options: - --account_id=<account_id> : (str) id of the account to query - --wallet_id=<wallet_id> : (str) restrict results to specific wallet - --page=<page> : (int) page to return during paginating - --page_size=<page_size> : (int) number of items on page during pagination - - Returns: - (list) List of transactions - - { - "claim_info": (list) claim info if in txn [{ - "address": (str) address of claim, - "balance_delta": (float) bid amount, - "amount": (float) claim amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "nout": (int) nout - }], - "abandon_info": (list) abandon info if in txn [{ - "address": (str) address of abandoned claim, - "balance_delta": (float) returned amount, - "amount": (float) claim amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "nout": (int) nout - }], - "confirmations": (int) number of confirmations for the txn, - "date": (str) date and time of txn, - "fee": (float) txn fee, - "support_info": (list) support info if in txn [{ - "address": (str) address of support, - "balance_delta": (float) support amount, - "amount": (float) support amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "is_tip": (bool), - "nout": (int) nout - }], - "timestamp": (int) timestamp, - "txid": (str) txn id, - "update_info": (list) update info if in txn [{ - "address": (str) address of claim, - "balance_delta": (float) credited/debited - "amount": (float) absolute amount, - "claim_id": (str) claim id, - "claim_name": (str) claim name, - "nout": (int) nout - }], - "value": (float) value of txn - } - - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - if account_id: - account = wallet.get_account_or_error(account_id) - transactions = account.get_transaction_history - transaction_count = account.get_transaction_history_count - else: - transactions = partial( - self.ledger.get_transaction_history, wallet=wallet, accounts=wallet.accounts) - transaction_count = partial( - self.ledger.get_transaction_history_count, wallet=wallet, accounts=wallet.accounts) - return paginate_rows(transactions, transaction_count, page, page_size) - - @requires(WALLET_COMPONENT) - def jsonrpc_transaction_show(self, txid): - """ - Get a decoded transaction from a txid - - Usage: - transaction_show (<txid> | --txid=<txid>) - - Options: - --txid=<txid> : (str) txid of the transaction - - Returns: {Transaction} - """ - return self.wallet_manager.get_transaction(unhexlify(txid)[::-1]) - - TXO_DOC = """ - List and sum transaction outputs. - """ - - @staticmethod - def _constrain_txo_from_kwargs( - constraints, type=None, txid=None, # pylint: disable=redefined-builtin - claim_id=None, channel_id=None, name=None, reposted_claim_id=None, - is_spent=False, is_not_spent=False, - is_my_input_or_output=None, exclude_internal_transfers=False, - is_my_output=None, is_not_my_output=None, - is_my_input=None, is_not_my_input=None): - if is_spent: - constraints['is_spent'] = True - elif is_not_spent: - constraints['is_spent'] = False - constraints['exclude_internal_transfers'] = exclude_internal_transfers - if is_my_input_or_output is True: - constraints['is_my_input_or_output'] = True - else: - if is_my_input is True: - constraints['is_my_input'] = True - elif is_not_my_input is True: - constraints['is_my_input'] = False - if is_my_output is True: - constraints['is_my_output'] = True - elif is_not_my_output is True: - constraints['is_my_output'] = False - to_hash = lambda x: unhexlify(x)[::-1] - database.constrain_single_or_list(constraints, 'txo_type', type, lambda x: TXO_TYPES[x]) - database.constrain_single_or_list(constraints, 'channel_hash', channel_id, to_hash) - database.constrain_single_or_list(constraints, 'claim_hash', claim_id, to_hash) - database.constrain_single_or_list(constraints, 'claim_name', name) - database.constrain_single_or_list(constraints, 'tx_hash', txid, to_hash) - database.constrain_single_or_list(constraints, 'reposted_claim_hash', reposted_claim_id, to_hash) - return constraints - - @requires(WALLET_COMPONENT) - def jsonrpc_txo_list( - self, account_id=None, wallet_id=None, page=None, page_size=None, - resolve=False, order_by=None, no_totals=False, include_received_tips=False, **kwargs): - """ - List my transaction outputs. - - Usage: - txo_list [--account_id=<account_id>] [--type=<type>...] [--txid=<txid>...] - [--claim_id=<claim_id>...] [--channel_id=<channel_id>...] [--name=<name>...] - [--is_spent | --is_not_spent] - [--is_my_input_or_output | - [[--is_my_output | --is_not_my_output] [--is_my_input | --is_not_my_input]] - ] - [--exclude_internal_transfers] [--include_received_tips] - [--wallet_id=<wallet_id>] [--page=<page>] [--page_size=<page_size>] - [--resolve] [--order_by=<order_by>][--no_totals] - - Options: - --type=<type> : (str or list) claim type: stream, channel, support, - purchase, collection, repost, other - --txid=<txid> : (str or list) transaction id of outputs - --claim_id=<claim_id> : (str or list) claim id - --channel_id=<channel_id> : (str or list) claims in this channel - --name=<name> : (str or list) claim name - --is_spent : (bool) only show spent txos - --is_not_spent : (bool) only show not spent txos - --is_my_input_or_output : (bool) txos which have your inputs or your outputs, - if using this flag the other related flags - are ignored (--is_my_output, --is_my_input, etc) - --is_my_output : (bool) show outputs controlled by you - --is_not_my_output : (bool) show outputs not controlled by you - --is_my_input : (bool) show outputs created by you - --is_not_my_input : (bool) show outputs not created by you - --exclude_internal_transfers: (bool) excludes any outputs that are exactly this combination: - "--is_my_input --is_my_output --type=other" - this allows to exclude "change" payments, this - flag can be used in combination with any of the other flags - --include_received_tips : (bool) calculate the amount of tips recieved for claim outputs - --account_id=<account_id> : (str) id of the account to query - --wallet_id=<wallet_id> : (str) restrict results to specific wallet - --page=<page> : (int) page to return during paginating - --page_size=<page_size> : (int) number of items on page during pagination - --resolve : (bool) resolves each claim to provide additional metadata - --order_by=<order_by> : (str) field to order by: 'name', 'height', 'amount' and 'none' - --no_totals : (bool) do not calculate the total number of pages and items in result set - (significant performance boost) - - Returns: {Paginated[Output]} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - if account_id: - account = wallet.get_account_or_error(account_id) - claims = account.get_txos - claim_count = account.get_txo_count - else: - claims = partial(self.ledger.get_txos, wallet=wallet, accounts=wallet.accounts) - claim_count = partial(self.ledger.get_txo_count, wallet=wallet, accounts=wallet.accounts) - constraints = { - 'resolve': resolve, - 'include_is_spent': True, - 'include_is_my_input': True, - 'include_is_my_output': True, - 'include_received_tips': include_received_tips - } - if order_by is not None: - if order_by == 'name': - constraints['order_by'] = 'txo.claim_name' - elif order_by in ('height', 'amount', 'none'): - constraints['order_by'] = order_by - else: - raise ValueError(f"'{order_by}' is not a valid --order_by value.") - self._constrain_txo_from_kwargs(constraints, **kwargs) - return paginate_rows(claims, None if no_totals else claim_count, page, page_size, **constraints) - - @requires(WALLET_COMPONENT) - async def jsonrpc_txo_spend( - self, account_id=None, wallet_id=None, batch_size=500, - include_full_tx=False, preview=False, blocking=False, **kwargs): - """ - Spend transaction outputs, batching into multiple transactions as necessary. - - Usage: - txo_spend [--account_id=<account_id>] [--type=<type>...] [--txid=<txid>...] - [--claim_id=<claim_id>...] [--channel_id=<channel_id>...] [--name=<name>...] - [--is_my_input | --is_not_my_input] - [--exclude_internal_transfers] [--wallet_id=<wallet_id>] - [--preview] [--blocking] [--batch_size=<batch_size>] [--include_full_tx] - - Options: - --type=<type> : (str or list) claim type: stream, channel, support, - purchase, collection, repost, other - --txid=<txid> : (str or list) transaction id of outputs - --claim_id=<claim_id> : (str or list) claim id - --channel_id=<channel_id> : (str or list) claims in this channel - --name=<name> : (str or list) claim name - --is_my_input : (bool) show outputs created by you - --is_not_my_input : (bool) show outputs not created by you - --exclude_internal_transfers: (bool) excludes any outputs that are exactly this combination: - "--is_my_input --is_my_output --type=other" - this allows to exclude "change" payments, this - flag can be used in combination with any of the other flags - --account_id=<account_id> : (str) id of the account to query - --wallet_id=<wallet_id> : (str) restrict results to specific wallet - --preview : (bool) do not broadcast the transaction - --blocking : (bool) wait until abandon is in mempool - --batch_size=<batch_size> : (int) number of txos to spend per transactions - --include_full_tx : (bool) include entire tx in output and not just the txid - - Returns: {List[Transaction]} - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - accounts = [wallet.get_account_or_error(account_id)] if account_id else wallet.accounts - txos = await self.ledger.get_txos( - wallet=wallet, accounts=accounts, - **self._constrain_txo_from_kwargs({}, is_not_spent=True, is_my_output=True, **kwargs) - ) - txs = [] - while txos: - txs.append( - await Transaction.create( - [Input.spend(txos.pop()) for _ in range(min(len(txos), batch_size))], - [], accounts, accounts[0] - ) - ) - if not preview: - for tx in txs: - await self.broadcast_or_release(tx, blocking) - if include_full_tx: - return txs - return [{'txid': tx.id} for tx in txs] - - @requires(WALLET_COMPONENT) - def jsonrpc_txo_sum(self, account_id=None, wallet_id=None, **kwargs): - """ - Sum of transaction outputs. - - Usage: - txo_list [--account_id=<account_id>] [--type=<type>...] [--txid=<txid>...] - [--claim_id=<claim_id>...] [--name=<name>...] - [--is_spent] [--is_not_spent] - [--is_my_input_or_output | - [[--is_my_output | --is_not_my_output] [--is_my_input | --is_not_my_input]] - ] - [--exclude_internal_transfers] [--wallet_id=<wallet_id>] - - Options: - --type=<type> : (str or list) claim type: stream, channel, support, - purchase, collection, repost, other - --txid=<txid> : (str or list) transaction id of outputs - --claim_id=<claim_id> : (str or list) claim id - --name=<name> : (str or list) claim name - --is_spent : (bool) only show spent txos - --is_not_spent : (bool) only show not spent txos - --is_my_input_or_output : (bool) txos which have your inputs or your outputs, - if using this flag the other related flags - are ignored (--is_my_output, --is_my_input, etc) - --is_my_output : (bool) show outputs controlled by you - --is_not_my_output : (bool) show outputs not controlled by you - --is_my_input : (bool) show outputs created by you - --is_not_my_input : (bool) show outputs not created by you - --exclude_internal_transfers: (bool) excludes any outputs that are exactly this combination: - "--is_my_input --is_my_output --type=other" - this allows to exclude "change" payments, this - flag can be used in combination with any of the other flags - --account_id=<account_id> : (str) id of the account to query - --wallet_id=<wallet_id> : (str) restrict results to specific wallet - - Returns: int - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - return self.ledger.get_txo_sum( - wallet=wallet, accounts=[wallet.get_account_or_error(account_id)] if account_id else wallet.accounts, - **self._constrain_txo_from_kwargs({}, **kwargs) - ) - - @requires(WALLET_COMPONENT) - async def jsonrpc_txo_plot( - self, account_id=None, wallet_id=None, - days_back=0, start_day=None, days_after=None, end_day=None, **kwargs): - """ - Plot transaction output sum over days. - - Usage: - txo_plot [--account_id=<account_id>] [--type=<type>...] [--txid=<txid>...] - [--claim_id=<claim_id>...] [--name=<name>...] [--is_spent] [--is_not_spent] - [--is_my_input_or_output | - [[--is_my_output | --is_not_my_output] [--is_my_input | --is_not_my_input]] - ] - [--exclude_internal_transfers] [--wallet_id=<wallet_id>] - [--days_back=<days_back> | - [--start_day=<start_day> [--days_after=<days_after> | --end_day=<end_day>]] - ] - - Options: - --type=<type> : (str or list) claim type: stream, channel, support, - purchase, collection, repost, other - --txid=<txid> : (str or list) transaction id of outputs - --claim_id=<claim_id> : (str or list) claim id - --name=<name> : (str or list) claim name - --is_spent : (bool) only show spent txos - --is_not_spent : (bool) only show not spent txos - --is_my_input_or_output : (bool) txos which have your inputs or your outputs, - if using this flag the other related flags - are ignored (--is_my_output, --is_my_input, etc) - --is_my_output : (bool) show outputs controlled by you - --is_not_my_output : (bool) show outputs not controlled by you - --is_my_input : (bool) show outputs created by you - --is_not_my_input : (bool) show outputs not created by you - --exclude_internal_transfers: (bool) excludes any outputs that are exactly this combination: - "--is_my_input --is_my_output --type=other" - this allows to exclude "change" payments, this - flag can be used in combination with any of the other flags - --account_id=<account_id> : (str) id of the account to query - --wallet_id=<wallet_id> : (str) restrict results to specific wallet - --days_back=<days_back> : (int) number of days back from today - (not compatible with --start_day, --days_after, --end_day) - --start_day=<start_day> : (date) start on specific date (YYYY-MM-DD) - (instead of --days_back) - --days_after=<days_after> : (int) end number of days after --start_day - (instead of --end_day) - --end_day=<end_day> : (date) end on specific date (YYYY-MM-DD) - (instead of --days_after) - - Returns: List[Dict] - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - plot = await self.ledger.get_txo_plot( - wallet=wallet, accounts=[wallet.get_account_or_error(account_id)] if account_id else wallet.accounts, - days_back=days_back, start_day=start_day, days_after=days_after, end_day=end_day, - **self._constrain_txo_from_kwargs({}, **kwargs) - ) - for row in plot: - row['total'] = dewies_to_lbc(row['total']) - return plot - - UTXO_DOC = """ - Unspent transaction management. - """ - - @requires(WALLET_COMPONENT) - def jsonrpc_utxo_list(self, *args, **kwargs): - """ - List unspent transaction outputs - - Usage: - utxo_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>] - [--page=<page>] [--page_size=<page_size>] - - Options: - --account_id=<account_id> : (str) id of the account to query - --wallet_id=<wallet_id> : (str) restrict results to specific wallet - --page=<page> : (int) page to return during paginating - --page_size=<page_size> : (int) number of items on page during pagination - - Returns: {Paginated[Output]} - """ - kwargs['type'] = ['other', 'purchase'] - kwargs['is_not_spent'] = True - return self.jsonrpc_txo_list(*args, **kwargs) - - @requires(WALLET_COMPONENT) - async def jsonrpc_utxo_release(self, account_id=None, wallet_id=None): - """ - When spending a UTXO it is locally locked to prevent double spends; - occasionally this can result in a UTXO being locked which ultimately - did not get spent (failed to broadcast, spend transaction was not - accepted by blockchain node, etc). This command releases the lock - on all UTXOs in your account. - - Usage: - utxo_release [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>] - - Options: - --account_id=<account_id> : (str) id of the account to query - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - - Returns: - None - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - if account_id is not None: - await wallet.get_account_or_error(account_id).release_all_outputs() - else: - for account in wallet.accounts: - await account.release_all_outputs() - - BLOB_DOC = """ - Blob management. - """ - - @requires(WALLET_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT) - async def jsonrpc_blob_get(self, blob_hash, timeout=None, read=False): - """ - Download and return a blob - - Usage: - blob_get (<blob_hash> | --blob_hash=<blob_hash>) [--timeout=<timeout>] [--read] - - Options: - --blob_hash=<blob_hash> : (str) blob hash of the blob to get - --timeout=<timeout> : (int) timeout in number of seconds - - Returns: - (str) Success/Fail message or (dict) decoded data - """ - - blob = await download_blob(asyncio.get_event_loop(), self.conf, self.blob_manager, self.dht_node, blob_hash) - if read: - with blob.reader_context() as handle: - return handle.read().decode() - elif isinstance(blob, BlobBuffer): - log.warning("manually downloaded blob buffer could have missed garbage collection, clearing it") - blob.delete() - return "Downloaded blob %s" % blob_hash - - @requires(BLOB_COMPONENT, DATABASE_COMPONENT) - async def jsonrpc_blob_delete(self, blob_hash): - """ - Delete a blob - - Usage: - blob_delete (<blob_hash> | --blob_hash=<blob_hash>) - - Options: - --blob_hash=<blob_hash> : (str) blob hash of the blob to delete - - Returns: - (str) Success/fail message - """ - if not blob_hash or not is_valid_blobhash(blob_hash): - return f"Invalid blob hash to delete '{blob_hash}'" - streams = self.stream_manager.get_filtered_streams(sd_hash=blob_hash) - if streams: - await self.stream_manager.delete_stream(streams[0]) - else: - await self.blob_manager.delete_blobs([blob_hash]) - return "Deleted %s" % blob_hash - - PEER_DOC = """ - DHT / Blob Exchange peer commands. - """ - - @requires(DHT_COMPONENT) - async def jsonrpc_peer_list(self, blob_hash, search_bottom_out_limit=None, page=None, page_size=None): - """ - Get peers for blob hash - - Usage: - peer_list (<blob_hash> | --blob_hash=<blob_hash>) - [<search_bottom_out_limit> | --search_bottom_out_limit=<search_bottom_out_limit>] - [--page=<page>] [--page_size=<page_size>] - - Options: - --blob_hash=<blob_hash> : (str) find available peers for this blob hash - --search_bottom_out_limit=<search_bottom_out_limit> : (int) the number of search probes in a row - that don't find any new peers - before giving up and returning - --page=<page> : (int) page to return during paginating - --page_size=<page_size> : (int) number of items on page during pagination - - Returns: - (list) List of contact dictionaries {'address': <peer ip>, 'udp_port': <dht port>, 'tcp_port': <peer port>, - 'node_id': <peer node id>} - """ - - if not is_valid_blobhash(blob_hash): - raise Exception("invalid blob hash") - if search_bottom_out_limit is not None: - search_bottom_out_limit = int(search_bottom_out_limit) - if search_bottom_out_limit <= 0: - raise Exception("invalid bottom out limit") - else: - search_bottom_out_limit = 4 - peers = [] - peer_q = asyncio.Queue(loop=self.component_manager.loop) - await self.dht_node._peers_for_value_producer(blob_hash, peer_q) - while not peer_q.empty(): - peers.extend(peer_q.get_nowait()) - results = [ - { - "node_id": hexlify(peer.node_id).decode(), - "address": peer.address, - "udp_port": peer.udp_port, - "tcp_port": peer.tcp_port, - } - for peer in peers - ] - return paginate_list(results, page, page_size) - - @requires(DATABASE_COMPONENT) - async def jsonrpc_blob_announce(self, blob_hash=None, stream_hash=None, sd_hash=None): - """ - Announce blobs to the DHT - - Usage: - blob_announce (<blob_hash> | --blob_hash=<blob_hash> - | --stream_hash=<stream_hash> | --sd_hash=<sd_hash>) - - Options: - --blob_hash=<blob_hash> : (str) announce a blob, specified by blob_hash - --stream_hash=<stream_hash> : (str) announce all blobs associated with - stream_hash - --sd_hash=<sd_hash> : (str) announce all blobs associated with - sd_hash and the sd_hash itself - - Returns: - (bool) true if successful - """ - blob_hashes = [] - if blob_hash: - blob_hashes.append(blob_hash) - elif stream_hash or sd_hash: - if sd_hash and stream_hash: - raise Exception("either the sd hash or the stream hash should be provided, not both") - if sd_hash: - stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash) - blobs = await self.storage.get_blobs_for_stream(stream_hash, only_completed=True) - blob_hashes.extend(blob.blob_hash for blob in blobs if blob.blob_hash is not None) - else: - raise Exception('single argument must be specified') - await self.storage.should_single_announce_blobs(blob_hashes, immediate=True) - return True - - @requires(BLOB_COMPONENT, WALLET_COMPONENT) - async def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None, - finished=None, page=None, page_size=None): - """ - Returns blob hashes. If not given filters, returns all blobs known by the blob manager - - Usage: - blob_list [--needed] [--finished] [<uri> | --uri=<uri>] - [<stream_hash> | --stream_hash=<stream_hash>] - [<sd_hash> | --sd_hash=<sd_hash>] - [--page=<page>] [--page_size=<page_size>] - - Options: - --needed : (bool) only return needed blobs - --finished : (bool) only return finished blobs - --uri=<uri> : (str) filter blobs by stream in a uri - --stream_hash=<stream_hash> : (str) filter blobs by stream hash - --sd_hash=<sd_hash> : (str) filter blobs by sd hash - --page=<page> : (int) page to return during paginating - --page_size=<page_size> : (int) number of items on page during pagination - - Returns: - (list) List of blob hashes - """ - - if uri or stream_hash or sd_hash: - if uri: - metadata = (await self.resolve([], uri))[uri] - sd_hash = utils.get_sd_hash(metadata) - stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash) - elif stream_hash: - sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash) - elif sd_hash: - stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash) - sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash) - if sd_hash: - blobs = [sd_hash] - else: - blobs = [] - if stream_hash: - blobs.extend([b.blob_hash for b in (await self.storage.get_blobs_for_stream(stream_hash))[:-1]]) - else: - blobs = list(self.blob_manager.completed_blob_hashes) - if needed: - blobs = [blob_hash for blob_hash in blobs if not self.blob_manager.is_blob_verified(blob_hash)] - if finished: - blobs = [blob_hash for blob_hash in blobs if self.blob_manager.is_blob_verified(blob_hash)] - return paginate_list(blobs, page, page_size) - - @requires(BLOB_COMPONENT) - async def jsonrpc_blob_reflect(self, blob_hashes, reflector_server=None): - """ - Reflects specified blobs - - Usage: - blob_reflect (<blob_hashes>...) [--reflector_server=<reflector_server>] - - Options: - --reflector_server=<reflector_server> : (str) reflector address - - Returns: - (list) reflected blob hashes - """ - - raise NotImplementedError() - - @requires(BLOB_COMPONENT) - async def jsonrpc_blob_reflect_all(self): - """ - Reflects all saved blobs - - Usage: - blob_reflect_all - - Options: - None - - Returns: - (bool) true if successful - """ - - raise NotImplementedError() - - @requires(STREAM_MANAGER_COMPONENT) - async def jsonrpc_file_reflect(self, **kwargs): - """ - Reflect all the blobs in a file matching the filter criteria - - Usage: - file_reflect [--sd_hash=<sd_hash>] [--file_name=<file_name>] - [--stream_hash=<stream_hash>] [--rowid=<rowid>] - [--reflector=<reflector>] - - Options: - --sd_hash=<sd_hash> : (str) get file with matching sd hash - --file_name=<file_name> : (str) get file with matching file name in the - downloads folder - --stream_hash=<stream_hash> : (str) get file with matching stream hash - --rowid=<rowid> : (int) get file with matching row id - --reflector=<reflector> : (str) reflector server, ip address or url - by default choose a server from the config - - Returns: - (list) list of blobs reflected - """ - - server, port = kwargs.get('server'), kwargs.get('port') - if server and port: - port = int(port) - else: - server, port = random.choice(self.conf.reflector_servers) - reflected = await asyncio.gather(*[ - self.stream_manager.reflect_stream(stream, server, port) - for stream in self.stream_manager.get_filtered_streams(**kwargs) - ]) - total = [] - for reflected_for_stream in reflected: - total.extend(reflected_for_stream) - return total - - @requires(DHT_COMPONENT) - async def jsonrpc_peer_ping(self, node_id, address, port): - """ - Send a kademlia ping to the specified peer. If address and port are provided the peer is directly pinged, - if not provided the peer is located first. - - Usage: - peer_ping (<node_id> | --node_id=<node_id>) (<address> | --address=<address>) (<port> | --port=<port>) - - Options: - None - - Returns: - (str) pong, or {'error': <error message>} if an error is encountered - """ - peer = None - if node_id and address and port: - peer = make_kademlia_peer(unhexlify(node_id), address, udp_port=int(port)) - try: - return await self.dht_node.protocol.get_rpc_peer(peer).ping() - except asyncio.TimeoutError: - return {'error': 'timeout'} - if not peer: - return {'error': 'peer not found'} - - @requires(DHT_COMPONENT) - def jsonrpc_routing_table_get(self): - """ - Get DHT routing information - - Usage: - routing_table_get - - Options: - None - - Returns: - (dict) dictionary containing routing and peer information - { - "buckets": { - <bucket index>: [ - { - "address": (str) peer address, - "udp_port": (int) peer udp port, - "tcp_port": (int) peer tcp port, - "node_id": (str) peer node id, - } - ] - }, - "node_id": (str) the local dht node id - } - """ - result = { - 'buckets': {} - } - - for i in range(len(self.dht_node.protocol.routing_table.buckets)): - result['buckets'][i] = [] - for peer in self.dht_node.protocol.routing_table.buckets[i].peers: - host = { - "address": peer.address, - "udp_port": peer.udp_port, - "tcp_port": peer.tcp_port, - "node_id": hexlify(peer.node_id).decode(), - } - result['buckets'][i].append(host) - - result['node_id'] = hexlify(self.dht_node.protocol.node_id).decode() - return result - - TRACEMALLOC_DOC = """ - Controls and queries tracemalloc memory tracing tools for troubleshooting. - """ - - def jsonrpc_tracemalloc_enable(self): # pylint: disable=no-self-use - """ - Enable tracemalloc memory tracing - - Usage: - jsonrpc_tracemalloc_enable - - Options: - None - - Returns: - (bool) is it tracing? - """ - tracemalloc.start() - return tracemalloc.is_tracing() - - def jsonrpc_tracemalloc_disable(self): # pylint: disable=no-self-use - """ - Disable tracemalloc memory tracing - - Usage: - jsonrpc_tracemalloc_disable - - Options: - None - - Returns: - (bool) is it tracing? - """ - tracemalloc.stop() - return tracemalloc.is_tracing() - - def jsonrpc_tracemalloc_top(self, items: int = 10): # pylint: disable=no-self-use - """ - Show most common objects, the place that created them and their size. - - Usage: - jsonrpc_tracemalloc_top [(<items> | --items=<items>)] - - Options: - --items=<items> : (int) maximum items to return, from the most common - - Returns: - (dict) dictionary containing most common objects in memory - { - "line": (str) filename and line number where it was created, - "code": (str) code that created it, - "size": (int) size in bytes, for each "memory block", - "count" (int) number of memory blocks - } - """ - if not tracemalloc.is_tracing(): - raise Exception("Enable tracemalloc first! See 'tracemalloc set' command.") - stats = tracemalloc.take_snapshot().filter_traces(( - tracemalloc.Filter(False, "<frozen importlib._bootstrap>"), - tracemalloc.Filter(False, "<unknown>"), - # tracemalloc and linecache here use some memory, but thats not relevant - tracemalloc.Filter(False, tracemalloc.__file__), - tracemalloc.Filter(False, linecache.__file__), - )).statistics('lineno', True) - results = [] - for stat in stats: - frame = stat.traceback[0] - filename = os.sep.join(frame.filename.split(os.sep)[-2:]) - line = linecache.getline(frame.filename, frame.lineno).strip() - results.append({ - "line": f"{filename}:{frame.lineno}", - "code": line, - "size": stat.size, - "count": stat.count - }) - if len(results) == items: - break - return results - - COMMENT_DOC = """ - View, create and abandon comments. - """ - - @requires(WALLET_COMPONENT) - async def jsonrpc_comment_list(self, claim_id, parent_id=None, page=1, page_size=50, - include_replies=True, is_channel_signature_valid=False, - hidden=False, visible=False): - """ - List comments associated with a claim. - - Usage: - comment_list (<claim_id> | --claim_id=<claim_id>) - [(--page=<page> --page_size=<page_size>)] - [--parent_id=<parent_id>] [--include_replies] - [--is_channel_signature_valid] - [--visible | --hidden] - - Options: - --claim_id=<claim_id> : (str) The claim on which the comment will be made on - --parent_id=<parent_id> : (str) CommentId of a specific thread you'd like to see - --page=<page> : (int) The page you'd like to see in the comment list. - --page_size=<page_size> : (int) The amount of comments that you'd like to retrieve - --include_replies : (bool) Whether or not you want to include replies in list - --is_channel_signature_valid : (bool) Only include comments with valid signatures. - [Warning: Paginated total size will not change, even - if list reduces] - --visible : (bool) Select only Visible Comments - --hidden : (bool) Select only Hidden Comments - - Returns: - (dict) Containing the list, and information about the paginated content: - { - "page": "Page number of the current items.", - "page_size": "Number of items to show on a page.", - "total_pages": "Total number of pages.", - "total_items": "Total number of items.", - "items": "A List of dict objects representing comments." - [ - { - "comment": (str) The actual string as inputted by the user, - "comment_id": (str) The Comment's unique identifier, - "channel_name": (str) Name of the channel this was posted under, prepended with a '@', - "channel_id": (str) The Channel Claim ID that this comment was posted under, - "signature": (str) The signature of the comment, - "channel_url": (str) Channel's URI in the ClaimTrie, - "parent_id": (str) Comment this is replying to, (None) if this is the root, - "timestamp": (int) The time at which comment was entered into the server at, in nanoseconds. - }, - ... - ] - } - """ - if hidden ^ visible: - result = await comment_client.jsonrpc_post( - self.conf.comment_server, - 'get_claim_hidden_comments', - claim_id=claim_id, - hidden=hidden, - page=page, - page_size=page_size - ) - else: - result = await comment_client.jsonrpc_post( - self.conf.comment_server, - 'get_claim_comments', - claim_id=claim_id, - parent_id=parent_id, - page=page, - page_size=page_size, - top_level=not include_replies - ) - for comment in result.get('items', []): - channel_url = comment.get('channel_url') - if not channel_url: - continue - resolve_response = await self.resolve([], [channel_url]) - if isinstance(resolve_response[channel_url], Output): - comment['is_channel_signature_valid'] = comment_client.is_comment_signed_by_channel( - comment, resolve_response[channel_url] - ) - else: - comment['is_channel_signature_valid'] = False - if is_channel_signature_valid: - result['items'] = [ - c for c in result.get('items', []) if c.get('is_channel_signature_valid', False) - ] - return result - - @requires(WALLET_COMPONENT) - async def jsonrpc_comment_create(self, comment, claim_id=None, parent_id=None, channel_account_id=None, - channel_name=None, channel_id=None, wallet_id=None): - """ - Create and associate a comment with a claim using your channel identity. - - Usage: - comment_create (<comment> | --comment=<comment>) - (<claim_id> | --claim_id=<claim_id> | --parent_id=<parent_id>) - (--channel_id=<channel_id> | --channel_name=<channel_name>) - [--channel_account_id=<channel_account_id>...] [--wallet_id=<wallet_id>] - - Options: - --comment=<comment> : (str) Comment to be made, should be at most 2000 characters. - --claim_id=<claim_id> : (str) The ID of the claim to comment on - --parent_id=<parent_id> : (str) The ID of a comment to make a response to - --channel_id=<channel_id> : (str) The ID of the channel you want to post under - --channel_name=<channel_name> : (str) The channel you want to post as, prepend with a '@' - --channel_account_id=<channel_account_id> : (str) one or more account ids for accounts to look in - for channel certificates, defaults to all accounts - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - - Returns: - (dict) Comment object if successfully made, (None) otherwise - { - "comment": (str) The actual string as inputted by the user, - "comment_id": (str) The Comment's unique identifier, - "channel_name": (str) Name of the channel this was posted under, prepended with a '@', - "channel_id": (str) The Channel Claim ID that this comment was posted under, - "signature": (str) The signature of the comment, - "signing_ts": (str) The timestamp used to sign the comment, - "channel_url": (str) Channel's URI in the ClaimTrie, - "parent_id": (str) Comment this is replying to, (None) if this is the root, - "timestamp": (int) The time at which comment was entered into the server at, in nanoseconds. - } - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - channel = await self.get_channel_or_error( - wallet, channel_account_id, channel_id, channel_name, for_signing=True - ) - - comment_body = { - 'comment': comment.strip(), - 'claim_id': claim_id, - 'parent_id': parent_id, - 'channel_id': channel.claim_id, - 'channel_name': channel.claim_name, - } - comment_client.sign_comment(comment_body, channel) - - response = await comment_client.jsonrpc_post(self.conf.comment_server, 'create_comment', comment_body) - response.update({ - 'is_claim_signature_valid': comment_client.is_comment_signed_by_channel(response, channel) - }) - return response - - @requires(WALLET_COMPONENT) - async def jsonrpc_comment_update(self, comment, comment_id, wallet_id=None): - """ - Edit a comment published as one of your channels. - - Usage: - comment_update (<comment> | --comment=<comment>) - (<comment_id> | --comment_id=<comment_id>) - [--wallet_id=<wallet_id>] - - Options: - --comment=<comment> : (str) New comment replacing the old one - --comment_id=<comment_id> : (str) Hash identifying the comment to edit - --wallet_id=<wallet_id : (str) restrict operation to specific wallet - - Returns: - (dict) Comment object if edit was successful, (None) otherwise - { - "comment": (str) The actual string as inputted by the user, - "comment_id": (str) The Comment's unique identifier, - "channel_name": (str) Name of the channel this was posted under, prepended with a '@', - "channel_id": (str) The Channel Claim ID that this comment was posted under, - "signature": (str) The signature of the comment, - "signing_ts": (str) Timestamp used to sign the most recent signature, - "channel_url": (str) Channel's URI in the ClaimTrie, - "parent_id": (str) Comment this is replying to, (None) if this is the root, - "timestamp": (int) The time at which comment was entered into the server at, in nanoseconds. - } - """ - channel = await comment_client.jsonrpc_post( - self.conf.comment_server, - 'get_channel_from_comment_id', - comment_id=comment_id - ) - if 'error' in channel: - raise ValueError(channel['error']) - - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - # channel = await self.get_channel_or_none(wallet, None, **channel) - channel_claim = await self.get_channel_or_error(wallet, [], **channel) - edited_comment = { - 'comment_id': comment_id, - 'comment': comment, - 'channel_id': channel_claim.claim_id, - 'channel_name': channel_claim.claim_name - } - comment_client.sign_comment(edited_comment, channel_claim) - return await comment_client.jsonrpc_post( - self.conf.comment_server, 'edit_comment', edited_comment - ) - - @requires(WALLET_COMPONENT) - async def jsonrpc_comment_abandon(self, comment_id, wallet_id=None): - """ - Abandon a comment published under your channel identity. - - Usage: - comment_abandon (<comment_id> | --comment_id=<comment_id>) [--wallet_id=<wallet_id>] - - Options: - --comment_id=<comment_id> : (str) The ID of the comment to be abandoned. - --wallet_id=<wallet_id : (str) restrict operation to specific wallet - - Returns: - (dict) Object with the `comment_id` passed in as the key, and a flag indicating if it was abandoned - { - <comment_id> (str): { - "abandoned": (bool) - } - } - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - abandon_comment_body = {'comment_id': comment_id} - channel = await comment_client.jsonrpc_post( - self.conf.comment_server, 'get_channel_from_comment_id', comment_id=comment_id - ) - if 'error' in channel: - return {comment_id: {'abandoned': False}} - channel = await self.get_channel_or_none(wallet, None, **channel) - abandon_comment_body.update({ - 'channel_id': channel.claim_id, - 'channel_name': channel.claim_name, - }) - comment_client.sign_comment(abandon_comment_body, channel, abandon=True) - return await comment_client.jsonrpc_post(self.conf.comment_server, 'abandon_comment', abandon_comment_body) - - @requires(WALLET_COMPONENT) - async def jsonrpc_comment_hide(self, comment_ids: typing.Union[str, list], wallet_id=None): - """ - Hide a comment published to a claim you control. - - Usage: - comment_hide <comment_ids>... [--wallet_id=<wallet_id>] - - Options: - --comment_ids=<comment_ids> : (str, list) one or more comment_id to hide. - --wallet_id=<wallet_id> : (str) restrict operation to specific wallet - - Returns: - (dict) keyed by comment_id, containing success info - '<comment_id>': { - "hidden": (bool) flag indicating if comment_id was hidden - } - """ - wallet = self.wallet_manager.get_wallet_or_default(wallet_id) - - if isinstance(comment_ids, str): - comment_ids = [comment_ids] - - comments = await comment_client.jsonrpc_post( - self.conf.comment_server, 'get_comments_by_id', comment_ids=comment_ids - ) - claim_ids = {comment['claim_id'] for comment in comments} - claims = {cid: await self.ledger.get_claim_by_claim_id(wallet.accounts, claim_id=cid) for cid in claim_ids} - pieces = [] - for comment in comments: - claim = claims.get(comment['claim_id']) - if claim: - channel = await self.get_channel_or_none( - wallet, - account_ids=[], - channel_id=claim.channel.claim_id, - channel_name=claim.channel.claim_name, - for_signing=True - ) - piece = {'comment_id': comment['comment_id']} - comment_client.sign_comment(piece, channel, abandon=True) - pieces.append(piece) - return await comment_client.jsonrpc_post(self.conf.comment_server, 'hide_comments', pieces=pieces) - - async def broadcast_or_release(self, tx, blocking=False): - await self.wallet_manager.broadcast_or_release(tx, blocking) - - def valid_address_or_error(self, address): - try: - assert self.ledger.is_valid_address(address) - except: - raise Exception(f"'{address}' is not a valid address") - - @staticmethod - def valid_stream_name_or_error(name: str): - try: - if not name: - raise Exception('Stream name cannot be blank.') - parsed = URL.parse(name) - if parsed.has_channel: - raise Exception( - "Stream names cannot start with '@' symbol. This is reserved for channels claims." - ) - if not parsed.has_stream or parsed.stream.name != name: - raise Exception('Stream name has invalid characters.') - except (TypeError, ValueError): - raise Exception("Invalid stream name.") - - @staticmethod - def valid_collection_name_or_error(name: str): - try: - if not name: - raise Exception('Collection name cannot be blank.') - parsed = URL.parse(name) - if parsed.has_channel: - raise Exception( - "Collection names cannot start with '@' symbol. This is reserved for channels claims." - ) - if not parsed.has_stream or parsed.stream.name != name: - raise Exception('Collection name has invalid characters.') - except (TypeError, ValueError): - raise Exception("Invalid collection name.") - - @staticmethod - def valid_channel_name_or_error(name: str): - try: - if not name: - raise Exception( - "Channel name cannot be blank." - ) - parsed = URL.parse(name) - if not parsed.has_channel: - raise Exception("Channel names must start with '@' symbol.") - if parsed.channel.name != name: - raise Exception("Channel name has invalid character") - except (TypeError, ValueError): - raise Exception("Invalid channel name.") - - def get_fee_address(self, kwargs: dict, claim_address: str) -> str: - if 'fee_address' in kwargs: - self.valid_address_or_error(kwargs['fee_address']) - return kwargs['fee_address'] - if 'fee_currency' in kwargs or 'fee_amount' in kwargs: - return claim_address - - async def get_receiving_address(self, address: str, account: Optional[Account]) -> str: - if address is None and account is not None: - return await account.receiving.get_or_create_usable_address() - self.valid_address_or_error(address) - return address - - async def get_channel_or_none( - self, wallet: Wallet, account_ids: List[str], channel_id: str = None, - channel_name: str = None, for_signing: bool = False) -> Output: - if channel_id is not None or channel_name is not None: - return await self.get_channel_or_error( - wallet, account_ids, channel_id, channel_name, for_signing - ) - - async def get_channel_or_error( - self, wallet: Wallet, account_ids: List[str], channel_id: str = None, - channel_name: str = None, for_signing: bool = False) -> Output: - if channel_id: - key, value = 'id', channel_id - elif channel_name: - key, value = 'name', channel_name - else: - raise ValueError("Couldn't find channel because a channel_id or channel_name was not provided.") - channels = await self.ledger.get_channels( - wallet=wallet, accounts=wallet.get_accounts_or_all(account_ids), - **{f'claim_{key}': value} - ) - if len(channels) == 1: - if for_signing and not channels[0].has_private_key: - raise Exception(f"Couldn't find private key for {key} '{value}'. ") - return channels[0] - elif len(channels) > 1: - raise ValueError( - f"Multiple channels found with channel_{key} '{value}', " - f"pass a channel_id to narrow it down." - ) - raise ValueError(f"Couldn't find channel with channel_{key} '{value}'.") - - @staticmethod - def get_dewies_or_error(argument: str, lbc: str, positive_value=False): - try: - dewies = lbc_to_dewies(lbc) - if positive_value and dewies <= 0: - raise ValueError(f"'{argument}' value must be greater than 0.0") - return dewies - except ValueError as e: - raise ValueError(f"Invalid value for '{argument}': {e.args[0]}") - - async def resolve(self, accounts, urls, **kwargs): - results = await self.ledger.resolve(accounts, urls, **kwargs) - if self.conf.save_resolved_claims and results: - try: - claims = self.stream_manager._convert_to_old_resolve_output(self.wallet_manager, results) - await self.storage.save_claims_for_resolve([ - value for value in claims.values() if 'error' not in value - ]) - except DecodeError: - pass - return results - - @staticmethod - def _old_get_temp_claim_info(tx, txo, address, claim_dict, name, bid): - return { - "claim_id": txo.claim_id, - "name": name, - "amount": bid, - "address": address, - "txid": tx.id, - "nout": txo.position, - "value": claim_dict, - "height": -1, - "claim_sequence": -1, - } - - -def loggly_time_string(date): - formatted_dt = date.strftime("%Y-%m-%dT%H:%M:%S") - milliseconds = str(round(date.microsecond * (10.0 ** -5), 3)) - return quote(formatted_dt + milliseconds + "Z") - - -def get_loggly_query_string(installation_id): - base_loggly_search_url = "https://lbry.loggly.com/search#" - now = utils.now() - yesterday = now - utils.timedelta(days=1) - params = { - 'terms': f'json.installation_id:{installation_id[:SHORT_ID_LEN]}*', - 'from': loggly_time_string(yesterday), - 'to': loggly_time_string(now) - } - data = urlencode(params) - return base_loggly_search_url + data diff --git a/lbry/extras/daemon/json_response_encoder.py b/lbry/extras/daemon/json_response_encoder.py deleted file mode 100644 index 66dc5cfd4..000000000 --- a/lbry/extras/daemon/json_response_encoder.py +++ /dev/null @@ -1,323 +0,0 @@ -import logging -from decimal import Decimal -from binascii import hexlify, unhexlify -from datetime import datetime, date -from json import JSONEncoder - -from google.protobuf.message import DecodeError - -from lbry.schema.claim import Claim -from lbry.wallet.wallet import Wallet, Account -from lbry.blockchain.ledger import Ledger -from lbry.blockchain.transaction import Transaction, Output -from lbry.crypto.bip32 import PubKey -from lbry.blockchain.dewies import dewies_to_lbc -from lbry.stream.managed_stream import ManagedStream - - -log = logging.getLogger(__name__) - - -def encode_txo_doc(): - return { - 'txid': "hash of transaction in hex", - 'nout': "position in the transaction", - 'height': "block where transaction was recorded", - 'amount': "value of the txo as a decimal", - 'address': "address of who can spend the txo", - 'confirmations': "number of confirmed blocks", - 'is_change': "payment to change address, only available when it can be determined", - 'is_received': "true if txo was sent from external account to this account", - 'is_spent': "true if txo is spent", - 'is_mine': "payment to one of your accounts, only available when it can be determined", - 'type': "one of 'claim', 'support' or 'purchase'", - 'name': "when type is 'claim' or 'support', this is the claim name", - 'claim_id': "when type is 'claim', 'support' or 'purchase', this is the claim id", - 'claim_op': "when type is 'claim', this determines if it is 'create' or 'update'", - 'value': "when type is 'claim' or 'support' with payload, this is the decoded protobuf payload", - 'value_type': "determines the type of the 'value' field: 'channel', 'stream', etc", - 'protobuf': "hex encoded raw protobuf version of 'value' field", - 'permanent_url': "when type is 'claim' or 'support', this is the long permanent claim URL", - 'claim': "for purchase outputs only, metadata of purchased claim", - 'reposted_claim': "for repost claims only, metadata of claim being reposted", - 'signing_channel': "for signed claims only, metadata of signing channel", - 'is_channel_signature_valid': "for signed claims only, whether signature is valid", - 'purchase_receipt': "metadata for the purchase transaction associated with this claim" - } - - -def encode_tx_doc(): - return { - 'txid': "hash of transaction in hex", - 'height': "block where transaction was recorded", - 'inputs': [encode_txo_doc()], - 'outputs': [encode_txo_doc()], - 'total_input': "sum of inputs as a decimal", - 'total_output': "sum of outputs, sans fee, as a decimal", - 'total_fee': "fee amount", - 'hex': "entire transaction encoded in hex", - } - - -def encode_account_doc(): - return { - 'id': 'account_id', - 'is_default': 'this account is used by default', - 'ledger': 'name of crypto currency and network', - 'name': 'optional account name', - 'seed': 'human friendly words from which account can be recreated', - 'encrypted': 'if account is encrypted', - 'private_key': 'extended private key', - 'public_key': 'extended public key', - 'address_generator': 'settings for generating addresses', - 'modified_on': 'date of last modification to account settings' - } - - -def encode_wallet_doc(): - return { - 'id': 'wallet_id', - 'name': 'optional wallet name', - } - - -def encode_file_doc(): - return { - 'streaming_url': '(str) url to stream the file using range requests', - 'completed': '(bool) true if download is completed', - 'file_name': '(str) name of file', - 'download_directory': '(str) download directory', - 'points_paid': '(float) credit paid to download file', - 'stopped': '(bool) true if download is stopped', - 'stream_hash': '(str) stream hash of file', - 'stream_name': '(str) stream name', - 'suggested_file_name': '(str) suggested file name', - 'sd_hash': '(str) sd hash of file', - 'download_path': '(str) download path of file', - 'mime_type': '(str) mime type of file', - 'key': '(str) key attached to file', - 'total_bytes_lower_bound': '(int) lower bound file size in bytes', - 'total_bytes': '(int) file upper bound size in bytes', - 'written_bytes': '(int) written size in bytes', - 'blobs_completed': '(int) number of fully downloaded blobs', - 'blobs_in_stream': '(int) total blobs on stream', - 'blobs_remaining': '(int) total blobs remaining to download', - 'status': '(str) downloader status', - 'claim_id': '(str) None if claim is not found else the claim id', - 'txid': '(str) None if claim is not found else the transaction id', - 'nout': '(int) None if claim is not found else the transaction output index', - 'outpoint': '(str) None if claim is not found else the tx and output', - 'metadata': '(dict) None if claim is not found else the claim metadata', - 'channel_claim_id': '(str) None if claim is not found or not signed', - 'channel_name': '(str) None if claim is not found or not signed', - 'claim_name': '(str) None if claim is not found else the claim name' - } - - -class JSONResponseEncoder(JSONEncoder): - - def __init__(self, *args, service, include_protobuf=False, **kwargs): - super().__init__(*args, **kwargs) - self.service = service - self.include_protobuf = include_protobuf - - def default(self, obj): # pylint: disable=method-hidden,arguments-differ,too-many-return-statements - if isinstance(obj, Account): - return self.encode_account(obj) - if isinstance(obj, Wallet): - return self.encode_wallet(obj) - if isinstance(obj, ManagedStream): - return self.encode_file(obj) - if isinstance(obj, Transaction): - return self.encode_transaction(obj) - if isinstance(obj, Output): - return self.encode_output(obj) - if isinstance(obj, Claim): - return self.encode_claim(obj) - if isinstance(obj, PubKey): - return obj.extended_key_string() - if isinstance(obj, date): - return obj.isoformat() - if isinstance(obj, datetime): - return obj.strftime("%Y%m%dT%H:%M:%S") - if isinstance(obj, Decimal): - return float(obj) - if isinstance(obj, bytes): - return obj.decode() - return super().default(obj) - - def encode_transaction(self, tx): - return { - 'txid': tx.id, - 'height': tx.height, - 'inputs': [self.encode_input(txo) for txo in tx.inputs], - 'outputs': [self.encode_output(txo) for txo in tx.outputs], - 'total_input': dewies_to_lbc(tx.input_sum), - 'total_output': dewies_to_lbc(tx.input_sum - tx.fee), - 'total_fee': dewies_to_lbc(tx.fee), - 'hex': hexlify(tx.raw).decode(), - } - - def encode_output(self, txo, check_signature=True): - if not txo: - return - tx_height = txo.tx_ref.height - best_height = 0#self.ledger.headers.height - output = { - 'txid': txo.tx_ref.id, - 'nout': txo.position, - 'height': tx_height, - 'amount': dewies_to_lbc(txo.amount), - 'address': txo.get_address(self.service.ledger) if txo.has_address else None, - 'confirmations': (best_height+1) - tx_height if tx_height > 0 else tx_height, - 'timestamp': 0 #self.ledger.headers.estimated_timestamp(tx_height) - } - if txo.is_spent is not None: - output['is_spent'] = txo.is_spent - if txo.is_my_output is not None: - output['is_my_output'] = txo.is_my_output - if txo.is_my_input is not None: - output['is_my_input'] = txo.is_my_input - if txo.sent_supports is not None: - output['sent_supports'] = dewies_to_lbc(txo.sent_supports) - if txo.sent_tips is not None: - output['sent_tips'] = dewies_to_lbc(txo.sent_tips) - if txo.received_tips is not None: - output['received_tips'] = dewies_to_lbc(txo.received_tips) - if txo.is_internal_transfer is not None: - output['is_internal_transfer'] = txo.is_internal_transfer - - if txo.script.is_claim_name: - output['type'] = 'claim' - output['claim_op'] = 'create' - elif txo.script.is_update_claim: - output['type'] = 'claim' - output['claim_op'] = 'update' - elif txo.script.is_support_claim: - output['type'] = 'support' - elif txo.script.is_return_data: - output['type'] = 'data' - elif txo.purchase is not None: - output['type'] = 'purchase' - output['claim_id'] = txo.purchased_claim_id - if txo.purchased_claim is not None: - output['claim'] = self.encode_output(txo.purchased_claim) - else: - output['type'] = 'payment' - - if txo.script.is_claim_involved: - output.update({ - 'name': txo.claim_name, - 'normalized_name': txo.normalized_name, - 'claim_id': txo.claim_id, - 'permanent_url': txo.permanent_url, - 'meta': self.encode_claim_meta(txo.meta.copy()) - }) - if 'short_url' in output['meta']: - output['short_url'] = output['meta'].pop('short_url') - if 'canonical_url' in output['meta']: - output['canonical_url'] = output['meta'].pop('canonical_url') - if txo.claims is not None: - output['claims'] = [self.encode_output(o) for o in txo.claims] - if txo.reposted_claim is not None: - output['reposted_claim'] = self.encode_output(txo.reposted_claim) - if txo.script.is_claim_name or txo.script.is_update_claim: - try: - output['value'] = txo.claim - output['value_type'] = txo.claim.claim_type - if self.include_protobuf: - output['protobuf'] = hexlify(txo.claim.to_bytes()) - if txo.purchase_receipt is not None: - output['purchase_receipt'] = self.encode_output(txo.purchase_receipt) - if txo.claim.is_channel: - output['has_signing_key'] = txo.has_private_key - if check_signature and txo.claim.is_signed: - if txo.channel is not None: - output['signing_channel'] = self.encode_output(txo.channel) - output['is_channel_signature_valid'] = txo.is_signed_by(txo.channel, self.service.ledger) - else: - output['signing_channel'] = {'channel_id': txo.claim.signing_channel_id} - output['is_channel_signature_valid'] = False - except DecodeError: - pass - return output - - def encode_claim_meta(self, meta): - for key, value in meta.items(): - if key.endswith('_amount'): - if isinstance(value, int): - meta[key] = dewies_to_lbc(value) - if 0 < meta.get('creation_height', 0) <= 0: #self.ledger.headers.height: - meta['creation_timestamp'] = self.ledger.headers.estimated_timestamp(meta['creation_height']) - return meta - - def encode_input(self, txi): - return self.encode_output(txi.txo_ref.txo, False) if txi.txo_ref.txo is not None else { - 'txid': txi.txo_ref.tx_ref.id, - 'nout': txi.txo_ref.position - } - - def encode_account(self, account): - result = account.to_dict() - result['id'] = account.id - result.pop('certificates', None) - #result['is_default'] = self.ledger.accounts[0] == account - return result - - @staticmethod - def encode_wallet(wallet): - return { - 'id': wallet.id, - 'name': wallet.name - } - - def encode_file(self, managed_stream): - output_exists = managed_stream.output_file_exists - tx_height = managed_stream.stream_claim_info.height - best_height = 0 #self.ledger.headers.height - return { - 'streaming_url': managed_stream.stream_url, - 'completed': managed_stream.completed, - 'file_name': managed_stream.file_name if output_exists else None, - 'download_directory': managed_stream.download_directory if output_exists else None, - 'download_path': managed_stream.full_path if output_exists else None, - 'points_paid': 0.0, - 'stopped': not managed_stream.running, - 'stream_hash': managed_stream.stream_hash, - 'stream_name': managed_stream.descriptor.stream_name, - 'suggested_file_name': managed_stream.descriptor.suggested_file_name, - 'sd_hash': managed_stream.descriptor.sd_hash, - 'mime_type': managed_stream.mime_type, - 'key': managed_stream.descriptor.key, - 'total_bytes_lower_bound': managed_stream.descriptor.lower_bound_decrypted_length(), - 'total_bytes': managed_stream.descriptor.upper_bound_decrypted_length(), - 'written_bytes': managed_stream.written_bytes, - 'blobs_completed': managed_stream.blobs_completed, - 'blobs_in_stream': managed_stream.blobs_in_stream, - 'blobs_remaining': managed_stream.blobs_remaining, - 'status': managed_stream.status, - 'claim_id': managed_stream.claim_id, - 'txid': managed_stream.txid, - 'nout': managed_stream.nout, - 'outpoint': managed_stream.outpoint, - 'metadata': managed_stream.metadata, - 'protobuf': managed_stream.metadata_protobuf, - 'channel_claim_id': managed_stream.channel_claim_id, - 'channel_name': managed_stream.channel_name, - 'claim_name': managed_stream.claim_name, - 'content_fee': managed_stream.content_fee, - 'purchase_receipt': self.encode_output(managed_stream.purchase_receipt), - 'added_on': managed_stream.added_on, - 'height': tx_height, - 'confirmations': (best_height + 1) - tx_height if tx_height > 0 else tx_height, - 'timestamp': 0, #self.ledger.headers.estimated_timestamp(tx_height), - 'is_fully_reflected': managed_stream.is_fully_reflected - } - - def encode_claim(self, claim): - encoded = getattr(claim, claim.claim_type).to_dict() - if 'public_key' in encoded: - encoded['public_key_id'] = self.service.ledger.public_key_to_address( - unhexlify(encoded['public_key']) - ) - return encoded diff --git a/lbry/extras/daemon/loggly_handler.py b/lbry/extras/daemon/loggly_handler.py deleted file mode 100644 index d21c37a97..000000000 --- a/lbry/extras/daemon/loggly_handler.py +++ /dev/null @@ -1,95 +0,0 @@ -import asyncio -import json -import logging.handlers -import traceback - -import typing -from aiohttp.client_exceptions import ClientError -import aiohttp -from lbry import utils, __version__ -if typing.TYPE_CHECKING: - from lbry.conf import Config - -LOGGLY_TOKEN = 'BQEzZmMzLJHgAGxkBF00LGD0YGuyATVgAmqxAQEuAQZ2BQH4' - - -class JsonFormatter(logging.Formatter): - """Format log records using json serialization""" - - def __init__(self, **kwargs): - super().__init__() - self.attributes = kwargs - - def format(self, record): - data = { - 'loggerName': record.name, - 'asciTime': self.formatTime(record), - 'fileName': record.filename, - 'functionName': record.funcName, - 'levelNo': record.levelno, - 'lineNo': record.lineno, - 'levelName': record.levelname, - 'message': record.getMessage(), - } - data.update(self.attributes) - if record.exc_info: - data['exc_info'] = self.formatException(record.exc_info) - return json.dumps(data) - - -class HTTPSLogglyHandler(logging.Handler): - def __init__(self, loggly_token: str, config: 'Config'): - super().__init__() - self.cookies = {} - self.url = "https://logs-01.loggly.com/inputs/{token}/tag/{tag}".format( - token=utils.deobfuscate(loggly_token), tag='lbrynet-' + __version__ - ) - self._loop = asyncio.get_event_loop() - self._session = aiohttp.ClientSession() - self._config = config - - @property - def enabled(self): - return self._config.share_usage_data - - @staticmethod - def get_full_message(record): - if record.exc_info: - return '\n'.join(traceback.format_exception(*record.exc_info)) - else: - return record.getMessage() - - async def _emit(self, record, retry=True): - data = self.format(record).encode() - try: - async with self._session.post(self.url, data=data, - cookies=self.cookies) as response: - self.cookies.update(response.cookies) - except ClientError: - if self._loop.is_running() and retry and self.enabled: - await self._session.close() - self._session = aiohttp.ClientSession() - return await self._emit(record, retry=False) - - def emit(self, record): - if not self.enabled: - return - try: - asyncio.ensure_future(self._emit(record), loop=self._loop) - except RuntimeError: # TODO: use a second loop - print(f"\nfailed to send traceback to loggly, please file an issue with the following traceback:\n" - f"{self.format(record)}") - - def close(self): - super().close() - try: - loop = asyncio.get_event_loop() - loop.run_until_complete(self._session.close()) - except RuntimeError: - pass - - -def get_loggly_handler(config): - handler = HTTPSLogglyHandler(LOGGLY_TOKEN, config=config) - handler.setFormatter(JsonFormatter()) - return handler diff --git a/lbry/extras/daemon/migrator/__init__.py b/lbry/extras/daemon/migrator/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/lbry/extras/daemon/migrator/dbmigrator.py b/lbry/extras/daemon/migrator/dbmigrator.py deleted file mode 100644 index 726cc1974..000000000 --- a/lbry/extras/daemon/migrator/dbmigrator.py +++ /dev/null @@ -1,70 +0,0 @@ -# pylint: skip-file -import os -import sys -import logging - -log = logging.getLogger(__name__) - - -def migrate_db(conf, start, end): - current = start - while current < end: - if current == 1: - from .migrate1to2 import do_migration - elif current == 2: - from .migrate2to3 import do_migration - elif current == 3: - from .migrate3to4 import do_migration - elif current == 4: - from .migrate4to5 import do_migration - elif current == 5: - from .migrate5to6 import do_migration - elif current == 6: - from .migrate6to7 import do_migration - elif current == 7: - from .migrate7to8 import do_migration - elif current == 8: - from .migrate8to9 import do_migration - elif current == 9: - from .migrate9to10 import do_migration - elif current == 10: - from .migrate10to11 import do_migration - elif current == 11: - from .migrate11to12 import do_migration - elif current == 12: - from .migrate12to13 import do_migration - elif current == 13: - from .migrate13to14 import do_migration - else: - raise Exception(f"DB migration of version {current} to {current+1} is not available") - try: - do_migration(conf) - except Exception: - log.exception("failed to migrate database") - if os.path.exists(os.path.join(conf.data_dir, "lbrynet.sqlite")): - backup_name = f"rev_{current}_unmigrated_database" - count = 0 - while os.path.exists(os.path.join(conf.data_dir, backup_name + ".sqlite")): - count += 1 - backup_name = f"rev_{current}_unmigrated_database_{count}" - backup_path = os.path.join(conf.data_dir, backup_name + ".sqlite") - os.rename(os.path.join(conf.data_dir, "lbrynet.sqlite"), backup_path) - log.info("made a backup of the unmigrated database: %s", backup_path) - if os.path.isfile(os.path.join(conf.data_dir, "db_revision")): - os.remove(os.path.join(conf.data_dir, "db_revision")) - return None - current += 1 - log.info("successfully migrated the database from revision %i to %i", current - 1, current) - return None - - -def run_migration_script(): - log_format = "(%(asctime)s)[%(filename)s:%(lineno)s] %(funcName)s(): %(message)s" - logging.basicConfig(level=logging.DEBUG, format=log_format, filename="migrator.log") - sys.stdout = open("migrator.out.log", 'w') - sys.stderr = open("migrator.err.log", 'w') - migrate_db(sys.argv[1], int(sys.argv[2]), int(sys.argv[3])) - - -if __name__ == "__main__": - run_migration_script() diff --git a/lbry/extras/daemon/migrator/migrate10to11.py b/lbry/extras/daemon/migrator/migrate10to11.py deleted file mode 100644 index 9974c785c..000000000 --- a/lbry/extras/daemon/migrator/migrate10to11.py +++ /dev/null @@ -1,54 +0,0 @@ -import sqlite3 -import os -import binascii - - -def do_migration(conf): - db_path = os.path.join(conf.data_dir, "lbrynet.sqlite") - connection = sqlite3.connect(db_path) - cursor = connection.cursor() - - current_columns = [] - for col_info in cursor.execute("pragma table_info('file');").fetchall(): - current_columns.append(col_info[1]) - if 'content_fee' in current_columns or 'saved_file' in current_columns: - connection.close() - print("already migrated") - return - - cursor.execute( - "pragma foreign_keys=off;" - ) - - cursor.execute(""" - create table if not exists new_file ( - stream_hash text primary key not null references stream, - file_name text, - download_directory text, - blob_data_rate real not null, - status text not null, - saved_file integer not null, - content_fee text - ); - """) - for (stream_hash, file_name, download_dir, data_rate, status) in cursor.execute("select * from file").fetchall(): - saved_file = 0 - if download_dir != '{stream}' and file_name != '{stream}': - try: - if os.path.isfile(os.path.join(binascii.unhexlify(download_dir).decode(), - binascii.unhexlify(file_name).decode())): - saved_file = 1 - else: - download_dir, file_name = None, None - except Exception: - download_dir, file_name = None, None - else: - download_dir, file_name = None, None - cursor.execute( - "insert into new_file values (?, ?, ?, ?, ?, ?, NULL)", - (stream_hash, file_name, download_dir, data_rate, status, saved_file) - ) - cursor.execute("drop table file") - cursor.execute("alter table new_file rename to file") - connection.commit() - connection.close() diff --git a/lbry/extras/daemon/migrator/migrate11to12.py b/lbry/extras/daemon/migrator/migrate11to12.py deleted file mode 100644 index b58462296..000000000 --- a/lbry/extras/daemon/migrator/migrate11to12.py +++ /dev/null @@ -1,69 +0,0 @@ -import sqlite3 -import os -import time - - -def do_migration(conf): - db_path = os.path.join(conf.data_dir, 'lbrynet.sqlite') - connection = sqlite3.connect(db_path) - connection.row_factory = sqlite3.Row - cursor = connection.cursor() - - current_columns = [] - for col_info in cursor.execute("pragma table_info('file');").fetchall(): - current_columns.append(col_info[1]) - - if 'added_on' in current_columns: - connection.close() - print('already migrated') - return - - # follow 12 step schema change procedure - cursor.execute("pragma foreign_keys=off") - - # we don't have any indexes, views or triggers, so step 3 is skipped. - cursor.execute("drop table if exists new_file") - cursor.execute(""" - create table if not exists new_file ( - stream_hash text not null primary key references stream, - file_name text, - download_directory text, - blob_data_rate text not null, - status text not null, - saved_file integer not null, - content_fee text, - added_on integer not null - ); - - - """) - - # step 5: transfer content from old to new - select = "select * from file" - for (stream_hash, file_name, download_dir, blob_rate, status, saved_file, fee) \ - in cursor.execute(select).fetchall(): - added_on = int(time.time()) - cursor.execute( - "insert into new_file values (?, ?, ?, ?, ?, ?, ?, ?)", - (stream_hash, file_name, download_dir, blob_rate, status, saved_file, fee, added_on) - ) - - # step 6: drop old table - cursor.execute("drop table file") - - # step 7: rename new table to old table - cursor.execute("alter table new_file rename to file") - - # step 8: we aren't using indexes, views or triggers so skip - # step 9: no views so skip - # step 10: foreign key check - cursor.execute("pragma foreign_key_check;") - - # step 11: commit transaction - connection.commit() - - # step 12: re-enable foreign keys - connection.execute("pragma foreign_keys=on;") - - # done :) - connection.close() diff --git a/lbry/extras/daemon/migrator/migrate12to13.py b/lbry/extras/daemon/migrator/migrate12to13.py deleted file mode 100644 index b89ef3f4e..000000000 --- a/lbry/extras/daemon/migrator/migrate12to13.py +++ /dev/null @@ -1,80 +0,0 @@ -import os -import sqlite3 - - -def do_migration(conf): - db_path = os.path.join(conf.data_dir, "lbrynet.sqlite") - connection = sqlite3.connect(db_path) - cursor = connection.cursor() - - current_columns = [] - for col_info in cursor.execute("pragma table_info('file');").fetchall(): - current_columns.append(col_info[1]) - if 'bt_infohash' in current_columns: - connection.close() - print("already migrated") - return - - cursor.executescript(""" - pragma foreign_keys=off; - - create table if not exists torrent ( - bt_infohash char(20) not null primary key, - tracker text, - length integer not null, - name text not null - ); - - create table if not exists torrent_node ( -- BEP-0005 - bt_infohash char(20) not null references torrent, - host text not null, - port integer not null - ); - - create table if not exists torrent_tracker ( -- BEP-0012 - bt_infohash char(20) not null references torrent, - tracker text not null - ); - - create table if not exists torrent_http_seed ( -- BEP-0017 - bt_infohash char(20) not null references torrent, - http_seed text not null - ); - - create table if not exists new_file ( - stream_hash char(96) references stream, - bt_infohash char(20) references torrent, - file_name text, - download_directory text, - blob_data_rate real not null, - status text not null, - saved_file integer not null, - content_fee text, - added_on integer not null - ); - - create table if not exists new_content_claim ( - stream_hash char(96) references stream, - bt_infohash char(20) references torrent, - claim_outpoint text unique not null references claim - ); - - insert into new_file (stream_hash, bt_infohash, file_name, download_directory, blob_data_rate, status, - saved_file, content_fee, added_on) select - stream_hash, NULL, file_name, download_directory, blob_data_rate, status, saved_file, content_fee, - added_on - from file; - - insert or ignore into new_content_claim (stream_hash, bt_infohash, claim_outpoint) - select stream_hash, NULL, claim_outpoint from content_claim; - - drop table file; - drop table content_claim; - alter table new_file rename to file; - alter table new_content_claim rename to content_claim; - - pragma foreign_keys=on; - """) - - connection.commit() - connection.close() diff --git a/lbry/extras/daemon/migrator/migrate13to14.py b/lbry/extras/daemon/migrator/migrate13to14.py deleted file mode 100644 index 5cbd6d3fa..000000000 --- a/lbry/extras/daemon/migrator/migrate13to14.py +++ /dev/null @@ -1,21 +0,0 @@ -import os -import sqlite3 - - -def do_migration(conf): - db_path = os.path.join(conf.data_dir, "lbrynet.sqlite") - connection = sqlite3.connect(db_path) - cursor = connection.cursor() - - cursor.executescript(""" - create table if not exists peer ( - node_id char(96) not null primary key, - address text not null, - udp_port integer not null, - tcp_port integer, - unique (address, udp_port) - ); - """) - - connection.commit() - connection.close() diff --git a/lbry/extras/daemon/migrator/migrate1to2.py b/lbry/extras/daemon/migrator/migrate1to2.py deleted file mode 100644 index 3f1becf3c..000000000 --- a/lbry/extras/daemon/migrator/migrate1to2.py +++ /dev/null @@ -1,77 +0,0 @@ -import sqlite3 -import os -import logging - -log = logging.getLogger(__name__) -UNSET_NOUT = -1 - -def do_migration(conf): - log.info("Doing the migration") - migrate_blockchainname_db(conf.data_dir) - log.info("Migration succeeded") - - -def migrate_blockchainname_db(db_dir): - blockchainname_db = os.path.join(db_dir, "blockchainname.db") - # skip migration on fresh installs - if not os.path.isfile(blockchainname_db): - return - temp_db = sqlite3.connect(":memory:") - db_file = sqlite3.connect(blockchainname_db) - file_cursor = db_file.cursor() - mem_cursor = temp_db.cursor() - - mem_cursor.execute("create table if not exists name_metadata (" - " name text, " - " txid text, " - " n integer, " - " sd_hash text)") - mem_cursor.execute("create table if not exists claim_ids (" - " claimId text, " - " name text, " - " txid text, " - " n integer)") - temp_db.commit() - - name_metadata = file_cursor.execute("select * from name_metadata").fetchall() - claim_metadata = file_cursor.execute("select * from claim_ids").fetchall() - - # fill n as V1_UNSET_NOUT, Wallet.py will be responsible for filling in correct n - for name, txid, sd_hash in name_metadata: - mem_cursor.execute( - "insert into name_metadata values (?, ?, ?, ?) ", - (name, txid, UNSET_NOUT, sd_hash)) - - for claim_id, name, txid in claim_metadata: - mem_cursor.execute( - "insert into claim_ids values (?, ?, ?, ?)", - (claim_id, name, txid, UNSET_NOUT)) - temp_db.commit() - - new_name_metadata = mem_cursor.execute("select * from name_metadata").fetchall() - new_claim_metadata = mem_cursor.execute("select * from claim_ids").fetchall() - - file_cursor.execute("drop table name_metadata") - file_cursor.execute("create table name_metadata (" - " name text, " - " txid text, " - " n integer, " - " sd_hash text)") - - for name, txid, n, sd_hash in new_name_metadata: - file_cursor.execute( - "insert into name_metadata values (?, ?, ?, ?) ", (name, txid, n, sd_hash)) - - file_cursor.execute("drop table claim_ids") - file_cursor.execute("create table claim_ids (" - " claimId text, " - " name text, " - " txid text, " - " n integer)") - - for claim_id, name, txid, n in new_claim_metadata: - file_cursor.execute("insert into claim_ids values (?, ?, ?, ?)", (claim_id, name, txid, n)) - - db_file.commit() - db_file.close() - temp_db.close() diff --git a/lbry/extras/daemon/migrator/migrate2to3.py b/lbry/extras/daemon/migrator/migrate2to3.py deleted file mode 100644 index bbe3a41c3..000000000 --- a/lbry/extras/daemon/migrator/migrate2to3.py +++ /dev/null @@ -1,42 +0,0 @@ -import sqlite3 -import os -import logging - -log = logging.getLogger(__name__) - - -def do_migration(conf): - log.info("Doing the migration") - migrate_blockchainname_db(conf.data_dir) - log.info("Migration succeeded") - - -def migrate_blockchainname_db(db_dir): - blockchainname_db = os.path.join(db_dir, "blockchainname.db") - # skip migration on fresh installs - if not os.path.isfile(blockchainname_db): - return - - db_file = sqlite3.connect(blockchainname_db) - file_cursor = db_file.cursor() - - tables = file_cursor.execute("SELECT tbl_name FROM sqlite_master " - "WHERE type='table'").fetchall() - - if 'tmp_name_metadata_table' in tables and 'name_metadata' not in tables: - file_cursor.execute("ALTER TABLE tmp_name_metadata_table RENAME TO name_metadata") - else: - file_cursor.executescript( - "CREATE TABLE IF NOT EXISTS tmp_name_metadata_table " - " (name TEXT UNIQUE NOT NULL, " - " txid TEXT NOT NULL, " - " n INTEGER NOT NULL, " - " sd_hash TEXT NOT NULL); " - "INSERT OR IGNORE INTO tmp_name_metadata_table " - " (name, txid, n, sd_hash) " - " SELECT name, txid, n, sd_hash FROM name_metadata; " - "DROP TABLE name_metadata; " - "ALTER TABLE tmp_name_metadata_table RENAME TO name_metadata;" - ) - db_file.commit() - db_file.close() diff --git a/lbry/extras/daemon/migrator/migrate3to4.py b/lbry/extras/daemon/migrator/migrate3to4.py deleted file mode 100644 index ad8aec6bd..000000000 --- a/lbry/extras/daemon/migrator/migrate3to4.py +++ /dev/null @@ -1,85 +0,0 @@ -import sqlite3 -import os -import logging - -log = logging.getLogger(__name__) - - -def do_migration(conf): - log.info("Doing the migration") - migrate_blobs_db(conf.data_dir) - log.info("Migration succeeded") - - -def migrate_blobs_db(db_dir): - """ - We migrate the blobs.db used in BlobManager to have a "should_announce" column, - and set this to True for blobs that are sd_hash's or head blobs (first blob in stream) - """ - - blobs_db = os.path.join(db_dir, "blobs.db") - lbryfile_info_db = os.path.join(db_dir, 'lbryfile_info.db') - - # skip migration on fresh installs - if not os.path.isfile(blobs_db) and not os.path.isfile(lbryfile_info_db): - return - - # if blobs.db doesn't exist, skip migration - if not os.path.isfile(blobs_db): - log.info("blobs.db was not found but lbryfile_info.db was found, skipping migration") - return - - blobs_db_file = sqlite3.connect(blobs_db) - blobs_db_cursor = blobs_db_file.cursor() - - # check if new columns exist (it shouldn't) and create it - try: - blobs_db_cursor.execute("SELECT should_announce FROM blobs") - except sqlite3.OperationalError: - blobs_db_cursor.execute( - "ALTER TABLE blobs ADD COLUMN should_announce integer NOT NULL DEFAULT 0") - else: - log.warning("should_announce already exists somehow, proceeding anyways") - - # if lbryfile_info.db doesn't exist, skip marking blobs as should_announce = True - if not os.path.isfile(lbryfile_info_db): - log.error("lbryfile_info.db was not found, skipping check for should_announce") - return - - lbryfile_info_file = sqlite3.connect(lbryfile_info_db) - lbryfile_info_cursor = lbryfile_info_file.cursor() - - # find blobs that are stream descriptors - lbryfile_info_cursor.execute('SELECT * FROM lbry_file_descriptors') - descriptors = lbryfile_info_cursor.fetchall() - should_announce_blob_hashes = [] - for d in descriptors: - sd_blob_hash = (d[0],) - should_announce_blob_hashes.append(sd_blob_hash) - - # find blobs that are the first blob in a stream - lbryfile_info_cursor.execute('SELECT * FROM lbry_file_blobs WHERE position = 0') - blobs = lbryfile_info_cursor.fetchall() - head_blob_hashes = [] - for b in blobs: - blob_hash = (b[0],) - should_announce_blob_hashes.append(blob_hash) - - # now mark them as should_announce = True - blobs_db_cursor.executemany('UPDATE blobs SET should_announce=1 WHERE blob_hash=?', - should_announce_blob_hashes) - - # Now run some final checks here to make sure migration succeeded - try: - blobs_db_cursor.execute("SELECT should_announce FROM blobs") - except sqlite3.OperationalError: - raise Exception('Migration failed, cannot find should_announce') - - blobs_db_cursor.execute("SELECT * FROM blobs WHERE should_announce=1") - blobs = blobs_db_cursor.fetchall() - if len(blobs) != len(should_announce_blob_hashes): - log.error("Some how not all blobs were marked as announceable") - - blobs_db_file.commit() - blobs_db_file.close() - lbryfile_info_file.close() diff --git a/lbry/extras/daemon/migrator/migrate4to5.py b/lbry/extras/daemon/migrator/migrate4to5.py deleted file mode 100644 index 1990c54bc..000000000 --- a/lbry/extras/daemon/migrator/migrate4to5.py +++ /dev/null @@ -1,62 +0,0 @@ -import sqlite3 -import os -import logging - -log = logging.getLogger(__name__) - - -def do_migration(conf): - log.info("Doing the migration") - add_lbry_file_metadata(conf.data_dir) - log.info("Migration succeeded") - - -def add_lbry_file_metadata(db_dir): - """ - We migrate the blobs.db used in BlobManager to have a "should_announce" column, - and set this to True for blobs that are sd_hash's or head blobs (first blob in stream) - """ - - name_metadata = os.path.join(db_dir, "blockchainname.db") - lbryfile_info_db = os.path.join(db_dir, 'lbryfile_info.db') - - if not os.path.isfile(name_metadata) and not os.path.isfile(lbryfile_info_db): - return - - if not os.path.isfile(lbryfile_info_db): - log.info("blockchainname.db was not found but lbryfile_info.db was found, skipping migration") - return - - name_metadata_db = sqlite3.connect(name_metadata) - lbryfile_db = sqlite3.connect(lbryfile_info_db) - name_metadata_cursor = name_metadata_db.cursor() - lbryfile_cursor = lbryfile_db.cursor() - - lbryfile_db.executescript( - "create table if not exists lbry_file_metadata (" + - " lbry_file integer primary key, " + - " txid text, " + - " n integer, " + - " foreign key(lbry_file) references lbry_files(rowid)" - ")") - - _files = lbryfile_cursor.execute("select rowid, stream_hash from lbry_files").fetchall() - - lbry_files = {x[1]: x[0] for x in _files} - for (sd_hash, stream_hash) in lbryfile_cursor.execute("select * " - "from lbry_file_descriptors").fetchall(): - lbry_file_id = lbry_files[stream_hash] - outpoint = name_metadata_cursor.execute("select txid, n from name_metadata " - "where sd_hash=?", - (sd_hash,)).fetchall() - if outpoint: - txid, nout = outpoint[0] - lbryfile_cursor.execute("insert into lbry_file_metadata values (?, ?, ?)", - (lbry_file_id, txid, nout)) - else: - lbryfile_cursor.execute("insert into lbry_file_metadata values (?, ?, ?)", - (lbry_file_id, None, None)) - lbryfile_db.commit() - - lbryfile_db.close() - name_metadata_db.close() diff --git a/lbry/extras/daemon/migrator/migrate5to6.py b/lbry/extras/daemon/migrator/migrate5to6.py deleted file mode 100644 index d74ddd15e..000000000 --- a/lbry/extras/daemon/migrator/migrate5to6.py +++ /dev/null @@ -1,326 +0,0 @@ -import sqlite3 -import os -import json -import logging -from binascii import hexlify -from lbry.schema.claim import Claim - -log = logging.getLogger(__name__) - -CREATE_TABLES_QUERY = """ - pragma foreign_keys=on; - pragma journal_mode=WAL; - - create table if not exists blob ( - blob_hash char(96) primary key not null, - blob_length integer not null, - next_announce_time integer not null, - should_announce integer not null default 0, - status text not null - ); - - create table if not exists stream ( - stream_hash char(96) not null primary key, - sd_hash char(96) not null references blob, - stream_key text not null, - stream_name text not null, - suggested_filename text not null - ); - - create table if not exists stream_blob ( - stream_hash char(96) not null references stream, - blob_hash char(96) references blob, - position integer not null, - iv char(32) not null, - primary key (stream_hash, blob_hash) - ); - - create table if not exists claim ( - claim_outpoint text not null primary key, - claim_id char(40) not null, - claim_name text not null, - amount integer not null, - height integer not null, - serialized_metadata blob not null, - channel_claim_id text, - address text not null, - claim_sequence integer not null - ); - - create table if not exists file ( - stream_hash text primary key not null references stream, - file_name text not null, - download_directory text not null, - blob_data_rate real not null, - status text not null - ); - - create table if not exists content_claim ( - stream_hash text unique not null references file, - claim_outpoint text not null references claim, - primary key (stream_hash, claim_outpoint) - ); - - create table if not exists support ( - support_outpoint text not null primary key, - claim_id text not null, - amount integer not null, - address text not null - ); - """ - - -def run_operation(db): - def _decorate(fn): - def _wrapper(*args): - cursor = db.cursor() - try: - result = fn(cursor, *args) - db.commit() - return result - except sqlite3.IntegrityError: - db.rollback() - raise - return _wrapper - return _decorate - - -def verify_sd_blob(sd_hash, blob_dir): - with open(os.path.join(blob_dir, sd_hash), "r") as sd_file: - data = sd_file.read() - sd_length = len(data) - decoded = json.loads(data) - assert set(decoded.keys()) == { - 'stream_name', 'blobs', 'stream_type', 'key', 'suggested_file_name', 'stream_hash' - }, "invalid sd blob" - for blob in sorted(decoded['blobs'], key=lambda x: int(x['blob_num']), reverse=True): - if blob['blob_num'] == len(decoded['blobs']) - 1: - assert {'length', 'blob_num', 'iv'} == set(blob.keys()), 'invalid stream terminator' - assert blob['length'] == 0, 'non zero length stream terminator' - else: - assert {'blob_hash', 'length', 'blob_num', 'iv'} == set(blob.keys()), 'invalid stream blob' - assert blob['length'] > 0, 'zero length stream blob' - return decoded, sd_length - - -def do_migration(conf): - new_db_path = os.path.join(conf.data_dir, "lbrynet.sqlite") - connection = sqlite3.connect(new_db_path) - - metadata_db = sqlite3.connect(os.path.join(conf.data_dir, "blockchainname.db")) - lbryfile_db = sqlite3.connect(os.path.join(conf.data_dir, 'lbryfile_info.db')) - blobs_db = sqlite3.connect(os.path.join(conf.data_dir, 'blobs.db')) - - name_metadata_cursor = metadata_db.cursor() - lbryfile_cursor = lbryfile_db.cursor() - blobs_db_cursor = blobs_db.cursor() - - old_rowid_to_outpoint = { - rowid: (txid, nout) for (rowid, txid, nout) in - lbryfile_cursor.execute("select * from lbry_file_metadata").fetchall() - } - - old_sd_hash_to_outpoint = { - sd_hash: (txid, nout) for (txid, nout, sd_hash) in - name_metadata_cursor.execute("select txid, n, sd_hash from name_metadata").fetchall() - } - - sd_hash_to_stream_hash = dict( - lbryfile_cursor.execute("select sd_blob_hash, stream_hash from lbry_file_descriptors").fetchall() - ) - - stream_hash_to_stream_blobs = {} - - for (blob_hash, stream_hash, position, iv, length) in lbryfile_db.execute( - "select * from lbry_file_blobs").fetchall(): - stream_blobs = stream_hash_to_stream_blobs.get(stream_hash, []) - stream_blobs.append((blob_hash, length, position, iv)) - stream_hash_to_stream_blobs[stream_hash] = stream_blobs - - claim_outpoint_queries = {} - - for claim_query in metadata_db.execute( - "select distinct c.txid, c.n, c.claimId, c.name, claim_cache.claim_sequence, claim_cache.claim_address, " - "claim_cache.height, claim_cache.amount, claim_cache.claim_pb " - "from claim_cache inner join claim_ids c on claim_cache.claim_id=c.claimId"): - txid, nout = claim_query[0], claim_query[1] - if (txid, nout) in claim_outpoint_queries: - continue - claim_outpoint_queries[(txid, nout)] = claim_query - - @run_operation(connection) - def _populate_blobs(transaction, blob_infos): - transaction.executemany( - "insert into blob values (?, ?, ?, ?, ?)", - [(blob_hash, blob_length, int(next_announce_time), should_announce, "finished") - for (blob_hash, blob_length, _, next_announce_time, should_announce) in blob_infos] - ) - - @run_operation(connection) - def _import_file(transaction, sd_hash, stream_hash, key, stream_name, suggested_file_name, data_rate, - status, stream_blobs): - try: - transaction.execute( - "insert or ignore into stream values (?, ?, ?, ?, ?)", - (stream_hash, sd_hash, key, stream_name, suggested_file_name) - ) - except sqlite3.IntegrityError: - # failed because the sd isn't a known blob, we'll try to read the blob file and recover it - return sd_hash - - # insert any stream blobs that were missing from the blobs table - transaction.executemany( - "insert or ignore into blob values (?, ?, ?, ?, ?)", - [ - (blob_hash, length, 0, 0, "pending") - for (blob_hash, length, position, iv) in stream_blobs - ] - ) - - # insert the stream blobs - for blob_hash, length, position, iv in stream_blobs: - transaction.execute( - "insert or ignore into stream_blob values (?, ?, ?, ?)", - (stream_hash, blob_hash, position, iv) - ) - - download_dir = conf.download_dir - if not isinstance(download_dir, bytes): - download_dir = download_dir.encode() - - # insert the file - transaction.execute( - "insert or ignore into file values (?, ?, ?, ?, ?)", - (stream_hash, stream_name, hexlify(download_dir), - data_rate, status) - ) - - @run_operation(connection) - def _add_recovered_blobs(transaction, blob_infos, sd_hash, sd_length): - transaction.execute( - "insert or replace into blob values (?, ?, ?, ?, ?)", (sd_hash, sd_length, 0, 1, "finished") - ) - for blob in sorted(blob_infos, key=lambda x: x['blob_num'], reverse=True): - if blob['blob_num'] < len(blob_infos) - 1: - transaction.execute( - "insert or ignore into blob values (?, ?, ?, ?, ?)", - (blob['blob_hash'], blob['length'], 0, 0, "pending") - ) - - @run_operation(connection) - def _make_db(new_db): - # create the new tables - new_db.executescript(CREATE_TABLES_QUERY) - - # first migrate the blobs - blobs = blobs_db_cursor.execute("select * from blobs").fetchall() - _populate_blobs(blobs) # pylint: disable=no-value-for-parameter - log.info("migrated %i blobs", new_db.execute("select count(*) from blob").fetchone()[0]) - - # used to store the query arguments if we need to try re-importing the lbry file later - file_args = {} # <sd_hash>: args tuple - - file_outpoints = {} # <outpoint tuple>: sd_hash - - # get the file and stream queries ready - for (rowid, sd_hash, stream_hash, key, stream_name, suggested_file_name, data_rate, status) in \ - lbryfile_db.execute( - "select distinct lbry_files.rowid, d.sd_blob_hash, lbry_files.*, o.blob_data_rate, o.status " - "from lbry_files " - "inner join lbry_file_descriptors d on lbry_files.stream_hash=d.stream_hash " - "inner join lbry_file_options o on lbry_files.stream_hash=o.stream_hash"): - - # this is try to link the file to a content claim after we've imported all the files - if rowid in old_rowid_to_outpoint: - file_outpoints[old_rowid_to_outpoint[rowid]] = sd_hash - elif sd_hash in old_sd_hash_to_outpoint: - file_outpoints[old_sd_hash_to_outpoint[sd_hash]] = sd_hash - - sd_hash_to_stream_hash[sd_hash] = stream_hash - if stream_hash in stream_hash_to_stream_blobs: - file_args[sd_hash] = ( - sd_hash, stream_hash, key, stream_name, - suggested_file_name, data_rate or 0.0, - status, stream_hash_to_stream_blobs.pop(stream_hash) - ) - - # used to store the query arguments if we need to try re-importing the claim - claim_queries = {} # <sd_hash>: claim query tuple - - # get the claim queries ready, only keep those with associated files - for outpoint, sd_hash in file_outpoints.items(): - if outpoint in claim_outpoint_queries: - claim_queries[sd_hash] = claim_outpoint_queries[outpoint] - - # insert the claims - new_db.executemany( - "insert or ignore into claim values (?, ?, ?, ?, ?, ?, ?, ?, ?)", - [ - ( - "%s:%i" % (claim_arg_tup[0], claim_arg_tup[1]), claim_arg_tup[2], claim_arg_tup[3], - claim_arg_tup[7], claim_arg_tup[6], claim_arg_tup[8], - Claim.from_bytes(claim_arg_tup[8]).signing_channel_id, claim_arg_tup[5], claim_arg_tup[4] - ) - for sd_hash, claim_arg_tup in claim_queries.items() if claim_arg_tup - ] # sd_hash, (txid, nout, claim_id, name, sequence, address, height, amount, serialized) - ) - - log.info("migrated %i claims", new_db.execute("select count(*) from claim").fetchone()[0]) - - damaged_stream_sds = [] - # import the files and get sd hashes of streams to attempt recovering - for sd_hash, file_query in file_args.items(): - failed_sd = _import_file(*file_query) - if failed_sd: - damaged_stream_sds.append(failed_sd) - - # recover damaged streams - if damaged_stream_sds: - blob_dir = os.path.join(conf.data_dir, "blobfiles") - damaged_sds_on_disk = [] if not os.path.isdir(blob_dir) else list({p for p in os.listdir(blob_dir) - if p in damaged_stream_sds}) - for damaged_sd in damaged_sds_on_disk: - try: - decoded, sd_length = verify_sd_blob(damaged_sd, blob_dir) - blobs = decoded['blobs'] - _add_recovered_blobs(blobs, damaged_sd, sd_length) # pylint: disable=no-value-for-parameter - _import_file(*file_args[damaged_sd]) - damaged_stream_sds.remove(damaged_sd) - except (OSError, ValueError, TypeError, AssertionError, sqlite3.IntegrityError): - continue - - log.info("migrated %i files", new_db.execute("select count(*) from file").fetchone()[0]) - - # associate the content claims to their respective files - for claim_arg_tup in claim_queries.values(): - if claim_arg_tup and (claim_arg_tup[0], claim_arg_tup[1]) in file_outpoints \ - and file_outpoints[(claim_arg_tup[0], claim_arg_tup[1])] in sd_hash_to_stream_hash: - try: - new_db.execute( - "insert or ignore into content_claim values (?, ?)", - ( - sd_hash_to_stream_hash.get(file_outpoints.get((claim_arg_tup[0], claim_arg_tup[1]))), - "%s:%i" % (claim_arg_tup[0], claim_arg_tup[1]) - ) - ) - except sqlite3.IntegrityError: - continue - - log.info("migrated %i content claims", new_db.execute("select count(*) from content_claim").fetchone()[0]) - try: - _make_db() # pylint: disable=no-value-for-parameter - except sqlite3.OperationalError as err: - if err.message == "table blob has 7 columns but 5 values were supplied": - log.warning("detected a failed previous migration to revision 6, repairing it") - connection.close() - os.remove(new_db_path) - return do_migration(conf) - raise err - - connection.close() - blobs_db.close() - lbryfile_db.close() - metadata_db.close() - # os.remove(os.path.join(db_dir, "blockchainname.db")) - # os.remove(os.path.join(db_dir, 'lbryfile_info.db')) - # os.remove(os.path.join(db_dir, 'blobs.db')) diff --git a/lbry/extras/daemon/migrator/migrate6to7.py b/lbry/extras/daemon/migrator/migrate6to7.py deleted file mode 100644 index ec5ae4597..000000000 --- a/lbry/extras/daemon/migrator/migrate6to7.py +++ /dev/null @@ -1,13 +0,0 @@ -import sqlite3 -import os - - -def do_migration(conf): - db_path = os.path.join(conf.data_dir, "lbrynet.sqlite") - connection = sqlite3.connect(db_path) - cursor = connection.cursor() - cursor.executescript("alter table blob add last_announced_time integer;") - cursor.executescript("alter table blob add single_announce integer;") - cursor.execute("update blob set next_announce_time=0") - connection.commit() - connection.close() diff --git a/lbry/extras/daemon/migrator/migrate7to8.py b/lbry/extras/daemon/migrator/migrate7to8.py deleted file mode 100644 index a02b7b5f2..000000000 --- a/lbry/extras/daemon/migrator/migrate7to8.py +++ /dev/null @@ -1,21 +0,0 @@ -import sqlite3 -import os - - -def do_migration(conf): - db_path = os.path.join(conf.data_dir, "lbrynet.sqlite") - connection = sqlite3.connect(db_path) - cursor = connection.cursor() - - cursor.executescript( - """ - create table reflected_stream ( - sd_hash text not null, - reflector_address text not null, - timestamp integer, - primary key (sd_hash, reflector_address) - ); - """ - ) - connection.commit() - connection.close() diff --git a/lbry/extras/daemon/migrator/migrate8to9.py b/lbry/extras/daemon/migrator/migrate8to9.py deleted file mode 100644 index c9aee0a37..000000000 --- a/lbry/extras/daemon/migrator/migrate8to9.py +++ /dev/null @@ -1,47 +0,0 @@ -import sqlite3 -import logging -import os -from lbry.blob.blob_info import BlobInfo -from lbry.stream.descriptor import StreamDescriptor - -log = logging.getLogger(__name__) - - -def do_migration(conf): - db_path = os.path.join(conf.data_dir, "lbrynet.sqlite") - blob_dir = os.path.join(conf.data_dir, "blobfiles") - connection = sqlite3.connect(db_path) - cursor = connection.cursor() - - query = "select stream_name, stream_key, suggested_filename, sd_hash, stream_hash from stream" - streams = cursor.execute(query).fetchall() - - blobs = cursor.execute("select s.stream_hash, s.position, s.iv, b.blob_hash, b.blob_length from stream_blob s " - "left outer join blob b ON b.blob_hash=s.blob_hash order by s.position").fetchall() - blobs_by_stream = {} - for stream_hash, position, iv, blob_hash, blob_length in blobs: - blobs_by_stream.setdefault(stream_hash, []).append(BlobInfo(position, blob_length or 0, iv, blob_hash)) - - for stream_name, stream_key, suggested_filename, sd_hash, stream_hash in streams: - sd = StreamDescriptor(None, blob_dir, stream_name, stream_key, suggested_filename, - blobs_by_stream[stream_hash], stream_hash, sd_hash) - if sd_hash != sd.calculate_sd_hash(): - log.info("Stream for descriptor %s is invalid, cleaning it up", sd_hash) - blob_hashes = [blob.blob_hash for blob in blobs_by_stream[stream_hash]] - delete_stream(cursor, stream_hash, sd_hash, blob_hashes, blob_dir) - - connection.commit() - connection.close() - - -def delete_stream(transaction, stream_hash, sd_hash, blob_hashes, blob_dir): - transaction.execute("delete from content_claim where stream_hash=? ", (stream_hash,)) - transaction.execute("delete from file where stream_hash=? ", (stream_hash, )) - transaction.execute("delete from stream_blob where stream_hash=?", (stream_hash, )) - transaction.execute("delete from stream where stream_hash=? ", (stream_hash, )) - transaction.execute("delete from blob where blob_hash=?", (sd_hash, )) - for blob_hash in blob_hashes: - transaction.execute("delete from blob where blob_hash=?", (blob_hash, )) - file_path = os.path.join(blob_dir, blob_hash) - if os.path.isfile(file_path): - os.unlink(file_path) diff --git a/lbry/extras/daemon/migrator/migrate9to10.py b/lbry/extras/daemon/migrator/migrate9to10.py deleted file mode 100644 index 97a48cc6f..000000000 --- a/lbry/extras/daemon/migrator/migrate9to10.py +++ /dev/null @@ -1,20 +0,0 @@ -import sqlite3 -import os - - -def do_migration(conf): - db_path = os.path.join(conf.data_dir, "lbrynet.sqlite") - connection = sqlite3.connect(db_path) - cursor = connection.cursor() - - query = "select stream_hash, sd_hash from main.stream" - for stream_hash, sd_hash in cursor.execute(query).fetchall(): - head_blob_hash = cursor.execute( - "select blob_hash from stream_blob where position = 0 and stream_hash = ?", - (stream_hash,) - ).fetchone() - if not head_blob_hash: - continue - cursor.execute("update blob set should_announce=1 where blob_hash in (?, ?)", (sd_hash, head_blob_hash[0],)) - connection.commit() - connection.close() diff --git a/lbry/extras/daemon/storage.py b/lbry/extras/daemon/storage.py deleted file mode 100644 index 5a6249830..000000000 --- a/lbry/extras/daemon/storage.py +++ /dev/null @@ -1,835 +0,0 @@ -import os -import logging -import sqlite3 -import typing -import asyncio -import binascii -import time -from typing import Optional -from lbry.wallet.database import SQLiteMixin -from lbry.conf import Config -from lbry.blockchain.dewies import dewies_to_lbc, lbc_to_dewies -from lbry.blockchain.transaction import Transaction -from lbry.schema.claim import Claim -from lbry.dht.constants import DATA_EXPIRATION -from lbry.blob.blob_info import BlobInfo - -if typing.TYPE_CHECKING: - from lbry.blob.blob_file import BlobFile - from lbry.stream.descriptor import StreamDescriptor - -log = logging.getLogger(__name__) - - -def calculate_effective_amount(amount: str, supports: typing.Optional[typing.List[typing.Dict]] = None) -> str: - return dewies_to_lbc( - lbc_to_dewies(amount) + sum([lbc_to_dewies(support['amount']) for support in supports]) - ) - - -class StoredContentClaim: - def __init__(self, outpoint: Optional[str] = None, claim_id: Optional[str] = None, name: Optional[str] = None, - amount: Optional[int] = None, height: Optional[int] = None, serialized: Optional[str] = None, - channel_claim_id: Optional[str] = None, address: Optional[str] = None, - claim_sequence: Optional[int] = None, channel_name: Optional[str] = None): - self.claim_id = claim_id - self.outpoint = outpoint - self.claim_name = name - self.amount = amount - self.height = height - self.claim: typing.Optional[Claim] = None if not serialized else Claim.from_bytes( - binascii.unhexlify(serialized) - ) - self.claim_address = address - self.claim_sequence = claim_sequence - self.channel_claim_id = channel_claim_id - self.channel_name = channel_name - - @property - def txid(self) -> typing.Optional[str]: - return None if not self.outpoint else self.outpoint.split(":")[0] - - @property - def nout(self) -> typing.Optional[int]: - return None if not self.outpoint else int(self.outpoint.split(":")[1]) - - def as_dict(self) -> typing.Dict: - return { - "name": self.claim_name, - "claim_id": self.claim_id, - "address": self.claim_address, - "claim_sequence": self.claim_sequence, - "value": self.claim, - "height": self.height, - "amount": dewies_to_lbc(self.amount), - "nout": self.nout, - "txid": self.txid, - "channel_claim_id": self.channel_claim_id, - "channel_name": self.channel_name - } - - -def _get_content_claims(transaction: sqlite3.Connection, query: str, - source_hashes: typing.List[str]) -> typing.Dict[str, StoredContentClaim]: - claims = {} - for claim_info in _batched_select(transaction, query, source_hashes): - claims[claim_info[0]] = StoredContentClaim(*claim_info[1:]) - return claims - - -def get_claims_from_stream_hashes(transaction: sqlite3.Connection, - stream_hashes: typing.List[str]) -> typing.Dict[str, StoredContentClaim]: - query = ( - "select content_claim.stream_hash, c.*, case when c.channel_claim_id is not null then " - " (select claim_name from claim where claim_id==c.channel_claim_id) " - " else null end as channel_name " - " from content_claim " - " inner join claim c on c.claim_outpoint=content_claim.claim_outpoint and content_claim.stream_hash in {}" - " order by c.rowid desc" - ) - return _get_content_claims(transaction, query, stream_hashes) - - -def get_claims_from_torrent_info_hashes(transaction: sqlite3.Connection, - info_hashes: typing.List[str]) -> typing.Dict[str, StoredContentClaim]: - query = ( - "select content_claim.bt_infohash, c.*, case when c.channel_claim_id is not null then " - " (select claim_name from claim where claim_id==c.channel_claim_id) " - " else null end as channel_name " - " from content_claim " - " inner join claim c on c.claim_outpoint=content_claim.claim_outpoint and content_claim.bt_infohash in {}" - " order by c.rowid desc" - ) - return _get_content_claims(transaction, query, info_hashes) - - -def _batched_select(transaction, query, parameters, batch_size=900): - for start_index in range(0, len(parameters), batch_size): - current_batch = parameters[start_index:start_index+batch_size] - bind = "({})".format(','.join(['?'] * len(current_batch))) - yield from transaction.execute(query.format(bind), current_batch) - - -def _get_lbry_file_stream_dict(rowid, added_on, stream_hash, file_name, download_dir, data_rate, status, - sd_hash, stream_key, stream_name, suggested_file_name, claim, saved_file, - raw_content_fee, fully_reflected): - return { - "rowid": rowid, - "added_on": added_on, - "stream_hash": stream_hash, - "file_name": file_name, # hex - "download_directory": download_dir, # hex - "blob_data_rate": data_rate, - "status": status, - "sd_hash": sd_hash, - "key": stream_key, - "stream_name": stream_name, # hex - "suggested_file_name": suggested_file_name, # hex - "claim": claim, - "saved_file": bool(saved_file), - "content_fee": None if not raw_content_fee else Transaction( - binascii.unhexlify(raw_content_fee) - ), - "fully_reflected": fully_reflected - } - - -def get_all_lbry_files(transaction: sqlite3.Connection) -> typing.List[typing.Dict]: - files = [] - signed_claims = {} - for (rowid, stream_hash, _, file_name, download_dir, data_rate, status, saved_file, raw_content_fee, - added_on, _, sd_hash, stream_key, stream_name, suggested_file_name, *claim_args) in transaction.execute( - "select file.rowid, file.*, stream.*, c.*, " - " case when (SELECT 1 FROM reflected_stream r WHERE r.sd_hash=stream.sd_hash) " - " is null then 0 else 1 end as fully_reflected " - "from file inner join stream on file.stream_hash=stream.stream_hash " - "inner join content_claim cc on file.stream_hash=cc.stream_hash " - "inner join claim c on cc.claim_outpoint=c.claim_outpoint " - "order by c.rowid desc").fetchall(): - claim_args, fully_reflected = tuple(claim_args[:-1]), claim_args[-1] - claim = StoredContentClaim(*claim_args) - if claim.channel_claim_id: - if claim.channel_claim_id not in signed_claims: - signed_claims[claim.channel_claim_id] = [] - signed_claims[claim.channel_claim_id].append(claim) - files.append( - _get_lbry_file_stream_dict( - rowid, added_on, stream_hash, file_name, download_dir, data_rate, status, - sd_hash, stream_key, stream_name, suggested_file_name, claim, saved_file, - raw_content_fee, fully_reflected - ) - ) - for claim_name, claim_id in _batched_select( - transaction, "select c.claim_name, c.claim_id from claim c where c.claim_id in {}", - tuple(signed_claims.keys())): - for claim in signed_claims[claim_id]: - claim.channel_name = claim_name - return files - - -def store_stream(transaction: sqlite3.Connection, sd_blob: 'BlobFile', descriptor: 'StreamDescriptor'): - # add all blobs, except the last one, which is empty - transaction.executemany( - "insert or ignore into blob values (?, ?, ?, ?, ?, ?, ?)", - ((blob.blob_hash, blob.length, 0, 0, "pending", 0, 0) - for blob in (descriptor.blobs[:-1] if len(descriptor.blobs) > 1 else descriptor.blobs) + [sd_blob]) - ).fetchall() - # associate the blobs to the stream - transaction.execute("insert or ignore into stream values (?, ?, ?, ?, ?)", - (descriptor.stream_hash, sd_blob.blob_hash, descriptor.key, - binascii.hexlify(descriptor.stream_name.encode()).decode(), - binascii.hexlify(descriptor.suggested_file_name.encode()).decode())).fetchall() - # add the stream - transaction.executemany( - "insert or ignore into stream_blob values (?, ?, ?, ?)", - ((descriptor.stream_hash, blob.blob_hash, blob.blob_num, blob.iv) - for blob in descriptor.blobs) - ).fetchall() - # ensure should_announce is set regardless if insert was ignored - transaction.execute( - "update blob set should_announce=1 where blob_hash in (?, ?)", - (sd_blob.blob_hash, descriptor.blobs[0].blob_hash,) - ).fetchall() - - -def delete_stream(transaction: sqlite3.Connection, descriptor: 'StreamDescriptor'): - blob_hashes = [(blob.blob_hash, ) for blob in descriptor.blobs[:-1]] - blob_hashes.append((descriptor.sd_hash, )) - transaction.execute("delete from content_claim where stream_hash=? ", (descriptor.stream_hash,)).fetchall() - transaction.execute("delete from file where stream_hash=? ", (descriptor.stream_hash,)).fetchall() - transaction.execute("delete from stream_blob where stream_hash=?", (descriptor.stream_hash,)).fetchall() - transaction.execute("delete from stream where stream_hash=? ", (descriptor.stream_hash,)).fetchall() - transaction.executemany("delete from blob where blob_hash=?", blob_hashes).fetchall() - - -def delete_torrent(transaction: sqlite3.Connection, bt_infohash: str): - transaction.execute("delete from content_claim where bt_infohash=?", (bt_infohash, )).fetchall() - transaction.execute("delete from torrent_tracker where bt_infohash=?", (bt_infohash,)).fetchall() - transaction.execute("delete from torrent_node where bt_infohash=?", (bt_infohash,)).fetchall() - transaction.execute("delete from torrent_http_seed where bt_infohash=?", (bt_infohash,)).fetchall() - transaction.execute("delete from file where bt_infohash=?", (bt_infohash,)).fetchall() - transaction.execute("delete from torrent where bt_infohash=?", (bt_infohash,)).fetchall() - - -def store_file(transaction: sqlite3.Connection, stream_hash: str, file_name: typing.Optional[str], - download_directory: typing.Optional[str], data_payment_rate: float, status: str, - content_fee: typing.Optional[Transaction], added_on: typing.Optional[int] = None) -> int: - if not file_name and not download_directory: - encoded_file_name, encoded_download_dir = None, None - else: - encoded_file_name = binascii.hexlify(file_name.encode()).decode() - encoded_download_dir = binascii.hexlify(download_directory.encode()).decode() - time_added = added_on or int(time.time()) - transaction.execute( - "insert or replace into file values (?, NULL, ?, ?, ?, ?, ?, ?, ?)", - (stream_hash, encoded_file_name, encoded_download_dir, data_payment_rate, status, - 1 if (file_name and download_directory and os.path.isfile(os.path.join(download_directory, file_name))) else 0, - None if not content_fee else binascii.hexlify(content_fee.raw).decode(), time_added) - ).fetchall() - - return transaction.execute("select rowid from file where stream_hash=?", (stream_hash, )).fetchone()[0] - - -class SQLiteStorage(SQLiteMixin): - CREATE_TABLES_QUERY = """ - pragma foreign_keys=on; - pragma journal_mode=WAL; - - create table if not exists blob ( - blob_hash char(96) primary key not null, - blob_length integer not null, - next_announce_time integer not null, - should_announce integer not null default 0, - status text not null, - last_announced_time integer, - single_announce integer - ); - - create table if not exists stream ( - stream_hash char(96) not null primary key, - sd_hash char(96) not null references blob, - stream_key text not null, - stream_name text not null, - suggested_filename text not null - ); - - create table if not exists stream_blob ( - stream_hash char(96) not null references stream, - blob_hash char(96) references blob, - position integer not null, - iv char(32) not null, - primary key (stream_hash, blob_hash) - ); - - create table if not exists claim ( - claim_outpoint text not null primary key, - claim_id char(40) not null, - claim_name text not null, - amount integer not null, - height integer not null, - serialized_metadata blob not null, - channel_claim_id text, - address text not null, - claim_sequence integer not null - ); - - create table if not exists torrent ( - bt_infohash char(20) not null primary key, - tracker text, - length integer not null, - name text not null - ); - - create table if not exists torrent_node ( -- BEP-0005 - bt_infohash char(20) not null references torrent, - host text not null, - port integer not null - ); - - create table if not exists torrent_tracker ( -- BEP-0012 - bt_infohash char(20) not null references torrent, - tracker text not null - ); - - create table if not exists torrent_http_seed ( -- BEP-0017 - bt_infohash char(20) not null references torrent, - http_seed text not null - ); - - create table if not exists file ( - stream_hash char(96) references stream, - bt_infohash char(20) references torrent, - file_name text, - download_directory text, - blob_data_rate real not null, - status text not null, - saved_file integer not null, - content_fee text, - added_on integer not null - ); - - create table if not exists content_claim ( - stream_hash char(96) references stream, - bt_infohash char(20) references torrent, - claim_outpoint text unique not null references claim - ); - - create table if not exists support ( - support_outpoint text not null primary key, - claim_id text not null, - amount integer not null, - address text not null - ); - - create table if not exists reflected_stream ( - sd_hash text not null, - reflector_address text not null, - timestamp integer, - primary key (sd_hash, reflector_address) - ); - - create table if not exists peer ( - node_id char(96) not null primary key, - address text not null, - udp_port integer not null, - tcp_port integer, - unique (address, udp_port) - ); - """ - - def __init__(self, conf: Config, path, loop=None, time_getter: typing.Optional[typing.Callable[[], float]] = None): - super().__init__(path) - self.conf = conf - self.content_claim_callbacks = {} - self.loop = loop or asyncio.get_event_loop() - self.time_getter = time_getter or time.time - - async def run_and_return_one_or_none(self, query, *args): - for row in await self.db.execute_fetchall(query, args): - if len(row) == 1: - return row[0] - return row - - async def run_and_return_list(self, query, *args): - rows = list(await self.db.execute_fetchall(query, args)) - return [col[0] for col in rows] if rows else [] - - # # # # # # # # # blob functions # # # # # # # # # - - async def add_blobs(self, *blob_hashes_and_lengths: typing.Tuple[str, int], finished=False): - def _add_blobs(transaction: sqlite3.Connection): - transaction.executemany( - "insert or ignore into blob values (?, ?, ?, ?, ?, ?, ?)", - ( - (blob_hash, length, 0, 0, "pending" if not finished else "finished", 0, 0) - for blob_hash, length in blob_hashes_and_lengths - ) - ).fetchall() - if finished: - transaction.executemany( - "update blob set status='finished' where blob.blob_hash=?", ( - (blob_hash, ) for blob_hash, _ in blob_hashes_and_lengths - ) - ).fetchall() - return await self.db.run(_add_blobs) - - def get_blob_status(self, blob_hash: str): - return self.run_and_return_one_or_none( - "select status from blob where blob_hash=?", blob_hash - ) - - def update_last_announced_blobs(self, blob_hashes: typing.List[str]): - def _update_last_announced_blobs(transaction: sqlite3.Connection): - last_announced = self.time_getter() - return transaction.executemany( - "update blob set next_announce_time=?, last_announced_time=?, single_announce=0 " - "where blob_hash=?", - ((int(last_announced + (DATA_EXPIRATION / 2)), int(last_announced), blob_hash) - for blob_hash in blob_hashes) - ).fetchall() - return self.db.run(_update_last_announced_blobs) - - def should_single_announce_blobs(self, blob_hashes, immediate=False): - def set_single_announce(transaction): - now = int(self.time_getter()) - for blob_hash in blob_hashes: - if immediate: - transaction.execute( - "update blob set single_announce=1, next_announce_time=? " - "where blob_hash=? and status='finished'", (int(now), blob_hash) - ).fetchall() - else: - transaction.execute( - "update blob set single_announce=1 where blob_hash=? and status='finished'", (blob_hash,) - ).fetchall() - return self.db.run(set_single_announce) - - def get_blobs_to_announce(self): - def get_and_update(transaction): - timestamp = int(self.time_getter()) - if self.conf.announce_head_and_sd_only: - r = transaction.execute( - "select blob_hash from blob " - "where blob_hash is not null and " - "(should_announce=1 or single_announce=1) and next_announce_time<? and status='finished' " - "order by next_announce_time asc limit ?", - (timestamp, int(self.conf.concurrent_blob_announcers * 10)) - ).fetchall() - else: - r = transaction.execute( - "select blob_hash from blob where blob_hash is not null " - "and next_announce_time<? and status='finished' " - "order by next_announce_time asc limit ?", - (timestamp, int(self.conf.concurrent_blob_announcers * 10)) - ).fetchall() - return [b[0] for b in r] - return self.db.run(get_and_update) - - def delete_blobs_from_db(self, blob_hashes): - def delete_blobs(transaction): - transaction.executemany( - "delete from blob where blob_hash=?;", ((blob_hash,) for blob_hash in blob_hashes) - ).fetchall() - return self.db.run_with_foreign_keys_disabled(delete_blobs) - - def get_all_blob_hashes(self): - return self.run_and_return_list("select blob_hash from blob") - - def sync_missing_blobs(self, blob_files: typing.Set[str]) -> typing.Awaitable[typing.Set[str]]: - def _sync_blobs(transaction: sqlite3.Connection) -> typing.Set[str]: - finished_blob_hashes = tuple( - blob_hash for (blob_hash, ) in transaction.execute( - "select blob_hash from blob where status='finished'" - ).fetchall() - ) - finished_blobs_set = set(finished_blob_hashes) - to_update_set = finished_blobs_set.difference(blob_files) - transaction.executemany( - "update blob set status='pending' where blob_hash=?", - ((blob_hash, ) for blob_hash in to_update_set) - ).fetchall() - return blob_files.intersection(finished_blobs_set) - return self.db.run(_sync_blobs) - - # # # # # # # # # stream functions # # # # # # # # # - - async def stream_exists(self, sd_hash: str) -> bool: - streams = await self.run_and_return_one_or_none("select stream_hash from stream where sd_hash=?", sd_hash) - return streams is not None - - async def file_exists(self, sd_hash: str) -> bool: - streams = await self.run_and_return_one_or_none("select f.stream_hash from file f " - "inner join stream s on " - "s.stream_hash=f.stream_hash and s.sd_hash=?", sd_hash) - return streams is not None - - def store_stream(self, sd_blob: 'BlobFile', descriptor: 'StreamDescriptor'): - return self.db.run(store_stream, sd_blob, descriptor) - - def get_blobs_for_stream(self, stream_hash, only_completed=False) -> typing.Awaitable[typing.List[BlobInfo]]: - def _get_blobs_for_stream(transaction): - crypt_blob_infos = [] - stream_blobs = transaction.execute( - "select blob_hash, position, iv from stream_blob where stream_hash=? " - "order by position asc", (stream_hash, ) - ).fetchall() - if only_completed: - lengths = transaction.execute( - "select b.blob_hash, b.blob_length from blob b " - "inner join stream_blob s ON b.blob_hash=s.blob_hash and b.status='finished' and s.stream_hash=?", - (stream_hash, ) - ).fetchall() - else: - lengths = transaction.execute( - "select b.blob_hash, b.blob_length from blob b " - "inner join stream_blob s ON b.blob_hash=s.blob_hash and s.stream_hash=?", - (stream_hash, ) - ).fetchall() - - blob_length_dict = {} - for blob_hash, length in lengths: - blob_length_dict[blob_hash] = length - - for blob_hash, position, iv in stream_blobs: - blob_length = blob_length_dict.get(blob_hash, 0) - crypt_blob_infos.append(BlobInfo(position, blob_length, iv, blob_hash)) - if not blob_hash: - break - return crypt_blob_infos - return self.db.run(_get_blobs_for_stream) - - def get_sd_blob_hash_for_stream(self, stream_hash): - return self.run_and_return_one_or_none( - "select sd_hash from stream where stream_hash=?", stream_hash - ) - - def get_stream_hash_for_sd_hash(self, sd_blob_hash): - return self.run_and_return_one_or_none( - "select stream_hash from stream where sd_hash = ?", sd_blob_hash - ) - - def delete_stream(self, descriptor: 'StreamDescriptor'): - return self.db.run_with_foreign_keys_disabled(delete_stream, descriptor) - - async def delete_torrent(self, bt_infohash: str): - return await self.db.run(delete_torrent, bt_infohash) - - # # # # # # # # # file stuff # # # # # # # # # - - def save_downloaded_file(self, stream_hash: str, file_name: typing.Optional[str], - download_directory: typing.Optional[str], data_payment_rate: float, - content_fee: typing.Optional[Transaction] = None, - added_on: typing.Optional[int] = None) -> typing.Awaitable[int]: - return self.save_published_file( - stream_hash, file_name, download_directory, data_payment_rate, status="running", - content_fee=content_fee, added_on=added_on - ) - - def save_published_file(self, stream_hash: str, file_name: typing.Optional[str], - download_directory: typing.Optional[str], data_payment_rate: float, - status: str = "finished", - content_fee: typing.Optional[Transaction] = None, - added_on: typing.Optional[int] = None) -> typing.Awaitable[int]: - return self.db.run(store_file, stream_hash, file_name, download_directory, data_payment_rate, status, - content_fee, added_on) - - async def update_manually_removed_files_since_last_run(self): - """ - Update files that have been removed from the downloads directory since the last run - """ - def update_manually_removed_files(transaction: sqlite3.Connection): - files = {} - query = "select stream_hash, download_directory, file_name from file where saved_file=1 " \ - "and stream_hash is not null" - for (stream_hash, download_directory, file_name) in transaction.execute(query).fetchall(): - if download_directory and file_name: - files[stream_hash] = download_directory, file_name - return files - - def detect_removed(files): - return [ - stream_hash for stream_hash, (download_directory, file_name) in files.items() - if not os.path.isfile(os.path.join(binascii.unhexlify(download_directory).decode(), - binascii.unhexlify(file_name).decode())) - ] - - def update_db_removed(transaction: sqlite3.Connection, removed): - query = "update file set file_name=null, download_directory=null, saved_file=0 where stream_hash in {}" - for cur in _batched_select(transaction, query, removed): - cur.fetchall() - - stream_and_file = await self.db.run(update_manually_removed_files) - removed = await self.loop.run_in_executor(None, detect_removed, stream_and_file) - if removed: - await self.db.run(update_db_removed, removed) - - def get_all_lbry_files(self) -> typing.Awaitable[typing.List[typing.Dict]]: - return self.db.run(get_all_lbry_files) - - def change_file_status(self, stream_hash: str, new_status: str): - log.debug("update file status %s -> %s", stream_hash, new_status) - return self.db.execute_fetchall("update file set status=? where stream_hash=?", (new_status, stream_hash)) - - async def change_file_download_dir_and_file_name(self, stream_hash: str, download_dir: typing.Optional[str], - file_name: typing.Optional[str]): - if not file_name or not download_dir: - encoded_file_name, encoded_download_dir = None, None - else: - encoded_file_name = binascii.hexlify(file_name.encode()).decode() - encoded_download_dir = binascii.hexlify(download_dir.encode()).decode() - return await self.db.execute_fetchall("update file set download_directory=?, file_name=? where stream_hash=?", ( - encoded_download_dir, encoded_file_name, stream_hash, - )) - - async def save_content_fee(self, stream_hash: str, content_fee: Transaction): - return await self.db.execute_fetchall("update file set content_fee=? where stream_hash=?", ( - binascii.hexlify(content_fee.raw), stream_hash, - )) - - async def set_saved_file(self, stream_hash: str): - return await self.db.execute_fetchall("update file set saved_file=1 where stream_hash=?", ( - stream_hash, - )) - - async def clear_saved_file(self, stream_hash: str): - return await self.db.execute_fetchall("update file set saved_file=0 where stream_hash=?", ( - stream_hash, - )) - - async def recover_streams(self, descriptors_and_sds: typing.List[typing.Tuple['StreamDescriptor', 'BlobFile', - typing.Optional[Transaction]]], - download_directory: str): - def _recover(transaction: sqlite3.Connection): - stream_hashes = [x[0].stream_hash for x in descriptors_and_sds] - for descriptor, sd_blob, content_fee in descriptors_and_sds: - content_claim = transaction.execute( - "select * from content_claim where stream_hash=?", (descriptor.stream_hash, ) - ).fetchone() - delete_stream(transaction, descriptor) # this will also delete the content claim - store_stream(transaction, sd_blob, descriptor) - store_file(transaction, descriptor.stream_hash, os.path.basename(descriptor.suggested_file_name), - download_directory, 0.0, 'stopped', content_fee=content_fee) - if content_claim: - transaction.execute("insert or ignore into content_claim values (?, ?, ?)", content_claim) - transaction.executemany( - "update file set status='stopped' where stream_hash=?", - ((stream_hash, ) for stream_hash in stream_hashes) - ).fetchall() - download_dir = binascii.hexlify(self.conf.download_dir.encode()).decode() - transaction.executemany( - f"update file set download_directory=? where stream_hash=?", - ((download_dir, stream_hash) for stream_hash in stream_hashes) - ).fetchall() - await self.db.run_with_foreign_keys_disabled(_recover) - - def get_all_stream_hashes(self): - return self.run_and_return_list("select stream_hash from stream") - - # # # # # # # # # support functions # # # # # # # # # - - def save_supports(self, claim_id_to_supports: dict): - # TODO: add 'address' to support items returned for a claim from lbrycrdd and lbryum-server - def _save_support(transaction): - bind = "({})".format(','.join(['?'] * len(claim_id_to_supports))) - transaction.execute( - f"delete from support where claim_id in {bind}", tuple(claim_id_to_supports.keys()) - ).fetchall() - for claim_id, supports in claim_id_to_supports.items(): - for support in supports: - transaction.execute( - "insert into support values (?, ?, ?, ?)", - ("%s:%i" % (support['txid'], support['nout']), claim_id, lbc_to_dewies(support['amount']), - support.get('address', "")) - ).fetchall() - return self.db.run(_save_support) - - def get_supports(self, *claim_ids): - def _format_support(outpoint, supported_id, amount, address): - return { - "txid": outpoint.split(":")[0], - "nout": int(outpoint.split(":")[1]), - "claim_id": supported_id, - "amount": dewies_to_lbc(amount), - "address": address, - } - - def _get_supports(transaction): - return [ - _format_support(*support_info) - for support_info in _batched_select( - transaction, - "select * from support where claim_id in {}", - claim_ids - ) - ] - - return self.db.run(_get_supports) - - # # # # # # # # # claim functions # # # # # # # # # - - async def save_claims(self, claim_infos): - claim_id_to_supports = {} - update_file_callbacks = [] - - def _save_claims(transaction): - content_claims_to_update = [] - for claim_info in claim_infos: - outpoint = "%s:%i" % (claim_info['txid'], claim_info['nout']) - claim_id = claim_info['claim_id'] - name = claim_info['name'] - amount = lbc_to_dewies(claim_info['amount']) - height = claim_info['height'] - address = claim_info['address'] - sequence = claim_info['claim_sequence'] - certificate_id = claim_info['value'].signing_channel_id - try: - source_hash = claim_info['value'].stream.source.sd_hash - except (AttributeError, ValueError): - source_hash = None - serialized = binascii.hexlify(claim_info['value'].to_bytes()) - transaction.execute( - "insert or replace into claim values (?, ?, ?, ?, ?, ?, ?, ?, ?)", - (outpoint, claim_id, name, amount, height, serialized, certificate_id, address, sequence) - ).fetchall() - # if this response doesn't have support info don't overwrite the existing - # support info - if 'supports' in claim_info: - claim_id_to_supports[claim_id] = claim_info['supports'] - if not source_hash: - continue - stream_hash = transaction.execute( - "select file.stream_hash from stream " - "inner join file on file.stream_hash=stream.stream_hash where sd_hash=?", (source_hash,) - ).fetchone() - if not stream_hash: - continue - stream_hash = stream_hash[0] - known_outpoint = transaction.execute( - "select claim_outpoint from content_claim where stream_hash=?", (stream_hash,) - ).fetchone() - known_claim_id = transaction.execute( - "select claim_id from claim " - "inner join content_claim c3 ON claim.claim_outpoint=c3.claim_outpoint " - "where c3.stream_hash=?", (stream_hash,) - ).fetchone() - if not known_claim_id: - content_claims_to_update.append((stream_hash, outpoint)) - elif known_outpoint != outpoint: - content_claims_to_update.append((stream_hash, outpoint)) - for stream_hash, outpoint in content_claims_to_update: - self._save_content_claim(transaction, outpoint, stream_hash) - if stream_hash in self.content_claim_callbacks: - update_file_callbacks.append(self.content_claim_callbacks[stream_hash]()) - - await self.db.run(_save_claims) - if update_file_callbacks: - await asyncio.wait(update_file_callbacks) - if claim_id_to_supports: - await self.save_supports(claim_id_to_supports) - - def save_claims_for_resolve(self, claim_infos): - to_save = {} - for info in claim_infos: - if 'value' in info: - if info['value']: - to_save[info['claim_id']] = info - else: - for key in ('certificate', 'claim'): - if info.get(key, {}).get('value'): - to_save[info[key]['claim_id']] = info[key] - return self.save_claims(to_save.values()) - - @staticmethod - def _save_content_claim(transaction, claim_outpoint, stream_hash): - # get the claim id and serialized metadata - claim_info = transaction.execute( - "select claim_id, serialized_metadata from claim where claim_outpoint=?", (claim_outpoint,) - ).fetchone() - if not claim_info: - raise Exception("claim not found") - new_claim_id, claim = claim_info[0], Claim.from_bytes(binascii.unhexlify(claim_info[1])) - - # certificate claims should not be in the content_claim table - if not claim.is_stream: - raise Exception("claim does not contain a stream") - - # get the known sd hash for this stream - known_sd_hash = transaction.execute( - "select sd_hash from stream where stream_hash=?", (stream_hash,) - ).fetchone() - if not known_sd_hash: - raise Exception("stream not found") - # check the claim contains the same sd hash - if known_sd_hash[0] != claim.stream.source.sd_hash: - raise Exception("stream mismatch") - - # if there is a current claim associated to the file, check that the new claim is an update to it - current_associated_content = transaction.execute( - "select claim_outpoint from content_claim where stream_hash=?", (stream_hash,) - ).fetchone() - if current_associated_content: - current_associated_claim_id = transaction.execute( - "select claim_id from claim where claim_outpoint=?", current_associated_content - ).fetchone()[0] - if current_associated_claim_id != new_claim_id: - raise Exception( - f"mismatching claim ids when updating stream {current_associated_claim_id} vs {new_claim_id}" - ) - - # update the claim associated to the file - transaction.execute("delete from content_claim where stream_hash=?", (stream_hash, )).fetchall() - transaction.execute( - "insert into content_claim values (?, NULL, ?)", (stream_hash, claim_outpoint) - ).fetchall() - - async def save_content_claim(self, stream_hash, claim_outpoint): - await self.db.run(self._save_content_claim, claim_outpoint, stream_hash) - # update corresponding ManagedEncryptedFileDownloader object - if stream_hash in self.content_claim_callbacks: - await self.content_claim_callbacks[stream_hash]() - - async def get_content_claim(self, stream_hash: str, include_supports: typing.Optional[bool] = True) -> typing.Dict: - claims = await self.db.run(get_claims_from_stream_hashes, [stream_hash]) - claim = None - if claims: - claim = claims[stream_hash].as_dict() - if include_supports: - supports = await self.get_supports(claim['claim_id']) - claim['supports'] = supports - claim['effective_amount'] = calculate_effective_amount(claim['amount'], supports) - return claim - - # # # # # # # # # reflector functions # # # # # # # # # - - def update_reflected_stream(self, sd_hash, reflector_address, success=True): - if success: - return self.db.execute_fetchall( - "insert or replace into reflected_stream values (?, ?, ?)", - (sd_hash, reflector_address, self.time_getter()) - ) - return self.db.execute_fetchall( - "delete from reflected_stream where sd_hash=? and reflector_address=?", - (sd_hash, reflector_address) - ) - - def get_streams_to_re_reflect(self): - return self.run_and_return_list( - "select s.sd_hash from stream s " - "left outer join reflected_stream r on s.sd_hash=r.sd_hash " - "where r.timestamp is null or r.timestamp < ?", - int(self.time_getter()) - 86400 - ) - - # # # # # # # # # # dht functions # # # # # # # # # # # - async def get_persisted_kademlia_peers(self) -> typing.List[typing.Tuple[bytes, str, int, int]]: - query = 'select node_id, address, udp_port, tcp_port from peer' - return [(binascii.unhexlify(n), a, u, t) for n, a, u, t in await self.db.execute_fetchall(query)] - - async def save_kademlia_peers(self, peers: typing.List['KademliaPeer']): - def _save_kademlia_peers(transaction: sqlite3.Connection): - transaction.execute('delete from peer').fetchall() - transaction.executemany( - 'insert into peer(node_id, address, udp_port, tcp_port) values (?, ?, ?, ?)', - tuple([(binascii.hexlify(p.node_id), p.address, p.udp_port, p.tcp_port) for p in peers]) - ).fetchall() - return await self.db.run(_save_kademlia_peers) diff --git a/lbry/extras/daemon/undecorated.py b/lbry/extras/daemon/undecorated.py deleted file mode 100644 index d9ae85c82..000000000 --- a/lbry/extras/daemon/undecorated.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2016-2017 Ionuț Arțăriși <ionut@artarisi.eu> - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# This came from https://github.com/mapleoin/undecorated - -from inspect import isfunction, ismethod, isclass - -__version__ = '0.3.0' - - -def undecorated(o): - """Remove all decorators from a function, method or class""" - # class decorator - if isinstance(o, type): - return o - - try: - # python2 - closure = o.func_closure - except AttributeError: - pass - - try: - # python3 - closure = o.__closure__ - except AttributeError: - return - - if closure: - for cell in closure: - # avoid infinite recursion - if cell.cell_contents is o: - continue - - # check if the contents looks like a decorator; in that case - # we need to go one level down into the dream, otherwise it - # might just be a different closed-over variable, which we - # can ignore. - - # Note: this favors supporting decorators defined without - # @wraps to the detriment of function/method/class closures - if looks_like_a_decorator(cell.cell_contents): - undecd = undecorated(cell.cell_contents) - if undecd: - return undecd - return o - - -def looks_like_a_decorator(a): - return isfunction(a) or ismethod(a) or isclass(a) diff --git a/lbry/extras/system_info.py b/lbry/extras/system_info.py deleted file mode 100644 index 68e24f881..000000000 --- a/lbry/extras/system_info.py +++ /dev/null @@ -1,29 +0,0 @@ -import platform -import os -import logging.handlers - -from lbry import build_info, __version__ as lbrynet_version - -log = logging.getLogger(__name__) - - -def get_platform() -> dict: - os_system = platform.system() - if os.environ and 'ANDROID_ARGUMENT' in os.environ: - os_system = 'android' - d = { - "processor": platform.processor(), - "python_version": platform.python_version(), - "platform": platform.platform(), - "os_release": platform.release(), - "os_system": os_system, - "lbrynet_version": lbrynet_version, - "version": lbrynet_version, - "build": build_info.BUILD, # CI server sets this during build step - } - if d["os_system"] == "Linux": - import distro # pylint: disable=import-outside-toplevel - d["distro"] = distro.info() - d["desktop"] = os.environ.get('XDG_CURRENT_DESKTOP', 'Unknown') - - return d