lbry-sdk/lbrynet/extras/daemon/Daemon.py

3339 lines
147 KiB
Python
Raw Normal View History

2016-02-29 19:25:47 +01:00
import os
2019-01-22 23:44:17 +01:00
import asyncio
import logging
import json
2019-02-11 00:36:21 +01:00
import time
2019-01-22 23:44:17 +01:00
import inspect
import typing
import base58
2019-01-30 20:59:48 +01:00
import random
2019-01-22 23:44:17 +01:00
from urllib.parse import urlencode, quote
from typing import Callable, Optional, List
from binascii import hexlify, unhexlify
from traceback import format_exc
2019-01-22 23:44:17 +01:00
from aiohttp import web
from functools import wraps
2019-03-11 14:52:35 +01:00
from torba.client.wallet import Wallet
2018-11-04 07:24:41 +01:00
from torba.client.baseaccount import SingleKey, HierarchicalDeterministic
2018-07-05 04:16:02 +02:00
2019-03-24 21:55:04 +01:00
from lbrynet import utils
from lbrynet.conf import Config, Setting
2018-11-12 20:45:41 +01:00
from lbrynet.blob.blob_file import is_valid_blobhash
from lbrynet.blob_exchange.downloader import download_blob
2019-03-25 03:20:17 +01:00
from lbrynet.error import DownloadSDTimeout, ComponentsNotStarted
from lbrynet.error import NullFundsError, NegativeFundsError, ComponentStartConditionNotMet
from lbrynet.extras import system_info
2019-01-22 23:44:17 +01:00
from lbrynet.extras.daemon import analytics
2018-11-09 20:02:03 +01:00
from lbrynet.extras.daemon.Components import WALLET_COMPONENT, DATABASE_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT
2019-01-22 23:44:17 +01:00
from lbrynet.extras.daemon.Components import STREAM_MANAGER_COMPONENT
from lbrynet.extras.daemon.Components import EXCHANGE_RATE_MANAGER_COMPONENT, UPNP_COMPONENT
2018-11-09 20:02:03 +01:00
from lbrynet.extras.daemon.ComponentManager import RequiredCondition
2019-01-22 23:44:17 +01:00
from lbrynet.extras.daemon.ComponentManager import ComponentManager
from lbrynet.extras.daemon.json_response_encoder import JSONResponseEncoder
from lbrynet.extras.daemon.undecorated import undecorated
from lbrynet.wallet.transaction import Transaction, Output, Input
2019-03-25 03:20:17 +01:00
from lbrynet.wallet.account import Account as LBCAccount
2019-03-20 06:46:23 +01:00
from lbrynet.wallet.dewies import dewies_to_lbc, lbc_to_dewies
from lbrynet.schema.claim import Claim
from lbrynet.schema.uri import parse_lbry_uri, URIParseError
2018-12-13 04:32:44 +01:00
2019-01-22 23:44:17 +01:00
if typing.TYPE_CHECKING:
from lbrynet.blob.blob_manager import BlobFileManager
from lbrynet.dht.node import Node
from lbrynet.extras.daemon.Components import UPnPComponent
from lbrynet.extras.daemon.exchange_rate_manager import ExchangeRateManager
from lbrynet.extras.daemon.storage import SQLiteStorage
2019-03-20 06:46:23 +01:00
from lbrynet.wallet.manager import LbryWalletManager
from lbrynet.wallet.ledger import MainNetLedger
2019-01-22 23:44:17 +01:00
from lbrynet.stream.stream_manager import StreamManager
2017-04-11 04:47:54 +02:00
log = logging.getLogger(__name__)
2018-12-13 04:32:44 +01:00
def requires(*components, **conditions):
if conditions and ["conditions"] != list(conditions.keys()):
raise SyntaxError("invalid conditions argument")
condition_names = conditions.get("conditions", [])
def _wrap(fn):
@wraps(fn)
def _inner(*args, **kwargs):
component_manager = args[0].component_manager
for condition_name in condition_names:
condition_result, err_msg = component_manager.evaluate_condition(condition_name)
if not condition_result:
raise ComponentStartConditionNotMet(err_msg)
if not component_manager.all_components_running(*components):
raise ComponentsNotStarted("the following required components have not yet started: "
"%s" % json.dumps(components))
return fn(*args, **kwargs)
return _inner
return _wrap
def deprecated(new_command=None):
def _deprecated_wrapper(f):
f.new_command = new_command
f._deprecated = True
return f
return _deprecated_wrapper
2016-06-28 20:28:59 +02:00
INITIALIZING_CODE = 'initializing'
# TODO: make this consistent with the stages in Downloader.py
DOWNLOAD_METADATA_CODE = 'downloading_metadata'
DOWNLOAD_TIMEOUT_CODE = 'timeout'
DOWNLOAD_RUNNING_CODE = 'running'
DOWNLOAD_STOPPED_CODE = 'stopped'
STREAM_STAGES = [
2017-01-04 23:10:36 +01:00
(INITIALIZING_CODE, 'Initializing'),
2017-01-03 20:13:01 +01:00
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
(DOWNLOAD_RUNNING_CODE, 'Started %s, got %s/%s blobs, stream status: %s'),
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
(DOWNLOAD_TIMEOUT_CODE, 'Stream timed out')
]
CONNECTION_STATUS_CONNECTED = 'connected'
CONNECTION_STATUS_NETWORK = 'network_connection'
CONNECTION_MESSAGES = {
CONNECTION_STATUS_CONNECTED: 'No connection problems detected',
CONNECTION_STATUS_NETWORK: "Your internet connection appears to have been interrupted",
}
2016-12-19 19:27:45 +01:00
SHORT_ID_LEN = 20
MAX_UPDATE_FEE_ESTIMATE = 0.3
2016-12-19 19:27:45 +01:00
2018-08-16 01:23:06 +02:00
2018-10-16 21:04:20 +02:00
async def maybe_paginate(get_records: Callable, get_record_count: Callable,
2019-01-22 23:44:17 +01:00
page: Optional[int], page_size: Optional[int], **constraints):
if None not in (page, page_size):
constraints.update({
"offset": page_size * (page-1),
"limit": page_size
})
return {
2018-10-16 21:04:20 +02:00
"items": await get_records(**constraints),
"total_pages": int(((await get_record_count(**constraints)) + (page_size-1)) / page_size),
"page": page, "page_size": page_size
}
2018-10-16 21:04:20 +02:00
return await get_records(**constraints)
def sort_claim_results(claims):
claims.sort(key=lambda d: (d['height'], d['name'], d['claim_id'], d['txid'], d['nout']))
DHT_HAS_CONTACTS = "dht_has_contacts"
WALLET_IS_UNLOCKED = "wallet_is_unlocked"
class DHTHasContacts(RequiredCondition):
name = DHT_HAS_CONTACTS
component = DHT_COMPONENT
message = "your node is not connected to the dht"
@staticmethod
def evaluate(component):
return len(component.contacts) > 0
class WalletIsUnlocked(RequiredCondition):
name = WALLET_IS_UNLOCKED
component = WALLET_COMPONENT
message = "your wallet is locked"
@staticmethod
def evaluate(component):
2018-10-16 21:04:20 +02:00
return not component.check_locked()
2018-12-13 04:32:44 +01:00
class JSONRPCError:
# http://www.jsonrpc.org/specification#error_object
CODE_PARSE_ERROR = -32700 # Invalid JSON. Error while parsing the JSON text.
CODE_INVALID_REQUEST = -32600 # The JSON sent is not a valid Request object.
CODE_METHOD_NOT_FOUND = -32601 # The method does not exist / is not available.
CODE_INVALID_PARAMS = -32602 # Invalid method parameter(s).
CODE_INTERNAL_ERROR = -32603 # Internal JSON-RPC error (I think this is like a 500?)
CODE_APPLICATION_ERROR = -32500 # Generic error with our app??
CODE_AUTHENTICATION_ERROR = -32501 # Authentication failed
MESSAGES = {
CODE_PARSE_ERROR: "Parse Error. Data is not valid JSON.",
CODE_INVALID_REQUEST: "JSON data is not a valid Request",
CODE_METHOD_NOT_FOUND: "Method Not Found",
CODE_INVALID_PARAMS: "Invalid Params",
CODE_INTERNAL_ERROR: "Internal Error",
CODE_AUTHENTICATION_ERROR: "Authentication Failed",
}
HTTP_CODES = {
CODE_INVALID_REQUEST: 400,
CODE_PARSE_ERROR: 400,
CODE_INVALID_PARAMS: 400,
CODE_METHOD_NOT_FOUND: 404,
CODE_INTERNAL_ERROR: 500,
CODE_APPLICATION_ERROR: 500,
CODE_AUTHENTICATION_ERROR: 401,
}
def __init__(self, message, code=CODE_APPLICATION_ERROR, traceback=None, data=None):
assert isinstance(code, int), "'code' must be an int"
assert (data is None or isinstance(data, dict)), "'data' must be None or a dict"
self.code = code
if message is None:
message = self.MESSAGES[code] if code in self.MESSAGES else "API Error"
self.message = message
self.data = {} if data is None else data
self.traceback = []
if traceback is not None:
trace_lines = traceback.split("\n")
for i, t in enumerate(trace_lines):
if "--- <exception caught here> ---" in t:
if len(trace_lines) > i + 1:
self.traceback = [j for j in trace_lines[i+1:] if j]
break
def to_dict(self):
return {
'code': self.code,
'message': self.message,
'data': self.traceback
}
@classmethod
def create_from_exception(cls, message, code=CODE_APPLICATION_ERROR, traceback=None):
return cls(message, code=code, traceback=traceback)
class UnknownAPIMethodError(Exception):
pass
def jsonrpc_dumps_pretty(obj, **kwargs):
if isinstance(obj, JSONRPCError):
data = {"jsonrpc": "2.0", "error": obj.to_dict()}
else:
data = {"jsonrpc": "2.0", "result": obj}
return json.dumps(data, cls=JSONResponseEncoder, sort_keys=True, indent=2, **kwargs) + "\n"
def trap(err, *to_trap):
err.trap(*to_trap)
class JSONRPCServerType(type):
def __new__(mcs, name, bases, newattrs):
klass = type.__new__(mcs, name, bases, newattrs)
klass.callable_methods = {}
klass.deprecated_methods = {}
for methodname in dir(klass):
if methodname.startswith("jsonrpc_"):
method = getattr(klass, methodname)
if not hasattr(method, '_deprecated'):
klass.callable_methods.update({methodname.split("jsonrpc_")[1]: method})
else:
klass.deprecated_methods.update({methodname.split("jsonrpc_")[1]: method})
return klass
class Daemon(metaclass=JSONRPCServerType):
"""
2016-03-24 03:27:48 +01:00
LBRYnet daemon, a jsonrpc interface to lbry functions
"""
2018-12-13 04:32:44 +01:00
2019-01-24 00:04:16 +01:00
def __init__(self, conf: Config, component_manager: typing.Optional[ComponentManager] = None):
2019-01-21 21:55:50 +01:00
self.conf = conf
self._node_id = None
self._installation_id = None
self.session_id = base58.b58encode(utils.generate_id()).decode()
self.analytics_manager = analytics.AnalyticsManager(conf, self.installation_id, self.session_id)
2018-12-13 04:32:44 +01:00
self.component_manager = component_manager or ComponentManager(
conf, analytics_manager=self.analytics_manager,
skip_components=conf.components_to_skip or []
2018-12-13 04:32:44 +01:00
)
self.component_startup_task = None
2019-02-03 22:19:29 +01:00
self._connection_status: typing.Tuple[float, bool] = [self.component_manager.loop.time(), False]
self.stop_event = asyncio.Event()
2016-05-14 23:36:30 +02:00
2019-01-09 05:54:18 +01:00
logging.getLogger('aiohttp.access').setLevel(logging.WARN)
app = web.Application()
app.router.add_get('/lbryapi', self.handle_old_jsonrpc)
app.router.add_post('/lbryapi', self.handle_old_jsonrpc)
app.router.add_post('/', self.handle_old_jsonrpc)
self.runner = web.AppRunner(app)
2019-01-22 23:44:17 +01:00
@property
def dht_node(self) -> typing.Optional['Node']:
return self.component_manager.get_component(DHT_COMPONENT)
@property
def wallet_manager(self) -> typing.Optional['LbryWalletManager']:
return self.component_manager.get_component(WALLET_COMPONENT)
@property
def storage(self) -> typing.Optional['SQLiteStorage']:
return self.component_manager.get_component(DATABASE_COMPONENT)
@property
def stream_manager(self) -> typing.Optional['StreamManager']:
return self.component_manager.get_component(STREAM_MANAGER_COMPONENT)
@property
def exchange_rate_manager(self) -> typing.Optional['ExchangeRateManager']:
return self.component_manager.get_component(EXCHANGE_RATE_MANAGER_COMPONENT)
@property
def blob_manager(self) -> typing.Optional['BlobFileManager']:
return self.component_manager.get_component(BLOB_COMPONENT)
@property
def upnp(self) -> typing.Optional['UPnPComponent']:
return self.component_manager.get_component(UPNP_COMPONENT)
2018-12-13 04:32:44 +01:00
2019-01-21 21:55:50 +01:00
@classmethod
def get_api_definitions(cls):
2019-01-23 19:00:58 +01:00
prefix = 'jsonrpc_'
not_grouped = ['block_show', 'report_bug', 'routing_table_get']
2019-01-23 19:00:58 +01:00
api = {
'groups': {
group_name[:-len('_DOC')].lower(): getattr(cls, group_name).strip()
for group_name in dir(cls) if group_name.endswith('_DOC')
},
'commands': {}
}
for jsonrpc_method in dir(cls):
if jsonrpc_method.startswith(prefix):
full_name = jsonrpc_method[len(prefix):]
method = getattr(cls, jsonrpc_method)
if full_name in not_grouped:
name_parts = [full_name]
else:
name_parts = full_name.split('_', 1)
if len(name_parts) == 1:
group = None
name, = name_parts
elif len(name_parts) == 2:
group, name = name_parts
assert group in api['groups'],\
f"Group {group} does not have doc string for command {full_name}."
else:
raise NameError(f'Could not parse method name: {jsonrpc_method}')
api['commands'][full_name] = {
'api_method_name': full_name,
'name': name,
'group': group,
'doc': method.__doc__,
'method': method,
}
if hasattr(method, '_deprecated'):
api['commands'][full_name]['replaced_by'] = method.new_command
for command in api['commands'].values():
if 'replaced_by' in command:
command['replaced_by'] = api['commands'][command['replaced_by']]
return api
2019-01-21 21:55:50 +01:00
@property
def db_revision_file_path(self):
return os.path.join(self.conf.data_dir, 'db_revision')
@property
def installation_id(self):
install_id_filename = os.path.join(self.conf.data_dir, "install_id")
if not self._installation_id:
if os.path.isfile(install_id_filename):
with open(install_id_filename, "r") as install_id_file:
self._installation_id = str(install_id_file.read()).strip()
if not self._installation_id:
self._installation_id = base58.b58encode(utils.generate_id()).decode()
with open(install_id_filename, "w") as install_id_file:
install_id_file.write(self._installation_id)
return self._installation_id
def ensure_data_dir(self):
if not os.path.isdir(self.conf.data_dir):
os.makedirs(self.conf.data_dir)
if not os.path.isdir(os.path.join(self.conf.data_dir, "blobfiles")):
os.makedirs(os.path.join(self.conf.data_dir, "blobfiles"))
return self.conf.data_dir
def ensure_wallet_dir(self):
if not os.path.isdir(self.conf.wallet_dir):
os.makedirs(self.conf.wallet_dir)
def ensure_download_dir(self):
if not os.path.isdir(self.conf.download_dir):
os.makedirs(self.conf.download_dir)
2019-02-03 22:19:29 +01:00
async def update_connection_status(self):
connected = await utils.async_check_connection()
self._connection_status = (self.component_manager.loop.time(), connected)
async def get_connection_status(self) -> str:
if self._connection_status[0] + 300 > self.component_manager.loop.time():
if not self._connection_status[1]:
await self.update_connection_status()
else:
await self.update_connection_status()
return CONNECTION_STATUS_CONNECTED if self._connection_status[1] else CONNECTION_STATUS_NETWORK
2019-01-23 22:41:14 +01:00
async def start(self):
log.info("Starting LBRYNet Daemon")
log.debug("Settings: %s", json.dumps(self.conf.settings_dict, indent=2))
log.info("Platform: %s", json.dumps(system_info.get_platform(), indent=2))
await self.analytics_manager.send_server_startup()
await self.runner.setup()
2018-12-13 04:32:44 +01:00
try:
site = web.TCPSite(self.runner, self.conf.api_host, self.conf.api_port)
await site.start()
log.info('lbrynet API listening on TCP %s:%i', *site._server.sockets[0].getsockname()[:2])
except OSError as e:
log.error('lbrynet API failed to bind TCP %s for listening. Daemon is already running or this port is '
'already in use by another application.', self.conf.api)
await self.analytics_manager.send_server_startup_error(str(e))
raise SystemExit()
2019-01-22 23:44:17 +01:00
try:
await self.initialize()
2019-01-22 23:44:17 +01:00
except asyncio.CancelledError:
2018-12-13 04:32:44 +01:00
log.info("shutting down before finished starting")
await self.analytics_manager.send_server_startup_error("shutting down before finished starting")
await self.stop()
except Exception as e:
await self.analytics_manager.send_server_startup_error(str(e))
2018-12-13 04:32:44 +01:00
log.exception('Failed to start lbrynet-daemon')
await self.analytics_manager.send_server_startup_success()
2018-12-13 04:32:44 +01:00
async def initialize(self):
2019-01-22 18:11:28 +01:00
self.ensure_data_dir()
self.ensure_wallet_dir()
self.ensure_download_dir()
if not self.analytics_manager.is_started:
2019-03-11 02:55:33 +01:00
await self.analytics_manager.start()
self.component_startup_task = asyncio.create_task(self.component_manager.start())
await self.component_startup_task
async def stop(self):
if self.component_startup_task is not None:
if self.component_startup_task.done():
await self.component_manager.stop()
else:
self.component_startup_task.cancel()
await self.runner.cleanup()
if self.analytics_manager.is_started:
2019-01-22 05:28:26 +01:00
self.analytics_manager.stop()
2018-12-13 04:32:44 +01:00
async def handle_old_jsonrpc(self, request):
data = await request.json()
result = await self._process_rpc_call(data)
2019-01-24 00:04:16 +01:00
ledger = None
if 'wallet' in self.component_manager.get_components_status():
# self.ledger only available if wallet component is not skipped
ledger = self.ledger
return web.Response(
2019-01-24 00:04:16 +01:00
text=jsonrpc_dumps_pretty(result, ledger=ledger),
content_type='application/json'
)
async def _process_rpc_call(self, data):
2018-12-13 04:32:44 +01:00
args = data.get('params', {})
try:
function_name = data['method']
except KeyError:
return JSONRPCError(
"Missing 'method' value in request.", JSONRPCError.CODE_METHOD_NOT_FOUND
)
2018-12-13 04:32:44 +01:00
try:
fn = self._get_jsonrpc_method(function_name)
except UnknownAPIMethodError:
return JSONRPCError(
f"Invalid method requested: {function_name}.", JSONRPCError.CODE_METHOD_NOT_FOUND
)
2018-12-13 04:32:44 +01:00
2019-01-22 23:44:17 +01:00
if args in ([{}], []):
2018-12-13 04:32:44 +01:00
_args, _kwargs = (), {}
elif isinstance(args, dict):
_args, _kwargs = (), args
elif len(args) == 1 and isinstance(args[0], dict):
# TODO: this is for backwards compatibility. Remove this once API and UI are updated
# TODO: also delete EMPTY_PARAMS then
_args, _kwargs = (), args[0]
elif len(args) == 2 and isinstance(args[0], list) and isinstance(args[1], dict):
_args, _kwargs = args
else:
return JSONRPCError(
f"Invalid parameters format.", JSONRPCError.CODE_INVALID_PARAMS
)
2018-12-13 04:32:44 +01:00
params_error, erroneous_params = self._check_params(fn, _args, _kwargs)
if params_error is not None:
params_error_message = '{} for {} command: {}'.format(
params_error, function_name, ', '.join(erroneous_params)
)
log.warning(params_error_message)
return JSONRPCError(
params_error_message, JSONRPCError.CODE_INVALID_PARAMS
)
2018-12-13 04:32:44 +01:00
try:
result = fn(self, *_args, **_kwargs)
if asyncio.iscoroutine(result):
result = await result
return result
except asyncio.CancelledError:
log.info("cancelled API call for: %s", function_name)
raise
except Exception as e: # pylint: disable=broad-except
2019-01-22 23:44:17 +01:00
log.exception("error handling api request")
return JSONRPCError(
str(e), JSONRPCError.CODE_APPLICATION_ERROR, format_exc()
)
2018-12-13 04:32:44 +01:00
def _verify_method_is_callable(self, function_path):
if function_path not in self.callable_methods:
raise UnknownAPIMethodError(function_path)
def _get_jsonrpc_method(self, function_path):
if function_path in self.deprecated_methods:
new_command = self.deprecated_methods[function_path].new_command
log.warning('API function \"%s\" is deprecated, please update to use \"%s\"',
function_path, new_command)
function_path = new_command
self._verify_method_is_callable(function_path)
return self.callable_methods.get(function_path)
@staticmethod
def _check_params(function, args_tup, args_dict):
argspec = inspect.getfullargspec(undecorated(function))
num_optional_params = 0 if argspec.defaults is None else len(argspec.defaults)
duplicate_params = [
duplicate_param
for duplicate_param in argspec.args[1:len(args_tup) + 1]
if duplicate_param in args_dict
]
if duplicate_params:
return 'Duplicate parameters', duplicate_params
missing_required_params = [
required_param
for required_param in argspec.args[len(args_tup)+1:-num_optional_params]
if required_param not in args_dict
]
if len(missing_required_params):
return 'Missing required parameters', missing_required_params
extraneous_params = [] if argspec.varkw is not None else [
extra_param
for extra_param in args_dict
if extra_param not in argspec.args[1:]
]
if len(extraneous_params):
return 'Extraneous parameters', extraneous_params
return None, None
@property
def default_wallet(self):
try:
return self.wallet_manager.default_wallet
except AttributeError:
return None
@property
2019-03-20 06:46:23 +01:00
def default_account(self) -> Optional[LBCAccount]:
try:
return self.wallet_manager.default_account
except AttributeError:
return None
2018-07-05 04:16:02 +02:00
@property
2019-03-20 06:46:23 +01:00
def ledger(self) -> Optional['MainNetLedger']:
2018-08-16 01:23:06 +02:00
try:
return self.wallet_manager.default_account.ledger
2018-08-16 01:23:06 +02:00
except AttributeError:
return None
2018-07-05 04:16:02 +02:00
2019-01-22 23:44:17 +01:00
async def get_est_cost_from_uri(self, uri: str) -> typing.Optional[float]:
2016-11-30 22:23:48 +01:00
"""
Resolve a name and return the estimated stream cost
"""
2017-06-09 19:47:13 +02:00
2019-03-25 00:45:54 +01:00
resolved = await self.resolve(uri)
2017-06-09 19:47:13 +02:00
if resolved:
claim_response = resolved[uri]
else:
2017-04-11 04:47:54 +02:00
claim_response = None
if claim_response and 'claim' in claim_response:
if 'value' in claim_response['claim'] and claim_response['claim']['value'] is not None:
2019-03-20 06:46:23 +01:00
claim_value = Claim.from_bytes(claim_response['claim']['value'])
if not claim_value.stream.has_fee:
2019-01-22 23:44:17 +01:00
return 0.0
return round(
self.exchange_rate_manager.convert_currency(
2019-03-20 06:46:23 +01:00
claim_value.stream.fee.currency, "LBC", claim_value.stream.fee.amount
2019-01-22 23:44:17 +01:00
), 5
)
else:
log.warning("Failed to estimate cost for %s", uri)
2017-01-03 20:13:01 +01:00
############################################################################
# #
# JSON-RPC API methods start here #
# #
############################################################################
def jsonrpc_stop(self):
"""
2019-01-25 00:22:53 +01:00
Stop lbrynet API server.
Usage:
stop
Options:
None
Returns:
(string) Shutdown message
"""
log.info("Shutting down lbrynet daemon")
self.stop_event.set()
return "Shutting down"
2018-12-15 21:31:02 +01:00
async def jsonrpc_status(self):
2016-03-24 03:27:48 +01:00
"""
2017-05-28 22:01:53 +02:00
Get daemon status
2017-05-28 22:01:53 +02:00
Usage:
2018-08-02 23:33:56 +02:00
status
2017-06-12 22:19:26 +02:00
2018-08-03 19:31:51 +02:00
Options:
None
2017-06-12 22:19:26 +02:00
Returns:
(dict) lbrynet-daemon status
{
'installation_id': (str) installation id - base58,
'is_running': (bool),
'skipped_components': (list) [names of skipped components (str)],
'startup_status': { Does not include components which have been skipped
2019-02-19 23:26:08 +01:00
'blob_manager': (bool),
'blockchain_headers': (bool),
'database': (bool),
'dht': (bool),
2019-02-19 23:26:08 +01:00
'exchange_rate_manager': (bool),
'hash_announcer': (bool),
'peer_protocol_server': (bool),
2019-02-19 23:26:08 +01:00
'stream_manager': (bool),
'upnp': (bool),
2019-02-19 23:26:08 +01:00
'wallet': (bool),
2017-06-12 22:19:26 +02:00
},
'connection_status': {
'code': (str) connection status code,
'message': (str) connection status message
2017-06-12 22:19:26 +02:00
},
2018-08-02 23:33:56 +02:00
'blockchain_headers': {
'downloading_headers': (bool),
'download_progress': (float) 0-100.0
},
'wallet': {
'blocks': (int) local blockchain height,
'blocks_behind': (int) remote_height - local_height,
'best_blockhash': (str) block hash of most recent block,
2018-08-13 04:04:48 +02:00
'is_encrypted': (bool),
'is_locked': (bool),
2017-06-12 22:19:26 +02:00
},
'dht': {
2018-07-20 23:22:10 +02:00
'node_id': (str) lbry dht node id - hex encoded,
'peers_in_routing_table': (int) the number of peers in the routing table,
},
2018-08-02 23:33:56 +02:00
'blob_manager': {
'finished_blobs': (int) number of finished blobs in the blob manager,
},
'hash_announcer': {
'announce_queue_size': (int) number of blobs currently queued to be announced
},
2019-02-19 23:26:08 +01:00
'stream_manager': {
'managed_files': (int) count of files in the stream manager,
},
'upnp': {
'aioupnp_version': (str),
'redirects': {
<TCP | UDP>: (int) external_port,
},
'gateway': (str) manufacturer and model,
'dht_redirect_set': (bool),
'peer_redirect_set': (bool),
'external_ip': (str) external ip address,
2018-08-02 23:33:56 +02:00
}
2017-06-12 22:19:26 +02:00
}
2017-01-03 20:13:01 +01:00
"""
2017-05-28 22:01:53 +02:00
2019-02-03 22:19:29 +01:00
connection_code = await self.get_connection_status()
2017-01-03 20:13:01 +01:00
response = {
2019-01-21 21:55:50 +01:00
'installation_id': self.installation_id,
'is_running': all(self.component_manager.get_components_status().values()),
'skipped_components': self.component_manager.skip_components,
2018-07-24 18:34:58 +02:00
'startup_status': self.component_manager.get_components_status(),
2017-01-03 20:13:01 +01:00
'connection_status': {
'code': connection_code,
'message': CONNECTION_MESSAGES[connection_code],
2017-01-03 20:13:01 +01:00
},
}
for component in self.component_manager.components:
status = await component.get_status()
if status:
response[component.component_name] = status
2018-12-15 21:31:02 +01:00
return response
def jsonrpc_version(self):
"""
2019-01-25 00:22:53 +01:00
Get lbrynet API server version information
2017-05-28 22:01:53 +02:00
Usage:
version
Options:
None
Returns:
2017-03-14 00:14:11 +01:00
(dict) Dictionary of lbry version information
{
2017-03-15 21:31:58 +01:00
'build': (str) build type (e.g. "dev", "rc", "release"),
'ip': (str) remote ip, if available,
2017-03-14 00:14:11 +01:00
'lbrynet_version': (str) lbrynet_version,
'lbryum_version': (str) lbryum_version,
'lbryschema_version': (str) lbryschema_version,
2017-03-15 21:31:58 +01:00
'os_release': (str) os release string
'os_system': (str) os name
'platform': (str) platform string
'processor': (str) processor type,
'python_version': (str) python version,
2017-03-14 00:14:11 +01:00
}
"""
2018-07-20 21:35:09 +02:00
platform_info = system_info.get_platform()
log.info("Get version info: " + json.dumps(platform_info))
2018-12-15 21:31:02 +01:00
return platform_info
2019-03-24 21:55:04 +01:00
@requires(WALLET_COMPONENT)
async def jsonrpc_resolve(self, urls: typing.Union[str, list]):
2017-01-02 20:52:24 +01:00
"""
2019-03-24 21:55:04 +01:00
Get the claim that a URL refers to.
2017-01-02 20:52:24 +01:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
resolve <urls>...
2017-05-28 22:01:53 +02:00
Options:
2019-03-24 21:55:04 +01:00
--urls=<urls> : (str, list) one or more urls to resolve
2017-01-02 20:52:24 +01:00
Returns:
2019-03-24 21:55:04 +01:00
Dictionary of results, keyed by url
'<url>': {
If a resolution error occurs:
'error': Error message
If the url resolves to a channel or a claim in a channel:
'certificate': {
'address': (str) claim address,
'amount': (float) claim amount,
'effective_amount': (float) claim amount including supports,
'claim_id': (str) claim id,
'claim_sequence': (int) claim sequence number (or -1 if unknown),
'decoded_claim': (bool) whether or not the claim value was decoded,
'height': (int) claim height,
'depth': (int) claim depth,
'has_signature': (bool) included if decoded_claim
'name': (str) claim name,
'permanent_url': (str) permanent url of the certificate claim,
'supports: (list) list of supports [{'txid': (str) txid,
'nout': (int) nout,
'amount': (float) amount}],
'txid': (str) claim txid,
'nout': (str) claim nout,
'signature_is_valid': (bool), included if has_signature,
'value': ClaimDict if decoded, otherwise hex string
}
If the url resolves to a channel:
'claims_in_channel': (int) number of claims in the channel,
If the url resolves to a claim:
'claim': {
'address': (str) claim address,
'amount': (float) claim amount,
'effective_amount': (float) claim amount including supports,
'claim_id': (str) claim id,
'claim_sequence': (int) claim sequence number (or -1 if unknown),
'decoded_claim': (bool) whether or not the claim value was decoded,
'height': (int) claim height,
'depth': (int) claim depth,
'has_signature': (bool) included if decoded_claim
'name': (str) claim name,
'permanent_url': (str) permanent url of the claim,
'channel_name': (str) channel name if claim is in a channel
'supports: (list) list of supports [{'txid': (str) txid,
'nout': (int) nout,
'amount': (float) amount}]
'txid': (str) claim txid,
'nout': (str) claim nout,
'signature_is_valid': (bool), included if has_signature,
'value': ClaimDict if decoded, otherwise hex string
}
}
2017-01-02 20:52:24 +01:00
"""
2019-01-22 23:44:17 +01:00
2019-03-24 21:55:04 +01:00
if isinstance(urls, str):
urls = [urls]
results = {}
valid_urls = set()
for u in urls:
try:
parse_lbry_uri(u)
valid_urls.add(u)
except URIParseError:
results[u] = {"error": "%s is not a valid url" % u}
2019-03-25 00:45:54 +01:00
resolved = await self.resolve(*tuple(valid_urls))
2019-03-24 21:55:04 +01:00
for resolved_uri in resolved:
results[resolved_uri] = resolved[resolved_uri]
return results
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT,
STREAM_MANAGER_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_get(self, uri, file_name=None, timeout=None):
"""
Download stream from a LBRY name.
Usage:
get <uri> [<file_name> | --file_name=<file_name>] [<timeout> | --timeout=<timeout>]
Options:
--uri=<uri> : (str) uri of the content to download
--file_name=<file_name> : (str) specified name for the downloaded file
--timeout=<timeout> : (int) download timeout in number of seconds
Returns:
(dict) Dictionary containing information about the stream
{
'completed': (bool) true if download is completed,
'file_name': (str) name of file,
'download_directory': (str) download directory,
'points_paid': (float) credit paid to download file,
'stopped': (bool) true if download is stopped,
'stream_hash': (str) stream hash of file,
'stream_name': (str) stream name ,
'suggested_file_name': (str) suggested file name,
'sd_hash': (str) sd hash of file,
'download_path': (str) download path of file,
'mime_type': (str) mime type of file,
'key': (str) key attached to file,
'total_bytes': (int) file size in bytes,
'written_bytes': (int) written size in bytes,
'blobs_completed': (int) number of fully downloaded blobs,
'blobs_in_stream': (int) total blobs on stream,
'status': (str) downloader status,
'claim_id': (str) claim id,
'outpoint': (str) claim outpoint string,
'txid': (str) claim txid,
'nout': (int) claim nout,
'metadata': (dict) claim metadata,
'channel_claim_id': (str) None if claim is not signed
'channel_name': (str) None if claim is not signed
'claim_name': (str) claim name
}
"""
try:
stream = await self.stream_manager.download_stream_from_uri(
uri, self.exchange_rate_manager, file_name, timeout
)
if not stream:
raise DownloadSDTimeout(uri)
except Exception as e:
log.warning("Error downloading %s: %s", uri, str(e))
return {"error": str(e)}
else:
return stream.as_dict()
2017-01-02 20:52:24 +01:00
2019-01-21 21:55:50 +01:00
SETTINGS_DOC = """
Settings management.
"""
2017-01-03 20:13:01 +01:00
def jsonrpc_settings_get(self):
"""
Get daemon settings
2017-05-28 22:01:53 +02:00
Usage:
settings_get
Options:
None
Returns:
2017-03-14 00:14:11 +01:00
(dict) Dictionary of daemon settings
See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings
"""
2019-01-21 21:55:50 +01:00
return self.conf.settings_dict
def jsonrpc_settings_set(self, key, value):
"""
Set daemon settings
Usage:
2019-02-19 23:26:08 +01:00
settings_set (<key>) (<value>)
Options:
None
Returns:
(dict) Updated dictionary of daemon settings
"""
2019-01-21 21:55:50 +01:00
with self.conf.update_config() as c:
attr: Setting = getattr(type(c), key)
cleaned = attr.deserialize(value)
setattr(c, key, cleaned)
return {key: cleaned}
2019-01-21 21:55:50 +01:00
ACCOUNT_DOC = """
2019-03-26 03:06:36 +01:00
Create, modify and inspect wallet accounts.
2019-01-21 21:55:50 +01:00
"""
@requires("wallet")
def jsonrpc_account_list(self, account_id=None, confirmations=6,
2018-08-30 06:04:25 +02:00
include_claims=False, show_seed=False):
"""
2018-08-30 06:04:25 +02:00
List details of all of the accounts or a specific account.
Usage:
2018-08-30 06:04:25 +02:00
account_list [<account_id>] [--confirmations=<confirmations>]
[--include_claims] [--show_seed]
Options:
--account_id=<account_id> : (str) If provided only the balance for this
account will be given
--confirmations=<confirmations> : (int) required confirmations (default: 0)
--include_claims : (bool) include claims, requires than a
LBC account is specified (default: false)
2018-08-30 06:04:25 +02:00
--show_seed : (bool) show the seed for the account
Returns:
(map) balance of account(s)
"""
kwargs = {
'confirmations': confirmations,
'show_seed': show_seed
}
2018-08-30 06:04:25 +02:00
if account_id:
return self.get_account_or_error(account_id).get_details(**kwargs)
else:
return self.wallet_manager.get_detailed_accounts(**kwargs)
@requires("wallet")
2018-10-16 21:04:20 +02:00
async def jsonrpc_account_balance(self, account_id=None, confirmations=0):
2017-01-03 20:13:01 +01:00
"""
Return the balance of an account
2016-03-24 03:27:48 +01:00
2017-05-28 22:01:53 +02:00
Usage:
account_balance [<account_id>] [<address> | --address=<address>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) If provided only the balance for this
account will be given. Otherwise default account.
--confirmations=<confirmations> : (int) Only include transactions with this many
confirmed blocks.
2017-04-23 19:33:06 +02:00
Returns:
(decimal) amount of lbry credits in wallet
"""
account = self.get_account_or_default(account_id)
2018-10-16 21:04:20 +02:00
dewies = await account.get_balance(confirmations=confirmations)
2018-10-03 22:38:47 +02:00
return dewies_to_lbc(dewies)
2017-04-23 19:33:06 +02:00
@requires("wallet")
2018-10-16 21:04:20 +02:00
async def jsonrpc_account_add(
self, account_name, single_key=False, seed=None, private_key=None, public_key=None):
"""
Add a previously created account from a seed, private key or public key (read-only).
Specify --single_key for single address or vanity address accounts.
Usage:
account_add (<account_name> | --account_name=<account_name>)
(--seed=<seed> | --private_key=<private_key> | --public_key=<public_key>)
[--single_key]
Options:
--account_name=<account_name> : (str) name of the account to add
--seed=<seed> : (str) seed to generate new account from
--private_key=<private_key> : (str) private key for new account
--public_key=<public_key> : (str) public key for new account
--single_key : (bool) create single key account, default is multi-key
Returns:
(map) added account details
"""
account = LBCAccount.from_dict(
2018-08-30 06:04:25 +02:00
self.ledger, self.default_wallet, {
'name': account_name,
'seed': seed,
'private_key': private_key,
'public_key': public_key,
'address_generator': {
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
}
}
)
2018-08-30 06:04:25 +02:00
if self.ledger.network.is_connected:
2018-11-20 01:23:23 +01:00
await self.ledger.subscribe_account(account)
2018-08-30 06:04:25 +02:00
self.default_wallet.save()
result = account.to_dict()
result['id'] = account.id
result['status'] = 'added'
result.pop('certificates', None)
result['is_default'] = self.default_wallet.accounts[0] == account
return result
@requires("wallet")
2018-10-16 21:04:20 +02:00
async def jsonrpc_account_create(self, account_name, single_key=False):
"""
Create a new account. Specify --single_key if you want to use
the same address for all transactions (not recommended).
Usage:
account_create (<account_name> | --account_name=<account_name>) [--single_key]
Options:
--account_name=<account_name> : (str) name of the account to create
--single_key : (bool) create single key account, default is multi-key
Returns:
(map) new account details
"""
account = LBCAccount.generate(
2018-08-30 06:04:25 +02:00
self.ledger, self.default_wallet, account_name, {
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
}
)
2018-08-30 06:04:25 +02:00
if self.ledger.network.is_connected:
2018-11-20 01:23:23 +01:00
await self.ledger.subscribe_account(account)
2018-08-30 06:04:25 +02:00
self.default_wallet.save()
result = account.to_dict()
result['id'] = account.id
result['status'] = 'created'
result.pop('certificates', None)
result['is_default'] = self.default_wallet.accounts[0] == account
return result
@requires("wallet")
2018-08-30 06:04:25 +02:00
def jsonrpc_account_remove(self, account_id):
"""
Remove an existing account.
Usage:
2019-03-25 03:59:55 +01:00
account_remove (<account_id> | --account_id=<account_id>)
Options:
--account_id=<account_id> : (str) id of the account to remove
Returns:
2018-08-30 06:04:25 +02:00
(map) details of removed account
"""
account = self.get_account_or_error(account_id)
self.default_wallet.accounts.remove(account)
self.default_wallet.save()
result = account.to_dict()
result['id'] = account.id
result['status'] = 'removed'
result.pop('certificates', None)
return result
@requires("wallet")
def jsonrpc_account_set(
2018-08-30 06:04:25 +02:00
self, account_id, default=False, new_name=None,
change_gap=None, change_max_uses=None, receiving_gap=None, receiving_max_uses=None):
"""
Change various settings on an account.
Usage:
2019-03-25 03:59:55 +01:00
account_set (<account_id> | --account_id=<account_id>)
2018-08-30 06:04:25 +02:00
[--default] [--new_name=<new_name>]
[--change_gap=<change_gap>] [--change_max_uses=<change_max_uses>]
[--receiving_gap=<receiving_gap>] [--receiving_max_uses=<receiving_max_uses>]
Options:
--account_id=<account_id> : (str) id of the account to change
--default : (bool) make this account the default
2018-08-30 06:04:25 +02:00
--new_name=<new_name> : (str) new name for the account
--receiving_gap=<receiving_gap> : (int) set the gap for receiving addresses
--receiving_max_uses=<receiving_max_uses> : (int) set the maximum number of times to
use a receiving address
--change_gap=<change_gap> : (int) set the gap for change addresses
--change_max_uses=<change_max_uses> : (int) set the maximum number of times to
use a change address
Returns:
(map) updated account details
"""
account = self.get_account_or_error(account_id)
change_made = False
if account.receiving.name == HierarchicalDeterministic.name:
address_changes = {
'change': {'gap': change_gap, 'maximum_uses_per_address': change_max_uses},
'receiving': {'gap': receiving_gap, 'maximum_uses_per_address': receiving_max_uses},
}
for chain_name in address_changes:
chain = getattr(account, chain_name)
for attr, value in address_changes[chain_name].items():
if value is not None:
setattr(chain, attr, value)
change_made = True
2018-08-30 06:04:25 +02:00
if new_name is not None:
account.name = new_name
change_made = True
if default:
self.default_wallet.accounts.remove(account)
self.default_wallet.accounts.insert(0, account)
change_made = True
if change_made:
2019-03-12 14:42:28 +01:00
account.modified_on = time.time()
self.default_wallet.save()
result = account.to_dict()
result['id'] = account.id
result.pop('certificates', None)
result['is_default'] = self.default_wallet.accounts[0] == account
return result
@requires(WALLET_COMPONENT)
def jsonrpc_account_unlock(self, password, account_id=None):
"""
Unlock an encrypted account
Usage:
account_unlock (<password> | --password=<password>) [<account_id> | --account_id=<account_id>]
Options:
--account_id=<account_id> : (str) id for the account to unlock
Returns:
(bool) true if account is unlocked, otherwise false
"""
return self.wallet_manager.unlock_account(
password, self.get_account_or_default(account_id, lbc_only=False)
)
2018-09-25 15:41:41 +02:00
@requires(WALLET_COMPONENT)
def jsonrpc_account_lock(self, account_id=None):
"""
Lock an unlocked account
Usage:
account_lock [<account_id> | --account_id=<account_id>]
Options:
--account_id=<account_id> : (str) id for the account to lock
Returns:
(bool) true if account is locked, otherwise false
"""
return self.wallet_manager.lock_account(self.get_account_or_default(account_id, lbc_only=False))
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
def jsonrpc_account_decrypt(self, account_id=None):
"""
Decrypt an encrypted account, this will remove the wallet password. The account must be unlocked to decrypt it
Usage:
account_decrypt [<account_id> | --account_id=<account_id>]
Options:
--account_id=<account_id> : (str) id for the account to decrypt
Returns:
(bool) true if wallet is decrypted, otherwise false
"""
return self.wallet_manager.decrypt_account(self.get_account_or_default(account_id, lbc_only=False))
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
def jsonrpc_account_encrypt(self, new_password, account_id=None):
"""
Encrypt an unencrypted account with a password
Usage:
2019-03-25 03:59:55 +01:00
account_encrypt (<new_password> | --new_password=<new_password>)
[<account_id> | --account_id=<account_id>]
Options:
--account_id=<account_id> : (str) id for the account to encrypt
Returns:
(bool) true if wallet is decrypted, otherwise false
"""
return self.wallet_manager.encrypt_account(
new_password,
self.get_account_or_default(account_id, lbc_only=False)
)
@requires("wallet")
def jsonrpc_account_max_address_gap(self, account_id):
"""
Finds ranges of consecutive addresses that are unused and returns the length
of the longest such range: for change and receiving address chains. This is
useful to figure out ideal values to set for 'receiving_gap' and 'change_gap'
account settings.
Usage:
account_max_address_gap (<account_id> | --account_id=<account_id>)
Options:
--account_id=<account_id> : (str) account for which to get max gaps
Returns:
(map) maximum gap for change and receiving addresses
"""
return self.get_account_or_error(account_id).get_max_gap()
@requires("wallet")
def jsonrpc_account_fund(self, to_account=None, from_account=None, amount='0.0',
everything=False, outputs=1, broadcast=False):
"""
Transfer some amount (or --everything) to an account from another
account (can be the same account). Amounts are interpreted as LBC.
You can also spread the transfer across a number of --outputs (cannot
be used together with --everything).
Usage:
account_fund [<to_account> | --to_account=<to_account>]
[<from_account> | --from_account=<from_account>]
(<amount> | --amount=<amount> | --everything)
[<outputs> | --outputs=<outputs>]
[--broadcast]
Options:
--to_account=<to_account> : (str) send to this account
--from_account=<from_account> : (str) spend from this account
--amount=<amount> : (str) the amount to transfer lbc
--everything : (bool) transfer everything (excluding claims), default: false.
--outputs=<outputs> : (int) split payment across many outputs, default: 1.
--broadcast : (bool) actually broadcast the transaction, default: false.
Returns:
(map) transaction performing requested action
"""
to_account = self.get_account_or_default(to_account, 'to_account')
from_account = self.get_account_or_default(from_account, 'from_account')
amount = self.get_dewies_or_error('amount', amount) if amount else None
if not isinstance(outputs, int):
raise ValueError("--outputs must be an integer.")
if everything and outputs > 1:
raise ValueError("Using --everything along with --outputs is not supported.")
return from_account.fund(
to_account=to_account, amount=amount, everything=everything,
outputs=outputs, broadcast=broadcast
)
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
2019-03-25 00:45:54 +01:00
async def jsonrpc_account_send(self, amount, addresses, account_id=None, preview=False):
"""
Send the same number of credits to multiple addresses.
Usage:
2019-03-25 00:45:54 +01:00
account_send <amount> <addresses>... [--account_id=<account_id>] [--preview]
Options:
--account_id=<account_id> : (str) account to fund the transaction
2019-03-25 00:45:54 +01:00
--preview : (bool) do not broadcast the transaction
Returns:
"""
2019-03-25 00:45:54 +01:00
account = self.get_account_or_default(account_id)
amount = self.get_dewies_or_error("amount", amount)
if not amount:
raise NullFundsError
2019-02-28 18:04:02 +01:00
if amount < 0:
raise NegativeFundsError()
2019-03-25 00:45:54 +01:00
if addresses and not isinstance(addresses, list):
addresses = [addresses]
outputs = []
for address in addresses:
2019-03-25 00:45:54 +01:00
self.valid_address_or_error(address)
outputs.append(
Output.pay_pubkey_hash(
amount, self.ledger.address_to_hash160(address)
)
)
2019-03-25 00:45:54 +01:00
tx = await Transaction.create(
[], outputs, [account], account
)
if not preview:
await self.ledger.broadcast(tx)
await self.analytics_manager.send_credits_sent()
else:
await account.ledger.release_tx(tx)
return tx
2019-03-12 14:42:28 +01:00
SYNC_DOC = """
Wallet synchronization.
"""
2019-02-11 00:36:21 +01:00
@requires("wallet")
2019-03-11 14:52:35 +01:00
def jsonrpc_sync_hash(self):
2019-02-11 00:36:21 +01:00
"""
2019-03-11 14:52:35 +01:00
Deterministic hash of the wallet.
2019-02-11 00:36:21 +01:00
Usage:
2019-03-25 03:59:55 +01:00
sync_hash
2019-02-11 00:36:21 +01:00
Options:
Returns:
2019-03-11 14:52:35 +01:00
(str) sha256 hash of wallet
"""
return hexlify(self.default_wallet.hash).decode()
2019-02-11 00:36:21 +01:00
2019-03-11 14:52:35 +01:00
@requires("wallet")
def jsonrpc_sync_apply(self, password, data=None, encrypt_password=None):
2019-02-11 00:36:21 +01:00
"""
2019-03-11 14:52:35 +01:00
Apply incoming synchronization data, if provided, and then produce a sync hash and
an encrypted wallet.
Usage:
2019-03-25 03:59:55 +01:00
sync_apply <password> [--data=<data>] [--encrypt-password=<encrypt_password>]
2019-03-11 14:52:35 +01:00
Options:
--password=<password> : (str) password to decrypt incoming and encrypt outgoing data
--data=<data> : (str) incoming sync data, if any
--encrypt-password=<encrypt_password> : (str) password to encrypt outgoing data if different
from the decrypt password, used during password changes
Returns:
(map) sync hash and data
"""
if data is not None:
decrypted_data = Wallet.unpack(password, data)
for account_data in decrypted_data['accounts']:
_, _, pubkey = LBCAccount.keys_from_dict(self.ledger, account_data)
account_id = pubkey.address
local_match = None
for local_account in self.default_wallet.accounts:
if account_id == local_account.id:
local_match = local_account
break
if local_match is not None:
2019-03-12 20:31:54 +01:00
local_match.apply(account_data)
2019-03-11 14:52:35 +01:00
else:
new_account = LBCAccount.from_dict(self.ledger, self.default_wallet, account_data)
if self.ledger.network.is_connected:
asyncio.create_task(self.ledger.subscribe_account(new_account))
2019-03-12 20:31:54 +01:00
self.default_wallet.save()
2019-03-11 14:52:35 +01:00
encrypted = self.default_wallet.pack(encrypt_password or password)
2019-02-11 00:36:21 +01:00
return {
2019-03-11 14:52:35 +01:00
'hash': self.jsonrpc_sync_hash(),
'data': encrypted.decode()
2019-02-11 00:36:21 +01:00
}
2019-01-21 21:55:50 +01:00
ADDRESS_DOC = """
2019-03-26 03:06:36 +01:00
List, generate and verify addresses.
2019-01-21 21:55:50 +01:00
"""
@requires(WALLET_COMPONENT)
2019-03-25 00:45:54 +01:00
async def jsonrpc_address_is_mine(self, address, account_id=None):
"""
Checks if an address is associated with the current wallet.
2016-03-24 03:27:48 +01:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-25 03:59:55 +01:00
address_is_mine (<address> | --address=<address>)
[<account_id> | --account_id=<account_id>]
Options:
--address=<address> : (str) address to check
--account_id=<account_id> : (str) id of the account to use
Returns:
(bool) true, if address is associated with current wallet
"""
2019-03-25 00:45:54 +01:00
account = self.get_account_or_default(account_id)
match = await self.ledger.db.get_address(address=address, account=account)
if match is not None:
return True
return False
@requires(WALLET_COMPONENT)
def jsonrpc_address_list(self, account_id=None, page=None, page_size=None):
"""
List account addresses
Usage:
address_list [<account_id> | --account_id=<account_id>]
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) id of the account to use
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
Returns:
List of wallet addresses
"""
account = self.get_account_or_default(account_id)
return maybe_paginate(
account.get_addresses,
account.get_address_count,
page, page_size
)
@requires(WALLET_COMPONENT)
def jsonrpc_address_unused(self, account_id=None):
"""
Return an address containing no balance, will create
a new address if there is none.
Usage:
address_unused [--account_id=<account_id>]
Options:
--account_id=<account_id> : (str) id of the account to use
Returns:
(str) Unused wallet address in base58
"""
return self.get_account_or_default(account_id).receiving.get_or_create_usable_address()
2019-01-21 21:55:50 +01:00
FILE_DOC = """
File management.
"""
2019-01-22 23:44:17 +01:00
@requires(STREAM_MANAGER_COMPONENT)
def jsonrpc_file_list(self, sort=None, reverse=False, comparison=None, **kwargs):
2017-01-03 20:13:01 +01:00
"""
List files limited by optional filters
2017-05-28 22:01:53 +02:00
Usage:
file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>]
[--rowid=<rowid>] [--claim_id=<claim_id>] [--outpoint=<outpoint>] [--txid=<txid>] [--nout=<nout>]
[--channel_claim_id=<channel_claim_id>] [--channel_name=<channel_name>]
[--claim_name=<claim_name>] [--blobs_in_stream=<blobs_in_stream>]
[--blobs_remaining=<blobs_remaining>] [--sort=<sort_by>]
[--comparison=<comparison>] [--full_status=<full_status>] [--reverse]
2017-05-28 22:01:53 +02:00
Options:
--sd_hash=<sd_hash> : (str) get file with matching sd hash
--file_name=<file_name> : (str) get file with matching file name in the
downloads folder
--stream_hash=<stream_hash> : (str) get file with matching stream hash
--rowid=<rowid> : (int) get file with matching row id
--claim_id=<claim_id> : (str) get file with matching claim id
--outpoint=<outpoint> : (str) get file with matching claim outpoint
--txid=<txid> : (str) get file with matching claim txid
--nout=<nout> : (int) get file with matching claim nout
--channel_claim_id=<channel_claim_id> : (str) get file with matching channel claim id
--channel_name=<channel_name> : (str) get file with matching channel name
--claim_name=<claim_name> : (str) get file with matching claim name
--blobs_in_stream<blobs_in_stream> : (int) get file with matching blobs in stream
--blobs_remaining=<blobs_remaining> : (int) amount of remaining blobs to download
--sort=<sort_by> : (str) field to sort by (one of the above filter fields)
--comparison=<comparison> : (str) logical comparision, (eq | ne | g | ge | l | le)
Returns:
2017-03-14 00:14:11 +01:00
(list) List of files
[
{
2017-03-14 00:14:11 +01:00
'completed': (bool) true if download is completed,
'file_name': (str) name of file,
'download_directory': (str) download directory,
'points_paid': (float) credit paid to download file,
'stopped': (bool) true if download is stopped,
'stream_hash': (str) stream hash of file,
'stream_name': (str) stream name ,
'suggested_file_name': (str) suggested file name,
'sd_hash': (str) sd hash of file,
'download_path': (str) download path of file,
'mime_type': (str) mime type of file,
'key': (str) key attached to file,
'total_bytes_lower_bound': (int) lower bound file size in bytes,
'total_bytes': (int) file upper bound size in bytes,
'written_bytes': (int) written size in bytes,
2018-09-07 08:57:44 +02:00
'blobs_completed': (int) number of fully downloaded blobs,
'blobs_in_stream': (int) total blobs on stream,
'blobs_remaining': (int) total blobs remaining to download,
2018-09-07 08:57:44 +02:00
'status': (str) downloader status
'claim_id': (str) None if claim is not found else the claim id,
'txid': (str) None if claim is not found else the transaction id,
'nout': (int) None if claim is not found else the transaction output index,
'outpoint': (str) None if claim is not found else the tx and output,
2018-09-07 08:57:44 +02:00
'metadata': (dict) None if claim is not found else the claim metadata,
'channel_claim_id': (str) None if claim is not found or not signed,
'channel_name': (str) None if claim is not found or not signed,
'claim_name': (str) None if claim is not found else the claim name
2017-03-14 00:14:11 +01:00
},
]
}
"""
2019-02-15 22:44:31 +01:00
sort = sort or 'rowid'
2019-01-22 23:44:17 +01:00
comparison = comparison or 'eq'
return [
stream.as_dict() for stream in self.stream_manager.get_filtered_streams(
sort, reverse, comparison, **kwargs
)
]
2019-03-24 21:55:04 +01:00
@requires(STREAM_MANAGER_COMPONENT)
async def jsonrpc_file_set_status(self, status, **kwargs):
2016-07-28 22:12:20 +02:00
"""
2019-03-24 21:55:04 +01:00
Start or stop downloading a file
2016-07-28 22:12:20 +02:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
file_set_status (<status> | --status=<status>) [--sd_hash=<sd_hash>]
[--file_name=<file_name>] [--stream_hash=<stream_hash>] [--rowid=<rowid>]
2017-05-28 22:01:53 +02:00
Options:
2019-03-24 21:55:04 +01:00
--status=<status> : (str) one of "start" or "stop"
--sd_hash=<sd_hash> : (str) set status of file with matching sd hash
--file_name=<file_name> : (str) set status of file with matching file name in the
downloads folder
--stream_hash=<stream_hash> : (str) set status of file with matching stream hash
--rowid=<rowid> : (int) set status of file with matching row id
2017-05-28 22:01:53 +02:00
2017-03-14 00:14:11 +01:00
Returns:
2019-03-24 21:55:04 +01:00
(str) Confirmation message
"""
2017-06-23 20:47:28 +02:00
2019-03-24 21:55:04 +01:00
if status not in ['start', 'stop']:
raise Exception('Status must be "start" or "stop".')
2017-06-23 20:47:28 +02:00
2019-03-24 21:55:04 +01:00
streams = self.stream_manager.get_filtered_streams(**kwargs)
if not streams:
raise Exception(f'Unable to find a file for {kwargs}')
stream = streams[0]
if status == 'start' and not stream.running:
await self.stream_manager.start_stream(stream)
msg = "Resumed download"
elif status == 'stop' and stream.running:
await self.stream_manager.stop_stream(stream)
msg = "Stopped download"
else:
2019-03-24 21:55:04 +01:00
msg = (
"File was already being downloaded" if status == 'start'
else "File was already stopped"
)
return msg
2019-03-24 21:55:04 +01:00
@requires(STREAM_MANAGER_COMPONENT)
async def jsonrpc_file_delete(self, delete_from_download_dir=False, delete_all=False, **kwargs):
2017-04-07 02:45:05 +02:00
"""
2019-03-24 21:55:04 +01:00
Delete a LBRY file
2017-04-07 02:45:05 +02:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash=<sd_hash>] [--file_name=<file_name>]
[--stream_hash=<stream_hash>] [--rowid=<rowid>] [--claim_id=<claim_id>] [--txid=<txid>]
[--nout=<nout>] [--claim_name=<claim_name>] [--channel_claim_id=<channel_claim_id>]
[--channel_name=<channel_name>]
2017-05-28 22:01:53 +02:00
Options:
2019-03-24 21:55:04 +01:00
--delete_from_download_dir : (bool) delete file from download directory,
instead of just deleting blobs
--delete_all : (bool) if there are multiple matching files,
allow the deletion of multiple files.
Otherwise do not delete anything.
--sd_hash=<sd_hash> : (str) delete by file sd hash
--file_name=<file_name> : (str) delete by file name in downloads folder
--stream_hash=<stream_hash> : (str) delete by file stream hash
--rowid=<rowid> : (int) delete by file row id
--claim_id=<claim_id> : (str) delete by file claim id
--txid=<txid> : (str) delete by file claim txid
--nout=<nout> : (int) delete by file claim nout
--claim_name=<claim_name> : (str) delete by file claim name
--channel_claim_id=<channel_claim_id> : (str) delete by file channel claim id
--channel_name=<channel_name> : (str) delete by file channel claim name
2017-05-28 22:01:53 +02:00
2017-04-07 02:45:05 +02:00
Returns:
2019-03-24 21:55:04 +01:00
(bool) true if deletion was successful
"""
2016-01-21 04:00:28 +01:00
2019-01-22 23:44:17 +01:00
streams = self.stream_manager.get_filtered_streams(**kwargs)
2017-04-07 02:45:05 +02:00
2019-01-22 23:44:17 +01:00
if len(streams) > 1:
2017-04-07 02:45:05 +02:00
if not delete_all:
log.warning("There are %i files to delete, use narrower filters to select one",
2019-01-22 23:44:17 +01:00
len(streams))
2018-12-15 21:31:02 +01:00
return False
2017-04-07 02:45:05 +02:00
else:
log.warning("Deleting %i files",
2019-01-22 23:44:17 +01:00
len(streams))
2017-04-07 02:45:05 +02:00
2019-01-22 23:44:17 +01:00
if not streams:
2017-03-08 20:19:54 +01:00
log.warning("There is no file to delete")
2018-12-15 21:31:02 +01:00
return False
else:
2019-01-22 23:44:17 +01:00
for stream in streams:
2019-02-20 20:48:25 +01:00
message = f"Deleted file {stream.file_name}"
2019-01-22 23:44:17 +01:00
await self.stream_manager.delete_stream(stream, delete_file=delete_from_download_dir)
2019-02-20 20:48:25 +01:00
log.info(message)
2019-01-22 23:44:17 +01:00
result = True
return result
CLAIM_DOC = """
List and search all types of claims.
2019-01-23 19:00:58 +01:00
"""
2019-01-22 23:44:17 +01:00
@requires(WALLET_COMPONENT)
def jsonrpc_claim_list(self, account_id=None, page=None, page_size=None):
2017-01-03 20:13:01 +01:00
"""
2019-03-26 03:06:36 +01:00
List my stream and channel claims.
2017-01-03 20:13:01 +01:00
2017-05-28 22:01:53 +02:00
Usage:
claim_list [<account_id> | --account_id=<account_id>]
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) id of the account to query
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
"""
account = self.get_account_or_default(account_id)
return maybe_paginate(
account.get_claims,
account.get_claim_count,
page, page_size
)
@requires(WALLET_COMPONENT)
2019-03-26 03:06:36 +01:00
async def jsonrpc_claim_search(
self, name=None, claim_id=None, txid=None, nout=None,
channel_id=None, winning=False, page=1, page_size=10):
"""
2019-03-26 03:06:36 +01:00
Search for stream and channel claims on the blockchain.
2017-05-28 22:01:53 +02:00
2019-03-26 03:06:36 +01:00
Use --channel_id=<channel_id> to list all stream claims in a channel.
Usage:
2019-03-26 04:44:36 +01:00
claim_search [<name> | --name=<name>] [--claim_id=<claim_id>] [--txid=<txid> --nout=<nout>]
2019-03-26 03:06:36 +01:00
[--channel_id=<channel_id>] [--winning] [--page=<page>] [--page_size=<page_size>]
Options:
2019-03-26 03:06:36 +01:00
--name=<name> : (str) find claims with this name
--claim_id=<claim_id> : (str) find a claim with this claim_id
--txid=<txid> : (str) find a claim with this txid:nout
--nout=<nout> : (str) find a claim with this txid:nout
--channel_id=<channel_id> : (str) limit search to specific channel claim id (returns stream claims)
2019-03-26 03:06:36 +01:00
--winning : (bool) limit to winning claims
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
"""
claims = []
if name is not None:
claims = await self.ledger.network.get_claims_for_name(name)
elif claim_id is not None:
claim = await self.wallet_manager.get_claim_by_claim_id(claim_id)
if claim and claim != 'claim not found':
claims = {'claims': [claim]}
elif txid is not None and nout is not None:
claim = await self.wallet_manager.get_claim_by_outpoint(txid, int(nout))
if claim and claim != 'claim not found':
claims = {'claims': [claim]}
elif channel_id is not None:
claim = await self.wallet_manager.get_claim_by_claim_id(channel_id)
if claim and claim != 'claim not found':
channel_url = f"{claim['name']}#{claim['claim_id']}"
2019-03-26 04:44:36 +01:00
resolve = await self.resolve(channel_url, page=page, page_size=page_size)
2019-03-26 03:06:36 +01:00
resolve = resolve.get(channel_url, {})
claims = resolve.get('claims_in_channel', []) or []
total_pages = 0
if claims:
total_pages = int((resolve['total_claims'] + (page_size-1)) / page_size)
#sort_claim_results(claims)
return {"items": claims, "total_pages": total_pages, "page": page, "page_size": page_size}
else:
2019-03-26 03:06:36 +01:00
raise Exception("Must specify either name, claimd_id, or txid:nout.")
if claims:
resolutions = await self.resolve(*(f"{claim['name']}#{claim['claim_id']}" for claim in claims['claims']))
claims = [value.get('claim', value.get('certificate')) for value in resolutions.values()]
sort_claim_results(claims)
return {"items": claims, "total_pages": 1, "page": 1, "page_size": len(claims)}
2019-01-21 21:55:50 +01:00
CHANNEL_DOC = """
Create, update, abandon and list your channel claims.
2019-01-21 21:55:50 +01:00
"""
2019-03-25 03:59:55 +01:00
@deprecated('channel_create')
def jsonrpc_channel_new(self):
""" deprecated """
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
2019-03-24 21:55:04 +01:00
async def jsonrpc_channel_create(
self, name, bid, allow_duplicate_name=False, account_id=None, claim_address=None, preview=False, **kwargs):
2017-04-07 02:45:05 +02:00
"""
2019-03-26 03:06:36 +01:00
Create a new channel by generating a channel private key and establishing an '@' prefixed claim.
2017-04-07 02:45:05 +02:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-25 03:59:55 +01:00
channel_create (<name> | --name=<name>) (<bid> | --bid=<bid>)
[--allow_duplicate_name=<allow_duplicate_name>]
[--title=<title>] [--description=<description>]
[--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...]
2019-03-24 21:55:04 +01:00
[--contact_email=<contact_email>]
[--homepage_url=<homepage_url>] [--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>]
[--account_id=<account_id>] [--claim_address=<claim_address>] [--preview]
2017-04-07 02:45:05 +02:00
Options:
2019-03-24 21:55:04 +01:00
--name=<name> : (str) name of the channel prefixed with '@'
--allow_duplicate_name=<allow_duplicate_name> : (bool) create new channel even if one already exists with
given name. default: false.
--bid=<bid> : (decimal) amount to back the claim
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
--tags=<tags> : (list) content tags
--languages=<languages> : (list) languages used by the channel,
using RFC 5646 format, eg:
for English `--languages=en`
for Spanish (Spain) `--languages=es-ES`
for Spanish (Mexican) `--languages=es-MX`
for Chinese (Simplified) `--languages=zh-Hans`
for Chinese (Traditional) `--languages=zh-Hant`
--locations=<locations> : (list) locations of the channel, consisting of 2 letter
`country` code and a `state`, `city` and a postal
`code` along with a `latitude` and `longitude`.
for JSON RPC: pass a dictionary with aforementioned
attributes as keys, eg:
...
"locations": [{'country': 'US', 'state': 'NH'}]
...
for command line: pass a colon delimited list
with values in the following order:
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
making sure to include colon for blank values, for
example to provide only the city:
... --locations="::Manchester"
with all values set:
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
optionally, you can just pass the "LATITUDE:LONGITUDE":
... --locations="42.990605:-71.460989"
finally, you can also pass JSON string of dictionary
on the command line as you would via JSON RPC
... --locations="{'country': 'US', 'state': 'NH'}"
2019-03-24 21:55:04 +01:00
--contact_email=<contact_email>: (str) email of channel owner
--homepage_url=<homepage_url> : (str) homepage url
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--cover_url=<cover_url> : (str) url of cover image
--account_id=<account_id> : (str) id of the account to store channel
--claim_address=<claim_address>: (str) address where the channel is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
"""
account = self.get_account_or_default(account_id)
2019-03-26 03:06:36 +01:00
self.valid_channel_name_or_error(name)
2019-03-24 21:55:04 +01:00
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
claim_address = await self.get_receiving_address(claim_address, account)
existing_channels = await account.get_channels(claim_name=name)
if len(existing_channels) > 0:
if not allow_duplicate_name:
raise Exception(
f"You already have a channel under the name '{name}'. "
f"Use --allow-duplicate-name flag to override."
)
2019-03-24 21:55:04 +01:00
claim = Claim()
claim.channel.update(**kwargs)
tx = await Transaction.claim_create(
name, claim, amount, claim_address, [account], account
)
txo = tx.outputs[0]
txo.generate_channel_private_key()
if not preview:
await tx.sign([account])
await account.ledger.broadcast(tx)
account.add_channel_private_key(txo.ref, txo.private_key)
self.default_wallet.save()
await self.storage.save_claims([self._old_get_temp_claim_info(
tx, txo, claim_address, claim, name, dewies_to_lbc(amount)
)])
await self.analytics_manager.send_new_channel()
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_channel_update(
self, claim_id, bid=None, account_id=None, claim_address=None,
new_signing_key=False, preview=False, **kwargs):
2017-04-07 02:45:05 +02:00
"""
2019-03-26 03:06:36 +01:00
Update an existing channel claim.
2018-08-17 21:02:14 +02:00
2019-03-24 21:55:04 +01:00
Usage:
2019-03-25 03:59:55 +01:00
channel_update (<claim_id> | --claim_id=<claim_id>) [<bid> | --bid=<bid>]
[--title=<title>] [--description=<description>]
[--tags=<tags>...] [--clear_tags]
[--languages=<languages>...] [--clear_languages]
[--locations=<locations>...] [--clear_locations]
[--contact_email=<contact_email>]
2019-03-24 21:55:04 +01:00
[--homepage_url=<homepage_url>] [--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>]
2019-03-29 17:10:27 +01:00
[--account_id=<account_id>] [--claim_address=<claim_address>] [--new_signing_key] [--preview]
2019-03-24 21:55:04 +01:00
Options:
--claim_id=<claim_id> : (str) claim_id of the channel to update
--bid=<bid> : (decimal) amount to back the claim
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
--clear_tags : (bool) clear existing tags (prior to adding new ones)
--tags=<tags> : (list) add content tags
--clear_languages : (bool) clear existing languages (prior to adding new ones)
--languages=<languages> : (list) languages used by the channel,
using RFC 5646 format, eg:
for English `--languages=en`
for Spanish (Spain) `--languages=es-ES`
for Spanish (Mexican) `--languages=es-MX`
for Chinese (Simplified) `--languages=zh-Hans`
for Chinese (Traditional) `--languages=zh-Hant`
--clear_locations : (bool) clear existing locations (prior to adding new ones)
--locations=<locations> : (list) locations of the channel, consisting of 2 letter
`country` code and a `state`, `city` and a postal
`code` along with a `latitude` and `longitude`.
for JSON RPC: pass a dictionary with aforementioned
attributes as keys, eg:
...
"locations": [{'country': 'US', 'state': 'NH'}]
...
for command line: pass a colon delimited list
with values in the following order:
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
making sure to include colon for blank values, for
example to provide only the city:
... --locations="::Manchester"
with all values set:
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
optionally, you can just pass the "LATITUDE:LONGITUDE":
... --locations="42.990605:-71.460989"
finally, you can also pass JSON string of dictionary
on the command line as you would via JSON RPC
... --locations="{'country': 'US', 'state': 'NH'}"
2019-03-24 21:55:04 +01:00
--contact_email=<contact_email>: (str) email of channel owner
--homepage_url=<homepage_url> : (str) homepage url
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--cover_url=<cover_url> : (str) url of cover image
--account_id=<account_id> : (str) id of the account to store channel
--claim_address=<claim_address>: (str) address where the channel is sent
2019-03-29 17:10:27 +01:00
--new_signing_key : (bool) generate a new signing key, will invalidate all previous publishes
2019-03-24 21:55:04 +01:00
--preview : (bool) do not broadcast the transaction
"""
account = self.get_account_or_default(account_id)
existing_channels = await account.get_claims(claim_id=claim_id)
if len(existing_channels) != 1:
raise Exception(
f"Can't find the channel '{claim_id}' in account '{account_id}'."
)
old_txo = existing_channels[0]
if not old_txo.claim.is_channel:
raise Exception(
f"A claim with id '{claim_id}' was found but it is not a channel."
)
if bid is not None:
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
else:
amount = old_txo.amount
2019-03-24 21:55:04 +01:00
if claim_address is not None:
2019-03-25 00:45:54 +01:00
self.valid_address_or_error(claim_address)
2019-03-24 21:55:04 +01:00
else:
claim_address = old_txo.get_address(account.ledger)
old_txo.claim.channel.update(**kwargs)
tx = await Transaction.claim_update(
old_txo, amount, claim_address, [account], account
)
2019-03-24 21:55:04 +01:00
new_txo = tx.outputs[0]
if new_signing_key:
new_txo.generate_channel_private_key()
else:
new_txo.private_key = old_txo.private_key
new_txo.script.generate()
2019-03-24 21:55:04 +01:00
if not preview:
await tx.sign([account])
await account.ledger.broadcast(tx)
account.add_channel_private_key(new_txo.ref, new_txo.private_key)
self.default_wallet.save()
await self.storage.save_claims([self._old_get_temp_claim_info(
tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount)
)])
await self.analytics_manager.send_new_channel()
else:
await account.ledger.release_tx(tx)
return tx
2017-04-07 02:45:05 +02:00
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_channel_abandon(
self, claim_id=None, txid=None, nout=None, account_id=None,
preview=False, blocking=True):
"""
Abandon one of my channel claims.
Usage:
channel_abandon [<claim_id> | --claim_id=<claim_id>]
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
[--account_id=<account_id>]
[--preview] [--blocking]
Options:
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
--txid=<txid> : (str) txid of the claim to abandon
--nout=<nout> : (int) nout of the claim to abandon
--account_id=<account_id> : (str) id of the account to use
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until abandon is in mempool
"""
account = self.get_account_or_default(account_id)
if txid is not None and nout is not None:
claims = await account.get_claims(**{'txo.txid': txid, 'txo.position': nout})
elif claim_id is not None:
claims = await account.get_claims(claim_id=claim_id)
else:
raise Exception('Must specify claim_id, or txid and nout')
if not claims:
raise Exception('No claim found for the specified claim_id or txid:nout')
tx = await Transaction.create(
[Input.spend(txo) for txo in claims], [], [account], account
)
if not preview:
await account.ledger.broadcast(tx)
await self.analytics_manager.send_claim_action('abandon')
if blocking:
await account.ledger.wait(tx)
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT)
def jsonrpc_channel_list(self, account_id=None, page=None, page_size=None):
2017-04-07 02:45:05 +02:00
"""
2019-03-26 03:06:36 +01:00
List my channel claims.
2017-04-07 02:45:05 +02:00
2017-05-28 22:01:53 +02:00
Usage:
channel_list [<account_id> | --account_id=<account_id>]
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) id of the account to use
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2017-04-07 02:45:05 +02:00
Returns:
(list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim
is in the wallet.
2017-04-07 02:45:05 +02:00
"""
account = self.get_account_or_default(account_id)
return maybe_paginate(
account.get_channels,
account.get_channel_count,
page, page_size
)
2017-04-07 02:45:05 +02:00
@requires(WALLET_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_channel_export(self, claim_id):
2017-11-22 19:46:34 +01:00
"""
Export serialized channel signing information for a given certificate claim id
Usage:
channel_export (<claim_id> | --claim_id=<claim_id>)
Options:
--claim_id=<claim_id> : (str) Claim ID to export information about
2017-11-22 19:46:34 +01:00
Returns:
(str) Serialized certificate information
"""
2018-12-15 21:31:02 +01:00
return await self.wallet_manager.export_certificate_info(claim_id)
2017-11-22 19:46:34 +01:00
@requires(WALLET_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_channel_import(self, serialized_certificate_info):
2017-11-22 19:46:34 +01:00
"""
Import serialized channel signing information (to allow signing new claims to the channel)
Usage:
2018-03-26 19:16:55 +02:00
channel_import (<serialized_certificate_info> | --serialized_certificate_info=<serialized_certificate_info>)
2017-11-22 19:46:34 +01:00
Options:
--serialized_certificate_info=<serialized_certificate_info> : (str) certificate info
2017-11-22 19:46:34 +01:00
Returns:
(dict) Result dictionary
"""
2018-12-15 21:31:02 +01:00
return await self.wallet_manager.import_certificate_info(serialized_certificate_info)
2017-11-22 19:46:34 +01:00
STREAM_DOC = """
Create, update, abandon, list and inspect your stream claims.
2019-03-24 21:55:04 +01:00
"""
2019-01-22 23:44:17 +01:00
@requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_publish(self, name, **kwargs):
"""
Create or update a stream claim at a given name (use 'stream create/update' for more control).
Usage:
2019-03-27 21:02:17 +01:00
publish (<name> | --name=<name>) [--bid=<bid>] [--file_path=<file_path>]
[<stream_type> | --stream_type=<stream_type>]
[--tags=<tags>...] [--clear_tags]
[--languages=<languages>...] [--clear_languages]
[--locations=<locations>...] [--clear_locations]
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>]
[--title=<title>] [--description=<description>] [--author=<author>] [--language=<language>]
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
2019-03-26 04:44:36 +01:00
[--release_time=<release_time>]
[--video_width=<video_width>] [--video_height=<video_height>] [--video_duration=<video_duration>]
[--image_width=<image_width>] [--image_height=<image_height>] [--audio_duration=<audio_duration>]
2019-03-27 21:02:17 +01:00
[--channel_id=<channel_id>] [--channel_name=<channel_name>]
[--channel_account_id=<channel_account_id>...]
[--account_id=<account_id>] [--claim_address=<claim_address>] [--preview]
Options:
--name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash))
--bid=<bid> : (decimal) amount to back the claim
--file_path=<file_path> : (str) path to file to be associated with name.
2019-03-26 04:44:36 +01:00
--stream_type=<stream_type> : (str) type of stream
--fee_currency=<fee_currency> : (string) specify fee currency
--fee_amount=<fee_amount> : (decimal) content download fee
--fee_address=<fee_address> : (str) address where to send fee payments, will use
value from --claim_address if not provided
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
--author=<author> : (str) author of the publication. The usage for this field is not
the same as for channels. The author field is used to credit an author
who is not the publisher and is not represented by the channel. For
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
by published to a channel such as '@classics', or to no channel at all
--clear_tags : (bool) clear existing tags (prior to adding new ones)
--tags=<tags> : (list) add content tags
--clear_languages : (bool) clear existing languages (prior to adding new ones)
--languages=<languages> : (list) languages used by the channel,
using RFC 5646 format, eg:
for English `--languages=en`
for Spanish (Spain) `--languages=es-ES`
for Spanish (Mexican) `--languages=es-MX`
for Chinese (Simplified) `--languages=zh-Hans`
for Chinese (Traditional) `--languages=zh-Hant`
--clear_locations : (bool) clear existing locations (prior to adding new ones)
2019-03-30 01:30:08 +01:00
--locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter
`country` code and a `state`, `city` and a postal
`code` along with a `latitude` and `longitude`.
for JSON RPC: pass a dictionary with aforementioned
attributes as keys, eg:
...
"locations": [{'country': 'US', 'state': 'NH'}]
...
for command line: pass a colon delimited list
with values in the following order:
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
making sure to include colon for blank values, for
example to provide only the city:
... --locations="::Manchester"
with all values set:
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
optionally, you can just pass the "LATITUDE:LONGITUDE":
... --locations="42.990605:-71.460989"
finally, you can also pass JSON string of dictionary
on the command line as you would via JSON RPC
... --locations="{'country': 'US', 'state': 'NH'}"
--license=<license> : (str) publication license
--license_url=<license_url> : (str) publication license url
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--release_time=<duration> : (int) original public release of content, seconds since UNIX epoch
--duration=<duration> : (int) audio/video duration in seconds, an attempt will be made to
calculate this automatically if not provided
--image_width=<image_width> : (int) image width
--image_height=<image_height> : (int) image height
--video_width=<video_width> : (int) video width
--video_height=<video_height> : (int) video height
--video_duration=<duration> : (int) video duration in seconds, an attempt will be made to
calculate this automatically if not provided
--audio_duration=<duration> : (int) audio duration in seconds, an attempt will be made to
calculate this automatically if not provided
--channel_id=<channel_id> : (str) claim id of the publisher channel
2019-03-27 21:02:17 +01:00
--channel_name=<channel_name> : (str) name of publisher channel
--channel_account_id=<channel_id>: (str) one or more account ids for accounts to look in
for channel certificates, defaults to all accounts.
--account_id=<account_id> : (str) account to use for funding the transaction
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
"""
self.valid_stream_name_or_error(name)
account = self.get_account_or_default(kwargs.get('account_id'))
claims = await account.get_claims(claim_name=name)
if len(claims) == 0:
2019-03-27 21:02:17 +01:00
if 'bid' not in kwargs:
raise Exception("'bid' is a required argument for new publishes.")
if 'file_path' not in kwargs:
raise Exception("'file_path' is a required argument for new publishes.")
return await self.jsonrpc_stream_create(name, **kwargs)
elif len(claims) == 1:
assert claims[0].claim.is_stream, f"Claim at name '{name}' is not a stream claim."
return await self.jsonrpc_stream_update(claims[0].claim_id, **kwargs)
raise Exception(
f"There are {len(claims)} claims for '{name}', please use 'stream update' command "
f"to update a specific stream claim."
)
@requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_stream_create(
2019-03-24 21:55:04 +01:00
self, name, bid, file_path, allow_duplicate_name=False,
2019-03-27 21:02:17 +01:00
channel_id=None, channel_name=None, channel_account_id=None,
2019-03-24 21:55:04 +01:00
account_id=None, claim_address=None, preview=False, **kwargs):
"""
Make a new stream claim and announce the associated file to lbrynet.
2017-05-28 22:01:53 +02:00
Usage:
stream_create (<name> | --name=<name>) (<bid> | --bid=<bid>)
2019-03-26 04:44:36 +01:00
(<file_path> | --file_path=<file_path>) [<stream_type> | --stream_type=<stream_type>]
[--allow_duplicate_name=<allow_duplicate_name>]
[--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...]
2019-03-24 21:55:04 +01:00
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>]
[--title=<title>] [--description=<description>] [--author=<author>]
2019-03-24 21:55:04 +01:00
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
2019-03-26 04:44:36 +01:00
[--release_time=<release_time>]
2019-03-25 17:30:30 +01:00
[--video_width=<video_width>] [--video_height=<video_height>] [--video_duration=<video_duration>]
[--image_width=<image_width>] [--image_height=<image_height>] [--audio_duration=<audio_duration>]
2019-03-27 21:02:17 +01:00
[--channel_id=<channel_id>] [--channel_name=<channel_name>]
[--channel_account_id=<channel_account_id>...]
2019-03-24 21:55:04 +01:00
[--account_id=<account_id>] [--claim_address=<claim_address>] [--preview]
2017-05-28 22:01:53 +02:00
Options:
--name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash))
2019-03-24 21:55:04 +01:00
--allow_duplicate_name=<allow_duplicate_name> : (bool) create new claim even if one already exists with
given name. default: false.
--bid=<bid> : (decimal) amount to back the claim
2019-03-24 21:55:04 +01:00
--file_path=<file_path> : (str) path to file to be associated with name.
2019-03-26 04:44:36 +01:00
--stream_type=<stream_type> : (str) type of stream
2019-03-24 21:55:04 +01:00
--fee_currency=<fee_currency> : (string) specify fee currency
--fee_amount=<fee_amount> : (decimal) content download fee
--fee_address=<fee_address> : (str) address where to send fee payments, will use
value from --claim_address if not provided
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
2018-08-31 00:05:10 +02:00
--author=<author> : (str) author of the publication. The usage for this field is not
the same as for channels. The author field is used to credit an author
who is not the publisher and is not represented by the channel. For
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
by published to a channel such as '@classics', or to no channel at all
--tags=<tags> : (list) add content tags
--languages=<languages> : (list) languages used by the channel,
using RFC 5646 format, eg:
for English `--languages=en`
for Spanish (Spain) `--languages=es-ES`
for Spanish (Mexican) `--languages=es-MX`
for Chinese (Simplified) `--languages=zh-Hans`
for Chinese (Traditional) `--languages=zh-Hant`
2019-03-30 01:30:08 +01:00
--locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter
`country` code and a `state`, `city` and a postal
`code` along with a `latitude` and `longitude`.
for JSON RPC: pass a dictionary with aforementioned
attributes as keys, eg:
...
"locations": [{'country': 'US', 'state': 'NH'}]
...
for command line: pass a colon delimited list
with values in the following order:
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
making sure to include colon for blank values, for
example to provide only the city:
... --locations="::Manchester"
with all values set:
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
optionally, you can just pass the "LATITUDE:LONGITUDE":
... --locations="42.990605:-71.460989"
finally, you can also pass JSON string of dictionary
on the command line as you would via JSON RPC
... --locations="{'country': 'US', 'state': 'NH'}"
--license=<license> : (str) publication license
--license_url=<license_url> : (str) publication license url
2019-03-24 21:55:04 +01:00
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--release_time=<duration> : (int) original public release of content, seconds since UNIX epoch
--duration=<duration> : (int) audio/video duration in seconds, an attempt will be made to
calculate this automatically if not provided
2019-03-25 17:30:30 +01:00
--image_width=<image_width> : (int) image width
--image_height=<image_height> : (int) image height
2019-03-24 21:55:04 +01:00
--video_width=<video_width> : (int) video width
--video_height=<video_height> : (int) video height
2019-03-25 17:30:30 +01:00
--video_duration=<duration> : (int) video duration in seconds, an attempt will be made to
calculate this automatically if not provided
--audio_duration=<duration> : (int) audio duration in seconds, an attempt will be made to
calculate this automatically if not provided
2019-03-24 21:55:04 +01:00
--channel_id=<channel_id> : (str) claim id of the publisher channel
--channel_account_id=<channel_id>: (str) one or more account ids for accounts to look in
2019-03-24 21:55:04 +01:00
for channel certificates, defaults to all accounts.
--account_id=<account_id> : (str) account to use for funding the transaction
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
"""
self.valid_stream_name_or_error(name)
2019-03-24 21:55:04 +01:00
account = self.get_account_or_default(account_id)
2019-03-27 21:02:17 +01:00
channel = await self.get_channel_or_none(channel_account_id, channel_id, channel_name, for_signing=True)
2019-03-24 21:55:04 +01:00
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
claim_address = await self.get_receiving_address(claim_address, account)
kwargs['fee_address'] = self.get_fee_address(kwargs, claim_address)
claims = await account.get_claims(claim_name=name)
if len(claims) > 0:
2019-03-24 21:55:04 +01:00
if not allow_duplicate_name:
raise Exception(
f"You already have a stream claim published under the name '{name}'. "
2019-03-24 21:55:04 +01:00
f"Use --allow-duplicate-name flag to override."
)
claim = Claim()
claim.stream.update(file_path=file_path, sd_hash='0'*96, **kwargs)
2019-03-24 21:55:04 +01:00
tx = await Transaction.claim_create(
name, claim, amount, claim_address, [account], account, channel
)
new_txo = tx.outputs[0]
if not preview:
file_stream = await self.stream_manager.create_stream(file_path)
claim.stream.sd_hash = file_stream.sd_hash
new_txo.script.generate()
2019-03-24 21:55:04 +01:00
if channel:
new_txo.sign(channel)
await tx.sign([account])
await account.ledger.broadcast(tx)
await self.storage.save_claims([self._old_get_temp_claim_info(
tx, new_txo, claim_address, claim, name, dewies_to_lbc(amount)
)])
stream_hash = await self.storage.get_stream_hash_for_sd_hash(claim.stream.sd_hash)
2019-03-24 21:55:04 +01:00
if stream_hash:
await self.storage.save_content_claim(stream_hash, new_txo.id)
await self.analytics_manager.send_claim_action('publish')
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_stream_update(
2019-03-24 21:55:04 +01:00
self, claim_id, bid=None, file_path=None,
2019-03-27 21:02:17 +01:00
channel_id=None, channel_name=None, channel_account_id=None, clear_channel=False,
2019-03-24 21:55:04 +01:00
account_id=None, claim_address=None,
preview=False, **kwargs):
"""
Update an existing stream claim and if a new file is provided announce it to lbrynet.
2019-03-24 21:55:04 +01:00
Usage:
stream_update (<claim_id> | --claim_id=<claim_id>) [--bid=<bid>] [--file_path=<file_path>]
[--tags=<tags>...] [--clear_tags]
[--languages=<languages>...] [--clear_languages]
[--locations=<locations>...] [--clear_locations]
2019-03-24 21:55:04 +01:00
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>]
[--title=<title>] [--description=<description>] [--author=<author>] [--language=<language>]
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
2019-03-25 17:30:30 +01:00
[--release_time=<release_time>] [--stream_type=<stream_type>]
[--video_width=<video_width>] [--video_height=<video_height>] [--video_duration=<video_duration>]
[--image_width=<image_width>] [--image_height=<image_height>] [--audio_duration=<audio_duration>]
2019-03-27 21:02:17 +01:00
[--channel_id=<channel_id>] [--channel_name=<channel_name>] [--clear_channel]
[--channel_account_id=<channel_account_id>...]
2019-03-24 21:55:04 +01:00
[--account_id=<account_id>] [--claim_address=<claim_address>] [--preview]
Options:
--claim_id=<claim_id> : (str) id of the stream claim to update
2019-03-24 21:55:04 +01:00
--bid=<bid> : (decimal) amount to back the claim
--file_path=<file_path> : (str) path to file to be associated with name.
--fee_currency=<fee_currency> : (string) specify fee currency
--fee_amount=<fee_amount> : (decimal) content download fee
--fee_address=<fee_address> : (str) address where to send fee payments, will use
value from --claim_address if not provided
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
--author=<author> : (str) author of the publication. The usage for this field is not
the same as for channels. The author field is used to credit an author
who is not the publisher and is not represented by the channel. For
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
by published to a channel such as '@classics', or to no channel at all
--clear_tags : (bool) clear existing tags (prior to adding new ones)
--tags=<tags> : (list) add content tags
--clear_languages : (bool) clear existing languages (prior to adding new ones)
--languages=<languages> : (list) languages used by the channel,
using RFC 5646 format, eg:
for English `--languages=en`
for Spanish (Spain) `--languages=es-ES`
for Spanish (Mexican) `--languages=es-MX`
for Chinese (Simplified) `--languages=zh-Hans`
for Chinese (Traditional) `--languages=zh-Hant`
--clear_locations : (bool) clear existing locations (prior to adding new ones)
2019-03-30 01:30:08 +01:00
--locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter
`country` code and a `state`, `city` and a postal
`code` along with a `latitude` and `longitude`.
for JSON RPC: pass a dictionary with aforementioned
attributes as keys, eg:
...
"locations": [{'country': 'US', 'state': 'NH'}]
...
for command line: pass a colon delimited list
with values in the following order:
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
making sure to include colon for blank values, for
example to provide only the city:
... --locations="::Manchester"
with all values set:
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
optionally, you can just pass the "LATITUDE:LONGITUDE":
... --locations="42.990605:-71.460989"
finally, you can also pass JSON string of dictionary
on the command line as you would via JSON RPC
... --locations="{'country': 'US', 'state': 'NH'}"
2019-03-24 21:55:04 +01:00
--license=<license> : (str) publication license
--license_url=<license_url> : (str) publication license url
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--release_time=<duration> : (int) original public release of content, seconds since UNIX epoch
2019-03-25 17:30:30 +01:00
--stream_type=<stream_type> : (str) type of stream
--image_width=<image_width> : (int) image width
--image_height=<image_height> : (int) image height
2019-03-24 21:55:04 +01:00
--video_width=<video_width> : (int) video width
--video_height=<video_height> : (int) video height
2019-03-25 17:30:30 +01:00
--video_duration=<duration> : (int) video duration in seconds, an attempt will be made to
calculate this automatically if not provided
--audio_duration=<duration> : (int) audio duration in seconds, an attempt will be made to
calculate this automatically if not provided
2019-03-24 21:55:04 +01:00
--channel_id=<channel_id> : (str) claim id of the publisher channel
2019-03-27 21:02:17 +01:00
--clear_channel : (bool) remove channel signature
2019-03-24 21:55:04 +01:00
--channel_account_id=<channel_id>: (str) one or more account ids for accounts to look in
for channel certificates, defaults to all accounts.
--account_id=<account_id> : (str) account to use for funding the transaction
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
2016-03-24 03:27:48 +01:00
"""
account = self.get_account_or_default(account_id)
existing_claims = await account.get_claims(claim_id=claim_id)
if len(existing_claims) != 1:
raise Exception(
f"Can't find the claim '{claim_id}' in account '{account_id}'."
)
old_txo = existing_claims[0]
if not old_txo.claim.is_stream:
raise Exception(
f"A claim with id '{claim_id}' was found but it is not a stream claim."
)
if bid is not None:
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
else:
amount = old_txo.amount
if claim_address is not None:
self.valid_address_or_error(claim_address)
else:
claim_address = old_txo.get_address(account.ledger)
channel = None
2019-03-27 21:02:17 +01:00
if channel_id or channel_name:
channel = await self.get_channel_or_error(channel_account_id, channel_id, channel_name, for_signing=True)
elif old_txo.claim.is_signed and not clear_channel:
channel = old_txo.channel
2019-03-25 00:45:54 +01:00
kwargs['fee_address'] = self.get_fee_address(kwargs, claim_address)
old_txo.claim.stream.update(**kwargs)
tx = await Transaction.claim_update(
old_txo, amount, claim_address, [account], account, channel
)
new_txo = tx.outputs[0]
2019-03-25 14:59:32 +01:00
if not preview:
if file_path is not None:
file_stream = await self.stream_manager.create_stream(file_path)
new_txo.claim.stream.sd_hash = file_stream.sd_hash
new_txo.script.generate()
if channel:
new_txo.sign(channel)
await tx.sign([account])
2019-03-25 14:59:32 +01:00
await account.ledger.broadcast(tx)
await self.storage.save_claims([self._old_get_temp_claim_info(
tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount)
)])
stream_hash = await self.storage.get_stream_hash_for_sd_hash(new_txo.claim.stream.sd_hash)
if stream_hash:
await self.storage.save_content_claim(stream_hash, new_txo.id)
await self.analytics_manager.send_claim_action('publish')
2019-03-25 14:59:32 +01:00
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_stream_abandon(
self, claim_id=None, txid=None, nout=None, account_id=None,
preview=False, blocking=True):
"""
Abandon one of my stream claims.
Usage:
stream_abandon [<claim_id> | --claim_id=<claim_id>]
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
[--account_id=<account_id>]
[--preview] [--blocking]
Options:
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
--txid=<txid> : (str) txid of the claim to abandon
--nout=<nout> : (int) nout of the claim to abandon
--account_id=<account_id> : (str) id of the account to use
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until abandon is in mempool
"""
account = self.get_account_or_default(account_id)
if txid is not None and nout is not None:
claims = await account.get_claims(**{'txo.txid': txid, 'txo.position': nout})
elif claim_id is not None:
claims = await account.get_claims(claim_id=claim_id)
else:
raise Exception('Must specify claim_id, or txid and nout')
if not claims:
raise Exception('No claim found for the specified claim_id or txid:nout')
tx = await Transaction.create(
[Input.spend(txo) for txo in claims], [], [account], account
)
if not preview:
await account.ledger.broadcast(tx)
await self.analytics_manager.send_claim_action('abandon')
if blocking:
await account.ledger.wait(tx)
else:
await account.ledger.release_tx(tx)
return tx
2019-03-24 21:55:04 +01:00
@requires(WALLET_COMPONENT)
def jsonrpc_stream_list(self, account_id=None, page=None, page_size=None):
2016-08-08 08:32:56 +02:00
"""
List my stream claims.
2016-08-08 08:32:56 +02:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-26 04:44:36 +01:00
stream_list [<account_id> | --account_id=<account_id>]
2019-03-25 03:59:55 +01:00
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
2019-03-24 21:55:04 +01:00
--account_id=<account_id> : (str) id of the account to query
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2016-08-08 08:32:56 +02:00
"""
account = self.get_account_or_default(account_id)
2019-03-24 21:55:04 +01:00
return maybe_paginate(
account.get_streams,
account.get_stream_count,
2019-03-24 21:55:04 +01:00
page, page_size
)
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
DHT_COMPONENT, DATABASE_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
def jsonrpc_stream_cost_estimate(self, uri):
2019-03-24 21:55:04 +01:00
"""
Get estimated cost for a lbry stream
2019-03-24 21:55:04 +01:00
Usage:
stream_cost_estimate (<uri> | --uri=<uri>)
2019-03-24 21:55:04 +01:00
Options:
--uri=<uri> : (str) uri to use
2019-03-24 21:55:04 +01:00
2019-03-25 03:20:17 +01:00
Returns:
(float) Estimated cost in lbry credits, returns None if uri is not
resolvable
2019-03-25 03:20:17 +01:00
"""
return self.get_est_cost_from_uri(uri)
2019-03-24 21:55:04 +01:00
SUPPORT_DOC = """
Create, list and abandon all types of supports.
2019-03-24 21:55:04 +01:00
"""
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
2019-03-24 23:14:02 +01:00
async def jsonrpc_support_create(self, claim_id, amount, tip=False, account_id=None, preview=False):
"""
2019-03-24 21:55:04 +01:00
Create a support or a tip for name claim.
Usage:
2019-03-25 03:59:55 +01:00
support_create (<claim_id> | --claim_id=<claim_id>) (<amount> | --amount=<amount>)
2019-03-24 23:14:02 +01:00
[--tip] [--account_id=<account_id>] [--preview]
Options:
--claim_id=<claim_id> : (str) claim_id of the claim to support
--amount=<amount> : (decimal) amount of support
2019-03-24 21:55:04 +01:00
--tip : (bool) send support to claim owner, default: false.
--account_id=<account_id> : (str) id of the account to use
2019-03-24 23:14:02 +01:00
--preview : (bool) do not broadcast the transaction
"""
account = self.get_account_or_default(account_id)
amount = self.get_dewies_or_error("amount", amount)
2019-03-24 23:14:02 +01:00
claim = await account.ledger.get_claim_by_claim_id(claim_id)
claim_name = claim['name']
claim_address = claim['address']
if not tip:
claim_address = await account.receiving.get_or_create_usable_address()
tx = await Transaction.support(
claim_name, claim_id, amount, claim_address, [account], account
)
if not preview:
await tx.sign([account])
await account.ledger.broadcast(tx)
await self.storage.save_supports(claim_id, [{
'txid': tx.id,
'nout': tx.position,
'address': claim_address,
'claim_id': claim_id,
'amount': dewies_to_lbc(amount)
}])
await self.analytics_manager.send_claim_action('new_support')
else:
await account.ledger.release_tx(tx)
return tx
2016-08-08 08:32:56 +02:00
@requires(WALLET_COMPONENT)
2019-03-24 21:55:04 +01:00
def jsonrpc_support_list(self, account_id=None, page=None, page_size=None):
2017-01-03 20:13:01 +01:00
"""
2019-03-26 03:06:36 +01:00
List supports and tips in my control.
2016-03-24 03:27:48 +01:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
support_list [<account_id> | --account_id=<account_id>]
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) id of the account to query
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2016-03-24 03:27:48 +01:00
"""
account = self.get_account_or_default(account_id)
return maybe_paginate(
2019-03-24 21:55:04 +01:00
account.get_supports,
account.get_support_count,
page, page_size
)
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_support_abandon(
self, claim_id=None, txid=None, nout=None, keep=None,
account_id=None, preview=False, blocking=True):
"""
2019-03-26 03:06:36 +01:00
Abandon supports, including tips, of a specific claim, optionally
keeping some amount as supports.
Usage:
support_abandon [--claim_id=<claim_id>] [(--txid=<txid> --nout=<nout>)] [--keep=<keep>]
[--account_id=<account_id>] [--preview] [--blocking]
Options:
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
--txid=<txid> : (str) txid of the claim to abandon
--nout=<nout> : (int) nout of the claim to abandon
--keep=<keep> : (decimal) amount of lbc to keep as support
--account_id=<account_id> : (str) id of the account to use
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until abandon is in mempool
"""
account = self.get_account_or_default(account_id)
2019-03-26 03:06:36 +01:00
if txid is not None and nout is not None:
supports = await account.get_supports(**{'txo.txid': txid, 'txo.position': nout})
2019-03-26 03:06:36 +01:00
elif claim_id is not None:
supports = await account.get_supports(claim_id=claim_id)
else:
raise Exception('Must specify claim_id, or txid and nout')
if not supports:
raise Exception('No supports found for the specified claim_id or txid:nout')
if keep is not None:
keep = self.get_dewies_or_error('keep', keep)
else:
keep = 0
outputs = []
if keep > 0:
outputs = [
Output.pay_support_pubkey_hash(
keep, supports[0].claim_name, supports[0].claim_id, supports[0].pubkey_hash
)
]
tx = await Transaction.create(
[Input.spend(txo) for txo in supports], outputs, [account], account
)
if not preview:
await account.ledger.broadcast(tx)
await self.analytics_manager.send_claim_action('abandon')
if blocking:
await account.ledger.wait(tx)
else:
await account.ledger.release_tx(tx)
return tx
2019-01-23 19:00:58 +01:00
TRANSACTION_DOC = """
2019-01-21 21:55:50 +01:00
Transaction management.
"""
@requires(WALLET_COMPONENT)
def jsonrpc_transaction_list(self, account_id=None, page=None, page_size=None):
2017-01-03 20:13:01 +01:00
"""
2017-03-14 00:14:11 +01:00
List transactions belonging to wallet
2017-05-28 22:01:53 +02:00
Usage:
transaction_list [<account_id> | --account_id=<account_id>]
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) id of the account to query
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
Returns:
2018-01-18 19:41:17 +01:00
(list) List of transactions
{
"claim_info": (list) claim info if in txn [{
"address": (str) address of claim,
"balance_delta": (float) bid amount,
"amount": (float) claim amount,
"claim_id": (str) claim id,
"claim_name": (str) claim name,
"nout": (int) nout
}],
"abandon_info": (list) abandon info if in txn [{
"address": (str) address of abandoned claim,
"balance_delta": (float) returned amount,
"amount": (float) claim amount,
"claim_id": (str) claim id,
"claim_name": (str) claim name,
"nout": (int) nout
}],
"confirmations": (int) number of confirmations for the txn,
"date": (str) date and time of txn,
"fee": (float) txn fee,
"support_info": (list) support info if in txn [{
"address": (str) address of support,
"balance_delta": (float) support amount,
"amount": (float) support amount,
"claim_id": (str) claim id,
"claim_name": (str) claim name,
"is_tip": (bool),
"nout": (int) nout
}],
"timestamp": (int) timestamp,
"txid": (str) txn id,
"update_info": (list) update info if in txn [{
"address": (str) address of claim,
2018-01-18 19:41:17 +01:00
"balance_delta": (float) credited/debited
"amount": (float) absolute amount,
"claim_id": (str) claim id,
"claim_name": (str) claim name,
"nout": (int) nout
}],
"value": (float) value of txn
}
"""
account = self.get_account_or_default(account_id)
return maybe_paginate(
self.wallet_manager.get_history,
self.ledger.db.get_transaction_count,
page, page_size, account=account
)
@requires(WALLET_COMPONENT)
def jsonrpc_transaction_show(self, txid):
2017-01-03 20:13:01 +01:00
"""
Get a decoded transaction from a txid
2017-05-28 22:01:53 +02:00
Usage:
transaction_show (<txid> | --txid=<txid>)
Options:
--txid=<txid> : (str) txid of the transaction
2017-01-03 20:13:01 +01:00
Returns:
2017-03-14 00:14:11 +01:00
(dict) JSON formatted transaction
2017-01-03 20:13:01 +01:00
"""
2018-09-19 15:58:50 +02:00
return self.wallet_manager.get_transaction(txid)
2019-01-21 21:55:50 +01:00
UTXO_DOC = """
Unspent transaction management.
"""
@requires(WALLET_COMPONENT)
def jsonrpc_utxo_list(self, account_id=None, page=None, page_size=None):
2017-11-01 22:17:38 +01:00
"""
List unspent transaction outputs
Usage:
utxo_list [<account_id> | --account_id=<account_id>]
[--page=<page>] [--page_size=<page_size>]
2017-11-01 22:17:38 +01:00
Options:
--account_id=<account_id> : (str) id of the account to query
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2017-11-01 22:17:38 +01:00
Returns:
2017-11-02 12:14:26 +01:00
(list) List of unspent transaction outputs (UTXOs)
2017-11-01 22:17:38 +01:00
[
{
"address": (str) the output address
"amount": (float) unspent amount
"height": (int) block height
"is_claim": (bool) is the tx a claim
"is_coinbase": (bool) is the tx a coinbase tx
"is_support": (bool) is the tx a support
"is_update": (bool) is the tx an update
"nout": (int) nout of the output
"txid": (str) txid of the output
},
...
]
"""
account = self.get_account_or_default(account_id)
return maybe_paginate(
account.get_utxos,
account.get_utxo_count,
page, page_size
)
2017-11-01 22:17:38 +01:00
2019-01-04 08:49:29 +01:00
@requires(WALLET_COMPONENT)
def jsonrpc_utxo_release(self, account_id=None):
"""
When spending a UTXO it is locally locked to prevent double spends;
occasionally this can result in a UTXO being locked which ultimately
did not get spent (failed to broadcast, spend transaction was not
accepted by blockchain node, etc). This command releases the lock
on all UTXOs in your account.
Usage:
utxo_release [<account_id> | --account_id=<account_id>]
Options:
--account_id=<account_id> : (str) id of the account to query
Returns:
None
"""
return self.get_account_or_default(account_id).release_all_outputs()
@requires(WALLET_COMPONENT)
def jsonrpc_block_show(self, blockhash=None, height=None):
"""
2017-03-14 00:14:11 +01:00
Get contents of a block
2017-05-28 22:01:53 +02:00
Usage:
block_show (<blockhash> | --blockhash=<blockhash>) | (<height> | --height=<height>)
Options:
--blockhash=<blockhash> : (str) hash of the block to look up
--height=<height> : (int) height of the block to look up
2017-05-28 22:01:53 +02:00
2017-03-14 00:14:11 +01:00
Returns:
(dict) Requested block
"""
2018-09-07 08:57:58 +02:00
return self.wallet_manager.get_block(blockhash, height)
2019-01-21 21:55:50 +01:00
BLOB_DOC = """
Blob management.
"""
2019-01-22 23:44:17 +01:00
@requires(WALLET_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
2019-01-22 23:44:17 +01:00
async def jsonrpc_blob_get(self, blob_hash, timeout=None, read=False):
"""
Download and return a blob
2017-05-28 22:01:53 +02:00
Usage:
2019-01-22 23:44:17 +01:00
blob_get (<blob_hash> | --blob_hash=<blob_hash>) [--timeout=<timeout>] [--read]
2017-05-28 22:01:53 +02:00
Options:
--blob_hash=<blob_hash> : (str) blob hash of the blob to get
--timeout=<timeout> : (int) timeout in number of seconds
Returns:
2017-03-14 00:14:11 +01:00
(str) Success/Fail message or (dict) decoded data
"""
blob = await download_blob(asyncio.get_event_loop(), self.conf, self.blob_manager, self.dht_node, blob_hash)
2019-01-22 23:44:17 +01:00
if read:
with open(blob.file_path, 'rb') as handle:
return handle.read().decode()
else:
2019-01-22 23:44:17 +01:00
return "Downloaded blob %s" % blob_hash
@requires(BLOB_COMPONENT, DATABASE_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_blob_delete(self, blob_hash):
"""
Delete a blob
2017-05-28 22:01:53 +02:00
Usage:
2018-11-14 20:02:07 +01:00
blob_delete (<blob_hash> | --blob_hash=<blob_hash>)
2017-06-09 18:14:03 +02:00
Options:
--blob_hash=<blob_hash> : (str) blob hash of the blob to delete
Returns:
2017-03-14 00:14:11 +01:00
(str) Success/fail message
"""
if not blob_hash or not is_valid_blobhash(blob_hash):
return f"Invalid blob hash to delete '{blob_hash}'"
2019-01-22 23:44:17 +01:00
streams = self.stream_manager.get_filtered_streams(sd_hash=blob_hash)
if streams:
await self.stream_manager.delete_stream(streams[0])
else:
await self.blob_manager.delete_blobs([blob_hash])
return "Deleted %s" % blob_hash
2019-01-23 19:00:58 +01:00
PEER_DOC = """
DHT / Blob Exchange peer commands.
"""
@requires(DHT_COMPONENT)
2019-01-22 23:44:17 +01:00
async def jsonrpc_peer_list(self, blob_hash, search_bottom_out_limit=None):
2016-08-03 09:16:06 +02:00
"""
Get peers for blob hash
2017-05-28 22:01:53 +02:00
Usage:
2019-01-22 23:44:17 +01:00
peer_list (<blob_hash> | --blob_hash=<blob_hash>)
[<search_bottom_out_limit> | --search_bottom_out_limit=<search_bottom_out_limit>]
2017-05-28 22:01:53 +02:00
Options:
2019-01-22 23:44:17 +01:00
--blob_hash=<blob_hash> : (str) find available peers for this blob hash
--search_bottom_out_limit=<search_bottom_out_limit> : (int) the number of search probes in a row
that don't find any new peers
before giving up and returning
2017-05-28 22:01:53 +02:00
2016-08-03 09:16:06 +02:00
Returns:
2019-01-22 23:44:17 +01:00
(list) List of contact dictionaries {'address': <peer ip>, 'udp_port': <dht port>, 'tcp_port': <peer port>,
'node_id': <peer node id>}
2016-08-03 09:16:06 +02:00
"""
2018-11-12 20:45:41 +01:00
if not is_valid_blobhash(blob_hash):
raise Exception("invalid blob hash")
2019-01-22 23:44:17 +01:00
if search_bottom_out_limit is not None:
search_bottom_out_limit = int(search_bottom_out_limit)
if search_bottom_out_limit <= 0:
raise Exception("invalid bottom out limit")
else:
search_bottom_out_limit = 4
peers = []
async for new_peers in self.dht_node.get_iterative_value_finder(unhexlify(blob_hash.encode()), max_results=1,
bottom_out_limit=search_bottom_out_limit):
peers.extend(new_peers)
results = [
{
2019-01-22 23:44:17 +01:00
"node_id": hexlify(peer.node_id).decode(),
"address": peer.address,
"udp_port": peer.udp_port,
"tcp_port": peer.tcp_port,
}
2019-01-22 23:44:17 +01:00
for peer in peers
]
return results
2016-08-03 09:16:06 +02:00
@requires(DATABASE_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_blob_announce(self, blob_hash=None, stream_hash=None, sd_hash=None):
"""
Announce blobs to the DHT
Usage:
blob_announce (<blob_hash> | --blob_hash=<blob_hash>
| --stream_hash=<stream_hash> | --sd_hash=<sd_hash>)
Options:
--blob_hash=<blob_hash> : (str) announce a blob, specified by blob_hash
--stream_hash=<stream_hash> : (str) announce all blobs associated with
stream_hash
--sd_hash=<sd_hash> : (str) announce all blobs associated with
sd_hash and the sd_hash itself
Returns:
(bool) true if successful
"""
blob_hashes = []
if blob_hash:
blob_hashes.append(blob_hash)
elif stream_hash or sd_hash:
if sd_hash and stream_hash:
raise Exception("either the sd hash or the stream hash should be provided, not both")
if sd_hash:
2018-12-15 21:31:02 +01:00
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
blobs = await self.storage.get_blobs_for_stream(stream_hash, only_completed=True)
blob_hashes.extend(blob.blob_hash for blob in blobs if blob.blob_hash is not None)
else:
raise Exception('single argument must be specified')
2018-12-15 21:31:02 +01:00
await self.storage.should_single_announce_blobs(blob_hashes, immediate=True)
return True
@requires(BLOB_COMPONENT, WALLET_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None,
2019-01-22 23:44:17 +01:00
finished=None, page_size=None, page=None):
2016-08-19 08:41:23 +02:00
"""
2017-03-14 00:14:11 +01:00
Returns blob hashes. If not given filters, returns all blobs known by the blob manager
2016-08-26 06:32:33 +02:00
2017-06-22 00:16:41 +02:00
Usage:
blob_list [--needed] [--finished] [<uri> | --uri=<uri>]
[<stream_hash> | --stream_hash=<stream_hash>]
[<sd_hash> | --sd_hash=<sd_hash>]
[<page_size> | --page_size=<page_size>]
2017-06-22 00:16:41 +02:00
[<page> | --page=<page>]
Options:
--needed : (bool) only return needed blobs
--finished : (bool) only return finished blobs
--uri=<uri> : (str) filter blobs by stream in a uri
--stream_hash=<stream_hash> : (str) filter blobs by stream hash
--sd_hash=<sd_hash> : (str) filter blobs by sd hash
--page_size=<page_size> : (int) results page size
--page=<page> : (int) page of results to return
2017-06-22 00:16:41 +02:00
2016-08-26 06:32:33 +02:00
Returns:
2017-03-14 00:14:11 +01:00
(list) List of blob hashes
2016-08-19 08:41:23 +02:00
"""
2019-01-22 23:44:17 +01:00
if uri or stream_hash or sd_hash:
if uri:
2019-03-25 00:45:54 +01:00
metadata = (await self.resolve(uri))[uri]
sd_hash = utils.get_sd_hash(metadata)
2018-12-15 21:31:02 +01:00
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
elif stream_hash:
2018-12-15 21:31:02 +01:00
sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash)
elif sd_hash:
2018-12-15 21:31:02 +01:00
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash)
2019-01-22 23:44:17 +01:00
if sd_hash:
blobs = [sd_hash]
else:
blobs = []
2019-01-22 23:44:17 +01:00
if stream_hash:
blobs.extend([b.blob_hash for b in (await self.storage.get_blobs_for_stream(stream_hash))[:-1]])
else:
2019-01-22 23:44:17 +01:00
blobs = list(self.blob_manager.completed_blob_hashes)
if needed:
2019-01-22 23:44:17 +01:00
blobs = [blob_hash for blob_hash in blobs if not self.blob_manager.get_blob(blob_hash).get_is_verified()]
if finished:
2019-01-22 23:44:17 +01:00
blobs = [blob_hash for blob_hash in blobs if self.blob_manager.get_blob(blob_hash).get_is_verified()]
page_size = page_size or len(blobs)
page = page or 0
start_index = page * page_size
stop_index = start_index + page_size
2019-01-22 23:44:17 +01:00
return blobs[start_index:stop_index]
2016-08-27 01:58:53 +02:00
@requires(BLOB_COMPONENT)
2019-01-11 00:40:20 +01:00
async def jsonrpc_blob_reflect(self, blob_hashes, reflector_server=None):
2018-03-22 21:54:29 +01:00
"""
Reflects specified blobs
Usage:
blob_reflect (<blob_hashes>...) [--reflector_server=<reflector_server>]
Options:
2018-04-12 19:27:06 +02:00
--reflector_server=<reflector_server> : (str) reflector address
2018-03-22 21:54:29 +01:00
Returns:
(list) reflected blob hashes
"""
2019-01-22 23:44:17 +01:00
raise NotImplementedError()
2018-03-22 21:54:29 +01:00
@requires(BLOB_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_blob_reflect_all(self):
2016-08-27 01:58:53 +02:00
"""
Reflects all saved blobs
2017-05-28 22:01:53 +02:00
Usage:
blob_reflect_all
Options:
None
2016-08-27 01:58:53 +02:00
Returns:
2017-03-14 00:14:11 +01:00
(bool) true if successful
2016-08-27 01:58:53 +02:00
"""
2016-08-19 08:41:23 +02:00
2019-01-22 23:44:17 +01:00
raise NotImplementedError()
@requires(STREAM_MANAGER_COMPONENT)
async def jsonrpc_file_reflect(self, **kwargs):
2018-03-29 16:46:29 +02:00
"""
2019-01-22 23:44:17 +01:00
Reflect all the blobs in a file matching the filter criteria
2018-03-29 16:46:29 +02:00
Usage:
2019-01-22 23:44:17 +01:00
file_reflect [--sd_hash=<sd_hash>] [--file_name=<file_name>]
[--stream_hash=<stream_hash>] [--rowid=<rowid>]
[--reflector=<reflector>]
2018-03-29 16:46:29 +02:00
Options:
2019-01-22 23:44:17 +01:00
--sd_hash=<sd_hash> : (str) get file with matching sd hash
--file_name=<file_name> : (str) get file with matching file name in the
downloads folder
--stream_hash=<stream_hash> : (str) get file with matching stream hash
--rowid=<rowid> : (int) get file with matching row id
--reflector=<reflector> : (str) reflector server, ip address or url
by default choose a server from the config
Returns:
(list) list of blobs reflected
"""
2019-01-30 20:59:48 +01:00
server, port = kwargs.get('server'), kwargs.get('port')
if server and port:
port = int(port)
else:
server, port = random.choice(self.conf.reflector_servers)
reflected = await asyncio.gather(*[
2019-01-30 20:59:48 +01:00
stream.upload_to_reflector(server, port)
for stream in self.stream_manager.get_filtered_streams(**kwargs)
])
total = []
for reflected_for_stream in reflected:
total.extend(reflected_for_stream)
return total
2019-01-22 23:44:17 +01:00
@requires(DHT_COMPONENT)
async def jsonrpc_peer_ping(self, node_id, address, port):
"""
Send a kademlia ping to the specified peer. If address and port are provided the peer is directly pinged,
if not provided the peer is located first.
2019-01-22 23:44:17 +01:00
Usage:
peer_ping (<node_id> | --node_id=<node_id>) (<address> | --address=<address>) (<port> | --port=<port>)
2019-02-19 23:26:08 +01:00
Options:
None
2018-03-29 16:46:29 +02:00
Returns:
(str) pong, or {'error': <error message>} if an error is encountered
"""
2019-01-22 23:44:17 +01:00
peer = None
if node_id and address and port:
2019-02-02 08:02:19 +01:00
peer = self.component_manager.peer_manager.get_kademlia_peer(unhexlify(node_id), address,
udp_port=int(port))
try:
return await self.dht_node.protocol.get_rpc_peer(peer).ping()
except asyncio.TimeoutError:
return {'error': 'timeout'}
2019-01-22 23:44:17 +01:00
if not peer:
return {'error': 'peer not found'}
2018-03-29 16:46:29 +02:00
@requires(DHT_COMPONENT)
2017-10-10 21:04:48 +02:00
def jsonrpc_routing_table_get(self):
"""
Get DHT routing information
Usage:
routing_table_get
Options:
None
2017-10-10 21:04:48 +02:00
Returns:
2019-01-22 23:44:17 +01:00
(dict) dictionary containing routing and peer information
2017-10-10 21:04:48 +02:00
{
"buckets": {
<bucket index>: [
{
"address": (str) peer address,
2019-01-22 23:44:17 +01:00
"udp_port": (int) peer udp port,
"tcp_port": (int) peer tcp port,
2017-10-10 21:04:48 +02:00
"node_id": (str) peer node id,
}
2017-10-11 21:14:29 +02:00
]
},
2017-10-10 21:04:48 +02:00
"node_id": (str) the local dht node id
2017-10-11 21:14:29 +02:00
}
2017-10-10 21:04:48 +02:00
"""
2019-01-22 23:44:17 +01:00
result = {
'buckets': {}
}
2017-10-10 21:04:48 +02:00
2019-01-22 23:44:17 +01:00
for i in range(len(self.dht_node.protocol.routing_table.buckets)):
2018-11-13 15:36:52 +01:00
result['buckets'][i] = []
2019-01-22 23:44:17 +01:00
for peer in self.dht_node.protocol.routing_table.buckets[i].peers:
2017-10-10 21:04:48 +02:00
host = {
2019-01-22 23:44:17 +01:00
"address": peer.address,
"udp_port": peer.udp_port,
"tcp_port": peer.tcp_port,
"node_id": hexlify(peer.node_id).decode(),
2017-10-10 21:04:48 +02:00
}
2018-11-13 15:36:52 +01:00
result['buckets'][i].append(host)
2017-10-10 21:04:48 +02:00
2019-01-22 23:44:17 +01:00
result['node_id'] = hexlify(self.dht_node.protocol.node_id).decode()
2018-12-15 21:31:02 +01:00
return result
2017-10-10 21:04:48 +02:00
2019-03-25 00:45:54 +01:00
def valid_address_or_error(self, address):
try:
2019-03-25 00:57:45 +01:00
assert self.ledger.is_valid_address(address)
2019-03-25 00:45:54 +01:00
except:
raise Exception(f"'{address}' is not a valid address")
@staticmethod
def valid_stream_name_or_error(name: str):
try:
if not name:
raise Exception(
"Stream name cannot be blank."
)
parsed = parse_lbry_uri(name)
if parsed.is_channel:
raise Exception(
"Stream names cannot start with '@' symbol. This is reserved for channels claims."
)
if parsed.name != name:
raise Exception(
"Stream name has invalid characters."
)
except (TypeError, URIParseError):
raise Exception("Invalid stream name.")
@staticmethod
def valid_channel_name_or_error(name: str):
try:
if not name:
raise Exception(
"Channel name cannot be blank."
)
parsed = parse_lbry_uri(name)
if not parsed.is_channel:
raise Exception("Channel names must start with '@' symbol.")
if parsed.name != name:
raise Exception("Channel name has invalid character")
except (TypeError, URIParseError):
raise Exception("Invalid channel name.")
2019-03-24 21:55:04 +01:00
def get_fee_address(self, kwargs: dict, claim_address: str) -> str:
if 'fee_address' in kwargs:
2019-03-25 00:45:54 +01:00
self.valid_address_or_error(kwargs['fee_address'])
2019-03-24 21:55:04 +01:00
return kwargs['fee_address']
return claim_address
async def get_receiving_address(self, address: str, account: LBCAccount) -> str:
if address is None:
return await account.receiving.get_or_create_usable_address()
2019-03-25 00:45:54 +01:00
self.valid_address_or_error(address)
2019-03-24 21:55:04 +01:00
return address
2019-03-27 21:02:17 +01:00
async def get_channel_or_none(self, account_ids: List[str], channel_id: str = None, channel_name: str = None,
2019-03-24 21:55:04 +01:00
for_signing: bool = False) -> Output:
if channel_id is not None:
2019-03-27 21:02:17 +01:00
return await self.get_channel_or_error(account_ids, channel_id, channel_name, for_signing)
2019-03-24 21:55:04 +01:00
2019-03-27 21:02:17 +01:00
async def get_channel_or_error(self, account_ids: List[str], channel_id: str = None, channel_name: str = None,
2019-03-24 21:55:04 +01:00
for_signing: bool = False) -> Output:
2019-03-27 21:02:17 +01:00
if channel_id:
key, value = 'id', channel_id
elif channel_name:
key, value = 'name', channel_name
else:
raise ValueError("Couldn't find channel because a channel_id or channel_name was not provided.")
2019-03-24 21:55:04 +01:00
for account in self.get_accounts_or_all(account_ids):
2019-03-27 21:02:17 +01:00
channels = await account.get_channels(**{f'claim_{key}': value}, limit=1)
if len(channels) == 1:
2019-03-24 21:55:04 +01:00
if for_signing and channels[0].private_key is None:
2019-03-27 21:02:17 +01:00
raise Exception(f"Couldn't find private key for {key} '{value}'. ")
2019-03-24 21:55:04 +01:00
return channels[0]
2019-03-27 21:02:17 +01:00
elif len(channels) > 1:
raise ValueError(
f"Multiple channels found with channel_{key} '{value}', "
f"pass a channel_id to narrow it down."
)
raise ValueError(f"Couldn't find channel with channel_{key} '{value}'.")
2019-03-24 21:55:04 +01:00
def get_account_or_default(self, account_id: str, argument_name: str = "account", lbc_only=True) -> LBCAccount:
2018-09-19 15:58:50 +02:00
if account_id is None:
return self.default_account
return self.get_account_or_error(account_id, argument_name, lbc_only)
2018-09-19 15:58:50 +02:00
2019-03-24 21:55:04 +01:00
def get_accounts_or_all(self, account_ids: List[str]) -> List[LBCAccount]:
return [
self.get_account_or_error(account_id)
for account_id in account_ids
] if account_ids else self.default_wallet.accounts
2019-03-24 21:55:04 +01:00
def get_account_or_error(
self, account_id: str, argument_name: str = "account", lbc_only=True) -> Optional[LBCAccount]:
for account in self.default_wallet.accounts:
2018-08-30 06:04:25 +02:00
if account.id == account_id:
if lbc_only and not isinstance(account, LBCAccount):
raise ValueError(
"Found '{}', but it's an {} ledger account. "
"'{}' requires specifying an LBC ledger account."
.format(account_id, account.ledger.symbol, argument_name)
)
return account
raise ValueError(f"Couldn't find account: {account_id}.")
2018-08-06 08:53:27 +02:00
@staticmethod
2019-03-24 21:55:04 +01:00
def get_dewies_or_error(argument: str, lbc: str, positive_value=False):
2018-10-03 22:38:47 +02:00
try:
2019-03-24 21:55:04 +01:00
dewies = lbc_to_dewies(lbc)
if positive_value and dewies <= 0:
raise ValueError(f"'{argument}' value must be greater than 0.0")
return dewies
2018-10-03 22:38:47 +02:00
except ValueError as e:
2019-03-24 21:55:04 +01:00
raise ValueError(f"Invalid value for '{argument}': {e.args[0]}")
2019-03-25 00:45:54 +01:00
async def resolve(self, *uris, **kwargs):
page = kwargs.get('page', 0)
page_size = kwargs.get('page_size', 10)
ledger: MainNetLedger = self.default_account.ledger
results = await ledger.resolve(page, page_size, *uris)
if 'error' not in results:
await self.storage.save_claims_for_resolve([
value for value in results.values() if 'error' not in value
])
return results
async def get_claims_for_name(self, name: str):
response = await self.ledger.network.get_claims_for_name(name)
resolutions = await self.resolve(*(f"{claim['name']}#{claim['claim_id']}" for claim in response['claims']))
response['claims'] = [value.get('claim', value.get('certificate')) for value in resolutions.values()]
return response
2019-03-24 21:55:04 +01:00
def _old_get_temp_claim_info(self, tx, txo, address, claim_dict, name, bid):
return {
"claim_id": txo.claim_id,
"name": name,
"amount": bid,
"address": address,
"txid": tx.id,
"nout": txo.position,
"value": claim_dict,
"height": -1,
"claim_sequence": -1,
}
2018-07-26 05:29:13 +02:00
2017-01-02 20:52:24 +01:00
def loggly_time_string(dt):
formatted_dt = dt.strftime("%Y-%m-%dT%H:%M:%S")
2017-01-03 20:13:01 +01:00
milliseconds = str(round(dt.microsecond * (10.0 ** -5), 3))
2019-01-22 23:44:17 +01:00
return quote(formatted_dt + milliseconds + "Z")
2017-01-02 20:52:24 +01:00
def get_loggly_query_string(installation_id):
2017-01-02 22:09:28 +01:00
base_loggly_search_url = "https://lbry.loggly.com/search#"
2017-01-02 20:52:24 +01:00
now = utils.now()
yesterday = now - utils.timedelta(days=1)
2017-01-02 22:09:28 +01:00
params = {
'terms': 'json.installation_id:{}*'.format(installation_id[:SHORT_ID_LEN]),
2017-01-02 22:09:28 +01:00
'from': loggly_time_string(yesterday),
'to': loggly_time_string(now)
}
2019-01-22 23:44:17 +01:00
data = urlencode(params)
2017-01-02 22:09:28 +01:00
return base_loggly_search_url + data