lbry-sdk/lbrynet/extras/daemon/Daemon.py

2992 lines
124 KiB
Python
Raw Normal View History

2016-02-29 19:25:47 +01:00
import os
2019-01-22 23:44:17 +01:00
import asyncio
import logging
import json
2019-02-11 00:36:21 +01:00
import time
2019-01-22 23:44:17 +01:00
import inspect
import typing
import aiohttp
import base58
2019-01-30 20:59:48 +01:00
import random
2019-01-22 23:44:17 +01:00
from urllib.parse import urlencode, quote
from typing import Callable, Optional, List
from binascii import hexlify, unhexlify
from traceback import format_exc
2019-01-22 23:44:17 +01:00
from aiohttp import web
from functools import wraps
2019-03-11 14:52:35 +01:00
from torba.client.wallet import Wallet
2018-11-04 07:24:41 +01:00
from torba.client.baseaccount import SingleKey, HierarchicalDeterministic
2018-07-05 04:16:02 +02:00
2019-03-24 21:55:04 +01:00
from lbrynet import utils
from lbrynet.conf import Config, Setting
2018-11-12 20:45:41 +01:00
from lbrynet.blob.blob_file import is_valid_blobhash
from lbrynet.blob_exchange.downloader import download_blob
2019-01-26 02:42:05 +01:00
from lbrynet.error import InsufficientFundsError, DownloadSDTimeout, ComponentsNotStarted
from lbrynet.error import NullFundsError, NegativeFundsError, ComponentStartConditionNotMet
from lbrynet.extras import system_info
2019-01-22 23:44:17 +01:00
from lbrynet.extras.daemon import analytics
2018-11-09 20:02:03 +01:00
from lbrynet.extras.daemon.Components import WALLET_COMPONENT, DATABASE_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT
2019-01-22 23:44:17 +01:00
from lbrynet.extras.daemon.Components import STREAM_MANAGER_COMPONENT
from lbrynet.extras.daemon.Components import EXCHANGE_RATE_MANAGER_COMPONENT, UPNP_COMPONENT
2018-11-09 20:02:03 +01:00
from lbrynet.extras.daemon.ComponentManager import RequiredCondition
2019-01-22 23:44:17 +01:00
from lbrynet.extras.daemon.ComponentManager import ComponentManager
from lbrynet.extras.daemon.json_response_encoder import JSONResponseEncoder
from lbrynet.extras.daemon.undecorated import undecorated
2019-03-24 21:55:04 +01:00
from lbrynet.wallet.transaction import Transaction, Output
2019-03-20 06:46:23 +01:00
from lbrynet.wallet.account import Account as LBCAccount, validate_claim_id
from lbrynet.wallet.dewies import dewies_to_lbc, lbc_to_dewies
from lbrynet.schema.claim import Claim
from lbrynet.schema.uri import parse_lbry_uri, URIParseError
2018-12-13 04:32:44 +01:00
2019-01-22 23:44:17 +01:00
if typing.TYPE_CHECKING:
from lbrynet.blob.blob_manager import BlobFileManager
from lbrynet.dht.node import Node
from lbrynet.extras.daemon.Components import UPnPComponent
from lbrynet.extras.daemon.exchange_rate_manager import ExchangeRateManager
from lbrynet.extras.daemon.storage import SQLiteStorage
2019-03-20 06:46:23 +01:00
from lbrynet.wallet.manager import LbryWalletManager
from lbrynet.wallet.ledger import MainNetLedger
2019-01-22 23:44:17 +01:00
from lbrynet.stream.stream_manager import StreamManager
2017-04-11 04:47:54 +02:00
log = logging.getLogger(__name__)
2018-12-13 04:32:44 +01:00
def requires(*components, **conditions):
if conditions and ["conditions"] != list(conditions.keys()):
raise SyntaxError("invalid conditions argument")
condition_names = conditions.get("conditions", [])
def _wrap(fn):
@wraps(fn)
def _inner(*args, **kwargs):
component_manager = args[0].component_manager
for condition_name in condition_names:
condition_result, err_msg = component_manager.evaluate_condition(condition_name)
if not condition_result:
raise ComponentStartConditionNotMet(err_msg)
if not component_manager.all_components_running(*components):
raise ComponentsNotStarted("the following required components have not yet started: "
"%s" % json.dumps(components))
return fn(*args, **kwargs)
return _inner
return _wrap
def deprecated(new_command=None):
def _deprecated_wrapper(f):
f.new_command = new_command
f._deprecated = True
return f
return _deprecated_wrapper
2016-06-28 20:28:59 +02:00
INITIALIZING_CODE = 'initializing'
# TODO: make this consistent with the stages in Downloader.py
DOWNLOAD_METADATA_CODE = 'downloading_metadata'
DOWNLOAD_TIMEOUT_CODE = 'timeout'
DOWNLOAD_RUNNING_CODE = 'running'
DOWNLOAD_STOPPED_CODE = 'stopped'
STREAM_STAGES = [
2017-01-04 23:10:36 +01:00
(INITIALIZING_CODE, 'Initializing'),
2017-01-03 20:13:01 +01:00
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
(DOWNLOAD_RUNNING_CODE, 'Started %s, got %s/%s blobs, stream status: %s'),
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
(DOWNLOAD_TIMEOUT_CODE, 'Stream timed out')
]
CONNECTION_STATUS_CONNECTED = 'connected'
CONNECTION_STATUS_NETWORK = 'network_connection'
CONNECTION_MESSAGES = {
CONNECTION_STATUS_CONNECTED: 'No connection problems detected',
CONNECTION_STATUS_NETWORK: "Your internet connection appears to have been interrupted",
}
2016-12-19 19:27:45 +01:00
SHORT_ID_LEN = 20
MAX_UPDATE_FEE_ESTIMATE = 0.3
2016-12-19 19:27:45 +01:00
2018-08-16 01:23:06 +02:00
2018-10-16 21:04:20 +02:00
async def maybe_paginate(get_records: Callable, get_record_count: Callable,
2019-01-22 23:44:17 +01:00
page: Optional[int], page_size: Optional[int], **constraints):
if None not in (page, page_size):
constraints.update({
"offset": page_size * (page-1),
"limit": page_size
})
return {
2018-10-16 21:04:20 +02:00
"items": await get_records(**constraints),
"total_pages": int(((await get_record_count(**constraints)) + (page_size-1)) / page_size),
"page": page, "page_size": page_size
}
2018-10-16 21:04:20 +02:00
return await get_records(**constraints)
def sort_claim_results(claims):
claims.sort(key=lambda d: (d['height'], d['name'], d['claim_id'], d['txid'], d['nout']))
return claims
DHT_HAS_CONTACTS = "dht_has_contacts"
WALLET_IS_UNLOCKED = "wallet_is_unlocked"
class DHTHasContacts(RequiredCondition):
name = DHT_HAS_CONTACTS
component = DHT_COMPONENT
message = "your node is not connected to the dht"
@staticmethod
def evaluate(component):
return len(component.contacts) > 0
class WalletIsUnlocked(RequiredCondition):
name = WALLET_IS_UNLOCKED
component = WALLET_COMPONENT
message = "your wallet is locked"
@staticmethod
def evaluate(component):
2018-10-16 21:04:20 +02:00
return not component.check_locked()
2018-12-13 04:32:44 +01:00
class JSONRPCError:
# http://www.jsonrpc.org/specification#error_object
CODE_PARSE_ERROR = -32700 # Invalid JSON. Error while parsing the JSON text.
CODE_INVALID_REQUEST = -32600 # The JSON sent is not a valid Request object.
CODE_METHOD_NOT_FOUND = -32601 # The method does not exist / is not available.
CODE_INVALID_PARAMS = -32602 # Invalid method parameter(s).
CODE_INTERNAL_ERROR = -32603 # Internal JSON-RPC error (I think this is like a 500?)
CODE_APPLICATION_ERROR = -32500 # Generic error with our app??
CODE_AUTHENTICATION_ERROR = -32501 # Authentication failed
MESSAGES = {
CODE_PARSE_ERROR: "Parse Error. Data is not valid JSON.",
CODE_INVALID_REQUEST: "JSON data is not a valid Request",
CODE_METHOD_NOT_FOUND: "Method Not Found",
CODE_INVALID_PARAMS: "Invalid Params",
CODE_INTERNAL_ERROR: "Internal Error",
CODE_AUTHENTICATION_ERROR: "Authentication Failed",
}
HTTP_CODES = {
CODE_INVALID_REQUEST: 400,
CODE_PARSE_ERROR: 400,
CODE_INVALID_PARAMS: 400,
CODE_METHOD_NOT_FOUND: 404,
CODE_INTERNAL_ERROR: 500,
CODE_APPLICATION_ERROR: 500,
CODE_AUTHENTICATION_ERROR: 401,
}
def __init__(self, message, code=CODE_APPLICATION_ERROR, traceback=None, data=None):
assert isinstance(code, int), "'code' must be an int"
assert (data is None or isinstance(data, dict)), "'data' must be None or a dict"
self.code = code
if message is None:
message = self.MESSAGES[code] if code in self.MESSAGES else "API Error"
self.message = message
self.data = {} if data is None else data
self.traceback = []
if traceback is not None:
trace_lines = traceback.split("\n")
for i, t in enumerate(trace_lines):
if "--- <exception caught here> ---" in t:
if len(trace_lines) > i + 1:
self.traceback = [j for j in trace_lines[i+1:] if j]
break
def to_dict(self):
return {
'code': self.code,
'message': self.message,
'data': self.traceback
}
@classmethod
def create_from_exception(cls, message, code=CODE_APPLICATION_ERROR, traceback=None):
return cls(message, code=code, traceback=traceback)
class UnknownAPIMethodError(Exception):
pass
def jsonrpc_dumps_pretty(obj, **kwargs):
if isinstance(obj, JSONRPCError):
data = {"jsonrpc": "2.0", "error": obj.to_dict()}
else:
data = {"jsonrpc": "2.0", "result": obj}
return json.dumps(data, cls=JSONResponseEncoder, sort_keys=True, indent=2, **kwargs) + "\n"
def trap(err, *to_trap):
err.trap(*to_trap)
class JSONRPCServerType(type):
def __new__(mcs, name, bases, newattrs):
klass = type.__new__(mcs, name, bases, newattrs)
klass.callable_methods = {}
klass.deprecated_methods = {}
for methodname in dir(klass):
if methodname.startswith("jsonrpc_"):
method = getattr(klass, methodname)
if not hasattr(method, '_deprecated'):
klass.callable_methods.update({methodname.split("jsonrpc_")[1]: method})
else:
klass.deprecated_methods.update({methodname.split("jsonrpc_")[1]: method})
return klass
class Daemon(metaclass=JSONRPCServerType):
"""
2016-03-24 03:27:48 +01:00
LBRYnet daemon, a jsonrpc interface to lbry functions
"""
2018-12-13 04:32:44 +01:00
2019-01-24 00:04:16 +01:00
def __init__(self, conf: Config, component_manager: typing.Optional[ComponentManager] = None):
2019-01-21 21:55:50 +01:00
self.conf = conf
self._node_id = None
self._installation_id = None
self.session_id = base58.b58encode(utils.generate_id()).decode()
self.analytics_manager = analytics.AnalyticsManager(conf, self.installation_id, self.session_id)
2018-12-13 04:32:44 +01:00
self.component_manager = component_manager or ComponentManager(
conf, analytics_manager=self.analytics_manager,
skip_components=conf.components_to_skip or []
2018-12-13 04:32:44 +01:00
)
self.component_startup_task = None
2019-02-03 22:19:29 +01:00
self._connection_status: typing.Tuple[float, bool] = [self.component_manager.loop.time(), False]
self.stop_event = asyncio.Event()
2016-05-14 23:36:30 +02:00
2019-01-09 05:54:18 +01:00
logging.getLogger('aiohttp.access').setLevel(logging.WARN)
app = web.Application()
app.router.add_get('/lbryapi', self.handle_old_jsonrpc)
app.router.add_post('/lbryapi', self.handle_old_jsonrpc)
app.router.add_post('/', self.handle_old_jsonrpc)
self.runner = web.AppRunner(app)
2019-01-22 23:44:17 +01:00
@property
def dht_node(self) -> typing.Optional['Node']:
return self.component_manager.get_component(DHT_COMPONENT)
@property
def wallet_manager(self) -> typing.Optional['LbryWalletManager']:
return self.component_manager.get_component(WALLET_COMPONENT)
@property
def storage(self) -> typing.Optional['SQLiteStorage']:
return self.component_manager.get_component(DATABASE_COMPONENT)
@property
def stream_manager(self) -> typing.Optional['StreamManager']:
return self.component_manager.get_component(STREAM_MANAGER_COMPONENT)
@property
def exchange_rate_manager(self) -> typing.Optional['ExchangeRateManager']:
return self.component_manager.get_component(EXCHANGE_RATE_MANAGER_COMPONENT)
@property
def blob_manager(self) -> typing.Optional['BlobFileManager']:
return self.component_manager.get_component(BLOB_COMPONENT)
@property
def upnp(self) -> typing.Optional['UPnPComponent']:
return self.component_manager.get_component(UPNP_COMPONENT)
2018-12-13 04:32:44 +01:00
2019-01-21 21:55:50 +01:00
@classmethod
def get_api_definitions(cls):
2019-01-23 19:00:58 +01:00
prefix = 'jsonrpc_'
not_grouped = ['block_show', 'report_bug', 'routing_table_get']
2019-01-23 19:00:58 +01:00
api = {
'groups': {
group_name[:-len('_DOC')].lower(): getattr(cls, group_name).strip()
for group_name in dir(cls) if group_name.endswith('_DOC')
},
'commands': {}
}
for jsonrpc_method in dir(cls):
if jsonrpc_method.startswith(prefix):
full_name = jsonrpc_method[len(prefix):]
method = getattr(cls, jsonrpc_method)
if full_name in not_grouped:
name_parts = [full_name]
else:
name_parts = full_name.split('_', 1)
if len(name_parts) == 1:
group = None
name, = name_parts
elif len(name_parts) == 2:
group, name = name_parts
assert group in api['groups'],\
f"Group {group} does not have doc string for command {full_name}."
else:
raise NameError(f'Could not parse method name: {jsonrpc_method}')
api['commands'][full_name] = {
'api_method_name': full_name,
'name': name,
'group': group,
'doc': method.__doc__,
'method': method,
}
if hasattr(method, '_deprecated'):
api['commands'][full_name]['replaced_by'] = method.new_command
for command in api['commands'].values():
if 'replaced_by' in command:
command['replaced_by'] = api['commands'][command['replaced_by']]
return api
2019-01-21 21:55:50 +01:00
@property
def db_revision_file_path(self):
return os.path.join(self.conf.data_dir, 'db_revision')
@property
def installation_id(self):
install_id_filename = os.path.join(self.conf.data_dir, "install_id")
if not self._installation_id:
if os.path.isfile(install_id_filename):
with open(install_id_filename, "r") as install_id_file:
self._installation_id = str(install_id_file.read()).strip()
if not self._installation_id:
self._installation_id = base58.b58encode(utils.generate_id()).decode()
with open(install_id_filename, "w") as install_id_file:
install_id_file.write(self._installation_id)
return self._installation_id
def ensure_data_dir(self):
if not os.path.isdir(self.conf.data_dir):
os.makedirs(self.conf.data_dir)
if not os.path.isdir(os.path.join(self.conf.data_dir, "blobfiles")):
os.makedirs(os.path.join(self.conf.data_dir, "blobfiles"))
return self.conf.data_dir
def ensure_wallet_dir(self):
if not os.path.isdir(self.conf.wallet_dir):
os.makedirs(self.conf.wallet_dir)
def ensure_download_dir(self):
if not os.path.isdir(self.conf.download_dir):
os.makedirs(self.conf.download_dir)
2019-02-03 22:19:29 +01:00
async def update_connection_status(self):
connected = await utils.async_check_connection()
self._connection_status = (self.component_manager.loop.time(), connected)
async def get_connection_status(self) -> str:
if self._connection_status[0] + 300 > self.component_manager.loop.time():
if not self._connection_status[1]:
await self.update_connection_status()
else:
await self.update_connection_status()
return CONNECTION_STATUS_CONNECTED if self._connection_status[1] else CONNECTION_STATUS_NETWORK
2019-01-23 22:41:14 +01:00
async def start(self):
log.info("Starting LBRYNet Daemon")
log.debug("Settings: %s", json.dumps(self.conf.settings_dict, indent=2))
log.info("Platform: %s", json.dumps(system_info.get_platform(), indent=2))
await self.analytics_manager.send_server_startup()
await self.runner.setup()
2018-12-13 04:32:44 +01:00
try:
site = web.TCPSite(self.runner, self.conf.api_host, self.conf.api_port)
await site.start()
log.info('lbrynet API listening on TCP %s:%i', *site._server.sockets[0].getsockname()[:2])
except OSError as e:
log.error('lbrynet API failed to bind TCP %s for listening. Daemon is already running or this port is '
'already in use by another application.', self.conf.api)
await self.analytics_manager.send_server_startup_error(str(e))
raise SystemExit()
2019-01-22 23:44:17 +01:00
try:
await self.initialize()
2019-01-22 23:44:17 +01:00
except asyncio.CancelledError:
2018-12-13 04:32:44 +01:00
log.info("shutting down before finished starting")
await self.analytics_manager.send_server_startup_error("shutting down before finished starting")
await self.stop()
except Exception as e:
await self.analytics_manager.send_server_startup_error(str(e))
2018-12-13 04:32:44 +01:00
log.exception('Failed to start lbrynet-daemon')
await self.analytics_manager.send_server_startup_success()
2018-12-13 04:32:44 +01:00
async def initialize(self):
2019-01-22 18:11:28 +01:00
self.ensure_data_dir()
self.ensure_wallet_dir()
self.ensure_download_dir()
if not self.analytics_manager.is_started:
2019-03-11 02:55:33 +01:00
await self.analytics_manager.start()
self.component_startup_task = asyncio.create_task(self.component_manager.start())
await self.component_startup_task
async def stop(self):
if self.component_startup_task is not None:
if self.component_startup_task.done():
await self.component_manager.stop()
else:
self.component_startup_task.cancel()
await self.runner.cleanup()
if self.analytics_manager.is_started:
2019-01-22 05:28:26 +01:00
self.analytics_manager.stop()
2018-12-13 04:32:44 +01:00
async def handle_old_jsonrpc(self, request):
data = await request.json()
result = await self._process_rpc_call(data)
2019-01-24 00:04:16 +01:00
ledger = None
if 'wallet' in self.component_manager.get_components_status():
# self.ledger only available if wallet component is not skipped
ledger = self.ledger
return web.Response(
2019-01-24 00:04:16 +01:00
text=jsonrpc_dumps_pretty(result, ledger=ledger),
content_type='application/json'
)
async def _process_rpc_call(self, data):
2018-12-13 04:32:44 +01:00
args = data.get('params', {})
try:
function_name = data['method']
except KeyError:
return JSONRPCError(
"Missing 'method' value in request.", JSONRPCError.CODE_METHOD_NOT_FOUND
)
2018-12-13 04:32:44 +01:00
try:
fn = self._get_jsonrpc_method(function_name)
except UnknownAPIMethodError:
return JSONRPCError(
f"Invalid method requested: {function_name}.", JSONRPCError.CODE_METHOD_NOT_FOUND
)
2018-12-13 04:32:44 +01:00
2019-01-22 23:44:17 +01:00
if args in ([{}], []):
2018-12-13 04:32:44 +01:00
_args, _kwargs = (), {}
elif isinstance(args, dict):
_args, _kwargs = (), args
elif len(args) == 1 and isinstance(args[0], dict):
# TODO: this is for backwards compatibility. Remove this once API and UI are updated
# TODO: also delete EMPTY_PARAMS then
_args, _kwargs = (), args[0]
elif len(args) == 2 and isinstance(args[0], list) and isinstance(args[1], dict):
_args, _kwargs = args
else:
return JSONRPCError(
f"Invalid parameters format.", JSONRPCError.CODE_INVALID_PARAMS
)
2018-12-13 04:32:44 +01:00
params_error, erroneous_params = self._check_params(fn, _args, _kwargs)
if params_error is not None:
params_error_message = '{} for {} command: {}'.format(
params_error, function_name, ', '.join(erroneous_params)
)
log.warning(params_error_message)
return JSONRPCError(
params_error_message, JSONRPCError.CODE_INVALID_PARAMS
)
2018-12-13 04:32:44 +01:00
try:
result = fn(self, *_args, **_kwargs)
if asyncio.iscoroutine(result):
result = await result
return result
except asyncio.CancelledError:
log.info("cancelled API call for: %s", function_name)
raise
except Exception as e: # pylint: disable=broad-except
2019-01-22 23:44:17 +01:00
log.exception("error handling api request")
return JSONRPCError(
str(e), JSONRPCError.CODE_APPLICATION_ERROR, format_exc()
)
2018-12-13 04:32:44 +01:00
def _verify_method_is_callable(self, function_path):
if function_path not in self.callable_methods:
raise UnknownAPIMethodError(function_path)
def _get_jsonrpc_method(self, function_path):
if function_path in self.deprecated_methods:
new_command = self.deprecated_methods[function_path].new_command
log.warning('API function \"%s\" is deprecated, please update to use \"%s\"',
function_path, new_command)
function_path = new_command
self._verify_method_is_callable(function_path)
return self.callable_methods.get(function_path)
@staticmethod
def _check_params(function, args_tup, args_dict):
argspec = inspect.getfullargspec(undecorated(function))
num_optional_params = 0 if argspec.defaults is None else len(argspec.defaults)
duplicate_params = [
duplicate_param
for duplicate_param in argspec.args[1:len(args_tup) + 1]
if duplicate_param in args_dict
]
if duplicate_params:
return 'Duplicate parameters', duplicate_params
missing_required_params = [
required_param
for required_param in argspec.args[len(args_tup)+1:-num_optional_params]
if required_param not in args_dict
]
if len(missing_required_params):
return 'Missing required parameters', missing_required_params
extraneous_params = [] if argspec.varkw is not None else [
extra_param
for extra_param in args_dict
if extra_param not in argspec.args[1:]
]
if len(extraneous_params):
return 'Extraneous parameters', extraneous_params
return None, None
@property
def default_wallet(self):
try:
return self.wallet_manager.default_wallet
except AttributeError:
return None
@property
2019-03-20 06:46:23 +01:00
def default_account(self) -> Optional[LBCAccount]:
try:
return self.wallet_manager.default_account
except AttributeError:
return None
2018-07-05 04:16:02 +02:00
@property
2019-03-20 06:46:23 +01:00
def ledger(self) -> Optional['MainNetLedger']:
2018-08-16 01:23:06 +02:00
try:
return self.wallet_manager.default_account.ledger
2018-08-16 01:23:06 +02:00
except AttributeError:
return None
2018-07-05 04:16:02 +02:00
2019-01-22 23:44:17 +01:00
async def get_est_cost_from_uri(self, uri: str) -> typing.Optional[float]:
2016-11-30 22:23:48 +01:00
"""
Resolve a name and return the estimated stream cost
"""
2017-06-09 19:47:13 +02:00
resolved = await self.wallet_manager.resolve(uri)
2017-06-09 19:47:13 +02:00
if resolved:
claim_response = resolved[uri]
else:
2017-04-11 04:47:54 +02:00
claim_response = None
if claim_response and 'claim' in claim_response:
if 'value' in claim_response['claim'] and claim_response['claim']['value'] is not None:
2019-03-20 06:46:23 +01:00
claim_value = Claim.from_bytes(claim_response['claim']['value'])
if not claim_value.stream.has_fee:
2019-01-22 23:44:17 +01:00
return 0.0
return round(
self.exchange_rate_manager.convert_currency(
2019-03-20 06:46:23 +01:00
claim_value.stream.fee.currency, "LBC", claim_value.stream.fee.amount
2019-01-22 23:44:17 +01:00
), 5
)
else:
log.warning("Failed to estimate cost for %s", uri)
2017-01-03 20:13:01 +01:00
############################################################################
# #
# JSON-RPC API methods start here #
# #
############################################################################
def jsonrpc_stop(self):
"""
2019-01-25 00:22:53 +01:00
Stop lbrynet API server.
Usage:
stop
Options:
None
Returns:
(string) Shutdown message
"""
log.info("Shutting down lbrynet daemon")
self.stop_event.set()
return "Shutting down"
2018-12-15 21:31:02 +01:00
async def jsonrpc_status(self):
2016-03-24 03:27:48 +01:00
"""
2017-05-28 22:01:53 +02:00
Get daemon status
2017-05-28 22:01:53 +02:00
Usage:
2018-08-02 23:33:56 +02:00
status
2017-06-12 22:19:26 +02:00
2018-08-03 19:31:51 +02:00
Options:
None
2017-06-12 22:19:26 +02:00
Returns:
(dict) lbrynet-daemon status
{
'installation_id': (str) installation id - base58,
'is_running': (bool),
'skipped_components': (list) [names of skipped components (str)],
'startup_status': { Does not include components which have been skipped
2019-02-19 23:26:08 +01:00
'blob_manager': (bool),
'blockchain_headers': (bool),
'database': (bool),
'dht': (bool),
2019-02-19 23:26:08 +01:00
'exchange_rate_manager': (bool),
'hash_announcer': (bool),
'peer_protocol_server': (bool),
2019-02-19 23:26:08 +01:00
'stream_manager': (bool),
'upnp': (bool),
2019-02-19 23:26:08 +01:00
'wallet': (bool),
2017-06-12 22:19:26 +02:00
},
'connection_status': {
'code': (str) connection status code,
'message': (str) connection status message
2017-06-12 22:19:26 +02:00
},
2018-08-02 23:33:56 +02:00
'blockchain_headers': {
'downloading_headers': (bool),
'download_progress': (float) 0-100.0
},
'wallet': {
'blocks': (int) local blockchain height,
'blocks_behind': (int) remote_height - local_height,
'best_blockhash': (str) block hash of most recent block,
2018-08-13 04:04:48 +02:00
'is_encrypted': (bool),
'is_locked': (bool),
2017-06-12 22:19:26 +02:00
},
'dht': {
2018-07-20 23:22:10 +02:00
'node_id': (str) lbry dht node id - hex encoded,
'peers_in_routing_table': (int) the number of peers in the routing table,
},
2018-08-02 23:33:56 +02:00
'blob_manager': {
'finished_blobs': (int) number of finished blobs in the blob manager,
},
'hash_announcer': {
'announce_queue_size': (int) number of blobs currently queued to be announced
},
2019-02-19 23:26:08 +01:00
'stream_manager': {
'managed_files': (int) count of files in the stream manager,
},
'upnp': {
'aioupnp_version': (str),
'redirects': {
<TCP | UDP>: (int) external_port,
},
'gateway': (str) manufacturer and model,
'dht_redirect_set': (bool),
'peer_redirect_set': (bool),
'external_ip': (str) external ip address,
2018-08-02 23:33:56 +02:00
}
2017-06-12 22:19:26 +02:00
}
2017-01-03 20:13:01 +01:00
"""
2017-05-28 22:01:53 +02:00
2019-02-03 22:19:29 +01:00
connection_code = await self.get_connection_status()
2017-01-03 20:13:01 +01:00
response = {
2019-01-21 21:55:50 +01:00
'installation_id': self.installation_id,
'is_running': all(self.component_manager.get_components_status().values()),
'skipped_components': self.component_manager.skip_components,
2018-07-24 18:34:58 +02:00
'startup_status': self.component_manager.get_components_status(),
2017-01-03 20:13:01 +01:00
'connection_status': {
'code': connection_code,
'message': CONNECTION_MESSAGES[connection_code],
2017-01-03 20:13:01 +01:00
},
}
for component in self.component_manager.components:
status = await component.get_status()
if status:
response[component.component_name] = status
2018-12-15 21:31:02 +01:00
return response
def jsonrpc_version(self):
"""
2019-01-25 00:22:53 +01:00
Get lbrynet API server version information
2017-05-28 22:01:53 +02:00
Usage:
version
Options:
None
Returns:
2017-03-14 00:14:11 +01:00
(dict) Dictionary of lbry version information
{
2017-03-15 21:31:58 +01:00
'build': (str) build type (e.g. "dev", "rc", "release"),
'ip': (str) remote ip, if available,
2017-03-14 00:14:11 +01:00
'lbrynet_version': (str) lbrynet_version,
'lbryum_version': (str) lbryum_version,
'lbryschema_version': (str) lbryschema_version,
2017-03-15 21:31:58 +01:00
'os_release': (str) os release string
'os_system': (str) os name
'platform': (str) platform string
'processor': (str) processor type,
'python_version': (str) python version,
2017-03-14 00:14:11 +01:00
}
"""
2018-07-20 21:35:09 +02:00
platform_info = system_info.get_platform()
log.info("Get version info: " + json.dumps(platform_info))
2018-12-15 21:31:02 +01:00
return platform_info
2019-03-24 21:55:04 +01:00
@requires(WALLET_COMPONENT)
async def jsonrpc_resolve(self, urls: typing.Union[str, list]):
2017-01-02 20:52:24 +01:00
"""
2019-03-24 21:55:04 +01:00
Get the claim that a URL refers to.
2017-01-02 20:52:24 +01:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
resolve <urls>...
2017-05-28 22:01:53 +02:00
Options:
2019-03-24 21:55:04 +01:00
--urls=<urls> : (str, list) one or more urls to resolve
2017-01-02 20:52:24 +01:00
Returns:
2019-03-24 21:55:04 +01:00
Dictionary of results, keyed by url
'<url>': {
If a resolution error occurs:
'error': Error message
If the url resolves to a channel or a claim in a channel:
'certificate': {
'address': (str) claim address,
'amount': (float) claim amount,
'effective_amount': (float) claim amount including supports,
'claim_id': (str) claim id,
'claim_sequence': (int) claim sequence number (or -1 if unknown),
'decoded_claim': (bool) whether or not the claim value was decoded,
'height': (int) claim height,
'depth': (int) claim depth,
'has_signature': (bool) included if decoded_claim
'name': (str) claim name,
'permanent_url': (str) permanent url of the certificate claim,
'supports: (list) list of supports [{'txid': (str) txid,
'nout': (int) nout,
'amount': (float) amount}],
'txid': (str) claim txid,
'nout': (str) claim nout,
'signature_is_valid': (bool), included if has_signature,
'value': ClaimDict if decoded, otherwise hex string
}
If the url resolves to a channel:
'claims_in_channel': (int) number of claims in the channel,
If the url resolves to a claim:
'claim': {
'address': (str) claim address,
'amount': (float) claim amount,
'effective_amount': (float) claim amount including supports,
'claim_id': (str) claim id,
'claim_sequence': (int) claim sequence number (or -1 if unknown),
'decoded_claim': (bool) whether or not the claim value was decoded,
'height': (int) claim height,
'depth': (int) claim depth,
'has_signature': (bool) included if decoded_claim
'name': (str) claim name,
'permanent_url': (str) permanent url of the claim,
'channel_name': (str) channel name if claim is in a channel
'supports: (list) list of supports [{'txid': (str) txid,
'nout': (int) nout,
'amount': (float) amount}]
'txid': (str) claim txid,
'nout': (str) claim nout,
'signature_is_valid': (bool), included if has_signature,
'value': ClaimDict if decoded, otherwise hex string
}
}
2017-01-02 20:52:24 +01:00
"""
2019-01-22 23:44:17 +01:00
2019-03-24 21:55:04 +01:00
if isinstance(urls, str):
urls = [urls]
results = {}
valid_urls = set()
for u in urls:
try:
parse_lbry_uri(u)
valid_urls.add(u)
except URIParseError:
results[u] = {"error": "%s is not a valid url" % u}
resolved = await self.wallet_manager.resolve(*tuple(valid_urls))
for resolved_uri in resolved:
results[resolved_uri] = resolved[resolved_uri]
return results
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT,
STREAM_MANAGER_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_get(self, uri, file_name=None, timeout=None):
"""
Download stream from a LBRY name.
Usage:
get <uri> [<file_name> | --file_name=<file_name>] [<timeout> | --timeout=<timeout>]
Options:
--uri=<uri> : (str) uri of the content to download
--file_name=<file_name> : (str) specified name for the downloaded file
--timeout=<timeout> : (int) download timeout in number of seconds
Returns:
(dict) Dictionary containing information about the stream
{
'completed': (bool) true if download is completed,
'file_name': (str) name of file,
'download_directory': (str) download directory,
'points_paid': (float) credit paid to download file,
'stopped': (bool) true if download is stopped,
'stream_hash': (str) stream hash of file,
'stream_name': (str) stream name ,
'suggested_file_name': (str) suggested file name,
'sd_hash': (str) sd hash of file,
'download_path': (str) download path of file,
'mime_type': (str) mime type of file,
'key': (str) key attached to file,
'total_bytes': (int) file size in bytes,
'written_bytes': (int) written size in bytes,
'blobs_completed': (int) number of fully downloaded blobs,
'blobs_in_stream': (int) total blobs on stream,
'status': (str) downloader status,
'claim_id': (str) claim id,
'outpoint': (str) claim outpoint string,
'txid': (str) claim txid,
'nout': (int) claim nout,
'metadata': (dict) claim metadata,
'channel_claim_id': (str) None if claim is not signed
'channel_name': (str) None if claim is not signed
'claim_name': (str) claim name
}
"""
try:
stream = await self.stream_manager.download_stream_from_uri(
uri, self.exchange_rate_manager, file_name, timeout
)
if not stream:
raise DownloadSDTimeout(uri)
except Exception as e:
log.warning("Error downloading %s: %s", uri, str(e))
return {"error": str(e)}
else:
return stream.as_dict()
2017-01-02 20:52:24 +01:00
2019-01-21 21:55:50 +01:00
SETTINGS_DOC = """
Settings management.
"""
2017-01-03 20:13:01 +01:00
def jsonrpc_settings_get(self):
"""
Get daemon settings
2017-05-28 22:01:53 +02:00
Usage:
settings_get
Options:
None
Returns:
2017-03-14 00:14:11 +01:00
(dict) Dictionary of daemon settings
See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings
"""
2019-01-21 21:55:50 +01:00
return self.conf.settings_dict
def jsonrpc_settings_set(self, key, value):
"""
Set daemon settings
Usage:
2019-02-19 23:26:08 +01:00
settings_set (<key>) (<value>)
Options:
None
Returns:
(dict) Updated dictionary of daemon settings
"""
2019-01-21 21:55:50 +01:00
with self.conf.update_config() as c:
attr: Setting = getattr(type(c), key)
cleaned = attr.deserialize(value)
setattr(c, key, cleaned)
return {key: cleaned}
2019-01-23 19:00:58 +01:00
WALLET_DOC = """
Wallet management.
"""
@deprecated("account_balance")
def jsonrpc_wallet_balance(self, address=None):
2019-01-21 21:55:50 +01:00
""" deprecated """
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
2018-10-16 21:04:20 +02:00
async def jsonrpc_wallet_send(self, amount, address=None, claim_id=None, account_id=None):
"""
Send credits. If given an address, send credits to it. If given a claim id, send a tip
to the owner of a claim specified by uri. A tip is a claim support where the recipient
of the support is the claim address for the claim being supported.
Usage:
wallet_send (<amount> | --amount=<amount>)
((<address> | --address=<address>) | (<claim_id> | --claim_id=<claim_id>))
[--account_id=<account_id>]
Options:
--amount=<amount> : (decimal) amount of credit to send
--address=<address> : (str) address to send credits to
--claim_id=<claim_id> : (str) claim_id of the claim to send to tip to
--account_id=<account_id> : (str) account to fund the transaction
Returns:
If sending to an address:
(dict) Dictionary containing the transaction information
{
"hex": (str) raw transaction,
"inputs": (list) inputs(dict) used for the transaction,
"outputs": (list) outputs(dict) for the transaction,
"total_fee": (int) fee in dewies,
"total_input": (int) total of inputs in dewies,
"total_output": (int) total of outputs in dewies(input - fees),
"txid": (str) txid of the transaction,
}
If sending a claim tip:
(dict) Dictionary containing the result of the support
{
txid : (str) txid of resulting support claim
nout : (int) nout of the resulting support claim
fee : (float) fee paid for the transaction
}
"""
amount = self.get_dewies_or_error("amount", amount)
if not amount:
raise NullFundsError
2019-02-28 18:04:02 +01:00
if amount < 0:
raise NegativeFundsError()
if address and claim_id:
raise Exception("Given both an address and a claim id")
2019-02-28 18:04:02 +01:00
if not address and not claim_id:
raise Exception("Not given an address or a claim id")
if address:
# raises an error if the address is invalid
2019-03-20 06:46:23 +01:00
self.ledger.is_valid_address(address)
reserved_points = self.wallet_manager.reserve_points(address, amount)
if reserved_points is None:
raise InsufficientFundsError()
account = self.get_account_or_default(account_id)
2018-10-16 21:04:20 +02:00
result = await self.wallet_manager.send_points_to_address(reserved_points, amount, account)
await self.analytics_manager.send_credits_sent()
else:
2018-09-04 21:05:45 +02:00
log.info("This command is deprecated for sending tips, please use the newer claim_tip command")
2018-10-16 21:04:20 +02:00
result = await self.jsonrpc_claim_tip(claim_id=claim_id, amount=amount, account_id=account_id)
return result
2019-01-21 21:55:50 +01:00
ACCOUNT_DOC = """
Account management.
"""
@requires("wallet")
def jsonrpc_account_list(self, account_id=None, confirmations=6,
2018-08-30 06:04:25 +02:00
include_claims=False, show_seed=False):
"""
2018-08-30 06:04:25 +02:00
List details of all of the accounts or a specific account.
Usage:
2018-08-30 06:04:25 +02:00
account_list [<account_id>] [--confirmations=<confirmations>]
[--include_claims] [--show_seed]
Options:
--account_id=<account_id> : (str) If provided only the balance for this
account will be given
--confirmations=<confirmations> : (int) required confirmations (default: 0)
--include_claims : (bool) include claims, requires than a
LBC account is specified (default: false)
2018-08-30 06:04:25 +02:00
--show_seed : (bool) show the seed for the account
Returns:
(map) balance of account(s)
"""
kwargs = {
'confirmations': confirmations,
'show_seed': show_seed
}
2018-08-30 06:04:25 +02:00
if account_id:
return self.get_account_or_error(account_id).get_details(**kwargs)
else:
return self.wallet_manager.get_detailed_accounts(**kwargs)
@requires("wallet")
2018-10-16 21:04:20 +02:00
async def jsonrpc_account_balance(self, account_id=None, confirmations=0):
2017-01-03 20:13:01 +01:00
"""
Return the balance of an account
2016-03-24 03:27:48 +01:00
2017-05-28 22:01:53 +02:00
Usage:
account_balance [<account_id>] [<address> | --address=<address>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) If provided only the balance for this
account will be given. Otherwise default account.
--confirmations=<confirmations> : (int) Only include transactions with this many
confirmed blocks.
2017-04-23 19:33:06 +02:00
Returns:
(decimal) amount of lbry credits in wallet
"""
account = self.get_account_or_default(account_id)
2018-10-16 21:04:20 +02:00
dewies = await account.get_balance(confirmations=confirmations)
2018-10-03 22:38:47 +02:00
return dewies_to_lbc(dewies)
2017-04-23 19:33:06 +02:00
@requires("wallet")
2018-10-16 21:04:20 +02:00
async def jsonrpc_account_add(
self, account_name, single_key=False, seed=None, private_key=None, public_key=None):
"""
Add a previously created account from a seed, private key or public key (read-only).
Specify --single_key for single address or vanity address accounts.
Usage:
account_add (<account_name> | --account_name=<account_name>)
(--seed=<seed> | --private_key=<private_key> | --public_key=<public_key>)
[--single_key]
Options:
--account_name=<account_name> : (str) name of the account to add
--seed=<seed> : (str) seed to generate new account from
--private_key=<private_key> : (str) private key for new account
--public_key=<public_key> : (str) public key for new account
--single_key : (bool) create single key account, default is multi-key
Returns:
(map) added account details
"""
account = LBCAccount.from_dict(
2018-08-30 06:04:25 +02:00
self.ledger, self.default_wallet, {
'name': account_name,
'seed': seed,
'private_key': private_key,
'public_key': public_key,
'address_generator': {
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
}
}
)
2018-08-30 06:04:25 +02:00
if self.ledger.network.is_connected:
2018-11-20 01:23:23 +01:00
await self.ledger.subscribe_account(account)
2018-08-30 06:04:25 +02:00
self.default_wallet.save()
result = account.to_dict()
result['id'] = account.id
result['status'] = 'added'
result.pop('certificates', None)
result['is_default'] = self.default_wallet.accounts[0] == account
return result
@requires("wallet")
2018-10-16 21:04:20 +02:00
async def jsonrpc_account_create(self, account_name, single_key=False):
"""
Create a new account. Specify --single_key if you want to use
the same address for all transactions (not recommended).
Usage:
account_create (<account_name> | --account_name=<account_name>) [--single_key]
Options:
--account_name=<account_name> : (str) name of the account to create
--single_key : (bool) create single key account, default is multi-key
Returns:
(map) new account details
"""
account = LBCAccount.generate(
2018-08-30 06:04:25 +02:00
self.ledger, self.default_wallet, account_name, {
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
}
)
2018-08-30 06:04:25 +02:00
if self.ledger.network.is_connected:
2018-11-20 01:23:23 +01:00
await self.ledger.subscribe_account(account)
2018-08-30 06:04:25 +02:00
self.default_wallet.save()
result = account.to_dict()
result['id'] = account.id
result['status'] = 'created'
result.pop('certificates', None)
result['is_default'] = self.default_wallet.accounts[0] == account
return result
@requires("wallet")
2018-08-30 06:04:25 +02:00
def jsonrpc_account_remove(self, account_id):
"""
Remove an existing account.
Usage:
account (<account_id> | --account_id=<account_id>)
Options:
--account_id=<account_id> : (str) id of the account to remove
Returns:
2018-08-30 06:04:25 +02:00
(map) details of removed account
"""
account = self.get_account_or_error(account_id)
self.default_wallet.accounts.remove(account)
self.default_wallet.save()
result = account.to_dict()
result['id'] = account.id
result['status'] = 'removed'
result.pop('certificates', None)
return result
@requires("wallet")
def jsonrpc_account_set(
2018-08-30 06:04:25 +02:00
self, account_id, default=False, new_name=None,
change_gap=None, change_max_uses=None, receiving_gap=None, receiving_max_uses=None):
"""
Change various settings on an account.
Usage:
account (<account_id> | --account_id=<account_id>)
2018-08-30 06:04:25 +02:00
[--default] [--new_name=<new_name>]
[--change_gap=<change_gap>] [--change_max_uses=<change_max_uses>]
[--receiving_gap=<receiving_gap>] [--receiving_max_uses=<receiving_max_uses>]
Options:
--account_id=<account_id> : (str) id of the account to change
--default : (bool) make this account the default
2018-08-30 06:04:25 +02:00
--new_name=<new_name> : (str) new name for the account
--receiving_gap=<receiving_gap> : (int) set the gap for receiving addresses
--receiving_max_uses=<receiving_max_uses> : (int) set the maximum number of times to
use a receiving address
--change_gap=<change_gap> : (int) set the gap for change addresses
--change_max_uses=<change_max_uses> : (int) set the maximum number of times to
use a change address
Returns:
(map) updated account details
"""
account = self.get_account_or_error(account_id)
change_made = False
if account.receiving.name == HierarchicalDeterministic.name:
address_changes = {
'change': {'gap': change_gap, 'maximum_uses_per_address': change_max_uses},
'receiving': {'gap': receiving_gap, 'maximum_uses_per_address': receiving_max_uses},
}
for chain_name in address_changes:
chain = getattr(account, chain_name)
for attr, value in address_changes[chain_name].items():
if value is not None:
setattr(chain, attr, value)
change_made = True
2018-08-30 06:04:25 +02:00
if new_name is not None:
account.name = new_name
change_made = True
if default:
self.default_wallet.accounts.remove(account)
self.default_wallet.accounts.insert(0, account)
change_made = True
if change_made:
2019-03-12 14:42:28 +01:00
account.modified_on = time.time()
self.default_wallet.save()
result = account.to_dict()
result['id'] = account.id
result.pop('certificates', None)
result['is_default'] = self.default_wallet.accounts[0] == account
return result
@requires(WALLET_COMPONENT)
def jsonrpc_account_unlock(self, password, account_id=None):
"""
Unlock an encrypted account
Usage:
account_unlock (<password> | --password=<password>) [<account_id> | --account_id=<account_id>]
Options:
--account_id=<account_id> : (str) id for the account to unlock
Returns:
(bool) true if account is unlocked, otherwise false
"""
return self.wallet_manager.unlock_account(
password, self.get_account_or_default(account_id, lbc_only=False)
)
2018-09-25 15:41:41 +02:00
@requires(WALLET_COMPONENT)
def jsonrpc_account_lock(self, account_id=None):
"""
Lock an unlocked account
Usage:
account_lock [<account_id> | --account_id=<account_id>]
Options:
--account_id=<account_id> : (str) id for the account to lock
Returns:
(bool) true if account is locked, otherwise false
"""
return self.wallet_manager.lock_account(self.get_account_or_default(account_id, lbc_only=False))
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
def jsonrpc_account_decrypt(self, account_id=None):
"""
Decrypt an encrypted account, this will remove the wallet password. The account must be unlocked to decrypt it
Usage:
account_decrypt [<account_id> | --account_id=<account_id>]
Options:
--account_id=<account_id> : (str) id for the account to decrypt
Returns:
(bool) true if wallet is decrypted, otherwise false
"""
return self.wallet_manager.decrypt_account(self.get_account_or_default(account_id, lbc_only=False))
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
def jsonrpc_account_encrypt(self, new_password, account_id=None):
"""
Encrypt an unencrypted account with a password
Usage:
wallet_encrypt (<new_password> | --new_password=<new_password>) [<account_id> | --account_id=<account_id>]
Options:
--account_id=<account_id> : (str) id for the account to encrypt
Returns:
(bool) true if wallet is decrypted, otherwise false
"""
return self.wallet_manager.encrypt_account(
new_password,
self.get_account_or_default(account_id, lbc_only=False)
)
@requires("wallet")
def jsonrpc_account_max_address_gap(self, account_id):
"""
Finds ranges of consecutive addresses that are unused and returns the length
of the longest such range: for change and receiving address chains. This is
useful to figure out ideal values to set for 'receiving_gap' and 'change_gap'
account settings.
Usage:
account_max_address_gap (<account_id> | --account_id=<account_id>)
Options:
--account_id=<account_id> : (str) account for which to get max gaps
Returns:
(map) maximum gap for change and receiving addresses
"""
return self.get_account_or_error(account_id).get_max_gap()
@requires("wallet")
def jsonrpc_account_fund(self, to_account=None, from_account=None, amount='0.0',
everything=False, outputs=1, broadcast=False):
"""
Transfer some amount (or --everything) to an account from another
account (can be the same account). Amounts are interpreted as LBC.
You can also spread the transfer across a number of --outputs (cannot
be used together with --everything).
Usage:
account_fund [<to_account> | --to_account=<to_account>]
[<from_account> | --from_account=<from_account>]
(<amount> | --amount=<amount> | --everything)
[<outputs> | --outputs=<outputs>]
[--broadcast]
Options:
--to_account=<to_account> : (str) send to this account
--from_account=<from_account> : (str) spend from this account
--amount=<amount> : (str) the amount to transfer lbc
--everything : (bool) transfer everything (excluding claims), default: false.
--outputs=<outputs> : (int) split payment across many outputs, default: 1.
--broadcast : (bool) actually broadcast the transaction, default: false.
Returns:
(map) transaction performing requested action
"""
to_account = self.get_account_or_default(to_account, 'to_account')
from_account = self.get_account_or_default(from_account, 'from_account')
amount = self.get_dewies_or_error('amount', amount) if amount else None
if not isinstance(outputs, int):
raise ValueError("--outputs must be an integer.")
if everything and outputs > 1:
raise ValueError("Using --everything along with --outputs is not supported.")
return from_account.fund(
to_account=to_account, amount=amount, everything=everything,
outputs=outputs, broadcast=broadcast
)
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_account_send(self, amount, addresses, account_id=None, broadcast=False):
"""
Send the same number of credits to multiple addresses.
Usage:
account_send <amount> <addresses>... [--account_id=<account_id>] [--broadcast]
Options:
--account_id=<account_id> : (str) account to fund the transaction
--broadcast : (bool) actually broadcast the transaction, default: false.
Returns:
"""
amount = self.get_dewies_or_error("amount", amount)
if not amount:
raise NullFundsError
2019-02-28 18:04:02 +01:00
if amount < 0:
raise NegativeFundsError()
for address in addresses:
2019-03-20 06:46:23 +01:00
self.ledger.is_valid_address(address)
account = self.get_account_or_default(account_id)
result = await account.send_to_addresses(amount, addresses, broadcast)
await self.analytics_manager.send_credits_sent()
return result
2019-03-12 14:42:28 +01:00
SYNC_DOC = """
Wallet synchronization.
"""
2019-02-11 00:36:21 +01:00
@requires("wallet")
2019-03-11 14:52:35 +01:00
def jsonrpc_sync_hash(self):
2019-02-11 00:36:21 +01:00
"""
2019-03-11 14:52:35 +01:00
Deterministic hash of the wallet.
2019-02-11 00:36:21 +01:00
Usage:
2019-03-11 14:52:35 +01:00
sync hash
2019-02-11 00:36:21 +01:00
Options:
Returns:
2019-03-11 14:52:35 +01:00
(str) sha256 hash of wallet
"""
return hexlify(self.default_wallet.hash).decode()
2019-02-11 00:36:21 +01:00
2019-03-11 14:52:35 +01:00
@requires("wallet")
def jsonrpc_sync_apply(self, password, data=None, encrypt_password=None):
2019-02-11 00:36:21 +01:00
"""
2019-03-11 14:52:35 +01:00
Apply incoming synchronization data, if provided, and then produce a sync hash and
an encrypted wallet.
Usage:
sync apply <password> [--data=<data>] [--encrypt-password=<encrypt_password>]
Options:
--password=<password> : (str) password to decrypt incoming and encrypt outgoing data
--data=<data> : (str) incoming sync data, if any
--encrypt-password=<encrypt_password> : (str) password to encrypt outgoing data if different
from the decrypt password, used during password changes
Returns:
(map) sync hash and data
"""
if data is not None:
decrypted_data = Wallet.unpack(password, data)
for account_data in decrypted_data['accounts']:
_, _, pubkey = LBCAccount.keys_from_dict(self.ledger, account_data)
account_id = pubkey.address
local_match = None
for local_account in self.default_wallet.accounts:
if account_id == local_account.id:
local_match = local_account
break
if local_match is not None:
2019-03-12 20:31:54 +01:00
local_match.apply(account_data)
2019-03-11 14:52:35 +01:00
else:
new_account = LBCAccount.from_dict(self.ledger, self.default_wallet, account_data)
if self.ledger.network.is_connected:
asyncio.create_task(self.ledger.subscribe_account(new_account))
2019-03-12 20:31:54 +01:00
self.default_wallet.save()
2019-03-11 14:52:35 +01:00
encrypted = self.default_wallet.pack(encrypt_password or password)
2019-02-11 00:36:21 +01:00
return {
2019-03-11 14:52:35 +01:00
'hash': self.jsonrpc_sync_hash(),
'data': encrypted.decode()
2019-02-11 00:36:21 +01:00
}
2019-01-21 21:55:50 +01:00
ADDRESS_DOC = """
Address management.
"""
@requires(WALLET_COMPONENT)
def jsonrpc_address_is_mine(self, address, account_id=None):
"""
Checks if an address is associated with the current wallet.
2016-03-24 03:27:48 +01:00
2017-05-28 22:01:53 +02:00
Usage:
wallet_is_address_mine (<address> | --address=<address>)
[<account_id> | --account_id=<account_id>]
Options:
--address=<address> : (str) address to check
--account_id=<account_id> : (str) id of the account to use
Returns:
(bool) true, if address is associated with current wallet
"""
return self.wallet_manager.address_is_mine(
address, self.get_account_or_default(account_id)
)
@requires(WALLET_COMPONENT)
def jsonrpc_address_list(self, account_id=None, page=None, page_size=None):
"""
List account addresses
Usage:
address_list [<account_id> | --account_id=<account_id>]
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) id of the account to use
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
Returns:
List of wallet addresses
"""
account = self.get_account_or_default(account_id)
return maybe_paginate(
account.get_addresses,
account.get_address_count,
page, page_size
)
@requires(WALLET_COMPONENT)
def jsonrpc_address_unused(self, account_id=None):
"""
Return an address containing no balance, will create
a new address if there is none.
Usage:
address_unused [--account_id=<account_id>]
Options:
--account_id=<account_id> : (str) id of the account to use
Returns:
(str) Unused wallet address in base58
"""
return self.get_account_or_default(account_id).receiving.get_or_create_usable_address()
2019-01-21 21:55:50 +01:00
FILE_DOC = """
File management.
"""
2019-01-22 23:44:17 +01:00
@requires(STREAM_MANAGER_COMPONENT)
def jsonrpc_file_list(self, sort=None, reverse=False, comparison=None, **kwargs):
2017-01-03 20:13:01 +01:00
"""
List files limited by optional filters
2017-05-28 22:01:53 +02:00
Usage:
file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>]
[--rowid=<rowid>] [--claim_id=<claim_id>] [--outpoint=<outpoint>] [--txid=<txid>] [--nout=<nout>]
[--channel_claim_id=<channel_claim_id>] [--channel_name=<channel_name>]
[--claim_name=<claim_name>] [--blobs_in_stream=<blobs_in_stream>]
[--blobs_remaining=<blobs_remaining>] [--sort=<sort_by>]
[--comparison=<comparison>] [--full_status=<full_status>] [--reverse]
2017-05-28 22:01:53 +02:00
Options:
--sd_hash=<sd_hash> : (str) get file with matching sd hash
--file_name=<file_name> : (str) get file with matching file name in the
downloads folder
--stream_hash=<stream_hash> : (str) get file with matching stream hash
--rowid=<rowid> : (int) get file with matching row id
--claim_id=<claim_id> : (str) get file with matching claim id
--outpoint=<outpoint> : (str) get file with matching claim outpoint
--txid=<txid> : (str) get file with matching claim txid
--nout=<nout> : (int) get file with matching claim nout
--channel_claim_id=<channel_claim_id> : (str) get file with matching channel claim id
--channel_name=<channel_name> : (str) get file with matching channel name
--claim_name=<claim_name> : (str) get file with matching claim name
--blobs_in_stream<blobs_in_stream> : (int) get file with matching blobs in stream
--blobs_remaining=<blobs_remaining> : (int) amount of remaining blobs to download
--sort=<sort_by> : (str) field to sort by (one of the above filter fields)
--comparison=<comparison> : (str) logical comparision, (eq | ne | g | ge | l | le)
Returns:
2017-03-14 00:14:11 +01:00
(list) List of files
[
{
2017-03-14 00:14:11 +01:00
'completed': (bool) true if download is completed,
'file_name': (str) name of file,
'download_directory': (str) download directory,
'points_paid': (float) credit paid to download file,
'stopped': (bool) true if download is stopped,
'stream_hash': (str) stream hash of file,
'stream_name': (str) stream name ,
'suggested_file_name': (str) suggested file name,
'sd_hash': (str) sd hash of file,
'download_path': (str) download path of file,
'mime_type': (str) mime type of file,
'key': (str) key attached to file,
'total_bytes_lower_bound': (int) lower bound file size in bytes,
'total_bytes': (int) file upper bound size in bytes,
'written_bytes': (int) written size in bytes,
2018-09-07 08:57:44 +02:00
'blobs_completed': (int) number of fully downloaded blobs,
'blobs_in_stream': (int) total blobs on stream,
'blobs_remaining': (int) total blobs remaining to download,
2018-09-07 08:57:44 +02:00
'status': (str) downloader status
'claim_id': (str) None if claim is not found else the claim id,
'txid': (str) None if claim is not found else the transaction id,
'nout': (int) None if claim is not found else the transaction output index,
'outpoint': (str) None if claim is not found else the tx and output,
2018-09-07 08:57:44 +02:00
'metadata': (dict) None if claim is not found else the claim metadata,
'channel_claim_id': (str) None if claim is not found or not signed,
'channel_name': (str) None if claim is not found or not signed,
'claim_name': (str) None if claim is not found else the claim name
2017-03-14 00:14:11 +01:00
},
]
}
"""
2019-02-15 22:44:31 +01:00
sort = sort or 'rowid'
2019-01-22 23:44:17 +01:00
comparison = comparison or 'eq'
return [
stream.as_dict() for stream in self.stream_manager.get_filtered_streams(
sort, reverse, comparison, **kwargs
)
]
2019-03-24 21:55:04 +01:00
@requires(STREAM_MANAGER_COMPONENT)
async def jsonrpc_file_set_status(self, status, **kwargs):
2016-07-28 22:12:20 +02:00
"""
2019-03-24 21:55:04 +01:00
Start or stop downloading a file
2016-07-28 22:12:20 +02:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
file_set_status (<status> | --status=<status>) [--sd_hash=<sd_hash>]
[--file_name=<file_name>] [--stream_hash=<stream_hash>] [--rowid=<rowid>]
2017-05-28 22:01:53 +02:00
Options:
2019-03-24 21:55:04 +01:00
--status=<status> : (str) one of "start" or "stop"
--sd_hash=<sd_hash> : (str) set status of file with matching sd hash
--file_name=<file_name> : (str) set status of file with matching file name in the
downloads folder
--stream_hash=<stream_hash> : (str) set status of file with matching stream hash
--rowid=<rowid> : (int) set status of file with matching row id
2017-05-28 22:01:53 +02:00
2017-03-14 00:14:11 +01:00
Returns:
2019-03-24 21:55:04 +01:00
(str) Confirmation message
"""
2017-06-23 20:47:28 +02:00
2019-03-24 21:55:04 +01:00
if status not in ['start', 'stop']:
raise Exception('Status must be "start" or "stop".')
2017-06-23 20:47:28 +02:00
2019-03-24 21:55:04 +01:00
streams = self.stream_manager.get_filtered_streams(**kwargs)
if not streams:
raise Exception(f'Unable to find a file for {kwargs}')
stream = streams[0]
if status == 'start' and not stream.running:
await self.stream_manager.start_stream(stream)
msg = "Resumed download"
elif status == 'stop' and stream.running:
await self.stream_manager.stop_stream(stream)
msg = "Stopped download"
else:
2019-03-24 21:55:04 +01:00
msg = (
"File was already being downloaded" if status == 'start'
else "File was already stopped"
)
return msg
2019-03-24 21:55:04 +01:00
@requires(STREAM_MANAGER_COMPONENT)
async def jsonrpc_file_delete(self, delete_from_download_dir=False, delete_all=False, **kwargs):
2017-04-07 02:45:05 +02:00
"""
2019-03-24 21:55:04 +01:00
Delete a LBRY file
2017-04-07 02:45:05 +02:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash=<sd_hash>] [--file_name=<file_name>]
[--stream_hash=<stream_hash>] [--rowid=<rowid>] [--claim_id=<claim_id>] [--txid=<txid>]
[--nout=<nout>] [--claim_name=<claim_name>] [--channel_claim_id=<channel_claim_id>]
[--channel_name=<channel_name>]
2017-05-28 22:01:53 +02:00
Options:
2019-03-24 21:55:04 +01:00
--delete_from_download_dir : (bool) delete file from download directory,
instead of just deleting blobs
--delete_all : (bool) if there are multiple matching files,
allow the deletion of multiple files.
Otherwise do not delete anything.
--sd_hash=<sd_hash> : (str) delete by file sd hash
--file_name=<file_name> : (str) delete by file name in downloads folder
--stream_hash=<stream_hash> : (str) delete by file stream hash
--rowid=<rowid> : (int) delete by file row id
--claim_id=<claim_id> : (str) delete by file claim id
--txid=<txid> : (str) delete by file claim txid
--nout=<nout> : (int) delete by file claim nout
--claim_name=<claim_name> : (str) delete by file claim name
--channel_claim_id=<channel_claim_id> : (str) delete by file channel claim id
--channel_name=<channel_name> : (str) delete by file channel claim name
2017-05-28 22:01:53 +02:00
2017-04-07 02:45:05 +02:00
Returns:
2019-03-24 21:55:04 +01:00
(bool) true if deletion was successful
"""
2016-01-21 04:00:28 +01:00
2019-01-22 23:44:17 +01:00
streams = self.stream_manager.get_filtered_streams(**kwargs)
2017-04-07 02:45:05 +02:00
2019-01-22 23:44:17 +01:00
if len(streams) > 1:
2017-04-07 02:45:05 +02:00
if not delete_all:
log.warning("There are %i files to delete, use narrower filters to select one",
2019-01-22 23:44:17 +01:00
len(streams))
2018-12-15 21:31:02 +01:00
return False
2017-04-07 02:45:05 +02:00
else:
log.warning("Deleting %i files",
2019-01-22 23:44:17 +01:00
len(streams))
2017-04-07 02:45:05 +02:00
2019-01-22 23:44:17 +01:00
if not streams:
2017-03-08 20:19:54 +01:00
log.warning("There is no file to delete")
2018-12-15 21:31:02 +01:00
return False
else:
2019-01-22 23:44:17 +01:00
for stream in streams:
2019-02-20 20:48:25 +01:00
message = f"Deleted file {stream.file_name}"
2019-01-22 23:44:17 +01:00
await self.stream_manager.delete_stream(stream, delete_file=delete_from_download_dir)
2019-02-20 20:48:25 +01:00
log.info(message)
2019-01-22 23:44:17 +01:00
result = True
return result
2019-01-23 19:00:58 +01:00
STREAM_DOC = """
Stream information.
"""
2019-01-22 23:44:17 +01:00
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
2019-01-22 23:44:17 +01:00
DHT_COMPONENT, DATABASE_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
2019-01-22 23:44:17 +01:00
def jsonrpc_stream_cost_estimate(self, uri):
2017-01-03 20:13:01 +01:00
"""
Get estimated cost for a lbry stream
2017-05-28 22:01:53 +02:00
Usage:
2019-01-22 23:44:17 +01:00
stream_cost_estimate (<uri> | --uri=<uri>)
2017-05-28 22:01:53 +02:00
Options:
--uri=<uri> : (str) uri to use
2017-05-28 22:01:53 +02:00
2017-01-03 20:13:01 +01:00
Returns:
(float) Estimated cost in lbry credits, returns None if uri is not
resolvable
2017-01-03 20:13:01 +01:00
"""
2019-01-22 23:44:17 +01:00
return self.get_est_cost_from_uri(uri)
2017-01-03 20:13:01 +01:00
2019-01-21 21:55:50 +01:00
CHANNEL_DOC = """
Channel management.
"""
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
2019-03-24 21:55:04 +01:00
async def jsonrpc_channel_create(
self, name, bid, allow_duplicate_name=False, account_id=None, claim_address=None, preview=False, **kwargs):
2017-04-07 02:45:05 +02:00
"""
2019-03-24 21:55:04 +01:00
Generate a publisher key and create a new '@' prefixed channel claim.
2017-04-07 02:45:05 +02:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
channel create (<name> | --name=<name>) (<bid> | --bid=<bid>)
[--tags=<tags>...] [--allow_duplicate_name=<allow_duplicate_name>]
[--title=<title>] [--description=<description>] [--language=<language>]
[--contact_email=<contact_email>]
[--homepage_url=<homepage_url>] [--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>]
[--account_id=<account_id>] [--claim_address=<claim_address>] [--preview]
2017-04-07 02:45:05 +02:00
Options:
2019-03-24 21:55:04 +01:00
--name=<name> : (str) name of the channel prefixed with '@'
--allow_duplicate_name=<allow_duplicate_name> : (bool) create new channel even if one already exists with
given name. default: false.
--bid=<bid> : (decimal) amount to back the claim
--tags=<tags> : (list) content tags
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
--language=<language> : (str) primary language of the channel
--contact_email=<contact_email>: (str) email of channel owner
--homepage_url=<homepage_url> : (str) homepage url
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--cover_url=<cover_url> : (str) url of cover image
--account_id=<account_id> : (str) id of the account to store channel
--claim_address=<claim_address>: (str) address where the channel is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
"""
account = self.get_account_or_default(account_id)
name = self.get_channel_name_or_error(name)
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
claim_address = await self.get_receiving_address(claim_address, account)
existing_channels = await account.get_channels(claim_name=name)
if len(existing_channels) > 0:
if not allow_duplicate_name:
raise Exception(
f"You already have a channel under the name '{name}'. "
f"Use --allow-duplicate-name flag to override."
)
2019-03-24 21:55:04 +01:00
claim = Claim()
claim.channel.update(**kwargs)
tx = await Transaction.claim_create(
name, claim, amount, claim_address, [account], account
)
txo = tx.outputs[0]
txo.generate_channel_private_key()
if not preview:
await tx.sign([account])
await account.ledger.broadcast(tx)
account.add_channel_private_key(txo.ref, txo.private_key)
self.default_wallet.save()
await self.storage.save_claims([self._old_get_temp_claim_info(
tx, txo, claim_address, claim, name, dewies_to_lbc(amount)
)])
await self.analytics_manager.send_new_channel()
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_channel_update(
self, claim_id, bid=None, account_id=None, claim_address=None,
new_signing_key=False, preview=False, **kwargs):
2017-04-07 02:45:05 +02:00
"""
2019-03-24 21:55:04 +01:00
Update attributes of a channel.
2018-08-17 21:02:14 +02:00
2019-03-24 21:55:04 +01:00
Usage:
channel update (<claim_id> | --claim_id=<claim_id>) [<bid> | --bid=<bid>]
[--tags=<tags>...] [--clear-tags] [--title=<title>] [--description=<description>]
[--language=<language>] [--contact_email=<contact_email>]
[--homepage_url=<homepage_url>] [--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>]
[--account_id=<account_id>] [--claim_address=<claim_address>] [--new-signing-key] [--preview]
Options:
--claim_id=<claim_id> : (str) claim_id of the channel to update
--bid=<bid> : (decimal) amount to back the claim
--tags=<tags> : (list) add content tags
--clear-tags : (bool) clear existing tags (prior to adding new ones)
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
--language=<language> : (str) primary language of the channel
--contact_email=<contact_email>: (str) email of channel owner
--homepage_url=<homepage_url> : (str) homepage url
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--cover_url=<cover_url> : (str) url of cover image
--account_id=<account_id> : (str) id of the account to store channel
--claim_address=<claim_address>: (str) address where the channel is sent
--new-signing-key : (bool) generate a new signing key, will invalidate all previous publishes
--preview : (bool) do not broadcast the transaction
"""
account = self.get_account_or_default(account_id)
existing_channels = await account.get_claims(claim_id=claim_id)
if len(existing_channels) != 1:
raise Exception(
f"Can't find the channel '{claim_id}' in account '{account_id}'."
)
old_txo = existing_channels[0]
if not old_txo.claim.is_channel:
raise Exception(
f"A claim with id '{claim_id}' was found but it is not a channel."
)
if bid is not None:
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
else:
amount = old_txo.amount
2019-03-24 21:55:04 +01:00
if claim_address is not None:
self.ledger.is_valid_address(claim_address)
else:
claim_address = old_txo.get_address(account.ledger)
old_txo.claim.channel.update(**kwargs)
tx = await Transaction.claim_update(
old_txo, amount, claim_address, [account], account
)
2019-03-24 21:55:04 +01:00
new_txo = tx.outputs[0]
if new_signing_key:
new_txo.generate_channel_private_key()
else:
new_txo.private_key = old_txo.private_key
if not preview:
await tx.sign([account])
await account.ledger.broadcast(tx)
account.add_channel_private_key(new_txo.ref, new_txo.private_key)
self.default_wallet.save()
await self.storage.save_claims([self._old_get_temp_claim_info(
tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount)
)])
await self.analytics_manager.send_new_channel()
else:
await account.ledger.release_tx(tx)
return tx
2017-04-07 02:45:05 +02:00
@requires(WALLET_COMPONENT)
def jsonrpc_channel_list(self, account_id=None, page=None, page_size=None):
2017-04-07 02:45:05 +02:00
"""
Get certificate claim infos for channels that can be published to
2017-04-07 02:45:05 +02:00
2017-05-28 22:01:53 +02:00
Usage:
channel_list [<account_id> | --account_id=<account_id>]
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) id of the account to use
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2017-04-07 02:45:05 +02:00
Returns:
(list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim
is in the wallet.
2017-04-07 02:45:05 +02:00
"""
account = self.get_account_or_default(account_id)
return maybe_paginate(
account.get_channels,
account.get_channel_count,
page, page_size
)
2017-04-07 02:45:05 +02:00
@requires(WALLET_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_channel_export(self, claim_id):
2017-11-22 19:46:34 +01:00
"""
Export serialized channel signing information for a given certificate claim id
Usage:
channel_export (<claim_id> | --claim_id=<claim_id>)
Options:
--claim_id=<claim_id> : (str) Claim ID to export information about
2017-11-22 19:46:34 +01:00
Returns:
(str) Serialized certificate information
"""
2018-12-15 21:31:02 +01:00
return await self.wallet_manager.export_certificate_info(claim_id)
2017-11-22 19:46:34 +01:00
@requires(WALLET_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_channel_import(self, serialized_certificate_info):
2017-11-22 19:46:34 +01:00
"""
Import serialized channel signing information (to allow signing new claims to the channel)
Usage:
2018-03-26 19:16:55 +02:00
channel_import (<serialized_certificate_info> | --serialized_certificate_info=<serialized_certificate_info>)
2017-11-22 19:46:34 +01:00
Options:
--serialized_certificate_info=<serialized_certificate_info> : (str) certificate info
2017-11-22 19:46:34 +01:00
Returns:
(dict) Result dictionary
"""
2018-12-15 21:31:02 +01:00
return await self.wallet_manager.import_certificate_info(serialized_certificate_info)
2017-11-22 19:46:34 +01:00
2019-03-24 21:55:04 +01:00
CLAIM_DOC = """
Claim management.
"""
2019-01-22 23:44:17 +01:00
@requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_publish(
2019-03-24 21:55:04 +01:00
self, name, bid, file_path, allow_duplicate_name=False,
channel_id=None, channel_account_id=None,
account_id=None, claim_address=None, preview=False, **kwargs):
"""
Make a new name claim and publish associated data to lbrynet.
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
publish (<name> | --name=<name>) (<bid> | --bid=<bid>) (<file_path> | --file_path=<file_path>)
[--tags=<tags>...] [--allow_duplicate_name=<allow_duplicate_name>]
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>]
[--title=<title>] [--description=<description>] [--author=<author>] [--language=<language>]
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
[--release_time=<release_time>] [--duration=<duration>]
[--video_width=<video_width>] [--video_height=<video_height>]
[--channel_id=<channel_id>] [--channel_account_id=<channel_account_id>...]
[--account_id=<account_id>] [--claim_address=<claim_address>] [--preview]
2017-05-28 22:01:53 +02:00
Options:
--name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash))
2019-03-24 21:55:04 +01:00
--allow_duplicate_name=<allow_duplicate_name> : (bool) create new claim even if one already exists with
given name. default: false.
--bid=<bid> : (decimal) amount to back the claim
2019-03-24 21:55:04 +01:00
--file_path=<file_path> : (str) path to file to be associated with name.
--tags=<tags> : (list) content tags
--fee_currency=<fee_currency> : (string) specify fee currency
--fee_amount=<fee_amount> : (decimal) content download fee
--fee_address=<fee_address> : (str) address where to send fee payments, will use
value from --claim_address if not provided
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
2018-08-31 00:05:10 +02:00
--author=<author> : (str) author of the publication. The usage for this field is not
the same as for channels. The author field is used to credit an author
who is not the publisher and is not represented by the channel. For
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
by published to a channel such as '@classics', or to no channel at all
--language=<language> : (str) language of the publication
--license=<license> : (str) publication license
--license_url=<license_url> : (str) publication license url
2019-03-24 21:55:04 +01:00
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--release_time=<duration> : (int) original public release of content, seconds since UNIX epoch
--duration=<duration> : (int) audio/video duration in seconds, an attempt will be made to
calculate this automatically if not provided
--video_width=<video_width> : (int) video width
--video_height=<video_height> : (int) video height
--channel_id=<channel_id> : (str) claim id of the publisher channel
--channel_account_id=<channel_id>: (str) one or more account ids for accounts to look in
2019-03-24 21:55:04 +01:00
for channel certificates, defaults to all accounts.
--account_id=<account_id> : (str) account to use for funding the transaction
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
"""
account = self.get_account_or_default(account_id)
channel = await self.get_channel_or_none(channel_account_id, channel_id, for_signing=True)
name = self.get_claim_name_or_error(name)
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
claim_address = await self.get_receiving_address(claim_address, account)
kwargs['fee_address'] = self.get_fee_address(kwargs, claim_address)
existing_claims = await account.get_claims(claim_name=name)
if len(existing_claims) > 0:
if not allow_duplicate_name:
raise Exception(
f"You already have a claim published under the name '{name}'. "
f"Use --allow-duplicate-name flag to override."
)
claim = Claim()
claim.stream.update(file_path=file_path, hash='0'*96, **kwargs)
tx = await Transaction.claim_create(
name, claim, amount, claim_address, [account], account, channel
)
new_txo = tx.outputs[0]
if not preview:
file_stream = await self.stream_manager.create_stream(file_path)
claim.stream.hash = file_stream.sd_hash
if channel:
new_txo.sign(channel)
await tx.sign([account])
await account.ledger.broadcast(tx)
await self.storage.save_claims([self._old_get_temp_claim_info(
tx, new_txo, claim_address, claim, name, dewies_to_lbc(amount)
)])
stream_hash = await self.storage.get_stream_hash_for_sd_hash(claim.stream.hash)
if stream_hash:
await self.storage.save_content_claim(stream_hash, new_txo.id)
await self.analytics_manager.send_claim_action('publish')
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_claim_update(
self, claim_id, bid=None, file_path=None,
channel_id=None, channel_account_id=None, clear_channel=False,
account_id=None, claim_address=None,
preview=False, **kwargs):
"""
Modify an existing claim.
Usage:
claim update (<claim_id> | --claim_id=<claim_id>)
[--bid=<bid>] [--file_path=<file_path>] [--tags=<tags>...] [--clear-tags]
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>]
[--title=<title>] [--description=<description>] [--author=<author>] [--language=<language>]
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
[--release_time=<release_time>] [--duration=<duration>]
[--video_width=<video_width>] [--video_height=<video_height>]
[--channel_id=<channel_id>] [--channel_account_id=<channel_account_id>...] [--clear-channel]
[--account_id=<account_id>] [--claim_address=<claim_address>] [--preview]
Options:
--claim_id=<claim_id> : (str) id of the claim to update
--bid=<bid> : (decimal) amount to back the claim
--file_path=<file_path> : (str) path to file to be associated with name.
--tags=<tags> : (list) content tags
--clear-tags : (bool) clear existing tags (prior to adding new ones)
--fee_currency=<fee_currency> : (string) specify fee currency
--fee_amount=<fee_amount> : (decimal) content download fee
--fee_address=<fee_address> : (str) address where to send fee payments, will use
value from --claim_address if not provided
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
--author=<author> : (str) author of the publication. The usage for this field is not
the same as for channels. The author field is used to credit an author
who is not the publisher and is not represented by the channel. For
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
by published to a channel such as '@classics', or to no channel at all
--language=<language> : (str) language of the publication
--license=<license> : (str) publication license
--license_url=<license_url> : (str) publication license url
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--release_time=<duration> : (int) original public release of content, seconds since UNIX epoch
--duration=<duration> : (int) audio/video duration in seconds, an attempt will be made to
calculate this automatically if not provided
--video_width=<video_width> : (int) video width
--video_height=<video_height> : (int) video height
--channel_id=<channel_id> : (str) claim id of the publisher channel
--clear-channel : (bool) remove channel signature
--channel_account_id=<channel_id>: (str) one or more account ids for accounts to look in
for channel certificates, defaults to all accounts.
--account_id=<account_id> : (str) account to use for funding the transaction
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
"""
account = self.get_account_or_default(account_id)
existing_claims = await account.get_claims(claim_id=claim_id)
if len(existing_claims) != 1:
raise Exception(
f"Can't find the claim '{claim_id}' in account '{account_id}'."
)
old_txo = existing_claims[0]
if not old_txo.claim.is_stream:
raise Exception(
f"A claim with id '{claim_id}' was found but it is not a stream claim."
)
if bid is not None:
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
else:
amount = old_txo.amount
if claim_address is not None:
self.ledger.is_valid_address(claim_address)
else:
claim_address = old_txo.get_address(account.ledger)
channel = None
if channel_id:
channel = await self.get_channel_or_error(channel_account_id, channel_id, for_signing=True)
elif old_txo.claim.is_signed and not clear_channel:
channel = old_txo.channel
kwargs['fee_address'] = self.get_fee_address(kwargs, claim_address)
old_txo.claim.stream.update(**kwargs)
tx = await Transaction.claim_update(
old_txo, amount, claim_address, [account], account, channel
)
new_txo = tx.outputs[0]
if not preview:
if file_path is not None:
file_stream = await self.stream_manager.create_stream(file_path)
new_txo.claim.stream.hash = file_stream.sd_hash
if channel:
new_txo.sign(channel)
await tx.sign([account])
await account.ledger.broadcast(tx)
await self.storage.save_claims([self._old_get_temp_claim_info(
tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount)
)])
stream_hash = await self.storage.get_stream_hash_for_sd_hash(new_txo.claim.stream.hash)
if stream_hash:
await self.storage.save_content_claim(stream_hash, new_txo.id)
await self.analytics_manager.send_claim_action('publish')
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT)
async def jsonrpc_claim_show(self, txid=None, nout=None, claim_id=None):
"""
Resolve claim info from txid/nout or with claim ID
Usage:
claim_show [<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
[<claim_id> | --claim_id=<claim_id>]
Options:
--txid=<txid> : (str) look for claim with this txid, nout must
also be specified
--nout=<nout> : (int) look for claim with this nout, txid must
also be specified
--claim_id=<claim_id> : (str) look for claim with this claim id
2017-05-20 17:59:55 +02:00
Returns:
2019-03-24 21:55:04 +01:00
(dict) Dictionary containing claim info as below,
2017-03-14 00:14:11 +01:00
{
2019-03-24 21:55:04 +01:00
'txid': (str) txid of claim
'nout': (int) nout of claim
'amount': (float) amount of claim
'value': (str) value of claim
'height' : (int) height of claim takeover
'claim_id': (str) claim ID of claim
'supports': (list) list of supports associated with claim
2017-03-14 00:14:11 +01:00
}
2018-08-16 01:23:06 +02:00
2019-03-24 21:55:04 +01:00
if claim cannot be resolved, dictionary as below will be returned
2019-03-24 21:55:04 +01:00
{
'error': (str) reason for error
}
2019-03-24 21:55:04 +01:00
"""
if claim_id is not None and txid is None and nout is None:
claim_results = await self.wallet_manager.get_claim_by_claim_id(claim_id)
elif txid is not None and nout is not None and claim_id is None:
claim_results = await self.wallet_manager.get_claim_by_outpoint(txid, int(nout))
else:
2019-03-24 21:55:04 +01:00
raise Exception("Must specify either txid/nout, or claim_id")
return claim_results
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
2018-11-07 20:43:07 +01:00
async def jsonrpc_claim_abandon(self, claim_id=None, txid=None, nout=None, account_id=None, blocking=True):
2016-03-24 03:27:48 +01:00
"""
Abandon a name and reclaim credits from the claim
2016-12-09 04:05:31 +01:00
2017-05-28 22:01:53 +02:00
Usage:
2017-08-08 18:37:28 +02:00
claim_abandon [<claim_id> | --claim_id=<claim_id>]
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
[--account_id=<account_id>]
2018-11-07 20:43:07 +01:00
[--blocking]
2017-05-28 22:01:53 +02:00
Options:
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
--txid=<txid> : (str) txid of the claim to abandon
--nout=<nout> : (int) nout of the claim to abandon
--account_id=<account_id> : (str) id of the account to use
2018-11-07 20:43:07 +01:00
--blocking : (bool) wait until abandon is in mempool
Returns:
2017-03-14 00:14:11 +01:00
(dict) Dictionary containing result of the claim
{
success: (bool) True if txn is successful
2017-03-14 00:14:11 +01:00
txid : (str) txid of resulting transaction
}
2016-03-24 03:27:48 +01:00
"""
account = self.get_account_or_default(account_id)
2017-08-08 18:37:28 +02:00
if claim_id is None and txid is None and nout is None:
raise Exception('Must specify claim_id, or txid and nout')
if txid is None and nout is not None:
raise Exception('Must specify txid')
if nout is None and txid is not None:
raise Exception('Must specify nout')
2018-10-16 21:04:20 +02:00
tx = await self.wallet_manager.abandon_claim(claim_id, txid, nout, account)
await self.analytics_manager.send_claim_action('abandon')
2018-11-07 20:43:07 +01:00
if blocking:
await self.ledger.wait(tx)
2018-10-16 21:04:20 +02:00
return {"success": True, "tx": tx}
2019-03-24 21:55:04 +01:00
@requires(WALLET_COMPONENT)
def jsonrpc_claim_list(self, account_id=None, page=None, page_size=None):
2016-08-08 08:32:56 +02:00
"""
2019-03-24 21:55:04 +01:00
List my name claims
2016-08-08 08:32:56 +02:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
claim_list_mine [<account_id> | --account_id=<account_id>]
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
2019-03-24 21:55:04 +01:00
--account_id=<account_id> : (str) id of the account to query
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2016-08-08 08:32:56 +02:00
"""
account = self.get_account_or_default(account_id)
2019-03-24 21:55:04 +01:00
return maybe_paginate(
account.get_claims,
account.get_claim_count,
page, page_size
)
@requires(WALLET_COMPONENT)
async def jsonrpc_claim_search(self, name, channel_id=None, winning=False):
"""
Search for claims on the blockchain.
Usage:
claim search (<name> | --name=<name>) [--channel_id=<channel_id>] [--winning]
Options:
--name=<name> : (str) name of the claim to list info about
--channel_id=<channel_id> : (str) limit search to specific channel
--winning : (bool) limit to winning claims
"""
response = await self.wallet_manager.ledger.network.get_claims_for_name(name)
resolutions = await self.wallet_manager.resolve(*(f"{claim['name']}#{claim['claim_id']}" for claim in response['claims']))
response['claims'] = [value.get('claim', value.get('certificate')) for value in resolutions.values()]
response['claims'] = sort_claim_results(response['claims'])
return response
uris = tuple(uris)
page = int(page)
page_size = int(page_size)
if uri is not None:
uris += (uri,)
results = {}
valid_uris = tuple()
for chan_uri in uris:
try:
parsed = parse_lbry_uri(chan_uri)
if not parsed.contains_channel:
results[chan_uri] = {"error": "%s is not a channel uri" % parsed.name}
elif parsed.path:
results[chan_uri] = {"error": "%s is a claim in a channel" % parsed.path}
else:
valid_uris += (chan_uri,)
except URIParseError:
results[chan_uri] = {"error": "%s is not a valid uri" % chan_uri}
resolved = await self.wallet_manager.resolve(*valid_uris, page=page, page_size=page_size)
if 'error' in resolved:
return {'error': resolved['error']}
for u in resolved:
if 'error' in resolved[u]:
results[u] = resolved[u]
else:
results[u] = {
'claims_in_channel': resolved[u]['claims_in_channel']
}
if page:
results[u]['returned_page'] = page
results[u]['claims_in_channel'] = resolved[u].get('claims_in_channel', [])
return results
SUPPORT_DOC = """
Support and tip management.
"""
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
2019-03-24 21:55:04 +01:00
async def jsonrpc_support_create(self, claim_id, amount, tip=False, account_id=None):
"""
2019-03-24 21:55:04 +01:00
Create a support or a tip for name claim.
Usage:
2019-03-24 21:55:04 +01:00
support create (<claim_id> | --claim_id=<claim_id>) (<amount> | --amount=<amount>)
[--tip] [--account_id=<account_id>]
Options:
--claim_id=<claim_id> : (str) claim_id of the claim to support
--amount=<amount> : (decimal) amount of support
2019-03-24 21:55:04 +01:00
--tip : (bool) send support to claim owner, default: false.
--account_id=<account_id> : (str) id of the account to use
Returns:
(dict) Dictionary containing the transaction information
{
"hex": (str) raw transaction,
"inputs": (list) inputs(dict) used for the transaction,
"outputs": (list) outputs(dict) for the transaction,
"total_fee": (int) fee in dewies,
"total_input": (int) total of inputs in dewies,
"total_output": (int) total of outputs in dewies(input - fees),
"txid": (str) txid of the transaction,
}
"""
account = self.get_account_or_default(account_id)
amount = self.get_dewies_or_error("amount", amount)
2019-03-24 21:55:04 +01:00
result = await self.wallet_manager.support_claim(name, claim_id, amount, account)
await self.analytics_manager.send_claim_action('new_support')
# tip:
validate_claim_id(claim_id)
2018-10-16 21:04:20 +02:00
result = await self.wallet_manager.tip_claim(amount, claim_id, account)
await self.analytics_manager.send_claim_action('new_support')
return result
2016-08-08 08:32:56 +02:00
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
2019-03-24 21:55:04 +01:00
async def jsonrpc_support_abandon(self, claim_id=None, txid=None, nout=None, account_id=None, blocking=True):
2017-06-12 16:30:18 +02:00
"""
2019-03-24 21:55:04 +01:00
Abandon a name and reclaim credits from the claim
2017-06-12 16:30:18 +02:00
Usage:
2019-03-24 21:55:04 +01:00
claim_abandon [<claim_id> | --claim_id=<claim_id>]
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
[--account_id=<account_id>]
[--blocking]
2017-06-12 16:30:18 +02:00
Options:
2019-03-24 21:55:04 +01:00
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
--txid=<txid> : (str) txid of the claim to abandon
--nout=<nout> : (int) nout of the claim to abandon
--account_id=<account_id> : (str) id of the account to use
--blocking : (bool) wait until abandon is in mempool
Returns:
(dict) Dictionary containing result of the claim
{
2019-03-24 21:55:04 +01:00
success: (bool) True if txn is successful
txid : (str) txid of resulting transaction
}
2017-06-12 16:30:18 +02:00
"""
2019-03-24 21:55:04 +01:00
account = self.get_account_or_default(account_id)
if claim_id is None and txid is None and nout is None:
raise Exception('Must specify claim_id, or txid and nout')
if txid is None and nout is not None:
raise Exception('Must specify txid')
if nout is None and txid is not None:
raise Exception('Must specify nout')
tx = await self.wallet_manager.abandon_claim(claim_id, txid, nout, account)
await self.analytics_manager.send_claim_action('abandon')
if blocking:
await self.ledger.wait(tx)
return {"success": True, "tx": tx}
2017-06-12 16:30:18 +02:00
@requires(WALLET_COMPONENT)
2019-03-24 21:55:04 +01:00
def jsonrpc_support_list(self, account_id=None, page=None, page_size=None):
2017-01-03 20:13:01 +01:00
"""
2019-03-24 21:55:04 +01:00
List supports and tips.
2016-03-24 03:27:48 +01:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
support_list [<account_id> | --account_id=<account_id>]
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) id of the account to query
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2016-03-24 03:27:48 +01:00
"""
account = self.get_account_or_default(account_id)
return maybe_paginate(
2019-03-24 21:55:04 +01:00
account.get_supports,
account.get_support_count,
page, page_size
)
2019-01-23 19:00:58 +01:00
TRANSACTION_DOC = """
2019-01-21 21:55:50 +01:00
Transaction management.
"""
@requires(WALLET_COMPONENT)
def jsonrpc_transaction_list(self, account_id=None, page=None, page_size=None):
2017-01-03 20:13:01 +01:00
"""
2017-03-14 00:14:11 +01:00
List transactions belonging to wallet
2017-05-28 22:01:53 +02:00
Usage:
transaction_list [<account_id> | --account_id=<account_id>]
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) id of the account to query
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
Returns:
2018-01-18 19:41:17 +01:00
(list) List of transactions
{
"claim_info": (list) claim info if in txn [{
"address": (str) address of claim,
"balance_delta": (float) bid amount,
"amount": (float) claim amount,
"claim_id": (str) claim id,
"claim_name": (str) claim name,
"nout": (int) nout
}],
"abandon_info": (list) abandon info if in txn [{
"address": (str) address of abandoned claim,
"balance_delta": (float) returned amount,
"amount": (float) claim amount,
"claim_id": (str) claim id,
"claim_name": (str) claim name,
"nout": (int) nout
}],
"confirmations": (int) number of confirmations for the txn,
"date": (str) date and time of txn,
"fee": (float) txn fee,
"support_info": (list) support info if in txn [{
"address": (str) address of support,
"balance_delta": (float) support amount,
"amount": (float) support amount,
"claim_id": (str) claim id,
"claim_name": (str) claim name,
"is_tip": (bool),
"nout": (int) nout
}],
"timestamp": (int) timestamp,
"txid": (str) txn id,
"update_info": (list) update info if in txn [{
"address": (str) address of claim,
2018-01-18 19:41:17 +01:00
"balance_delta": (float) credited/debited
"amount": (float) absolute amount,
"claim_id": (str) claim id,
"claim_name": (str) claim name,
"nout": (int) nout
}],
"value": (float) value of txn
}
"""
account = self.get_account_or_default(account_id)
return maybe_paginate(
self.wallet_manager.get_history,
self.ledger.db.get_transaction_count,
page, page_size, account=account
)
@requires(WALLET_COMPONENT)
def jsonrpc_transaction_show(self, txid):
2017-01-03 20:13:01 +01:00
"""
Get a decoded transaction from a txid
2017-05-28 22:01:53 +02:00
Usage:
transaction_show (<txid> | --txid=<txid>)
Options:
--txid=<txid> : (str) txid of the transaction
2017-01-03 20:13:01 +01:00
Returns:
2017-03-14 00:14:11 +01:00
(dict) JSON formatted transaction
2017-01-03 20:13:01 +01:00
"""
2018-09-19 15:58:50 +02:00
return self.wallet_manager.get_transaction(txid)
2019-01-21 21:55:50 +01:00
UTXO_DOC = """
Unspent transaction management.
"""
@requires(WALLET_COMPONENT)
def jsonrpc_utxo_list(self, account_id=None, page=None, page_size=None):
2017-11-01 22:17:38 +01:00
"""
List unspent transaction outputs
Usage:
utxo_list [<account_id> | --account_id=<account_id>]
[--page=<page>] [--page_size=<page_size>]
2017-11-01 22:17:38 +01:00
Options:
--account_id=<account_id> : (str) id of the account to query
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2017-11-01 22:17:38 +01:00
Returns:
2017-11-02 12:14:26 +01:00
(list) List of unspent transaction outputs (UTXOs)
2017-11-01 22:17:38 +01:00
[
{
"address": (str) the output address
"amount": (float) unspent amount
"height": (int) block height
"is_claim": (bool) is the tx a claim
"is_coinbase": (bool) is the tx a coinbase tx
"is_support": (bool) is the tx a support
"is_update": (bool) is the tx an update
"nout": (int) nout of the output
"txid": (str) txid of the output
},
...
]
"""
account = self.get_account_or_default(account_id)
return maybe_paginate(
account.get_utxos,
account.get_utxo_count,
page, page_size
)
2017-11-01 22:17:38 +01:00
2019-01-04 08:49:29 +01:00
@requires(WALLET_COMPONENT)
def jsonrpc_utxo_release(self, account_id=None):
"""
When spending a UTXO it is locally locked to prevent double spends;
occasionally this can result in a UTXO being locked which ultimately
did not get spent (failed to broadcast, spend transaction was not
accepted by blockchain node, etc). This command releases the lock
on all UTXOs in your account.
Usage:
utxo_release [<account_id> | --account_id=<account_id>]
Options:
--account_id=<account_id> : (str) id of the account to query
Returns:
None
"""
return self.get_account_or_default(account_id).release_all_outputs()
@requires(WALLET_COMPONENT)
def jsonrpc_block_show(self, blockhash=None, height=None):
"""
2017-03-14 00:14:11 +01:00
Get contents of a block
2017-05-28 22:01:53 +02:00
Usage:
block_show (<blockhash> | --blockhash=<blockhash>) | (<height> | --height=<height>)
Options:
--blockhash=<blockhash> : (str) hash of the block to look up
--height=<height> : (int) height of the block to look up
2017-05-28 22:01:53 +02:00
2017-03-14 00:14:11 +01:00
Returns:
(dict) Requested block
"""
2018-09-07 08:57:58 +02:00
return self.wallet_manager.get_block(blockhash, height)
2019-01-21 21:55:50 +01:00
BLOB_DOC = """
Blob management.
"""
2019-01-22 23:44:17 +01:00
@requires(WALLET_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
2019-01-22 23:44:17 +01:00
async def jsonrpc_blob_get(self, blob_hash, timeout=None, read=False):
"""
Download and return a blob
2017-05-28 22:01:53 +02:00
Usage:
2019-01-22 23:44:17 +01:00
blob_get (<blob_hash> | --blob_hash=<blob_hash>) [--timeout=<timeout>] [--read]
2017-05-28 22:01:53 +02:00
Options:
--blob_hash=<blob_hash> : (str) blob hash of the blob to get
--timeout=<timeout> : (int) timeout in number of seconds
Returns:
2017-03-14 00:14:11 +01:00
(str) Success/Fail message or (dict) decoded data
"""
blob = await download_blob(asyncio.get_event_loop(), self.conf, self.blob_manager, self.dht_node, blob_hash)
2019-01-22 23:44:17 +01:00
if read:
with open(blob.file_path, 'rb') as handle:
return handle.read().decode()
else:
2019-01-22 23:44:17 +01:00
return "Downloaded blob %s" % blob_hash
@requires(BLOB_COMPONENT, DATABASE_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_blob_delete(self, blob_hash):
"""
Delete a blob
2017-05-28 22:01:53 +02:00
Usage:
2018-11-14 20:02:07 +01:00
blob_delete (<blob_hash> | --blob_hash=<blob_hash>)
2017-06-09 18:14:03 +02:00
Options:
--blob_hash=<blob_hash> : (str) blob hash of the blob to delete
Returns:
2017-03-14 00:14:11 +01:00
(str) Success/fail message
"""
if not blob_hash or not is_valid_blobhash(blob_hash):
return f"Invalid blob hash to delete '{blob_hash}'"
2019-01-22 23:44:17 +01:00
streams = self.stream_manager.get_filtered_streams(sd_hash=blob_hash)
if streams:
await self.stream_manager.delete_stream(streams[0])
else:
await self.blob_manager.delete_blobs([blob_hash])
return "Deleted %s" % blob_hash
2019-01-23 19:00:58 +01:00
PEER_DOC = """
DHT / Blob Exchange peer commands.
"""
@requires(DHT_COMPONENT)
2019-01-22 23:44:17 +01:00
async def jsonrpc_peer_list(self, blob_hash, search_bottom_out_limit=None):
2016-08-03 09:16:06 +02:00
"""
Get peers for blob hash
2017-05-28 22:01:53 +02:00
Usage:
2019-01-22 23:44:17 +01:00
peer_list (<blob_hash> | --blob_hash=<blob_hash>)
[<search_bottom_out_limit> | --search_bottom_out_limit=<search_bottom_out_limit>]
2017-05-28 22:01:53 +02:00
Options:
2019-01-22 23:44:17 +01:00
--blob_hash=<blob_hash> : (str) find available peers for this blob hash
--search_bottom_out_limit=<search_bottom_out_limit> : (int) the number of search probes in a row
that don't find any new peers
before giving up and returning
2017-05-28 22:01:53 +02:00
2016-08-03 09:16:06 +02:00
Returns:
2019-01-22 23:44:17 +01:00
(list) List of contact dictionaries {'address': <peer ip>, 'udp_port': <dht port>, 'tcp_port': <peer port>,
'node_id': <peer node id>}
2016-08-03 09:16:06 +02:00
"""
2018-11-12 20:45:41 +01:00
if not is_valid_blobhash(blob_hash):
raise Exception("invalid blob hash")
2019-01-22 23:44:17 +01:00
if search_bottom_out_limit is not None:
search_bottom_out_limit = int(search_bottom_out_limit)
if search_bottom_out_limit <= 0:
raise Exception("invalid bottom out limit")
else:
search_bottom_out_limit = 4
peers = []
async for new_peers in self.dht_node.get_iterative_value_finder(unhexlify(blob_hash.encode()), max_results=1,
bottom_out_limit=search_bottom_out_limit):
peers.extend(new_peers)
results = [
{
2019-01-22 23:44:17 +01:00
"node_id": hexlify(peer.node_id).decode(),
"address": peer.address,
"udp_port": peer.udp_port,
"tcp_port": peer.tcp_port,
}
2019-01-22 23:44:17 +01:00
for peer in peers
]
return results
2016-08-03 09:16:06 +02:00
@requires(DATABASE_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_blob_announce(self, blob_hash=None, stream_hash=None, sd_hash=None):
"""
Announce blobs to the DHT
Usage:
blob_announce (<blob_hash> | --blob_hash=<blob_hash>
| --stream_hash=<stream_hash> | --sd_hash=<sd_hash>)
Options:
--blob_hash=<blob_hash> : (str) announce a blob, specified by blob_hash
--stream_hash=<stream_hash> : (str) announce all blobs associated with
stream_hash
--sd_hash=<sd_hash> : (str) announce all blobs associated with
sd_hash and the sd_hash itself
Returns:
(bool) true if successful
"""
blob_hashes = []
if blob_hash:
blob_hashes.append(blob_hash)
elif stream_hash or sd_hash:
if sd_hash and stream_hash:
raise Exception("either the sd hash or the stream hash should be provided, not both")
if sd_hash:
2018-12-15 21:31:02 +01:00
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
blobs = await self.storage.get_blobs_for_stream(stream_hash, only_completed=True)
blob_hashes.extend(blob.blob_hash for blob in blobs if blob.blob_hash is not None)
else:
raise Exception('single argument must be specified')
2018-12-15 21:31:02 +01:00
await self.storage.should_single_announce_blobs(blob_hashes, immediate=True)
return True
@requires(BLOB_COMPONENT, WALLET_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None,
2019-01-22 23:44:17 +01:00
finished=None, page_size=None, page=None):
2016-08-19 08:41:23 +02:00
"""
2017-03-14 00:14:11 +01:00
Returns blob hashes. If not given filters, returns all blobs known by the blob manager
2016-08-26 06:32:33 +02:00
2017-06-22 00:16:41 +02:00
Usage:
blob_list [--needed] [--finished] [<uri> | --uri=<uri>]
[<stream_hash> | --stream_hash=<stream_hash>]
[<sd_hash> | --sd_hash=<sd_hash>]
[<page_size> | --page_size=<page_size>]
2017-06-22 00:16:41 +02:00
[<page> | --page=<page>]
Options:
--needed : (bool) only return needed blobs
--finished : (bool) only return finished blobs
--uri=<uri> : (str) filter blobs by stream in a uri
--stream_hash=<stream_hash> : (str) filter blobs by stream hash
--sd_hash=<sd_hash> : (str) filter blobs by sd hash
--page_size=<page_size> : (int) results page size
--page=<page> : (int) page of results to return
2017-06-22 00:16:41 +02:00
2016-08-26 06:32:33 +02:00
Returns:
2017-03-14 00:14:11 +01:00
(list) List of blob hashes
2016-08-19 08:41:23 +02:00
"""
2019-01-22 23:44:17 +01:00
if uri or stream_hash or sd_hash:
if uri:
2018-12-15 21:31:02 +01:00
metadata = (await self.wallet_manager.resolve(uri))[uri]
sd_hash = utils.get_sd_hash(metadata)
2018-12-15 21:31:02 +01:00
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
elif stream_hash:
2018-12-15 21:31:02 +01:00
sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash)
elif sd_hash:
2018-12-15 21:31:02 +01:00
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash)
2019-01-22 23:44:17 +01:00
if sd_hash:
blobs = [sd_hash]
else:
blobs = []
2019-01-22 23:44:17 +01:00
if stream_hash:
blobs.extend([b.blob_hash for b in (await self.storage.get_blobs_for_stream(stream_hash))[:-1]])
else:
2019-01-22 23:44:17 +01:00
blobs = list(self.blob_manager.completed_blob_hashes)
if needed:
2019-01-22 23:44:17 +01:00
blobs = [blob_hash for blob_hash in blobs if not self.blob_manager.get_blob(blob_hash).get_is_verified()]
if finished:
2019-01-22 23:44:17 +01:00
blobs = [blob_hash for blob_hash in blobs if self.blob_manager.get_blob(blob_hash).get_is_verified()]
page_size = page_size or len(blobs)
page = page or 0
start_index = page * page_size
stop_index = start_index + page_size
2019-01-22 23:44:17 +01:00
return blobs[start_index:stop_index]
2016-08-27 01:58:53 +02:00
@requires(BLOB_COMPONENT)
2019-01-11 00:40:20 +01:00
async def jsonrpc_blob_reflect(self, blob_hashes, reflector_server=None):
2018-03-22 21:54:29 +01:00
"""
Reflects specified blobs
Usage:
blob_reflect (<blob_hashes>...) [--reflector_server=<reflector_server>]
Options:
2018-04-12 19:27:06 +02:00
--reflector_server=<reflector_server> : (str) reflector address
2018-03-22 21:54:29 +01:00
Returns:
(list) reflected blob hashes
"""
2019-01-22 23:44:17 +01:00
raise NotImplementedError()
2018-03-22 21:54:29 +01:00
@requires(BLOB_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_blob_reflect_all(self):
2016-08-27 01:58:53 +02:00
"""
Reflects all saved blobs
2017-05-28 22:01:53 +02:00
Usage:
blob_reflect_all
Options:
None
2016-08-27 01:58:53 +02:00
Returns:
2017-03-14 00:14:11 +01:00
(bool) true if successful
2016-08-27 01:58:53 +02:00
"""
2016-08-19 08:41:23 +02:00
2019-01-22 23:44:17 +01:00
raise NotImplementedError()
@requires(STREAM_MANAGER_COMPONENT)
async def jsonrpc_file_reflect(self, **kwargs):
2018-03-29 16:46:29 +02:00
"""
2019-01-22 23:44:17 +01:00
Reflect all the blobs in a file matching the filter criteria
2018-03-29 16:46:29 +02:00
Usage:
2019-01-22 23:44:17 +01:00
file_reflect [--sd_hash=<sd_hash>] [--file_name=<file_name>]
[--stream_hash=<stream_hash>] [--rowid=<rowid>]
[--reflector=<reflector>]
2018-03-29 16:46:29 +02:00
Options:
2019-01-22 23:44:17 +01:00
--sd_hash=<sd_hash> : (str) get file with matching sd hash
--file_name=<file_name> : (str) get file with matching file name in the
downloads folder
--stream_hash=<stream_hash> : (str) get file with matching stream hash
--rowid=<rowid> : (int) get file with matching row id
--reflector=<reflector> : (str) reflector server, ip address or url
by default choose a server from the config
Returns:
(list) list of blobs reflected
"""
2019-01-30 20:59:48 +01:00
server, port = kwargs.get('server'), kwargs.get('port')
if server and port:
port = int(port)
else:
server, port = random.choice(self.conf.reflector_servers)
reflected = await asyncio.gather(*[
2019-01-30 20:59:48 +01:00
stream.upload_to_reflector(server, port)
for stream in self.stream_manager.get_filtered_streams(**kwargs)
])
total = []
for reflected_for_stream in reflected:
total.extend(reflected_for_stream)
return total
2019-01-22 23:44:17 +01:00
@requires(DHT_COMPONENT)
async def jsonrpc_peer_ping(self, node_id, address, port):
"""
Send a kademlia ping to the specified peer. If address and port are provided the peer is directly pinged,
if not provided the peer is located first.
2019-01-22 23:44:17 +01:00
Usage:
peer_ping (<node_id> | --node_id=<node_id>) (<address> | --address=<address>) (<port> | --port=<port>)
2019-02-19 23:26:08 +01:00
Options:
None
2018-03-29 16:46:29 +02:00
Returns:
(str) pong, or {'error': <error message>} if an error is encountered
"""
2019-01-22 23:44:17 +01:00
peer = None
if node_id and address and port:
2019-02-02 08:02:19 +01:00
peer = self.component_manager.peer_manager.get_kademlia_peer(unhexlify(node_id), address,
udp_port=int(port))
try:
return await self.dht_node.protocol.get_rpc_peer(peer).ping()
except asyncio.TimeoutError:
return {'error': 'timeout'}
2019-01-22 23:44:17 +01:00
if not peer:
return {'error': 'peer not found'}
2018-03-29 16:46:29 +02:00
@requires(DHT_COMPONENT)
2017-10-10 21:04:48 +02:00
def jsonrpc_routing_table_get(self):
"""
Get DHT routing information
Usage:
routing_table_get
Options:
None
2017-10-10 21:04:48 +02:00
Returns:
2019-01-22 23:44:17 +01:00
(dict) dictionary containing routing and peer information
2017-10-10 21:04:48 +02:00
{
"buckets": {
<bucket index>: [
{
"address": (str) peer address,
2019-01-22 23:44:17 +01:00
"udp_port": (int) peer udp port,
"tcp_port": (int) peer tcp port,
2017-10-10 21:04:48 +02:00
"node_id": (str) peer node id,
}
2017-10-11 21:14:29 +02:00
]
},
2017-10-10 21:04:48 +02:00
"node_id": (str) the local dht node id
2017-10-11 21:14:29 +02:00
}
2017-10-10 21:04:48 +02:00
"""
2019-01-22 23:44:17 +01:00
result = {
'buckets': {}
}
2017-10-10 21:04:48 +02:00
2019-01-22 23:44:17 +01:00
for i in range(len(self.dht_node.protocol.routing_table.buckets)):
2018-11-13 15:36:52 +01:00
result['buckets'][i] = []
2019-01-22 23:44:17 +01:00
for peer in self.dht_node.protocol.routing_table.buckets[i].peers:
2017-10-10 21:04:48 +02:00
host = {
2019-01-22 23:44:17 +01:00
"address": peer.address,
"udp_port": peer.udp_port,
"tcp_port": peer.tcp_port,
"node_id": hexlify(peer.node_id).decode(),
2017-10-10 21:04:48 +02:00
}
2018-11-13 15:36:52 +01:00
result['buckets'][i].append(host)
2017-10-10 21:04:48 +02:00
2019-01-22 23:44:17 +01:00
result['node_id'] = hexlify(self.dht_node.protocol.node_id).decode()
2018-12-15 21:31:02 +01:00
return result
2017-10-10 21:04:48 +02:00
2019-03-24 21:55:04 +01:00
def get_fee_address(self, kwargs: dict, claim_address: str) -> str:
if 'fee_address' in kwargs:
self.ledger.is_valid_address(kwargs['fee_address'])
return kwargs['fee_address']
return claim_address
async def get_receiving_address(self, address: str, account: LBCAccount) -> str:
if address is None:
return await account.receiving.get_or_create_usable_address()
self.ledger.is_valid_address(address)
return address
@staticmethod
def get_claim_name_or_error(name: str) -> str:
try:
parsed = parse_lbry_uri(name)
if parsed.name != name:
raise Exception("Claim name given has invalid characters.")
if parsed.is_channel:
raise Exception("Claim names cannot start with @ symbol. This is reserved for channels.")
except (TypeError, URIParseError):
raise Exception("Invalid claim name given.")
return name
@staticmethod
def get_channel_name_or_error(channel_name: str) -> str:
try:
parsed = parse_lbry_uri(channel_name)
if not parsed.contains_channel:
raise Exception("Cannot make a new channel for a non channel name")
if parsed.path:
raise Exception("Invalid channel uri")
except (TypeError, URIParseError):
raise Exception("Invalid channel name")
return channel_name
async def get_channel_or_none(self, account_ids: List[str], channel_id: str = None,
for_signing: bool = False) -> Output:
if channel_id is not None:
2019-03-24 21:55:04 +01:00
return await self.get_channel_or_error(account_ids, channel_id, for_signing)
async def get_channel_or_error(self, account_ids: List[str], channel_id: str = None,
for_signing: bool = False) -> Output:
if channel_id is None:
raise ValueError("Couldn't find channel because a channel_id was not provided.")
for account in self.get_accounts_or_all(account_ids):
channels = await account.get_channels(claim_id=channel_id, limit=1)
if channels:
if for_signing and channels[0].private_key is None:
raise Exception(f"Couldn't find private key for channel '{channel_id}'. ")
return channels[0]
raise ValueError(f"Couldn't find channel with channel_id '{channel_id}'.")
def get_account_or_default(self, account_id: str, argument_name: str = "account", lbc_only=True) -> LBCAccount:
2018-09-19 15:58:50 +02:00
if account_id is None:
return self.default_account
return self.get_account_or_error(account_id, argument_name, lbc_only)
2018-09-19 15:58:50 +02:00
2019-03-24 21:55:04 +01:00
def get_accounts_or_all(self, account_ids: List[str]) -> List[LBCAccount]:
return [
self.get_account_or_error(account_id)
for account_id in account_ids
] if account_ids else self.default_wallet.accounts
2019-03-24 21:55:04 +01:00
def get_account_or_error(
self, account_id: str, argument_name: str = "account", lbc_only=True) -> Optional[LBCAccount]:
for account in self.default_wallet.accounts:
2018-08-30 06:04:25 +02:00
if account.id == account_id:
if lbc_only and not isinstance(account, LBCAccount):
raise ValueError(
"Found '{}', but it's an {} ledger account. "
"'{}' requires specifying an LBC ledger account."
.format(account_id, account.ledger.symbol, argument_name)
)
return account
raise ValueError(f"Couldn't find account: {account_id}.")
2018-08-06 08:53:27 +02:00
@staticmethod
2019-03-24 21:55:04 +01:00
def get_dewies_or_error(argument: str, lbc: str, positive_value=False):
2018-10-03 22:38:47 +02:00
try:
2019-03-24 21:55:04 +01:00
dewies = lbc_to_dewies(lbc)
if positive_value and dewies <= 0:
raise ValueError(f"'{argument}' value must be greater than 0.0")
return dewies
2018-10-03 22:38:47 +02:00
except ValueError as e:
2019-03-24 21:55:04 +01:00
raise ValueError(f"Invalid value for '{argument}': {e.args[0]}")
def _old_get_temp_claim_info(self, tx, txo, address, claim_dict, name, bid):
return {
"claim_id": txo.claim_id,
"name": name,
"amount": bid,
"address": address,
"txid": tx.id,
"nout": txo.position,
"value": claim_dict,
"height": -1,
"claim_sequence": -1,
}
2018-07-26 05:29:13 +02:00
2017-01-02 20:52:24 +01:00
def loggly_time_string(dt):
formatted_dt = dt.strftime("%Y-%m-%dT%H:%M:%S")
2017-01-03 20:13:01 +01:00
milliseconds = str(round(dt.microsecond * (10.0 ** -5), 3))
2019-01-22 23:44:17 +01:00
return quote(formatted_dt + milliseconds + "Z")
2017-01-02 20:52:24 +01:00
def get_loggly_query_string(installation_id):
2017-01-02 22:09:28 +01:00
base_loggly_search_url = "https://lbry.loggly.com/search#"
2017-01-02 20:52:24 +01:00
now = utils.now()
yesterday = now - utils.timedelta(days=1)
2017-01-02 22:09:28 +01:00
params = {
'terms': 'json.installation_id:{}*'.format(installation_id[:SHORT_ID_LEN]),
2017-01-02 22:09:28 +01:00
'from': loggly_time_string(yesterday),
'to': loggly_time_string(now)
}
2019-01-22 23:44:17 +01:00
data = urlencode(params)
2017-01-02 22:09:28 +01:00
return base_loggly_search_url + data